fix(mid-section-defect): include non-charge-off rejects, fix forward tracing, remove auto-refresh
- Add DEFECTQTY to reject SUM in station_detection, station_detection_by_ids, and downstream_rejects SQL so KPI/charts include both charge-off and non-charge-off reject quantities - Wire forward direction through events-based trace pipeline so downstream pareto charts and detail table populate correctly - Remove inappropriate 5-min auto-refresh from query tool page; replace useAutoRefresh with local createAbortSignal for request cancellation Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -2,7 +2,6 @@
|
|||||||
import { computed, reactive, ref } from 'vue';
|
import { computed, reactive, ref } from 'vue';
|
||||||
|
|
||||||
import { apiGet, ensureMesApiAvailable } from '../core/api.js';
|
import { apiGet, ensureMesApiAvailable } from '../core/api.js';
|
||||||
import { useAutoRefresh } from '../shared-composables/useAutoRefresh.js';
|
|
||||||
import { useTraceProgress } from '../shared-composables/useTraceProgress.js';
|
import { useTraceProgress } from '../shared-composables/useTraceProgress.js';
|
||||||
import TraceProgressBar from '../shared-composables/TraceProgressBar.vue';
|
import TraceProgressBar from '../shared-composables/TraceProgressBar.vue';
|
||||||
|
|
||||||
@@ -381,10 +380,6 @@ async function loadAnalysis() {
|
|||||||
saveSession();
|
saveSession();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!autoRefreshStarted) {
|
|
||||||
autoRefreshStarted = true;
|
|
||||||
startAutoRefresh();
|
|
||||||
}
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
if (err?.name === 'AbortError') {
|
if (err?.name === 'AbortError') {
|
||||||
return;
|
return;
|
||||||
@@ -434,16 +429,14 @@ function exportCsv() {
|
|||||||
document.body.removeChild(link);
|
document.body.removeChild(link);
|
||||||
}
|
}
|
||||||
|
|
||||||
let autoRefreshStarted = false;
|
const _abortControllers = new Map();
|
||||||
const { createAbortSignal, startAutoRefresh } = useAutoRefresh({
|
function createAbortSignal(key = 'default') {
|
||||||
onRefresh: async () => {
|
const prev = _abortControllers.get(key);
|
||||||
trace.abort();
|
if (prev) prev.abort();
|
||||||
await loadAnalysis();
|
const ctrl = new AbortController();
|
||||||
},
|
_abortControllers.set(key, ctrl);
|
||||||
intervalMs: 5 * 60 * 1000,
|
return ctrl.signal;
|
||||||
autoStart: false,
|
}
|
||||||
refreshOnVisible: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
function saveSession() {
|
function saveSession() {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -412,6 +412,7 @@ def _build_msd_aggregation(
|
|||||||
seed_container_ids = _normalize_strings(list(lineage_ancestors.keys()))
|
seed_container_ids = _normalize_strings(list(lineage_ancestors.keys()))
|
||||||
|
|
||||||
upstream_events = domain_results.get("upstream_history", {})
|
upstream_events = domain_results.get("upstream_history", {})
|
||||||
|
downstream_events = domain_results.get("downstream_rejects", {})
|
||||||
station = str(params.get("station") or "測試").strip()
|
station = str(params.get("station") or "測試").strip()
|
||||||
direction = str(params.get("direction") or "backward").strip()
|
direction = str(params.get("direction") or "backward").strip()
|
||||||
|
|
||||||
@@ -422,6 +423,7 @@ def _build_msd_aggregation(
|
|||||||
seed_container_ids=seed_container_ids,
|
seed_container_ids=seed_container_ids,
|
||||||
lineage_ancestors=lineage_ancestors,
|
lineage_ancestors=lineage_ancestors,
|
||||||
upstream_events_by_cid=upstream_events,
|
upstream_events_by_cid=upstream_events,
|
||||||
|
downstream_events_by_cid=downstream_events,
|
||||||
station=station,
|
station=station,
|
||||||
direction=direction,
|
direction=direction,
|
||||||
mode=mode,
|
mode=mode,
|
||||||
|
|||||||
@@ -297,6 +297,7 @@ def build_trace_aggregation_from_events(
|
|||||||
seed_container_ids: Optional[List[str]] = None,
|
seed_container_ids: Optional[List[str]] = None,
|
||||||
lineage_ancestors: Optional[Dict[str, Any]] = None,
|
lineage_ancestors: Optional[Dict[str, Any]] = None,
|
||||||
upstream_events_by_cid: Optional[Dict[str, List[Dict[str, Any]]]] = None,
|
upstream_events_by_cid: Optional[Dict[str, List[Dict[str, Any]]]] = None,
|
||||||
|
downstream_events_by_cid: Optional[Dict[str, List[Dict[str, Any]]]] = None,
|
||||||
station: str = '測試',
|
station: str = '測試',
|
||||||
direction: str = 'backward',
|
direction: str = 'backward',
|
||||||
mode: str = 'date_range',
|
mode: str = 'date_range',
|
||||||
@@ -308,6 +309,7 @@ def build_trace_aggregation_from_events(
|
|||||||
seed_container_ids=seed_container_ids,
|
seed_container_ids=seed_container_ids,
|
||||||
lineage_ancestors=lineage_ancestors,
|
lineage_ancestors=lineage_ancestors,
|
||||||
upstream_events_by_cid=upstream_events_by_cid,
|
upstream_events_by_cid=upstream_events_by_cid,
|
||||||
|
downstream_events_by_cid=downstream_events_by_cid,
|
||||||
station=station,
|
station=station,
|
||||||
direction=direction,
|
direction=direction,
|
||||||
)
|
)
|
||||||
@@ -348,6 +350,43 @@ def build_trace_aggregation_from_events(
|
|||||||
filtered_df = detection_df
|
filtered_df = detection_df
|
||||||
|
|
||||||
detection_data = _build_detection_lookup(filtered_df)
|
detection_data = _build_detection_lookup(filtered_df)
|
||||||
|
|
||||||
|
seed_ids = [
|
||||||
|
cid for cid in (seed_container_ids or list(detection_data.keys()))
|
||||||
|
if isinstance(cid, str) and cid.strip()
|
||||||
|
]
|
||||||
|
genealogy_status = 'ready'
|
||||||
|
if seed_ids and lineage_ancestors is None:
|
||||||
|
genealogy_status = 'error'
|
||||||
|
|
||||||
|
# Forward direction: use forward pipeline
|
||||||
|
if direction == 'forward':
|
||||||
|
station_order = get_group_order(station)
|
||||||
|
defect_cids = filtered_df.loc[
|
||||||
|
filtered_df['REJECTQTY'] > 0, 'CONTAINERID'
|
||||||
|
].unique().tolist()
|
||||||
|
|
||||||
|
wip_by_cid = _normalize_upstream_event_records(upstream_events_by_cid or {})
|
||||||
|
downstream_rejects = _normalize_downstream_event_records(downstream_events_by_cid or {})
|
||||||
|
|
||||||
|
forward_attr = _attribute_forward_defects(
|
||||||
|
detection_data, defect_cids, wip_by_cid, downstream_rejects, station_order,
|
||||||
|
)
|
||||||
|
detail = _build_forward_detail_table(
|
||||||
|
filtered_df, defect_cids, wip_by_cid, downstream_rejects, station_order,
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'kpi': _build_forward_kpi(detection_data, forward_attr),
|
||||||
|
'charts': _build_forward_charts(forward_attr, detection_data),
|
||||||
|
'daily_trend': _build_daily_trend(filtered_df, normalized_loss_reasons),
|
||||||
|
'available_loss_reasons': available_loss_reasons,
|
||||||
|
'genealogy_status': genealogy_status,
|
||||||
|
'detail_total_count': len(detail),
|
||||||
|
'attribution': [],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Backward direction
|
||||||
normalized_ancestors = _normalize_lineage_ancestors(
|
normalized_ancestors = _normalize_lineage_ancestors(
|
||||||
lineage_ancestors,
|
lineage_ancestors,
|
||||||
seed_container_ids=seed_container_ids,
|
seed_container_ids=seed_container_ids,
|
||||||
@@ -363,14 +402,6 @@ def build_trace_aggregation_from_events(
|
|||||||
)
|
)
|
||||||
detail = _build_detail_table(filtered_df, normalized_ancestors, normalized_upstream)
|
detail = _build_detail_table(filtered_df, normalized_ancestors, normalized_upstream)
|
||||||
|
|
||||||
seed_ids = [
|
|
||||||
cid for cid in (seed_container_ids or list(detection_data.keys()))
|
|
||||||
if isinstance(cid, str) and cid.strip()
|
|
||||||
]
|
|
||||||
genealogy_status = 'ready'
|
|
||||||
if seed_ids and lineage_ancestors is None:
|
|
||||||
genealogy_status = 'error'
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'kpi': _build_kpi(filtered_df, attribution, normalized_loss_reasons),
|
'kpi': _build_kpi(filtered_df, attribution, normalized_loss_reasons),
|
||||||
'charts': _build_all_charts(attribution, detection_data),
|
'charts': _build_all_charts(attribution, detection_data),
|
||||||
@@ -388,6 +419,7 @@ def _build_trace_aggregation_container_mode(
|
|||||||
seed_container_ids: Optional[List[str]] = None,
|
seed_container_ids: Optional[List[str]] = None,
|
||||||
lineage_ancestors: Optional[Dict[str, Any]] = None,
|
lineage_ancestors: Optional[Dict[str, Any]] = None,
|
||||||
upstream_events_by_cid: Optional[Dict[str, List[Dict[str, Any]]]] = None,
|
upstream_events_by_cid: Optional[Dict[str, List[Dict[str, Any]]]] = None,
|
||||||
|
downstream_events_by_cid: Optional[Dict[str, List[Dict[str, Any]]]] = None,
|
||||||
station: str = '測試',
|
station: str = '測試',
|
||||||
direction: str = 'backward',
|
direction: str = 'backward',
|
||||||
) -> Optional[Dict[str, Any]]:
|
) -> Optional[Dict[str, Any]]:
|
||||||
@@ -437,6 +469,43 @@ def _build_trace_aggregation_container_mode(
|
|||||||
filtered_df = detection_df
|
filtered_df = detection_df
|
||||||
|
|
||||||
detection_data = _build_detection_lookup(filtered_df)
|
detection_data = _build_detection_lookup(filtered_df)
|
||||||
|
|
||||||
|
seed_ids = [
|
||||||
|
cid for cid in seed_container_ids
|
||||||
|
if isinstance(cid, str) and cid.strip()
|
||||||
|
]
|
||||||
|
genealogy_status = 'ready'
|
||||||
|
if seed_ids and lineage_ancestors is None:
|
||||||
|
genealogy_status = 'error'
|
||||||
|
|
||||||
|
# Forward direction
|
||||||
|
if direction == 'forward':
|
||||||
|
station_order = get_group_order(station)
|
||||||
|
defect_cids = filtered_df.loc[
|
||||||
|
filtered_df['REJECTQTY'] > 0, 'CONTAINERID'
|
||||||
|
].unique().tolist()
|
||||||
|
|
||||||
|
wip_by_cid = _normalize_upstream_event_records(upstream_events_by_cid or {})
|
||||||
|
downstream_rejects = _normalize_downstream_event_records(downstream_events_by_cid or {})
|
||||||
|
|
||||||
|
forward_attr = _attribute_forward_defects(
|
||||||
|
detection_data, defect_cids, wip_by_cid, downstream_rejects, station_order,
|
||||||
|
)
|
||||||
|
detail = _build_forward_detail_table(
|
||||||
|
filtered_df, defect_cids, wip_by_cid, downstream_rejects, station_order,
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'kpi': _build_forward_kpi(detection_data, forward_attr),
|
||||||
|
'charts': _build_forward_charts(forward_attr, detection_data),
|
||||||
|
'daily_trend': [],
|
||||||
|
'available_loss_reasons': available_loss_reasons,
|
||||||
|
'genealogy_status': genealogy_status,
|
||||||
|
'detail_total_count': len(detail),
|
||||||
|
'attribution': [],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Backward direction
|
||||||
normalized_ancestors = _normalize_lineage_ancestors(
|
normalized_ancestors = _normalize_lineage_ancestors(
|
||||||
lineage_ancestors,
|
lineage_ancestors,
|
||||||
seed_container_ids=seed_container_ids,
|
seed_container_ids=seed_container_ids,
|
||||||
@@ -452,14 +521,6 @@ def _build_trace_aggregation_container_mode(
|
|||||||
)
|
)
|
||||||
detail = _build_detail_table(filtered_df, normalized_ancestors, normalized_upstream)
|
detail = _build_detail_table(filtered_df, normalized_ancestors, normalized_upstream)
|
||||||
|
|
||||||
seed_ids = [
|
|
||||||
cid for cid in seed_container_ids
|
|
||||||
if isinstance(cid, str) and cid.strip()
|
|
||||||
]
|
|
||||||
genealogy_status = 'ready'
|
|
||||||
if seed_ids and lineage_ancestors is None:
|
|
||||||
genealogy_status = 'error'
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'kpi': _build_kpi(filtered_df, attribution, normalized_loss_reasons),
|
'kpi': _build_kpi(filtered_df, attribution, normalized_loss_reasons),
|
||||||
'charts': _build_all_charts(attribution, detection_data),
|
'charts': _build_all_charts(attribution, detection_data),
|
||||||
@@ -1202,6 +1263,29 @@ def _normalize_upstream_event_records(
|
|||||||
return dict(result)
|
return dict(result)
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_downstream_event_records(
|
||||||
|
events_by_cid: Dict[str, List[Dict[str, Any]]],
|
||||||
|
) -> Dict[str, List[Dict[str, Any]]]:
|
||||||
|
"""Normalize EventFetcher downstream_rejects payload into forward-pipeline-ready records."""
|
||||||
|
result: Dict[str, List[Dict[str, Any]]] = defaultdict(list)
|
||||||
|
for cid, events in events_by_cid.items():
|
||||||
|
cid_value = _safe_str(cid)
|
||||||
|
if not cid_value:
|
||||||
|
continue
|
||||||
|
for event in events:
|
||||||
|
group_name = _safe_str(event.get('WORKCENTER_GROUP'))
|
||||||
|
if not group_name:
|
||||||
|
continue
|
||||||
|
result[cid_value].append({
|
||||||
|
'workcenter_group': group_name,
|
||||||
|
'lossreasonname': _safe_str(event.get('LOSSREASONNAME')),
|
||||||
|
'equipment_name': _safe_str(event.get('EQUIPMENTNAME')),
|
||||||
|
'reject_total_qty': _safe_int(event.get('REJECT_TOTAL_QTY')),
|
||||||
|
'txndate': _safe_str(event.get('TXNDATE')),
|
||||||
|
})
|
||||||
|
return dict(result)
|
||||||
|
|
||||||
|
|
||||||
# ============================================================
|
# ============================================================
|
||||||
# Detection Data Lookup
|
# Detection Data Lookup
|
||||||
# ============================================================
|
# ============================================================
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
-- Dynamically built IN clause for descendant CONTAINERIDs ({{ DESCENDANT_FILTER }})
|
-- Dynamically built IN clause for descendant CONTAINERIDs ({{ DESCENDANT_FILTER }})
|
||||||
--
|
--
|
||||||
-- Tables used:
|
-- Tables used:
|
||||||
-- DWH.DW_MES_LOTREJECTHISTORY (reject records)
|
-- DWH.DW_MES_LOTREJECTHISTORY (reject records - charge-off + non-charge-off)
|
||||||
--
|
--
|
||||||
-- Performance:
|
-- Performance:
|
||||||
-- CONTAINERID has index. Batch IN clause (up to 1000 per query).
|
-- CONTAINERID has index. Batch IN clause (up to 1000 per query).
|
||||||
@@ -49,7 +49,8 @@ SELECT
|
|||||||
+ NVL(r.STANDBYQTY, 0)
|
+ NVL(r.STANDBYQTY, 0)
|
||||||
+ NVL(r.QTYTOPROCESS, 0)
|
+ NVL(r.QTYTOPROCESS, 0)
|
||||||
+ NVL(r.INPROCESSQTY, 0)
|
+ NVL(r.INPROCESSQTY, 0)
|
||||||
+ NVL(r.PROCESSEDQTY, 0) AS REJECT_TOTAL_QTY,
|
+ NVL(r.PROCESSEDQTY, 0)
|
||||||
|
+ NVL(r.DEFECTQTY, 0) AS REJECT_TOTAL_QTY,
|
||||||
r.TXNDATE
|
r.TXNDATE
|
||||||
FROM DWH.DW_MES_LOTREJECTHISTORY r
|
FROM DWH.DW_MES_LOTREJECTHISTORY r
|
||||||
WHERE {{ DESCENDANT_FILTER }}
|
WHERE {{ DESCENDANT_FILTER }}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
--
|
--
|
||||||
-- Tables used:
|
-- Tables used:
|
||||||
-- DWH.DW_MES_LOTWIPHISTORY (detection station records)
|
-- DWH.DW_MES_LOTWIPHISTORY (detection station records)
|
||||||
-- DWH.DW_MES_LOTREJECTHISTORY (defect records - ALL loss reasons)
|
-- DWH.DW_MES_LOTREJECTHISTORY (defect records - charge-off + non-charge-off)
|
||||||
-- DWH.DW_MES_CONTAINER (product info + MFGORDERNAME for genealogy)
|
-- DWH.DW_MES_CONTAINER (product info + MFGORDERNAME for genealogy)
|
||||||
-- DWH.DW_MES_WIP (WORKFLOWNAME)
|
-- DWH.DW_MES_WIP (WORKFLOWNAME)
|
||||||
|
|
||||||
@@ -43,7 +43,8 @@ detection_rejects AS (
|
|||||||
r.CONTAINERID,
|
r.CONTAINERID,
|
||||||
r.LOSSREASONNAME,
|
r.LOSSREASONNAME,
|
||||||
SUM(NVL(r.REJECTQTY, 0) + NVL(r.STANDBYQTY, 0) + NVL(r.QTYTOPROCESS, 0)
|
SUM(NVL(r.REJECTQTY, 0) + NVL(r.STANDBYQTY, 0) + NVL(r.QTYTOPROCESS, 0)
|
||||||
+ NVL(r.INPROCESSQTY, 0) + NVL(r.PROCESSEDQTY, 0)) AS REJECTQTY
|
+ NVL(r.INPROCESSQTY, 0) + NVL(r.PROCESSEDQTY, 0)
|
||||||
|
+ NVL(r.DEFECTQTY, 0)) AS REJECTQTY
|
||||||
FROM DWH.DW_MES_LOTREJECTHISTORY r
|
FROM DWH.DW_MES_LOTREJECTHISTORY r
|
||||||
WHERE r.TXNDATE >= TO_DATE(:start_date, 'YYYY-MM-DD')
|
WHERE r.TXNDATE >= TO_DATE(:start_date, 'YYYY-MM-DD')
|
||||||
AND r.TXNDATE < TO_DATE(:end_date, 'YYYY-MM-DD') + 1
|
AND r.TXNDATE < TO_DATE(:end_date, 'YYYY-MM-DD') + 1
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
--
|
--
|
||||||
-- Tables used:
|
-- Tables used:
|
||||||
-- DWH.DW_MES_LOTWIPHISTORY (detection station records)
|
-- DWH.DW_MES_LOTWIPHISTORY (detection station records)
|
||||||
-- DWH.DW_MES_LOTREJECTHISTORY (defect records - ALL loss reasons)
|
-- DWH.DW_MES_LOTREJECTHISTORY (defect records - charge-off + non-charge-off)
|
||||||
-- DWH.DW_MES_CONTAINER (product info + MFGORDERNAME for genealogy)
|
-- DWH.DW_MES_CONTAINER (product info + MFGORDERNAME for genealogy)
|
||||||
-- DWH.DW_MES_WIP (WORKFLOWNAME)
|
-- DWH.DW_MES_WIP (WORKFLOWNAME)
|
||||||
|
|
||||||
@@ -42,7 +42,8 @@ detection_rejects AS (
|
|||||||
r.CONTAINERID,
|
r.CONTAINERID,
|
||||||
r.LOSSREASONNAME,
|
r.LOSSREASONNAME,
|
||||||
SUM(NVL(r.REJECTQTY, 0) + NVL(r.STANDBYQTY, 0) + NVL(r.QTYTOPROCESS, 0)
|
SUM(NVL(r.REJECTQTY, 0) + NVL(r.STANDBYQTY, 0) + NVL(r.QTYTOPROCESS, 0)
|
||||||
+ NVL(r.INPROCESSQTY, 0) + NVL(r.PROCESSEDQTY, 0)) AS REJECTQTY
|
+ NVL(r.INPROCESSQTY, 0) + NVL(r.PROCESSEDQTY, 0)
|
||||||
|
+ NVL(r.DEFECTQTY, 0)) AS REJECTQTY
|
||||||
FROM DWH.DW_MES_LOTREJECTHISTORY r
|
FROM DWH.DW_MES_LOTREJECTHISTORY r
|
||||||
WHERE r.CONTAINERID IN ({{ CONTAINER_IDS }})
|
WHERE r.CONTAINERID IN ({{ CONTAINER_IDS }})
|
||||||
AND ({{ STATION_FILTER_REJECTS }})
|
AND ({{ STATION_FILTER_REJECTS }})
|
||||||
|
|||||||
Reference in New Issue
Block a user