+
+
+
+ 關係清單({{ relationRows.length }})
+
+
+
+
+
+ | 來源批次 |
+ 目標批次 |
+ 關係 |
+
+
+
+
+ |
+ {{ row.fromName }}
+ |
+
+ {{ row.toName }}
+ |
+
+ {{ row.edgeLabel }}
+ |
+
+
+
+
+
+ 僅顯示前 200 筆,請搭配上方樹圖與節點點選進一步縮小範圍。
+
+
+
未命中:{{ notFound.join(', ') }}
diff --git a/frontend/src/query-tool/components/LotTraceView.vue b/frontend/src/query-tool/components/LotTraceView.vue
index 4c9b077..2a9e3ba 100644
--- a/frontend/src/query-tool/components/LotTraceView.vue
+++ b/frontend/src/query-tool/components/LotTraceView.vue
@@ -56,6 +56,10 @@ const props = defineProps({
type: Object,
default: () => new Map(),
},
+ graphEdges: {
+ type: Array,
+ default: () => [],
+ },
leafSerials: {
type: Object,
default: () => new Map(),
@@ -154,6 +158,7 @@ const emit = defineEmits([
:name-map="nameMap"
:node-meta-map="nodeMetaMap"
:edge-type-map="edgeTypeMap"
+ :graph-edges="graphEdges"
:leaf-serials="leafSerials"
:selected-container-ids="selectedContainerIds"
:loading="lineageLoading"
diff --git a/frontend/src/query-tool/components/SerialReverseTraceView.vue b/frontend/src/query-tool/components/SerialReverseTraceView.vue
index 0406ad0..6e75675 100644
--- a/frontend/src/query-tool/components/SerialReverseTraceView.vue
+++ b/frontend/src/query-tool/components/SerialReverseTraceView.vue
@@ -56,6 +56,10 @@ const props = defineProps({
type: Object,
default: () => new Map(),
},
+ graphEdges: {
+ type: Array,
+ default: () => [],
+ },
leafSerials: {
type: Object,
default: () => new Map(),
@@ -154,6 +158,7 @@ const emit = defineEmits([
:name-map="nameMap"
:node-meta-map="nodeMetaMap"
:edge-type-map="edgeTypeMap"
+ :graph-edges="graphEdges"
:leaf-serials="leafSerials"
:selected-container-ids="selectedContainerIds"
:loading="lineageLoading"
diff --git a/frontend/src/query-tool/composables/useLotLineage.js b/frontend/src/query-tool/composables/useLotLineage.js
index c26461e..276bcae 100644
--- a/frontend/src/query-tool/composables/useLotLineage.js
+++ b/frontend/src/query-tool/composables/useLotLineage.js
@@ -78,6 +78,7 @@ export function useLotLineage(initial = {}) {
const nameMap = reactive(new Map());
const nodeMetaMap = reactive(new Map());
const edgeTypeMap = reactive(new Map());
+ const graphEdges = ref([]);
const leafSerials = reactive(new Map());
const expandedNodes = ref(new Set());
const selectedContainerId = ref(normalizeText(initial.selectedContainerId));
@@ -255,18 +256,23 @@ export function useLotLineage(initial = {}) {
}
edgeTypeMap.clear();
+ const normalizedEdges = [];
if (Array.isArray(typedEdges)) {
typedEdges.forEach((edge) => {
if (!edge || typeof edge !== 'object') {
return;
}
- const key = edgeKey(edge.from_cid, edge.to_cid);
+ const from = normalizeText(edge.from_cid);
+ const to = normalizeText(edge.to_cid);
+ const key = edgeKey(from, to);
const type = normalizeText(edge.edge_type);
if (key && type) {
edgeTypeMap.set(key, type);
+ normalizedEdges.push({ from_cid: from, to_cid: to, edge_type: type });
}
});
}
+ graphEdges.value = normalizedEdges;
// Store leaf serial numbers
Object.entries(serialsData).forEach(([cid, serials]) => {
@@ -463,6 +469,7 @@ export function useLotLineage(initial = {}) {
nameMap.clear();
nodeMetaMap.clear();
edgeTypeMap.clear();
+ graphEdges.value = [];
leafSerials.clear();
expandedNodes.value = new Set();
selectedContainerIds.value = [];
@@ -508,6 +515,7 @@ export function useLotLineage(initial = {}) {
nameMap,
nodeMetaMap,
edgeTypeMap,
+ graphEdges,
leafSerials,
expandedNodes,
selectedContainerId,
diff --git a/frontend/src/query-tool/composables/useReverseLineage.js b/frontend/src/query-tool/composables/useReverseLineage.js
index 2a376b2..588db38 100644
--- a/frontend/src/query-tool/composables/useReverseLineage.js
+++ b/frontend/src/query-tool/composables/useReverseLineage.js
@@ -71,6 +71,31 @@ function edgeKey(fromCid, toCid) {
return `${from}->${to}`;
}
+function collectAncestors(parentMap, startCid) {
+ const start = normalizeText(startCid);
+ if (!start) {
+ return new Set();
+ }
+
+ const visited = new Set();
+ const stack = [start];
+
+ while (stack.length > 0) {
+ const current = stack.pop();
+ const parents = Array.isArray(parentMap?.[current]) ? parentMap[current] : [];
+ parents.forEach((parentId) => {
+ const parent = normalizeText(parentId);
+ if (!parent || visited.has(parent)) {
+ return;
+ }
+ visited.add(parent);
+ stack.push(parent);
+ });
+ }
+
+ return visited;
+}
+
export function useReverseLineage(initial = {}) {
ensureMesApiAvailable();
@@ -78,6 +103,7 @@ export function useReverseLineage(initial = {}) {
const nameMap = reactive(new Map());
const nodeMetaMap = reactive(new Map());
const edgeTypeMap = reactive(new Map());
+ const graphEdges = ref([]);
const leafSerials = reactive(new Map());
const selectedContainerId = ref(normalizeText(initial.selectedContainerId));
const selectedContainerIds = ref(
@@ -227,6 +253,81 @@ export function useReverseLineage(initial = {}) {
return normalized;
}
+ function deriveDisplayRoots(candidateRoots, parentMap) {
+ const roots = uniqueValues((candidateRoots || []).map((cid) => normalizeText(cid)).filter(Boolean));
+ if (roots.length <= 1) {
+ return roots;
+ }
+
+ const candidateSet = new Set(roots);
+ const groupedRoots = [];
+ const groupsByInput = new Map();
+ const assigned = new Set();
+
+ // Keep reduction within each query input token to avoid cross-token interference.
+ rootRows.value.forEach((row) => {
+ const cid = extractContainerId(row);
+ if (!candidateSet.has(cid)) {
+ return;
+ }
+
+ const inputToken = normalizeText(row?.input_value || row?.inputValue || row?.INPUT_VALUE);
+ const key = inputToken || `__${cid}`;
+
+ if (!groupsByInput.has(key)) {
+ groupsByInput.set(key, []);
+ groupedRoots.push(groupsByInput.get(key));
+ }
+
+ const group = groupsByInput.get(key);
+ if (!group.includes(cid)) {
+ group.push(cid);
+ assigned.add(cid);
+ }
+ });
+
+ // Roots not found in rootRows still need a standalone group.
+ roots.forEach((cid) => {
+ if (assigned.has(cid)) {
+ return;
+ }
+ groupedRoots.push([cid]);
+ });
+
+ const reduced = [];
+
+ groupedRoots.forEach((group) => {
+ if (group.length <= 1) {
+ const only = group[0];
+ if (only && !reduced.includes(only)) {
+ reduced.push(only);
+ }
+ return;
+ }
+
+ const ancestorCache = new Map();
+ const getAncestors = (cid) => {
+ if (!ancestorCache.has(cid)) {
+ ancestorCache.set(cid, collectAncestors(parentMap, cid));
+ }
+ return ancestorCache.get(cid);
+ };
+
+ const kept = group.filter((cid) => !group.some((otherCid) => (
+ otherCid !== cid && getAncestors(otherCid).has(cid)
+ )));
+
+ const finalGroup = kept.length > 0 ? kept : group;
+ finalGroup.forEach((cid) => {
+ if (cid && !reduced.includes(cid)) {
+ reduced.push(cid);
+ }
+ });
+ });
+
+ return reduced;
+ }
+
function populateReverseTree(payload, requestedRoots = []) {
const parentMap = normalizeParentMap(payload);
const names = payload?.names;
@@ -256,18 +357,23 @@ export function useReverseLineage(initial = {}) {
}
edgeTypeMap.clear();
+ const normalizedEdges = [];
if (Array.isArray(typedEdges)) {
typedEdges.forEach((edge) => {
if (!edge || typeof edge !== 'object') {
return;
}
- const key = edgeKey(edge.from_cid, edge.to_cid);
+ const from = normalizeText(edge.from_cid);
+ const to = normalizeText(edge.to_cid);
+ const key = edgeKey(from, to);
const type = normalizeText(edge.edge_type);
if (key && type) {
edgeTypeMap.set(key, type);
+ normalizedEdges.push({ from_cid: from, to_cid: to, edge_type: type });
}
});
}
+ graphEdges.value = normalizedEdges;
Object.entries(parentMap).forEach(([childId, parentIds]) => {
patchEntry(childId, {
@@ -317,7 +423,7 @@ export function useReverseLineage(initial = {}) {
}
});
- treeRoots.value = roots;
+ treeRoots.value = deriveDisplayRoots(roots, parentMap);
}
async function fetchLineage(containerIds, { force = false } = {}) {
@@ -392,6 +498,7 @@ export function useReverseLineage(initial = {}) {
nameMap.clear();
nodeMetaMap.clear();
edgeTypeMap.clear();
+ graphEdges.value = [];
leafSerials.clear();
rootRows.value = [];
rootContainerIds.value = [];
@@ -416,6 +523,7 @@ export function useReverseLineage(initial = {}) {
nameMap,
nodeMetaMap,
edgeTypeMap,
+ graphEdges,
leafSerials,
selectedContainerId,
selectedContainerIds,
diff --git a/src/mes_dashboard/services/lineage_engine.py b/src/mes_dashboard/services/lineage_engine.py
index 5bed20d..a858c6d 100644
--- a/src/mes_dashboard/services/lineage_engine.py
+++ b/src/mes_dashboard/services/lineage_engine.py
@@ -90,7 +90,6 @@ def _build_parent_map(
child_to_parent: Dict[str, str],
merge_child_to_parent: Dict[str, str],
merge_source_map: Dict[str, List[str]],
- cid_to_name: Dict[str, str],
) -> tuple:
"""Build per-node direct parent lists and merge edge lists.
@@ -98,6 +97,9 @@ def _build_parent_map(
(parent_map, merge_edges) where:
- parent_map: {child_cid: [direct_parent_cids]}
- merge_edges: {child_cid: [merge_source_cids]}
+
+ Notes:
+ merge_source_map is keyed by target/child CID.
"""
parent_map: Dict[str, List[str]] = defaultdict(list)
merge_edges: Dict[str, List[str]] = defaultdict(list)
@@ -109,17 +111,18 @@ def _build_parent_map(
if parent not in parent_map[child]:
parent_map[child].append(parent)
- if merge_source_map and cid_to_name:
- name_to_cids: Dict[str, List[str]] = defaultdict(list)
- for cid, name in cid_to_name.items():
- name_to_cids[name].append(cid)
-
- for name, source_cids in merge_source_map.items():
- for owner_cid in name_to_cids.get(name, []):
- for source_cid in source_cids:
- if source_cid != owner_cid and source_cid not in parent_map[owner_cid]:
- parent_map[owner_cid].append(source_cid)
- merge_edges[owner_cid].append(source_cid)
+ if merge_source_map:
+ for owner_cid, source_cids in merge_source_map.items():
+ child = _safe_str(owner_cid)
+ if not child:
+ continue
+ for source_cid in source_cids:
+ source = _safe_str(source_cid)
+ if not source or source == child:
+ continue
+ if source not in parent_map[child]:
+ parent_map[child].append(source)
+ merge_edges[child].append(source)
return dict(parent_map), dict(merge_edges)
@@ -361,23 +364,23 @@ class LineageEngine:
@staticmethod
def resolve_merge_sources(
- container_names: List[str],
+ target_cids: List[str],
) -> Dict[str, List[str]]:
- """Resolve merge source lots from FINISHEDNAME."""
- normalized_names = _normalize_list(container_names)
- if not normalized_names:
+ """Resolve merge source lots by target LOT CID (COMBINE.LOTID)."""
+ normalized_target_cids = _normalize_list(target_cids)
+ if not normalized_target_cids:
return {}
result: Dict[str, Set[str]] = defaultdict(set)
- for i in range(0, len(normalized_names), ORACLE_IN_BATCH_SIZE):
- batch = normalized_names[i:i + ORACLE_IN_BATCH_SIZE]
+ for i in range(0, len(normalized_target_cids), ORACLE_IN_BATCH_SIZE):
+ batch = normalized_target_cids[i:i + ORACLE_IN_BATCH_SIZE]
builder = QueryBuilder()
- builder.add_in_condition("ca.FINISHEDNAME", batch)
+ builder.add_in_condition("ca.LOTID", batch)
sql = SQLLoader.load_with_params(
"lineage/merge_sources",
- FINISHED_NAME_FILTER=builder.get_conditions_sql(),
+ TARGET_CID_FILTER=builder.get_conditions_sql(),
)
df = read_sql_df(sql, builder.params)
@@ -385,16 +388,16 @@ class LineageEngine:
continue
for _, row in df.iterrows():
- finished_name = _safe_str(row.get("FINISHEDNAME"))
+ target_cid = _safe_str(row.get("FINISHED_CID"))
source_cid = _safe_str(row.get("SOURCE_CID"))
- if not finished_name or not source_cid:
+ if not target_cid or not source_cid or source_cid == target_cid:
continue
- result[finished_name].add(source_cid)
+ result[target_cid].add(source_cid)
mapped = {k: sorted(v) for k, v in result.items()}
logger.info(
- "Merge source resolution completed: finished_names=%s, mapped=%s",
- len(normalized_names),
+ "Merge source resolution completed: target_cids=%s, mapped=%s",
+ len(normalized_target_cids),
len(mapped),
)
return mapped
@@ -565,6 +568,7 @@ class LineageEngine:
for parent, children in split_children_map.items():
for child in children:
split_edges.append((parent, child, EDGE_TYPE_SPLIT))
+ split_pairs = {(parent, child) for parent, child, _ in split_edges}
# Collect all nodes in the tree
all_nodes: Set[str] = set(roots)
@@ -578,6 +582,26 @@ class LineageEngine:
# Step 4: Query serial numbers for leaf nodes
leaf_serials = LineageEngine.resolve_leaf_serials(leaf_cids) if leaf_cids else {}
+ # Step 4b: Resolve merge relations for known nodes by target CID.
+ merge_edges: List[Tuple[str, str, str]] = []
+ try:
+ merge_source_map = LineageEngine.resolve_merge_sources(list(all_nodes))
+ for target_cid, source_cids in merge_source_map.items():
+ target = _safe_str(target_cid)
+ if not target:
+ continue
+ for source_cid in source_cids:
+ source = _safe_str(source_cid)
+ if not source or source == target:
+ continue
+ if (source, target) in split_pairs:
+ continue
+ merge_edges.append((source, target, EDGE_TYPE_MERGE))
+ all_nodes.add(source)
+ all_nodes.add(target)
+ except Exception as exc:
+ logger.warning("Forward merge enrichment skipped due to merge lookup error: %s", exc)
+
# Step 5: Build semantic links (wafer origin / GD rework) and augment tree.
snapshots: Dict[str, Dict[str, Optional[str]]] = {}
semantic_edges: List[Tuple[str, str, str]] = []
@@ -614,15 +638,16 @@ class LineageEngine:
roots = sorted([cid for cid in all_nodes if cid not in incoming])
typed_nodes = LineageEngine._build_nodes_payload(all_nodes, snapshots, cid_to_name, wafer_ids)
- typed_edges = _to_edge_payload(split_edges + semantic_edges)
+ typed_edges = _to_edge_payload(split_edges + merge_edges + semantic_edges)
logger.info(
- "Forward tree resolution completed: seeds=%s, roots=%s, nodes=%s, leaves=%s, serials=%s, semantic_edges=%s",
+ "Forward tree resolution completed: seeds=%s, roots=%s, nodes=%s, leaves=%s, serials=%s, merge_edges=%s, semantic_edges=%s",
len(seed_cids),
len(roots),
len(all_nodes),
len(leaf_cids),
len(leaf_serials),
+ len(merge_edges),
len(semantic_edges),
)
@@ -686,18 +711,25 @@ class LineageEngine:
if _safe_str(parent) and _safe_str(child)
]
- all_names = [name for name in cid_to_name.values() if _safe_str(name)]
- merge_source_map = LineageEngine.resolve_merge_sources(all_names)
+ merge_lookup_targets = sorted(
+ {
+ cid
+ for cid in (
+ list(seed_cids)
+ + list(child_to_parent.keys())
+ + list(child_to_parent.values())
+ )
+ if _safe_str(cid)
+ }
+ )
+ merge_source_map = LineageEngine.resolve_merge_sources(merge_lookup_targets)
merge_child_to_parent: Dict[str, str] = {}
merge_source_cids_all: Set[str] = set()
if merge_source_map:
for seed in seed_cids:
self_and_ancestors = ancestors[seed] | {seed}
for cid in list(self_and_ancestors):
- name = cid_to_name.get(cid)
- if not name:
- continue
- for source_cid in merge_source_map.get(name, []):
+ for source_cid in merge_source_map.get(cid, []):
if source_cid == cid or source_cid in self_and_ancestors:
continue
ancestors[seed].add(source_cid)
@@ -722,7 +754,7 @@ class LineageEngine:
ancestors[seed].add(parent)
current = parent
- pm, me = _build_parent_map(child_to_parent, merge_child_to_parent, merge_source_map, cid_to_name)
+ pm, me = _build_parent_map(child_to_parent, merge_child_to_parent, merge_source_map)
for child, parent in merge_child_to_parent.items():
if _safe_str(parent) and _safe_str(child):
diff --git a/src/mes_dashboard/services/query_tool_service.py b/src/mes_dashboard/services/query_tool_service.py
index e75829a..1a9a8db 100644
--- a/src/mes_dashboard/services/query_tool_service.py
+++ b/src/mes_dashboard/services/query_tool_service.py
@@ -503,46 +503,126 @@ def _resolve_by_gd_lot_id(gd_lot_ids: List[str]) -> Dict[str, Any]:
def _resolve_by_serial_number(serial_numbers: List[str]) -> Dict[str, Any]:
- """Resolve serial numbers (FINISHEDNAME) to CONTAINERID.
-
- Note: One serial number may map to multiple CONTAINERIDs.
-
- Args:
- serial_numbers: List of serial numbers
-
- Returns:
- Resolution result dict.
- """
- builder = QueryBuilder()
- _add_exact_or_pattern_condition(builder, "p.FINISHEDNAME", serial_numbers)
- sql = SQLLoader.load_with_params(
- "query_tool/lot_resolve_serial",
- SERIAL_FILTER=builder.get_conditions_sql(),
+ """Resolve serial-related inputs to CONTAINERID.
+
+ Matching sources (in priority order):
+ 1. DW_MES_PJ_COMBINEDASSYLOTS.FINISHEDNAME (new serial path)
+ 2. DW_MES_CONTAINER.CONTAINERNAME (old serial / lot-id style inputs)
+ 3. DW_MES_CONTAINER.FIRSTNAME (bridge from serial to related lots)
+ """
+ tokens = _normalize_search_tokens(serial_numbers)
+ if not tokens:
+ return {
+ 'data': [],
+ 'total': 0,
+ 'input_count': 0,
+ 'not_found': [],
+ 'expansion_info': {},
+ }
+
+ source_configs = [
+ {
+ 'name': 'finished_name',
+ 'priority': 0,
+ 'sql_name': 'query_tool/lot_resolve_serial',
+ 'filter_key': 'SERIAL_FILTER',
+ 'filter_column': 'p.FINISHEDNAME',
+ 'match_key': 'FINISHEDNAME',
+ 'extra_conditions': [],
+ },
+ {
+ 'name': 'container_name',
+ 'priority': 1,
+ 'sql_name': 'query_tool/lot_resolve_id',
+ 'filter_key': 'CONTAINER_FILTER',
+ 'filter_column': 'CONTAINERNAME',
+ 'match_key': 'CONTAINERNAME',
+ 'extra_conditions': ["OBJECTTYPE = 'LOT'"],
+ },
+ {
+ 'name': 'first_name',
+ 'priority': 2,
+ 'sql_name': 'query_tool/lot_resolve_wafer_lot',
+ 'filter_key': 'WAFER_FILTER',
+ 'filter_column': 'FIRSTNAME',
+ 'match_key': 'FIRSTNAME',
+ 'extra_conditions': ["OBJECTTYPE = 'LOT'"],
+ },
+ ]
+
+ best_match_by_key: Dict[Tuple[str, str], Dict[str, Any]] = {}
+
+ for config in source_configs:
+ builder = QueryBuilder()
+ _add_exact_or_pattern_condition(builder, config['filter_column'], tokens)
+ for cond in config['extra_conditions']:
+ builder.add_condition(cond)
+
+ if not builder.conditions:
+ continue
+
+ sql = SQLLoader.load_with_params(
+ config['sql_name'],
+ **{config['filter_key']: builder.get_conditions_sql()},
+ )
+ df = read_sql_df(sql, builder.params)
+ data = _df_to_records(df)
+ matched, _, _ = _match_rows_by_tokens(
+ tokens,
+ data,
+ row_key=config['match_key'],
+ )
+
+ for row in matched:
+ input_value = str(row.get('input_value') or '').strip()
+ cid = str(row.get('CONTAINERID') or '').strip()
+ if not input_value or not cid:
+ continue
+
+ candidate = {
+ 'container_id': cid,
+ 'lot_id': row.get('CONTAINERNAME') or cid,
+ 'input_value': input_value,
+ 'spec_name': row.get('SPECNAME'),
+ 'match_source': config['name'],
+ '_priority': config['priority'],
+ }
+ key = (input_value, cid)
+ existing = best_match_by_key.get(key)
+ if existing is None or candidate['_priority'] < existing['_priority']:
+ best_match_by_key[key] = candidate
+
+ grouped_by_input: Dict[str, List[Dict[str, Any]]] = {}
+ for item in best_match_by_key.values():
+ grouped_by_input.setdefault(item['input_value'], []).append(item)
+
+ results: List[Dict[str, Any]] = []
+ not_found: List[str] = []
+ expansion_info: Dict[str, int] = {}
+
+ for token in tokens:
+ rows = grouped_by_input.get(token, [])
+ rows.sort(key=lambda row: (row.get('_priority', 999), str(row.get('lot_id') or '')))
+ if not rows:
+ not_found.append(token)
+ continue
+
+ expansion_info[token] = len(rows)
+ for row in rows:
+ row.pop('_priority', None)
+ results.append(row)
+
+ logger.info(
+ "Serial number resolution: %s containers from %s inputs (not_found=%s)",
+ len(results),
+ len(tokens),
+ len(not_found),
)
- df = read_sql_df(sql, builder.params)
- data = _df_to_records(df)
- matched, not_found, expansion_info = _match_rows_by_tokens(
- serial_numbers,
- data,
- row_key='FINISHEDNAME',
- )
-
- results = []
- for row in matched:
- results.append({
- 'container_id': row.get('CONTAINERID'),
- 'lot_id': row.get('CONTAINERNAME'),
- 'input_value': row.get('input_value'),
- 'spec_name': row.get('SPECNAME'),
- })
-
- logger.info(f"Serial number resolution: {len(results)} containers from {len(serial_numbers)} inputs")
-
return {
'data': results,
'total': len(results),
- 'input_count': len(serial_numbers),
+ 'input_count': len(tokens),
'not_found': not_found,
'expansion_info': expansion_info,
}
diff --git a/src/mes_dashboard/sql/lineage/merge_sources.sql b/src/mes_dashboard/sql/lineage/merge_sources.sql
index 5662303..d32b788 100644
--- a/src/mes_dashboard/sql/lineage/merge_sources.sql
+++ b/src/mes_dashboard/sql/lineage/merge_sources.sql
@@ -1,8 +1,8 @@
-- Unified LineageEngine - Merge Sources
--- Find source lots merged into finished lots from DW_MES_PJ_COMBINEDASSYLOTS.
+-- Find source lots merged into target LOT CIDs from DW_MES_PJ_COMBINEDASSYLOTS.
--
-- Parameters:
--- FINISHED_NAME_FILTER - QueryBuilder-generated condition on ca.FINISHEDNAME
+-- TARGET_CID_FILTER - QueryBuilder-generated condition on ca.LOTID
--
SELECT
ca.CONTAINERID AS SOURCE_CID,
@@ -10,4 +10,4 @@ SELECT
ca.FINISHEDNAME,
ca.LOTID AS FINISHED_CID
FROM DWH.DW_MES_PJ_COMBINEDASSYLOTS ca
-WHERE {{ FINISHED_NAME_FILTER }}
+WHERE {{ TARGET_CID_FILTER }}
diff --git a/tests/test_lineage_engine.py b/tests/test_lineage_engine.py
index 6fbc0b6..a6270fd 100644
--- a/tests/test_lineage_engine.py
+++ b/tests/test_lineage_engine.py
@@ -67,35 +67,35 @@ def test_resolve_split_ancestors_batches_and_enforces_max_depth(mock_read_sql_df
@patch("mes_dashboard.services.lineage_engine.read_sql_df")
def test_resolve_merge_sources_batches_and_returns_mapping(mock_read_sql_df):
- names = [f"FN{i:04d}" for i in range(1001)]
+ target_cids = [f"T{i:04d}" for i in range(1001)]
mock_read_sql_df.side_effect = [
pd.DataFrame(
[
- {"FINISHEDNAME": "FN0000", "SOURCE_CID": "SRC-A"},
- {"FINISHEDNAME": "FN0000", "SOURCE_CID": "SRC-B"},
+ {"FINISHED_CID": "T0000", "SOURCE_CID": "SRC-A"},
+ {"FINISHED_CID": "T0000", "SOURCE_CID": "SRC-B"},
]
),
pd.DataFrame(
[
- {"FINISHEDNAME": "FN1000", "SOURCE_CID": "SRC-C"},
- {"FINISHEDNAME": "FN1000", "SOURCE_CID": "SRC-C"},
- {"FINISHEDNAME": None, "SOURCE_CID": "SRC-INVALID"},
+ {"FINISHED_CID": "T1000", "SOURCE_CID": "SRC-C"},
+ {"FINISHED_CID": "T1000", "SOURCE_CID": "SRC-C"},
+ {"FINISHED_CID": None, "SOURCE_CID": "SRC-INVALID"},
]
),
]
- result = LineageEngine.resolve_merge_sources(names)
+ result = LineageEngine.resolve_merge_sources(target_cids)
assert mock_read_sql_df.call_count == 2
first_sql, first_params = mock_read_sql_df.call_args_list[0].args
second_sql, second_params = mock_read_sql_df.call_args_list[1].args
- assert "{{ FINISHED_NAME_FILTER }}" not in first_sql
- assert "{{ FINISHED_NAME_FILTER }}" not in second_sql
+ assert "{{ TARGET_CID_FILTER }}" not in first_sql
+ assert "{{ TARGET_CID_FILTER }}" not in second_sql
assert len(first_params) == 1000
assert len(second_params) == 1
- assert result["FN0000"] == ["SRC-A", "SRC-B"]
- assert result["FN1000"] == ["SRC-C"]
+ assert result["T0000"] == ["SRC-A", "SRC-B"]
+ assert result["T1000"] == ["SRC-C"]
@patch("mes_dashboard.services.lineage_engine.LineageEngine.resolve_merge_sources")
@@ -126,7 +126,7 @@ def test_resolve_full_genealogy_combines_split_and_merge(
},
},
]
- mock_resolve_merge_sources.return_value = {"LOT-B": ["M1"]}
+ mock_resolve_merge_sources.return_value = {"B": ["M1"]}
result = LineageEngine.resolve_full_genealogy(["A"], {"A": "LOT-A"})
diff --git a/tests/test_query_tool_service.py b/tests/test_query_tool_service.py
index 0eb14e1..494f4c0 100644
--- a/tests/test_query_tool_service.py
+++ b/tests/test_query_tool_service.py
@@ -281,28 +281,68 @@ class TestResolveQueriesUseBindParams:
assert "OBJECTTYPE = 'LOT'" in sql_params['WAFER_FILTER']
def test_resolve_by_serial_number_uses_query_builder_params(self):
- from unittest.mock import patch
- import pandas as pd
-
- with patch('mes_dashboard.services.query_tool_service.SQLLoader.load_with_params') as mock_load:
- with patch('mes_dashboard.services.query_tool_service.read_sql_df') as mock_read:
- mock_load.return_value = "SELECT * FROM DUAL"
- mock_read.return_value = pd.DataFrame([
- {
- 'CONTAINERID': 'CID-1',
- 'FINISHEDNAME': 'SN-1',
- 'CONTAINERNAME': 'LOT-1',
- 'SPECNAME': 'SPEC-1',
- }
- ])
-
- result = _resolve_by_serial_number(['SN-1'])
-
- assert result['total'] == 1
- sql_params = mock_load.call_args.kwargs
- assert ':p0' in sql_params['SERIAL_FILTER']
- _, query_params = mock_read.call_args.args
- assert query_params == {'p0': 'SN-1'}
+ from unittest.mock import patch
+ import pandas as pd
+
+ with patch('mes_dashboard.services.query_tool_service.SQLLoader.load_with_params') as mock_load:
+ with patch('mes_dashboard.services.query_tool_service.read_sql_df') as mock_read:
+ mock_load.side_effect = [
+ "SELECT * FROM COMBINE",
+ "SELECT * FROM CONTAINER_NAME",
+ "SELECT * FROM FIRSTNAME",
+ ]
+ mock_read.side_effect = [
+ pd.DataFrame([
+ {
+ 'CONTAINERID': 'CID-FIN',
+ 'FINISHEDNAME': 'SN-1',
+ 'CONTAINERNAME': 'LOT-FIN',
+ 'SPECNAME': 'SPEC-1',
+ }
+ ]),
+ pd.DataFrame([
+ {
+ 'CONTAINERID': 'CID-NAME',
+ 'CONTAINERNAME': 'SN-1',
+ 'SPECNAME': 'SPEC-2',
+ 'MFGORDERNAME': None,
+ 'QTY': 1,
+ }
+ ]),
+ pd.DataFrame([
+ {
+ 'CONTAINERID': 'CID-FIRST',
+ 'CONTAINERNAME': 'GD25000001-A01',
+ 'FIRSTNAME': 'SN-1',
+ 'SPECNAME': 'SPEC-3',
+ 'QTY': 1,
+ }
+ ]),
+ ]
+
+ result = _resolve_by_serial_number(['SN-1'])
+
+ assert result['total'] == 3
+ assert {row['match_source'] for row in result['data']} == {
+ 'finished_name',
+ 'container_name',
+ 'first_name',
+ }
+
+ assert [call.args[0] for call in mock_load.call_args_list] == [
+ 'query_tool/lot_resolve_serial',
+ 'query_tool/lot_resolve_id',
+ 'query_tool/lot_resolve_wafer_lot',
+ ]
+ assert ':p0' in mock_load.call_args_list[0].kwargs['SERIAL_FILTER']
+ assert ':p0' in mock_load.call_args_list[1].kwargs['CONTAINER_FILTER']
+ assert ':p0' in mock_load.call_args_list[2].kwargs['WAFER_FILTER']
+ assert "OBJECTTYPE = 'LOT'" in mock_load.call_args_list[1].kwargs['CONTAINER_FILTER']
+ assert "OBJECTTYPE = 'LOT'" in mock_load.call_args_list[2].kwargs['WAFER_FILTER']
+
+ assert mock_read.call_args_list[0].args[1] == {'p0': 'SN-1'}
+ assert mock_read.call_args_list[1].args[1] == {'p0': 'SN-1'}
+ assert mock_read.call_args_list[2].args[1] == {'p0': 'SN-1'}
def test_resolve_by_work_order_uses_query_builder_params(self):
from unittest.mock import patch