feat(query-tool): align lineage merge semantics and add tree exports

This commit is contained in:
egg
2026-02-23 07:07:36 +08:00
parent 3dc7886c90
commit 57a0b780b1
11 changed files with 789 additions and 132 deletions

View File

@@ -481,6 +481,7 @@ watch(
:name-map="lotLineage.nameMap"
:node-meta-map="lotLineage.nodeMetaMap"
:edge-type-map="lotLineage.edgeTypeMap"
:graph-edges="lotLineage.graphEdges.value"
:leaf-serials="lotLineage.leafSerials"
:lineage-loading="lotLineage.lineageLoading.value"
:selected-container-ids="lotLineage.selectedContainerIds.value"
@@ -520,6 +521,7 @@ watch(
:name-map="reverseLineage.nameMap"
:node-meta-map="reverseLineage.nodeMetaMap"
:edge-type-map="reverseLineage.edgeTypeMap"
:graph-edges="reverseLineage.graphEdges.value"
:leaf-serials="reverseLineage.leafSerials"
:lineage-loading="reverseLineage.lineageLoading.value"
:selected-container-ids="reverseLineage.selectedContainerIds.value"

View File

@@ -1,5 +1,5 @@
<script setup>
import { computed, ref } from 'vue';
import { computed, nextTick, ref } from 'vue';
import VChart from 'vue-echarts';
import { use } from 'echarts/core';
@@ -7,6 +7,7 @@ import { CanvasRenderer } from 'echarts/renderers';
import { TreeChart } from 'echarts/charts';
import { TooltipComponent } from 'echarts/components';
import ExportButton from './ExportButton.vue';
import { normalizeText } from '../utils/values.js';
use([CanvasRenderer, TreeChart, TooltipComponent]);
@@ -30,6 +31,20 @@ const EDGE_STYLES = Object.freeze({
default: { color: '#CBD5E1', type: 'solid', width: 1.5 },
});
const EDGE_TAGS = Object.freeze({
split_from: { forward: '←拆', reverse: '→拆' },
merge_source: { forward: '←併', reverse: '→併' },
wafer_origin: { forward: '←晶', reverse: '→晶' },
gd_rework_source: { forward: '←重', reverse: '→重' },
});
const RELATION_TYPE_LABELS = Object.freeze({
split_from: '拆批',
merge_source: '併批',
wafer_origin: '晶圓來源',
gd_rework_source: '重工來源',
});
const LABEL_BASE_STYLE = Object.freeze({
backgroundColor: 'rgba(255,255,255,0.92)',
borderRadius: 3,
@@ -57,6 +72,10 @@ const props = defineProps({
type: Object,
default: () => new Map(),
},
graphEdges: {
type: Array,
default: () => [],
},
leafSerials: {
type: Object,
default: () => new Map(),
@@ -92,6 +111,10 @@ const props = defineProps({
});
const emit = defineEmits(['select-nodes']);
const chartRef = ref(null);
const exportingTreeImage = ref(false);
const exportingRelationCsv = ref(false);
const exportErrorMessage = ref('');
const selectedSet = computed(() => new Set(props.selectedContainerIds.map(normalizeText).filter(Boolean)));
@@ -109,6 +132,47 @@ const allSerialNames = computed(() => {
return names;
});
const relationRows = computed(() => {
const rows = [];
const seen = new Set();
const source = Array.isArray(props.graphEdges) ? props.graphEdges : [];
source.forEach((edge) => {
if (!edge || typeof edge !== 'object') {
return;
}
const fromCid = normalizeText(edge.from_cid);
const toCid = normalizeText(edge.to_cid);
const edgeType = normalizeText(edge.edge_type);
if (!fromCid || !toCid || !edgeType) {
return;
}
const key = `${fromCid}->${toCid}:${edgeType}`;
if (seen.has(key)) {
return;
}
seen.add(key);
rows.push({
key,
fromCid,
toCid,
fromName: normalizeText(props.nameMap?.get?.(fromCid) || fromCid),
toName: normalizeText(props.nameMap?.get?.(toCid) || toCid),
edgeType,
edgeLabel: RELATION_TYPE_LABELS[edgeType] || edgeType,
});
});
rows.sort((a, b) => (
a.edgeLabel.localeCompare(b.edgeLabel, 'zh-Hant')
|| a.fromName.localeCompare(b.fromName, 'zh-Hant')
|| a.toName.localeCompare(b.toName, 'zh-Hant')
));
return rows;
});
function detectNodeType(cid, entry, serials) {
const explicitType = normalizeText(props.nodeMetaMap?.get?.(cid)?.node_type).toUpperCase();
if (explicitType === 'WAFER') {
@@ -137,17 +201,62 @@ function detectNodeType(cid, entry, serials) {
return 'branch';
}
function lookupEdgeType(parentCid, childCid) {
function lookupEdgeMeta(parentCid, childCid) {
const parent = normalizeText(parentCid);
const child = normalizeText(childCid);
if (!parent || !child) {
return '';
return { edgeType: '', reversed: false };
}
const direct = normalizeText(props.edgeTypeMap?.get?.(`${parent}->${child}`));
if (direct) {
return direct;
return { edgeType: direct, reversed: false };
}
return normalizeText(props.edgeTypeMap?.get?.(`${child}->${parent}`));
const reverse = normalizeText(props.edgeTypeMap?.get?.(`${child}->${parent}`));
if (reverse) {
return { edgeType: reverse, reversed: true };
}
return { edgeType: '', reversed: false };
}
function relationTag(edgeType, reversed) {
const spec = EDGE_TAGS[normalizeText(edgeType)];
if (!spec) {
return '';
}
return reversed ? spec.reverse : spec.forward;
}
function relationSentence({ edgeType, reversed, leftName, currentName }) {
const left = normalizeText(leftName);
const current = normalizeText(currentName);
if (!edgeType || !left || !current) {
return '';
}
if (edgeType === 'split_from') {
return reversed
? `${left} 拆自 ${current}`
: `${current} 拆自 ${left}`;
}
if (edgeType === 'merge_source') {
return reversed
? `${left}${current} 併批而來`
: `${current}${left} 併批而來`;
}
if (edgeType === 'wafer_origin') {
return reversed
? `${left} 對應 Wafer ${current}`
: `${current} 源自 Wafer ${left}`;
}
if (edgeType === 'gd_rework_source') {
return reversed
? `${left}${current} 重工而來`
: `${current}${left} 重工而來`;
}
return reversed
? `${left}${current}${edgeType}`
: `${current}${left}${edgeType}`;
}
function buildNode(cid, visited, parentCid = '') {
@@ -194,12 +303,24 @@ function buildNode(cid, visited, parentCid = '') {
&& allSerialNames.value.has(name);
const effectiveType = isSerialLike ? 'serial' : nodeType;
const color = NODE_COLORS[effectiveType] || NODE_COLORS.branch;
const incomingEdgeType = lookupEdgeType(parentCid, id);
const incomingMeta = lookupEdgeMeta(parentCid, id);
const incomingEdgeType = incomingMeta.edgeType;
const incomingEdgeReversed = incomingMeta.reversed;
const incomingEdgeStyle = EDGE_STYLES[incomingEdgeType] || EDGE_STYLES.default;
const parentName = normalizeText(props.nameMap?.get?.(normalizeText(parentCid)) || parentCid);
const shortTag = relationTag(incomingEdgeType, incomingEdgeReversed);
const displayLabel = shortTag ? `${shortTag} ${name}` : name;
return {
name,
value: { cid: id, type: effectiveType, edgeType: incomingEdgeType || '' },
value: {
cid: id,
type: effectiveType,
edgeType: incomingEdgeType || '',
edgeReversed: incomingEdgeReversed,
parentName,
relationTag: shortTag,
},
children,
itemStyle: {
color,
@@ -213,6 +334,7 @@ function buildNode(cid, visited, parentCid = '') {
fontWeight: isSelected ? 'bold' : 'normal',
fontSize: isSerialLike ? 10 : 11,
color: isSelected ? '#1E3A8A' : (isSerialLike ? '#64748B' : '#334155'),
formatter: () => displayLabel,
},
symbol: isSerialLike ? 'diamond' : (nodeType === 'root' ? 'roundRect' : 'circle'),
symbolSize: isSerialLike ? 6 : (nodeType === 'root' ? 14 : 10),
@@ -226,9 +348,8 @@ const treesData = computed(() => {
return [];
}
const globalVisited = new Set();
return props.treeRoots
.map((rootId) => buildNode(rootId, globalVisited))
.map((rootId) => buildNode(rootId, new Set()))
.filter(Boolean);
});
@@ -247,6 +368,73 @@ const chartHeight = computed(() => {
return `${base}px`;
});
function countGraphemes(text) {
return Array.from(normalizeText(text)).length;
}
function walkTreeMetrics(node, depth, metrics) {
if (!node || typeof node !== 'object') {
return;
}
metrics.maxDepth = Math.max(metrics.maxDepth, depth);
const relationTag = normalizeText(node?.value?.relationTag);
const labelText = relationTag
? `${relationTag} ${normalizeText(node.name)}`
: normalizeText(node.name);
metrics.maxLabelChars = Math.max(metrics.maxLabelChars, countGraphemes(labelText));
const children = Array.isArray(node.children) ? node.children : [];
children.forEach((child) => walkTreeMetrics(child, depth + 1, metrics));
}
const treeMetrics = computed(() => {
const metrics = {
maxDepth: 1,
maxLabelChars: 12,
};
treesData.value.forEach((tree) => walkTreeMetrics(tree, 1, metrics));
return metrics;
});
function clampNumber(value, min, max) {
return Math.max(min, Math.min(max, value));
}
const labelWidthPx = computed(() => clampNumber(
treeMetrics.value.maxLabelChars * 7 + 14,
120,
360,
));
const depthSpacingPx = computed(() => clampNumber(
88 + Math.round(treeMetrics.value.maxLabelChars * 1.4),
96,
132,
));
const rootLabelWidthPx = computed(() => {
const maxChars = props.treeRoots.reduce((max, rootCid) => {
const rootId = normalizeText(rootCid);
const rootName = normalizeText(props.nameMap?.get?.(rootId) || rootId);
return Math.max(max, countGraphemes(rootName));
}, 8);
return clampNumber(maxChars * 7 + 24, 72, 260);
});
const chartLayout = computed(() => {
const left = rootLabelWidthPx.value;
const right = labelWidthPx.value + 18;
const depthSpacing = depthSpacingPx.value;
const depthCount = Math.max(1, treeMetrics.value.maxDepth - 1);
const requiredWidth = left + right + (depthCount * depthSpacing) + 120;
const minWidth = clampNumber(requiredWidth, 760, 3000);
return { left, right, minWidth };
});
const chartMinWidth = computed(() => `${chartLayout.value.minWidth}px`);
const TREE_SERIES_DEFAULTS = Object.freeze({
type: 'tree',
layout: 'orthogonal',
@@ -262,9 +450,7 @@ const TREE_SERIES_DEFAULTS = Object.freeze({
distance: 6,
fontSize: 11,
color: '#334155',
overflow: 'truncate',
ellipsis: '…',
width: 160,
overflow: 'break',
...LABEL_BASE_STYLE,
},
lineStyle: {
@@ -315,7 +501,17 @@ const chartOption = computed(() => {
lines.push('<span style="color:#10B981">中間節點</span>');
}
if (val.edgeType) {
lines.push(`<span style="color:#94A3B8;font-size:11px">關係: ${val.edgeType}</span>`);
const sentence = relationSentence({
edgeType: val.edgeType,
reversed: Boolean(val.edgeReversed),
leftName: val.parentName,
currentName: data.name,
});
if (sentence) {
lines.push(`<span style="color:#0F172A;font-size:11px">讀法: ${sentence}</span>`);
}
const directionTag = val.relationTag ? `${val.relationTag}` : '';
lines.push(`<span style="color:#94A3B8;font-size:11px">關係型別: ${val.edgeType}${directionTag}</span>`);
}
if (val.cid && val.cid !== data.name) {
lines.push(`<span style="color:#94A3B8;font-size:11px">CID: ${val.cid}</span>`);
@@ -330,10 +526,14 @@ const chartOption = computed(() => {
tooltip,
series: [{
...TREE_SERIES_DEFAULTS,
left: 40,
right: 180,
left: chartLayout.value.left,
right: chartLayout.value.right,
top: 20,
bottom: 20,
label: {
...TREE_SERIES_DEFAULTS.label,
width: labelWidthPx.value,
},
data: [trees[0]],
}],
};
@@ -355,10 +555,14 @@ const chartOption = computed(() => {
return {
...TREE_SERIES_DEFAULTS,
left: 40,
right: 180,
left: chartLayout.value.left,
right: chartLayout.value.right,
top: `${topPercent}%`,
height: `${heightPercent}%`,
label: {
...TREE_SERIES_DEFAULTS.label,
width: labelWidthPx.value,
},
data: [tree],
};
});
@@ -384,6 +588,122 @@ function handleNodeClick(params) {
}
emit('select-nodes', [...current]);
}
function buildExportFileName(ext = 'png') {
const now = new Date();
const ts = [
String(now.getFullYear()).padStart(4, '0'),
String(now.getMonth() + 1).padStart(2, '0'),
String(now.getDate()).padStart(2, '0'),
String(now.getHours()).padStart(2, '0'),
String(now.getMinutes()).padStart(2, '0'),
String(now.getSeconds()).padStart(2, '0'),
].join('');
const rawBase = normalizeText(props.title) || 'lineage_tree';
const safeBase = rawBase
.replace(/[\\/:*?"<>|]/g, '-')
.replace(/\s+/g, '_');
return `${safeBase}_${ts}.${ext}`;
}
function triggerDownloadByUrl(url, filename) {
const link = document.createElement('a');
link.href = url;
link.download = filename;
link.rel = 'noopener';
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
}
function getChartInstance() {
const chartComponent = chartRef.value;
if (!chartComponent) {
return null;
}
if (typeof chartComponent.getEchartsInstance === 'function') {
return chartComponent.getEchartsInstance();
}
return chartComponent.chart || null;
}
function escapeCsvField(value) {
const text = normalizeText(value);
if (text === '') {
return '';
}
if (/[",\n\r]/.test(text)) {
return `"${text.replace(/"/g, '""')}"`;
}
return text;
}
function buildCsvContent() {
const headers = ['來源批次', '來源CID', '目標批次', '目標CID', '關係', '關係代碼'];
const lines = [headers.join(',')];
relationRows.value.forEach((row) => {
lines.push([
escapeCsvField(row.fromName),
escapeCsvField(row.fromCid),
escapeCsvField(row.toName),
escapeCsvField(row.toCid),
escapeCsvField(row.edgeLabel),
escapeCsvField(row.edgeType),
].join(','));
});
return `\uFEFF${lines.join('\r\n')}`;
}
async function exportTreeAsPng() {
if (!hasData.value || exportingTreeImage.value) {
return;
}
exportingTreeImage.value = true;
exportErrorMessage.value = '';
try {
await nextTick();
const instance = getChartInstance();
if (!instance || typeof instance.getDataURL !== 'function') {
throw new Error('無法取得樹圖實例');
}
const dataUrl = instance.getDataURL({
type: 'png',
pixelRatio: Math.max(2, Math.min(4, window.devicePixelRatio || 2)),
backgroundColor: '#FFFFFF',
});
triggerDownloadByUrl(dataUrl, buildExportFileName('png'));
} catch (error) {
exportErrorMessage.value = error?.message || '樹圖匯出失敗';
} finally {
exportingTreeImage.value = false;
}
}
function exportRelationCsv() {
if (!hasData.value || exportingRelationCsv.value || relationRows.value.length === 0) {
return;
}
exportingRelationCsv.value = true;
exportErrorMessage.value = '';
try {
const csv = buildCsvContent();
const blob = new Blob([csv], { type: 'text/csv;charset=utf-8;' });
const href = URL.createObjectURL(blob);
triggerDownloadByUrl(href, buildExportFileName('csv'));
URL.revokeObjectURL(href);
} catch (error) {
exportErrorMessage.value = error?.message || '關係 CSV 匯出失敗';
} finally {
exportingRelationCsv.value = false;
}
}
</script>
<template>
@@ -392,9 +712,25 @@ function handleNodeClick(params) {
<div>
<h3 class="text-sm font-semibold text-slate-800">{{ title }}</h3>
<p class="text-xs text-slate-500">{{ description }}</p>
<p class="text-[11px] text-slate-500">
讀圖方向由左至右節點前綴 <code>///</code> 代表本節點由左側來源而來
<code>///</code> 代表左側節點由本節點而來
</p>
</div>
<div class="flex items-center gap-3">
<ExportButton
:disabled="!hasData || loading"
:loading="exportingTreeImage"
label="匯出樹圖 PNG"
@click="exportTreeAsPng"
/>
<ExportButton
:disabled="!hasData || loading || relationRows.length === 0"
:loading="exportingRelationCsv"
label="匯出關係 CSV"
@click="exportRelationCsv"
/>
<div class="flex items-center gap-2 text-[10px] text-slate-500">
<span class="inline-flex items-center gap-1">
<span class="inline-block size-2.5 rounded-sm" :style="{ background: NODE_COLORS.wafer }" />
@@ -422,23 +758,26 @@ function handleNodeClick(params) {
</span>
<span class="inline-flex items-center gap-1">
<span class="inline-block h-0.5 w-3 bg-slate-300" />
split
split(拆批)
</span>
<span class="inline-flex items-center gap-1">
<span class="inline-block h-0.5 w-3 border-t-2 border-dashed border-amber-500" />
merge
merge(併批)
</span>
<span class="inline-flex items-center gap-1">
<span class="inline-block h-0.5 w-3 border-t-2 border-dotted border-blue-600" />
wafer
wafer(晶圓來源)
</span>
<span class="inline-flex items-center gap-1">
<span class="inline-block h-0.5 w-3 border-t-2 border-dashed border-red-500" />
gd-rework
gd-rework(重工來源)
</span>
</div>
</div>
</div>
<p v-if="exportErrorMessage" class="mb-2 rounded border border-rose-200 bg-rose-50 px-2 py-1 text-xs text-rose-700">
{{ exportErrorMessage }}
</p>
<!-- Loading overlay -->
<div v-if="loading" class="flex items-center justify-center rounded-card border border-dashed border-stroke-soft bg-surface-muted/40 py-16">
@@ -454,16 +793,54 @@ function handleNodeClick(params) {
</div>
<!-- ECharts Tree -->
<div v-else class="relative">
<div v-else class="relative overflow-x-auto">
<VChart
ref="chartRef"
class="lineage-tree-chart"
:style="{ height: chartHeight }"
:style="{ height: chartHeight, width: '100%', minWidth: chartMinWidth }"
:option="chartOption"
autoresize
@click="handleNodeClick"
/>
</div>
<details v-if="relationRows.length > 0" class="mt-3 rounded-card border border-stroke-soft bg-surface-muted/50 px-3 py-2">
<summary class="cursor-pointer text-xs font-medium text-slate-700">
關係清單{{ relationRows.length }}
</summary>
<div class="mt-2 max-h-56 overflow-auto rounded border border-stroke-soft bg-white">
<table class="min-w-full text-left text-xs text-slate-700">
<thead class="bg-slate-50 text-[11px] text-slate-500">
<tr>
<th class="px-2 py-1.5 font-medium">來源批次</th>
<th class="px-2 py-1.5 font-medium">目標批次</th>
<th class="px-2 py-1.5 font-medium">關係</th>
</tr>
</thead>
<tbody>
<tr
v-for="row in relationRows.slice(0, 200)"
:key="row.key"
class="border-t border-slate-100"
>
<td class="px-2 py-1.5 font-mono text-[11px]">
{{ row.fromName }}
</td>
<td class="px-2 py-1.5 font-mono text-[11px]">
{{ row.toName }}
</td>
<td class="px-2 py-1.5 text-[11px]">
{{ row.edgeLabel }}
</td>
</tr>
</tbody>
</table>
</div>
<p v-if="relationRows.length > 200" class="mt-1 text-[11px] text-slate-500">
僅顯示前 200 請搭配上方樹圖與節點點選進一步縮小範圍
</p>
</details>
<!-- Not found warning -->
<div v-if="notFound.length > 0" class="mt-3 rounded-card border border-state-warning/40 bg-amber-50 px-3 py-2 text-xs text-amber-700">
未命中{{ notFound.join(', ') }}

View File

@@ -56,6 +56,10 @@ const props = defineProps({
type: Object,
default: () => new Map(),
},
graphEdges: {
type: Array,
default: () => [],
},
leafSerials: {
type: Object,
default: () => new Map(),
@@ -154,6 +158,7 @@ const emit = defineEmits([
:name-map="nameMap"
:node-meta-map="nodeMetaMap"
:edge-type-map="edgeTypeMap"
:graph-edges="graphEdges"
:leaf-serials="leafSerials"
:selected-container-ids="selectedContainerIds"
:loading="lineageLoading"

View File

@@ -56,6 +56,10 @@ const props = defineProps({
type: Object,
default: () => new Map(),
},
graphEdges: {
type: Array,
default: () => [],
},
leafSerials: {
type: Object,
default: () => new Map(),
@@ -154,6 +158,7 @@ const emit = defineEmits([
:name-map="nameMap"
:node-meta-map="nodeMetaMap"
:edge-type-map="edgeTypeMap"
:graph-edges="graphEdges"
:leaf-serials="leafSerials"
:selected-container-ids="selectedContainerIds"
:loading="lineageLoading"

View File

@@ -78,6 +78,7 @@ export function useLotLineage(initial = {}) {
const nameMap = reactive(new Map());
const nodeMetaMap = reactive(new Map());
const edgeTypeMap = reactive(new Map());
const graphEdges = ref([]);
const leafSerials = reactive(new Map());
const expandedNodes = ref(new Set());
const selectedContainerId = ref(normalizeText(initial.selectedContainerId));
@@ -255,18 +256,23 @@ export function useLotLineage(initial = {}) {
}
edgeTypeMap.clear();
const normalizedEdges = [];
if (Array.isArray(typedEdges)) {
typedEdges.forEach((edge) => {
if (!edge || typeof edge !== 'object') {
return;
}
const key = edgeKey(edge.from_cid, edge.to_cid);
const from = normalizeText(edge.from_cid);
const to = normalizeText(edge.to_cid);
const key = edgeKey(from, to);
const type = normalizeText(edge.edge_type);
if (key && type) {
edgeTypeMap.set(key, type);
normalizedEdges.push({ from_cid: from, to_cid: to, edge_type: type });
}
});
}
graphEdges.value = normalizedEdges;
// Store leaf serial numbers
Object.entries(serialsData).forEach(([cid, serials]) => {
@@ -463,6 +469,7 @@ export function useLotLineage(initial = {}) {
nameMap.clear();
nodeMetaMap.clear();
edgeTypeMap.clear();
graphEdges.value = [];
leafSerials.clear();
expandedNodes.value = new Set();
selectedContainerIds.value = [];
@@ -508,6 +515,7 @@ export function useLotLineage(initial = {}) {
nameMap,
nodeMetaMap,
edgeTypeMap,
graphEdges,
leafSerials,
expandedNodes,
selectedContainerId,

View File

@@ -71,6 +71,31 @@ function edgeKey(fromCid, toCid) {
return `${from}->${to}`;
}
function collectAncestors(parentMap, startCid) {
const start = normalizeText(startCid);
if (!start) {
return new Set();
}
const visited = new Set();
const stack = [start];
while (stack.length > 0) {
const current = stack.pop();
const parents = Array.isArray(parentMap?.[current]) ? parentMap[current] : [];
parents.forEach((parentId) => {
const parent = normalizeText(parentId);
if (!parent || visited.has(parent)) {
return;
}
visited.add(parent);
stack.push(parent);
});
}
return visited;
}
export function useReverseLineage(initial = {}) {
ensureMesApiAvailable();
@@ -78,6 +103,7 @@ export function useReverseLineage(initial = {}) {
const nameMap = reactive(new Map());
const nodeMetaMap = reactive(new Map());
const edgeTypeMap = reactive(new Map());
const graphEdges = ref([]);
const leafSerials = reactive(new Map());
const selectedContainerId = ref(normalizeText(initial.selectedContainerId));
const selectedContainerIds = ref(
@@ -227,6 +253,81 @@ export function useReverseLineage(initial = {}) {
return normalized;
}
function deriveDisplayRoots(candidateRoots, parentMap) {
const roots = uniqueValues((candidateRoots || []).map((cid) => normalizeText(cid)).filter(Boolean));
if (roots.length <= 1) {
return roots;
}
const candidateSet = new Set(roots);
const groupedRoots = [];
const groupsByInput = new Map();
const assigned = new Set();
// Keep reduction within each query input token to avoid cross-token interference.
rootRows.value.forEach((row) => {
const cid = extractContainerId(row);
if (!candidateSet.has(cid)) {
return;
}
const inputToken = normalizeText(row?.input_value || row?.inputValue || row?.INPUT_VALUE);
const key = inputToken || `__${cid}`;
if (!groupsByInput.has(key)) {
groupsByInput.set(key, []);
groupedRoots.push(groupsByInput.get(key));
}
const group = groupsByInput.get(key);
if (!group.includes(cid)) {
group.push(cid);
assigned.add(cid);
}
});
// Roots not found in rootRows still need a standalone group.
roots.forEach((cid) => {
if (assigned.has(cid)) {
return;
}
groupedRoots.push([cid]);
});
const reduced = [];
groupedRoots.forEach((group) => {
if (group.length <= 1) {
const only = group[0];
if (only && !reduced.includes(only)) {
reduced.push(only);
}
return;
}
const ancestorCache = new Map();
const getAncestors = (cid) => {
if (!ancestorCache.has(cid)) {
ancestorCache.set(cid, collectAncestors(parentMap, cid));
}
return ancestorCache.get(cid);
};
const kept = group.filter((cid) => !group.some((otherCid) => (
otherCid !== cid && getAncestors(otherCid).has(cid)
)));
const finalGroup = kept.length > 0 ? kept : group;
finalGroup.forEach((cid) => {
if (cid && !reduced.includes(cid)) {
reduced.push(cid);
}
});
});
return reduced;
}
function populateReverseTree(payload, requestedRoots = []) {
const parentMap = normalizeParentMap(payload);
const names = payload?.names;
@@ -256,18 +357,23 @@ export function useReverseLineage(initial = {}) {
}
edgeTypeMap.clear();
const normalizedEdges = [];
if (Array.isArray(typedEdges)) {
typedEdges.forEach((edge) => {
if (!edge || typeof edge !== 'object') {
return;
}
const key = edgeKey(edge.from_cid, edge.to_cid);
const from = normalizeText(edge.from_cid);
const to = normalizeText(edge.to_cid);
const key = edgeKey(from, to);
const type = normalizeText(edge.edge_type);
if (key && type) {
edgeTypeMap.set(key, type);
normalizedEdges.push({ from_cid: from, to_cid: to, edge_type: type });
}
});
}
graphEdges.value = normalizedEdges;
Object.entries(parentMap).forEach(([childId, parentIds]) => {
patchEntry(childId, {
@@ -317,7 +423,7 @@ export function useReverseLineage(initial = {}) {
}
});
treeRoots.value = roots;
treeRoots.value = deriveDisplayRoots(roots, parentMap);
}
async function fetchLineage(containerIds, { force = false } = {}) {
@@ -392,6 +498,7 @@ export function useReverseLineage(initial = {}) {
nameMap.clear();
nodeMetaMap.clear();
edgeTypeMap.clear();
graphEdges.value = [];
leafSerials.clear();
rootRows.value = [];
rootContainerIds.value = [];
@@ -416,6 +523,7 @@ export function useReverseLineage(initial = {}) {
nameMap,
nodeMetaMap,
edgeTypeMap,
graphEdges,
leafSerials,
selectedContainerId,
selectedContainerIds,

View File

@@ -90,7 +90,6 @@ def _build_parent_map(
child_to_parent: Dict[str, str],
merge_child_to_parent: Dict[str, str],
merge_source_map: Dict[str, List[str]],
cid_to_name: Dict[str, str],
) -> tuple:
"""Build per-node direct parent lists and merge edge lists.
@@ -98,6 +97,9 @@ def _build_parent_map(
(parent_map, merge_edges) where:
- parent_map: {child_cid: [direct_parent_cids]}
- merge_edges: {child_cid: [merge_source_cids]}
Notes:
merge_source_map is keyed by target/child CID.
"""
parent_map: Dict[str, List[str]] = defaultdict(list)
merge_edges: Dict[str, List[str]] = defaultdict(list)
@@ -109,17 +111,18 @@ def _build_parent_map(
if parent not in parent_map[child]:
parent_map[child].append(parent)
if merge_source_map and cid_to_name:
name_to_cids: Dict[str, List[str]] = defaultdict(list)
for cid, name in cid_to_name.items():
name_to_cids[name].append(cid)
for name, source_cids in merge_source_map.items():
for owner_cid in name_to_cids.get(name, []):
for source_cid in source_cids:
if source_cid != owner_cid and source_cid not in parent_map[owner_cid]:
parent_map[owner_cid].append(source_cid)
merge_edges[owner_cid].append(source_cid)
if merge_source_map:
for owner_cid, source_cids in merge_source_map.items():
child = _safe_str(owner_cid)
if not child:
continue
for source_cid in source_cids:
source = _safe_str(source_cid)
if not source or source == child:
continue
if source not in parent_map[child]:
parent_map[child].append(source)
merge_edges[child].append(source)
return dict(parent_map), dict(merge_edges)
@@ -361,23 +364,23 @@ class LineageEngine:
@staticmethod
def resolve_merge_sources(
container_names: List[str],
target_cids: List[str],
) -> Dict[str, List[str]]:
"""Resolve merge source lots from FINISHEDNAME."""
normalized_names = _normalize_list(container_names)
if not normalized_names:
"""Resolve merge source lots by target LOT CID (COMBINE.LOTID)."""
normalized_target_cids = _normalize_list(target_cids)
if not normalized_target_cids:
return {}
result: Dict[str, Set[str]] = defaultdict(set)
for i in range(0, len(normalized_names), ORACLE_IN_BATCH_SIZE):
batch = normalized_names[i:i + ORACLE_IN_BATCH_SIZE]
for i in range(0, len(normalized_target_cids), ORACLE_IN_BATCH_SIZE):
batch = normalized_target_cids[i:i + ORACLE_IN_BATCH_SIZE]
builder = QueryBuilder()
builder.add_in_condition("ca.FINISHEDNAME", batch)
builder.add_in_condition("ca.LOTID", batch)
sql = SQLLoader.load_with_params(
"lineage/merge_sources",
FINISHED_NAME_FILTER=builder.get_conditions_sql(),
TARGET_CID_FILTER=builder.get_conditions_sql(),
)
df = read_sql_df(sql, builder.params)
@@ -385,16 +388,16 @@ class LineageEngine:
continue
for _, row in df.iterrows():
finished_name = _safe_str(row.get("FINISHEDNAME"))
target_cid = _safe_str(row.get("FINISHED_CID"))
source_cid = _safe_str(row.get("SOURCE_CID"))
if not finished_name or not source_cid:
if not target_cid or not source_cid or source_cid == target_cid:
continue
result[finished_name].add(source_cid)
result[target_cid].add(source_cid)
mapped = {k: sorted(v) for k, v in result.items()}
logger.info(
"Merge source resolution completed: finished_names=%s, mapped=%s",
len(normalized_names),
"Merge source resolution completed: target_cids=%s, mapped=%s",
len(normalized_target_cids),
len(mapped),
)
return mapped
@@ -565,6 +568,7 @@ class LineageEngine:
for parent, children in split_children_map.items():
for child in children:
split_edges.append((parent, child, EDGE_TYPE_SPLIT))
split_pairs = {(parent, child) for parent, child, _ in split_edges}
# Collect all nodes in the tree
all_nodes: Set[str] = set(roots)
@@ -578,6 +582,26 @@ class LineageEngine:
# Step 4: Query serial numbers for leaf nodes
leaf_serials = LineageEngine.resolve_leaf_serials(leaf_cids) if leaf_cids else {}
# Step 4b: Resolve merge relations for known nodes by target CID.
merge_edges: List[Tuple[str, str, str]] = []
try:
merge_source_map = LineageEngine.resolve_merge_sources(list(all_nodes))
for target_cid, source_cids in merge_source_map.items():
target = _safe_str(target_cid)
if not target:
continue
for source_cid in source_cids:
source = _safe_str(source_cid)
if not source or source == target:
continue
if (source, target) in split_pairs:
continue
merge_edges.append((source, target, EDGE_TYPE_MERGE))
all_nodes.add(source)
all_nodes.add(target)
except Exception as exc:
logger.warning("Forward merge enrichment skipped due to merge lookup error: %s", exc)
# Step 5: Build semantic links (wafer origin / GD rework) and augment tree.
snapshots: Dict[str, Dict[str, Optional[str]]] = {}
semantic_edges: List[Tuple[str, str, str]] = []
@@ -614,15 +638,16 @@ class LineageEngine:
roots = sorted([cid for cid in all_nodes if cid not in incoming])
typed_nodes = LineageEngine._build_nodes_payload(all_nodes, snapshots, cid_to_name, wafer_ids)
typed_edges = _to_edge_payload(split_edges + semantic_edges)
typed_edges = _to_edge_payload(split_edges + merge_edges + semantic_edges)
logger.info(
"Forward tree resolution completed: seeds=%s, roots=%s, nodes=%s, leaves=%s, serials=%s, semantic_edges=%s",
"Forward tree resolution completed: seeds=%s, roots=%s, nodes=%s, leaves=%s, serials=%s, merge_edges=%s, semantic_edges=%s",
len(seed_cids),
len(roots),
len(all_nodes),
len(leaf_cids),
len(leaf_serials),
len(merge_edges),
len(semantic_edges),
)
@@ -686,18 +711,25 @@ class LineageEngine:
if _safe_str(parent) and _safe_str(child)
]
all_names = [name for name in cid_to_name.values() if _safe_str(name)]
merge_source_map = LineageEngine.resolve_merge_sources(all_names)
merge_lookup_targets = sorted(
{
cid
for cid in (
list(seed_cids)
+ list(child_to_parent.keys())
+ list(child_to_parent.values())
)
if _safe_str(cid)
}
)
merge_source_map = LineageEngine.resolve_merge_sources(merge_lookup_targets)
merge_child_to_parent: Dict[str, str] = {}
merge_source_cids_all: Set[str] = set()
if merge_source_map:
for seed in seed_cids:
self_and_ancestors = ancestors[seed] | {seed}
for cid in list(self_and_ancestors):
name = cid_to_name.get(cid)
if not name:
continue
for source_cid in merge_source_map.get(name, []):
for source_cid in merge_source_map.get(cid, []):
if source_cid == cid or source_cid in self_and_ancestors:
continue
ancestors[seed].add(source_cid)
@@ -722,7 +754,7 @@ class LineageEngine:
ancestors[seed].add(parent)
current = parent
pm, me = _build_parent_map(child_to_parent, merge_child_to_parent, merge_source_map, cid_to_name)
pm, me = _build_parent_map(child_to_parent, merge_child_to_parent, merge_source_map)
for child, parent in merge_child_to_parent.items():
if _safe_str(parent) and _safe_str(child):

View File

@@ -503,46 +503,126 @@ def _resolve_by_gd_lot_id(gd_lot_ids: List[str]) -> Dict[str, Any]:
def _resolve_by_serial_number(serial_numbers: List[str]) -> Dict[str, Any]:
"""Resolve serial numbers (FINISHEDNAME) to CONTAINERID.
"""Resolve serial-related inputs to CONTAINERID.
Note: One serial number may map to multiple CONTAINERIDs.
Args:
serial_numbers: List of serial numbers
Returns:
Resolution result dict.
Matching sources (in priority order):
1. DW_MES_PJ_COMBINEDASSYLOTS.FINISHEDNAME (new serial path)
2. DW_MES_CONTAINER.CONTAINERNAME (old serial / lot-id style inputs)
3. DW_MES_CONTAINER.FIRSTNAME (bridge from serial to related lots)
"""
builder = QueryBuilder()
_add_exact_or_pattern_condition(builder, "p.FINISHEDNAME", serial_numbers)
sql = SQLLoader.load_with_params(
"query_tool/lot_resolve_serial",
SERIAL_FILTER=builder.get_conditions_sql(),
tokens = _normalize_search_tokens(serial_numbers)
if not tokens:
return {
'data': [],
'total': 0,
'input_count': 0,
'not_found': [],
'expansion_info': {},
}
source_configs = [
{
'name': 'finished_name',
'priority': 0,
'sql_name': 'query_tool/lot_resolve_serial',
'filter_key': 'SERIAL_FILTER',
'filter_column': 'p.FINISHEDNAME',
'match_key': 'FINISHEDNAME',
'extra_conditions': [],
},
{
'name': 'container_name',
'priority': 1,
'sql_name': 'query_tool/lot_resolve_id',
'filter_key': 'CONTAINER_FILTER',
'filter_column': 'CONTAINERNAME',
'match_key': 'CONTAINERNAME',
'extra_conditions': ["OBJECTTYPE = 'LOT'"],
},
{
'name': 'first_name',
'priority': 2,
'sql_name': 'query_tool/lot_resolve_wafer_lot',
'filter_key': 'WAFER_FILTER',
'filter_column': 'FIRSTNAME',
'match_key': 'FIRSTNAME',
'extra_conditions': ["OBJECTTYPE = 'LOT'"],
},
]
best_match_by_key: Dict[Tuple[str, str], Dict[str, Any]] = {}
for config in source_configs:
builder = QueryBuilder()
_add_exact_or_pattern_condition(builder, config['filter_column'], tokens)
for cond in config['extra_conditions']:
builder.add_condition(cond)
if not builder.conditions:
continue
sql = SQLLoader.load_with_params(
config['sql_name'],
**{config['filter_key']: builder.get_conditions_sql()},
)
df = read_sql_df(sql, builder.params)
data = _df_to_records(df)
matched, _, _ = _match_rows_by_tokens(
tokens,
data,
row_key=config['match_key'],
)
for row in matched:
input_value = str(row.get('input_value') or '').strip()
cid = str(row.get('CONTAINERID') or '').strip()
if not input_value or not cid:
continue
candidate = {
'container_id': cid,
'lot_id': row.get('CONTAINERNAME') or cid,
'input_value': input_value,
'spec_name': row.get('SPECNAME'),
'match_source': config['name'],
'_priority': config['priority'],
}
key = (input_value, cid)
existing = best_match_by_key.get(key)
if existing is None or candidate['_priority'] < existing['_priority']:
best_match_by_key[key] = candidate
grouped_by_input: Dict[str, List[Dict[str, Any]]] = {}
for item in best_match_by_key.values():
grouped_by_input.setdefault(item['input_value'], []).append(item)
results: List[Dict[str, Any]] = []
not_found: List[str] = []
expansion_info: Dict[str, int] = {}
for token in tokens:
rows = grouped_by_input.get(token, [])
rows.sort(key=lambda row: (row.get('_priority', 999), str(row.get('lot_id') or '')))
if not rows:
not_found.append(token)
continue
expansion_info[token] = len(rows)
for row in rows:
row.pop('_priority', None)
results.append(row)
logger.info(
"Serial number resolution: %s containers from %s inputs (not_found=%s)",
len(results),
len(tokens),
len(not_found),
)
df = read_sql_df(sql, builder.params)
data = _df_to_records(df)
matched, not_found, expansion_info = _match_rows_by_tokens(
serial_numbers,
data,
row_key='FINISHEDNAME',
)
results = []
for row in matched:
results.append({
'container_id': row.get('CONTAINERID'),
'lot_id': row.get('CONTAINERNAME'),
'input_value': row.get('input_value'),
'spec_name': row.get('SPECNAME'),
})
logger.info(f"Serial number resolution: {len(results)} containers from {len(serial_numbers)} inputs")
return {
'data': results,
'total': len(results),
'input_count': len(serial_numbers),
'input_count': len(tokens),
'not_found': not_found,
'expansion_info': expansion_info,
}

View File

@@ -1,8 +1,8 @@
-- Unified LineageEngine - Merge Sources
-- Find source lots merged into finished lots from DW_MES_PJ_COMBINEDASSYLOTS.
-- Find source lots merged into target LOT CIDs from DW_MES_PJ_COMBINEDASSYLOTS.
--
-- Parameters:
-- FINISHED_NAME_FILTER - QueryBuilder-generated condition on ca.FINISHEDNAME
-- TARGET_CID_FILTER - QueryBuilder-generated condition on ca.LOTID
--
SELECT
ca.CONTAINERID AS SOURCE_CID,
@@ -10,4 +10,4 @@ SELECT
ca.FINISHEDNAME,
ca.LOTID AS FINISHED_CID
FROM DWH.DW_MES_PJ_COMBINEDASSYLOTS ca
WHERE {{ FINISHED_NAME_FILTER }}
WHERE {{ TARGET_CID_FILTER }}

View File

@@ -67,35 +67,35 @@ def test_resolve_split_ancestors_batches_and_enforces_max_depth(mock_read_sql_df
@patch("mes_dashboard.services.lineage_engine.read_sql_df")
def test_resolve_merge_sources_batches_and_returns_mapping(mock_read_sql_df):
names = [f"FN{i:04d}" for i in range(1001)]
target_cids = [f"T{i:04d}" for i in range(1001)]
mock_read_sql_df.side_effect = [
pd.DataFrame(
[
{"FINISHEDNAME": "FN0000", "SOURCE_CID": "SRC-A"},
{"FINISHEDNAME": "FN0000", "SOURCE_CID": "SRC-B"},
{"FINISHED_CID": "T0000", "SOURCE_CID": "SRC-A"},
{"FINISHED_CID": "T0000", "SOURCE_CID": "SRC-B"},
]
),
pd.DataFrame(
[
{"FINISHEDNAME": "FN1000", "SOURCE_CID": "SRC-C"},
{"FINISHEDNAME": "FN1000", "SOURCE_CID": "SRC-C"},
{"FINISHEDNAME": None, "SOURCE_CID": "SRC-INVALID"},
{"FINISHED_CID": "T1000", "SOURCE_CID": "SRC-C"},
{"FINISHED_CID": "T1000", "SOURCE_CID": "SRC-C"},
{"FINISHED_CID": None, "SOURCE_CID": "SRC-INVALID"},
]
),
]
result = LineageEngine.resolve_merge_sources(names)
result = LineageEngine.resolve_merge_sources(target_cids)
assert mock_read_sql_df.call_count == 2
first_sql, first_params = mock_read_sql_df.call_args_list[0].args
second_sql, second_params = mock_read_sql_df.call_args_list[1].args
assert "{{ FINISHED_NAME_FILTER }}" not in first_sql
assert "{{ FINISHED_NAME_FILTER }}" not in second_sql
assert "{{ TARGET_CID_FILTER }}" not in first_sql
assert "{{ TARGET_CID_FILTER }}" not in second_sql
assert len(first_params) == 1000
assert len(second_params) == 1
assert result["FN0000"] == ["SRC-A", "SRC-B"]
assert result["FN1000"] == ["SRC-C"]
assert result["T0000"] == ["SRC-A", "SRC-B"]
assert result["T1000"] == ["SRC-C"]
@patch("mes_dashboard.services.lineage_engine.LineageEngine.resolve_merge_sources")
@@ -126,7 +126,7 @@ def test_resolve_full_genealogy_combines_split_and_merge(
},
},
]
mock_resolve_merge_sources.return_value = {"LOT-B": ["M1"]}
mock_resolve_merge_sources.return_value = {"B": ["M1"]}
result = LineageEngine.resolve_full_genealogy(["A"], {"A": "LOT-A"})

View File

@@ -286,23 +286,63 @@ class TestResolveQueriesUseBindParams:
with patch('mes_dashboard.services.query_tool_service.SQLLoader.load_with_params') as mock_load:
with patch('mes_dashboard.services.query_tool_service.read_sql_df') as mock_read:
mock_load.return_value = "SELECT * FROM DUAL"
mock_read.return_value = pd.DataFrame([
{
'CONTAINERID': 'CID-1',
'FINISHEDNAME': 'SN-1',
'CONTAINERNAME': 'LOT-1',
'SPECNAME': 'SPEC-1',
}
])
mock_load.side_effect = [
"SELECT * FROM COMBINE",
"SELECT * FROM CONTAINER_NAME",
"SELECT * FROM FIRSTNAME",
]
mock_read.side_effect = [
pd.DataFrame([
{
'CONTAINERID': 'CID-FIN',
'FINISHEDNAME': 'SN-1',
'CONTAINERNAME': 'LOT-FIN',
'SPECNAME': 'SPEC-1',
}
]),
pd.DataFrame([
{
'CONTAINERID': 'CID-NAME',
'CONTAINERNAME': 'SN-1',
'SPECNAME': 'SPEC-2',
'MFGORDERNAME': None,
'QTY': 1,
}
]),
pd.DataFrame([
{
'CONTAINERID': 'CID-FIRST',
'CONTAINERNAME': 'GD25000001-A01',
'FIRSTNAME': 'SN-1',
'SPECNAME': 'SPEC-3',
'QTY': 1,
}
]),
]
result = _resolve_by_serial_number(['SN-1'])
assert result['total'] == 1
sql_params = mock_load.call_args.kwargs
assert ':p0' in sql_params['SERIAL_FILTER']
_, query_params = mock_read.call_args.args
assert query_params == {'p0': 'SN-1'}
assert result['total'] == 3
assert {row['match_source'] for row in result['data']} == {
'finished_name',
'container_name',
'first_name',
}
assert [call.args[0] for call in mock_load.call_args_list] == [
'query_tool/lot_resolve_serial',
'query_tool/lot_resolve_id',
'query_tool/lot_resolve_wafer_lot',
]
assert ':p0' in mock_load.call_args_list[0].kwargs['SERIAL_FILTER']
assert ':p0' in mock_load.call_args_list[1].kwargs['CONTAINER_FILTER']
assert ':p0' in mock_load.call_args_list[2].kwargs['WAFER_FILTER']
assert "OBJECTTYPE = 'LOT'" in mock_load.call_args_list[1].kwargs['CONTAINER_FILTER']
assert "OBJECTTYPE = 'LOT'" in mock_load.call_args_list[2].kwargs['WAFER_FILTER']
assert mock_read.call_args_list[0].args[1] == {'p0': 'SN-1'}
assert mock_read.call_args_list[1].args[1] == {'p0': 'SN-1'}
assert mock_read.call_args_list[2].args[1] == {'p0': 'SN-1'}
def test_resolve_by_work_order_uses_query_builder_params(self):
from unittest.mock import patch