feat(admin-performance): fix RSS measurement, add multi-worker aggregation, and new dashboard panels

- Fix RSS metric: replace peak `ru_maxrss` with current RSS via `process_rss_mb()`
- Add `query_snapshots_aggregated()` with SQLite time-bucket GROUP BY to eliminate
  multi-worker zigzag oscillation in trend charts
- Add Worker Memory Guard panel (GaugeBar + 6 StatCards) to dashboard
- Add Pareto Materialization panel (stats grid + fallback reasons table)
- Fix Redis memory trend to display in MB instead of raw bytes
- Restore 14 accidentally deleted migration JSON config files
- Fix 44 pre-existing test failures: mock targets, redirect expectations,
  filter params, navigation baselines

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
egg
2026-03-05 08:52:19 +08:00
parent f6a54f357f
commit 7f409dc17f
23 changed files with 1571 additions and 404 deletions

View File

@@ -0,0 +1,20 @@
{
"in_scope_required_assets": {
"/wip-overview": ["wip-overview.js"],
"/wip-detail": ["wip-detail.js"],
"/hold-overview": ["hold-overview.js"],
"/hold-detail": ["hold-detail.js"],
"/hold-history": ["hold-history.js"],
"/reject-history": ["reject-history.js"],
"/resource": ["resource-status.js"],
"/resource-history": ["resource-history.js"],
"/qc-gate": ["qc-gate.js"],
"/job-query": ["job-query.js"],
"/admin/performance": ["admin-performance.js"],
"/tables": ["tables.js"],
"/excel-query": ["excel-query.js"],
"/query-tool": ["query-tool.js"],
"/mid-section-defect": ["mid-section-defect.js"]
},
"deferred_routes": []
}

View File

@@ -0,0 +1,3 @@
{
"records": []
}

View File

@@ -0,0 +1,116 @@
{
"entries": [
{
"id": "style-legacy-wip-overview",
"scope": "/wip-overview",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-wip-detail",
"scope": "/wip-detail",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-hold-overview",
"scope": "/hold-overview",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-hold-detail",
"scope": "/hold-detail",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-hold-history",
"scope": "/hold-history",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-reject-history",
"scope": "/reject-history",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-resource",
"scope": "/resource",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-resource-history",
"scope": "/resource-history",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-qc-gate",
"scope": "/qc-gate",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-job-query",
"scope": "/job-query",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-admin-pages",
"scope": "/admin/pages",
"owner": "frontend-platform-admin",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-admin-performance",
"scope": "/admin/performance",
"owner": "frontend-platform-admin",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-tables",
"scope": "/tables",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-excel-query",
"scope": "/excel-query",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-query-tool",
"scope": "/query-tool",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-mid-section-defect",
"scope": "/mid-section-defect",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
}
]
}

View File

@@ -0,0 +1,20 @@
{
"routes": {
"/wip-overview": { "known_issues": [] },
"/wip-detail": { "known_issues": [] },
"/hold-overview": { "known_issues": [] },
"/hold-detail": { "known_issues": [] },
"/hold-history": { "known_issues": [] },
"/reject-history": { "known_issues": [] },
"/resource": { "known_issues": [] },
"/resource-history": { "known_issues": [] },
"/qc-gate": { "known_issues": [] },
"/job-query": { "known_issues": [] },
"/tables": { "known_issues": [] },
"/excel-query": { "known_issues": [] },
"/query-tool": { "known_issues": [] },
"/mid-section-defect": { "known_issues": [] },
"/admin/pages": { "known_issues": [] },
"/admin/performance": { "known_issues": [] }
}
}

View File

@@ -0,0 +1,3 @@
{
"records": []
}

View File

@@ -0,0 +1,6 @@
{
"severity_mode": {
"current": "block"
},
"deferred_routes_excluded": []
}

View File

@@ -0,0 +1,9 @@
{
"mode": "block",
"errors": [],
"warnings": [
"/excel-query uses shell tokens without fallback ['--portal-shadow-panel'] in frontend/src/excel-query/style.css with approved exception"
],
"info": [],
"passed": true
}

View File

@@ -0,0 +1,208 @@
{
"routes": [
{
"route": "/wip-overview",
"route_id": "wip-overview",
"title": "WIP Overview",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/wip-overview",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/wip-detail",
"route_id": "wip-detail",
"title": "WIP Detail",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/wip-detail",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/hold-overview",
"route_id": "hold-overview",
"title": "Hold Overview",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/hold-overview",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/hold-detail",
"route_id": "hold-detail",
"title": "Hold Detail",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/hold-detail",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/hold-history",
"route_id": "hold-history",
"title": "Hold History",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/hold-history",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/reject-history",
"route_id": "reject-history",
"title": "Reject History",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/reject-history",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/resource",
"route_id": "resource",
"title": "Resource",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/resource",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/resource-history",
"route_id": "resource-history",
"title": "Resource History",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/resource-history",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/qc-gate",
"route_id": "qc-gate",
"title": "QC Gate",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/qc-gate",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/job-query",
"route_id": "job-query",
"title": "Job Query",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/job-query",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/admin/pages",
"route_id": "admin-pages",
"title": "Admin Pages",
"scope": "in-scope",
"render_mode": "external",
"owner": "frontend-platform-admin",
"visibility_policy": "admin_only",
"canonical_shell_path": "/portal-shell/admin/pages",
"rollback_strategy": "external_route_reversion",
"compatibility_policy": "external_target_redirect"
},
{
"route": "/admin/performance",
"route_id": "admin-performance",
"title": "Admin Performance",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-platform-admin",
"visibility_policy": "admin_only",
"canonical_shell_path": "/portal-shell/admin/performance",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/tables",
"route_id": "tables",
"title": "Tables",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/tables",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/excel-query",
"route_id": "excel-query",
"title": "Excel Query",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/excel-query",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/query-tool",
"route_id": "query-tool",
"title": "Query Tool",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/query-tool",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/mid-section-defect",
"route_id": "mid-section-defect",
"title": "Mid Section Defect",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/mid-section-defect",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/material-trace",
"route_id": "material-trace",
"title": "Material Trace",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/material-trace",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
}
]
}

View File

@@ -0,0 +1,21 @@
{
"in_scope": [
{ "route": "/wip-overview", "category": "report" },
{ "route": "/wip-detail", "category": "report" },
{ "route": "/hold-overview", "category": "report" },
{ "route": "/hold-detail", "category": "report" },
{ "route": "/hold-history", "category": "report" },
{ "route": "/reject-history", "category": "report" },
{ "route": "/resource", "category": "report" },
{ "route": "/resource-history", "category": "report" },
{ "route": "/qc-gate", "category": "report" },
{ "route": "/job-query", "category": "report" },
{ "route": "/tables", "category": "report" },
{ "route": "/excel-query", "category": "report" },
{ "route": "/query-tool", "category": "report" },
{ "route": "/mid-section-defect", "category": "report" },
{ "route": "/admin/pages", "category": "admin" },
{ "route": "/admin/performance", "category": "admin" }
],
"deferred": []
}

View File

@@ -0,0 +1,4 @@
{
"routes": {},
"notes": "Baseline placeholder inventory"
}

View File

@@ -0,0 +1,46 @@
{
"source": "current frontend API consumption contracts",
"apis": {
"/api/wip/overview/summary": {
"required_keys": [
"dataUpdateDate",
"runLots",
"queueLots",
"holdLots"
],
"notes": "summary header and cards depend on these fields"
},
"/api/wip/overview/matrix": {
"required_keys": [
"workcenters",
"packages",
"matrix",
"workcenter_totals"
],
"notes": "matrix table rendering contract"
},
"/api/wip/hold-detail/summary": {
"required_keys": [
"workcenterCount",
"packageCount",
"lotCount"
],
"notes": "hold detail summary cards contract"
},
"/api/resource/history/summary": {
"required_keys": [
"kpi",
"trend",
"heatmap",
"workcenter_comparison"
],
"notes": "resource history chart summary contract"
},
"/api/resource/history/detail": {
"required_keys": [
"data"
],
"notes": "detail table contract (plus truncated/max_records metadata when present)"
}
}
}

View File

@@ -0,0 +1,4 @@
{
"source": "data/page_status.json",
"errors": []
}

View File

@@ -0,0 +1,235 @@
{
"source": "data/page_status.json",
"admin": [
{
"id": "reports",
"name": "即時報表",
"order": 1,
"admin_only": false,
"pages": [
{
"route": "/wip-overview",
"name": "WIP 即時概況",
"status": "released",
"order": 1
},
{
"route": "/hold-overview",
"name": "Hold 即時概況",
"status": "released",
"order": 2
},
{
"route": "/resource",
"name": "設備即時概況",
"status": "released",
"order": 4
},
{
"route": "/qc-gate",
"name": "QC-GATE 狀態",
"status": "released",
"order": 6
}
]
},
{
"id": "drawer-2",
"name": "歷史報表",
"order": 2,
"admin_only": false,
"pages": [
{
"route": "/hold-history",
"name": "Hold 歷史績效",
"status": "released",
"order": 3
},
{
"route": "/resource-history",
"name": "設備歷史績效",
"status": "released",
"order": 5
}
]
},
{
"id": "drawer",
"name": "查詢工具",
"order": 3,
"admin_only": false,
"pages": [
{
"route": "/reject-history",
"name": "報廢歷史查詢",
"status": "released",
"order": 1
},
{
"route": "/job-query",
"name": "設備維修查詢",
"status": "released",
"order": 2
}
]
},
{
"id": "drawer-3",
"name": "追溯工具",
"order": 4,
"admin_only": false,
"pages": [
{
"route": "/query-tool",
"name": "批次追蹤工具",
"status": "released",
"order": 2
},
{
"route": "/material-trace",
"name": "原物料追溯查詢",
"status": "released",
"order": 3
},
{
"route": "/mid-section-defect",
"name": "製程不良追溯分析",
"status": "released",
"order": 3
}
]
},
{
"id": "dev-tools",
"name": "開發工具",
"order": 4,
"admin_only": true,
"pages": [
{
"route": "/tables",
"name": "表格總覽",
"status": "dev",
"order": 1
},
{
"route": "/admin/pages",
"name": "頁面管理",
"status": "released",
"order": 1
},
{
"route": "/excel-query",
"name": "Excel 批次查詢",
"status": "dev",
"order": 2
},
{
"route": "/admin/performance",
"name": "效能監控",
"status": "dev",
"order": 2
}
]
}
],
"non_admin": [
{
"id": "reports",
"name": "即時報表",
"order": 1,
"admin_only": false,
"pages": [
{
"route": "/wip-overview",
"name": "WIP 即時概況",
"status": "released",
"order": 1
},
{
"route": "/hold-overview",
"name": "Hold 即時概況",
"status": "released",
"order": 2
},
{
"route": "/resource",
"name": "設備即時概況",
"status": "released",
"order": 4
},
{
"route": "/qc-gate",
"name": "QC-GATE 狀態",
"status": "released",
"order": 6
}
]
},
{
"id": "drawer-2",
"name": "歷史報表",
"order": 2,
"admin_only": false,
"pages": [
{
"route": "/hold-history",
"name": "Hold 歷史績效",
"status": "released",
"order": 3
},
{
"route": "/resource-history",
"name": "設備歷史績效",
"status": "released",
"order": 5
}
]
},
{
"id": "drawer",
"name": "查詢工具",
"order": 3,
"admin_only": false,
"pages": [
{
"route": "/reject-history",
"name": "報廢歷史查詢",
"status": "released",
"order": 1
},
{
"route": "/job-query",
"name": "設備維修查詢",
"status": "released",
"order": 2
}
]
},
{
"id": "drawer-3",
"name": "追溯工具",
"order": 4,
"admin_only": false,
"pages": [
{
"route": "/query-tool",
"name": "批次追蹤工具",
"status": "released",
"order": 2
},
{
"route": "/material-trace",
"name": "原物料追溯查詢",
"status": "released",
"order": 3
},
{
"route": "/mid-section-defect",
"name": "製程不良追溯分析",
"status": "released",
"order": 3
}
]
}
]
}

View File

@@ -0,0 +1,46 @@
{
"source": "frontend route parsing and current parity matrix",
"routes": {
"/wip-overview": {
"query_keys": [
"workorder",
"lotid",
"package",
"type",
"status"
],
"notes": "filters + status URL state must remain compatible"
},
"/wip-detail": {
"query_keys": [
"workcenter",
"workorder",
"lotid",
"package",
"type",
"status"
],
"notes": "workcenter deep-link and back-link query continuity"
},
"/hold-detail": {
"query_keys": [
"reason"
],
"notes": "reason required for normal access flow"
},
"/resource-history": {
"query_keys": [
"start_date",
"end_date",
"granularity",
"workcenter_groups",
"families",
"resource_ids",
"is_production",
"is_key",
"is_monitor"
],
"notes": "query/export params must remain compatible"
}
}
}

View File

@@ -136,6 +136,35 @@
</div>
</section>
<!-- Pareto Materialization -->
<section class="panel" v-if="perfDetail?.pareto_materialization && !perfDetail.pareto_materialization.error">
<h2 class="panel-title">Pareto 物化層</h2>
<div class="pareto-stats-grid">
<StatCard :value="paretoHitRateDisplay" label="命中率" />
<StatCard :value="perfDetail.pareto_materialization.hit" label="命中次數" />
<StatCard :value="perfDetail.pareto_materialization.miss" label="未命中次數" />
<StatCard :value="perfDetail.pareto_materialization.build" label="建構次數" />
<StatCard :value="perfDetail.pareto_materialization.build_ok" label="建構成功" />
<StatCard :value="perfDetail.pareto_materialization.build_fail" label="建構失敗" />
<StatCard :value="perfDetail.pareto_materialization.fallback" label="Fallback 次數" />
<StatCard :value="perfDetail.pareto_materialization.rejected_oversize" label="超大拒絕" />
<StatCard :value="paretoBuildLatencyDisplay" label="最近建構耗時" />
<StatCard :value="paretoPayloadDisplay" label="Snapshot 大小" />
</div>
<div class="pareto-fallback-reasons" v-if="paretoFallbackReasons.length">
<h3 class="sub-title">Fallback 原因分布</h3>
<table class="mini-table">
<thead><tr><th>原因</th><th>次數</th></tr></thead>
<tbody>
<tr v-for="r in paretoFallbackReasons" :key="r.reason">
<td>{{ r.reason }}</td>
<td>{{ r.count }}</td>
</tr>
</tbody>
</table>
</div>
</section>
<!-- Cache Hit Rate Trend -->
<TrendChart
v-if="historyData.length > 1"
@@ -186,6 +215,28 @@
yAxisLabel="MB"
/>
<!-- Worker Memory Guard -->
<section class="panel" v-if="perfDetail?.worker_memory_guard?.enabled">
<h2 class="panel-title">Worker 記憶體守衛</h2>
<GaugeBar
label="RSS 使用率"
:value="perfDetail.worker_memory_guard.rss_pct"
:max="100"
unit="%"
:displayText="memoryGuardRssDisplay"
:warningThreshold="0.70"
:dangerThreshold="0.85"
/>
<div class="memory-guard-stats">
<StatCard :value="perfDetail.worker_memory_guard.last_rss_mb?.toFixed(1)" label="當前 RSS (MB)" />
<StatCard :value="perfDetail.worker_memory_guard.limit_mb" label="上限 (MB)" />
<StatCard :value="memoryGuardLevelDisplay" label="壓力等級" />
<StatCard :value="perfDetail.worker_memory_guard.warn_count" label="警告次數" />
<StatCard :value="perfDetail.worker_memory_guard.evict_count" label="驅逐次數" />
<StatCard :value="perfDetail.worker_memory_guard.restart_count" label="重啟次數" />
</div>
</section>
<!-- Worker Control -->
<section class="panel">
<h2 class="panel-title">Worker 控制</h2>
@@ -408,6 +459,44 @@ const cooldownDisplay = computed(() => {
return '就緒';
});
const memoryGuardRssDisplay = computed(() => {
const g = perfDetail.value?.worker_memory_guard;
if (!g) return '';
return `${g.last_rss_mb?.toFixed(1) ?? '-'} MB / ${g.limit_mb} MB (${g.rss_pct?.toFixed(1) ?? '-'}%)`;
});
const memoryGuardLevelDisplay = computed(() => {
const level = perfDetail.value?.worker_memory_guard?.level;
const levelMap = { normal: '正常', warn: '警告', evict: '驅逐中', restart: '重啟中' };
return levelMap[level] || level || '-';
});
const paretoHitRateDisplay = computed(() => {
const r = perfDetail.value?.pareto_materialization?.hit_rate;
return r != null ? `${(r * 100).toFixed(1)}%` : '-';
});
const paretoBuildLatencyDisplay = computed(() => {
const s = perfDetail.value?.pareto_materialization?.last_build_latency_s;
return s != null ? `${s.toFixed(2)}s` : '-';
});
const paretoPayloadDisplay = computed(() => {
const b = perfDetail.value?.pareto_materialization?.last_snapshot_payload_bytes;
if (b == null) return '-';
if (b < 1024) return `${b} B`;
if (b < 1048576) return `${(b / 1024).toFixed(1)} KB`;
return `${(b / 1048576).toFixed(1)} MB`;
});
const paretoFallbackReasons = computed(() => {
const reasons = perfDetail.value?.pareto_materialization?.fallback_reasons;
if (!reasons || typeof reasons !== 'object') return [];
return Object.entries(reasons)
.filter(([, count]) => count > 0)
.map(([reason, count]) => ({ reason, count }));
});
// --- Data Fetching ---
async function loadSystemStatus() {
try {
@@ -466,6 +555,7 @@ async function loadPerformanceHistory() {
historyData.value = snapshots.map((s) => ({
...s,
worker_rss_mb: s.worker_rss_bytes ? Math.round(s.worker_rss_bytes / 1048576 * 10) / 10 : 0,
redis_used_memory_mb: s.redis_used_memory_mb ?? (s.redis_used_memory ? Math.round(s.redis_used_memory / 1048576 * 10) / 10 : 0),
}));
} catch (e) {
console.error('Failed to load performance history:', e);
@@ -486,7 +576,7 @@ const latencyTrendSeries = [
];
const redisTrendSeries = [
{ name: '記憶體 (bytes)', key: 'redis_used_memory', color: '#06b6d4' },
{ name: '記憶體 (MB)', key: 'redis_used_memory_mb', color: '#06b6d4' },
];
const hitRateTrendSeries = [

View File

@@ -542,3 +542,52 @@ body {
text-align: center;
padding: 32px 0;
}
/* Memory Guard Panel */
.memory-guard-stats {
display: grid;
grid-template-columns: repeat(6, 1fr);
gap: 8px;
margin-top: 12px;
}
/* Pareto Materialization Panel */
.pareto-stats-grid {
display: grid;
grid-template-columns: repeat(5, 1fr);
gap: 8px;
}
.pareto-fallback-reasons {
margin-top: 16px;
}
.pareto-fallback-reasons .sub-title {
font-size: 0.85rem;
color: #64748b;
margin-bottom: 8px;
}
.pareto-fallback-reasons .mini-table {
max-width: 400px;
font-size: 0.85rem;
}
.pareto-fallback-reasons .mini-table th,
.pareto-fallback-reasons .mini-table td {
padding: 4px 12px;
}
.pareto-fallback-reasons .mini-table th {
color: #64748b;
font-weight: 500;
}
@media (max-width: 768px) {
.memory-guard-stats {
grid-template-columns: repeat(3, 1fr);
}
.pareto-stats-grid {
grid-template-columns: repeat(2, 1fr);
}
}

View File

@@ -243,6 +243,55 @@ class MetricsHistoryStore:
logger.error("Failed to query metrics snapshots: %s", exc)
return []
def query_snapshots_aggregated(
self, minutes: int = 30, bucket_seconds: int = 30,
) -> List[Dict[str, Any]]:
"""Return time-bucketed aggregated snapshots for trend charts.
Groups raw rows into *bucket_seconds*-wide windows and returns
MAX for gauge metrics, SUM for latency_count, and worker_count.
"""
if not self._initialized:
self.initialize()
cutoff = (datetime.now() - timedelta(minutes=minutes)).isoformat()
sql = f"""
SELECT
datetime(
(CAST(strftime('%s', ts) AS INTEGER) / {bucket_seconds}) * {bucket_seconds},
'unixepoch', 'localtime'
) AS ts,
MAX(pool_saturation) AS pool_saturation,
MAX(pool_checked_out) AS pool_checked_out,
MAX(pool_checked_in) AS pool_checked_in,
MAX(pool_overflow) AS pool_overflow,
MAX(pool_max_capacity) AS pool_max_capacity,
MAX(redis_used_memory) AS redis_used_memory,
MAX(redis_hit_rate) AS redis_hit_rate,
MAX(rc_l1_hit_rate) AS rc_l1_hit_rate,
MAX(rc_l2_hit_rate) AS rc_l2_hit_rate,
MAX(rc_miss_rate) AS rc_miss_rate,
MAX(latency_p50_ms) AS latency_p50_ms,
MAX(latency_p95_ms) AS latency_p95_ms,
MAX(latency_p99_ms) AS latency_p99_ms,
SUM(latency_count) AS latency_count,
MAX(slow_query_active) AS slow_query_active,
MAX(slow_query_waiting) AS slow_query_waiting,
MAX(worker_rss_bytes) AS worker_rss_bytes,
COUNT(DISTINCT worker_pid) AS worker_count,
ROUND(MAX(redis_used_memory) / 1048576.0, 2) AS redis_used_memory_mb
FROM metrics_snapshots
WHERE ts >= ?
GROUP BY (CAST(strftime('%s', ts) AS INTEGER) / {bucket_seconds})
ORDER BY ts ASC
"""
try:
with self._get_connection() as conn:
cursor = conn.execute(sql, (cutoff,))
return [dict(row) for row in cursor.fetchall()]
except Exception as exc:
logger.error("Failed to query aggregated metrics snapshots: %s", exc)
return []
def cleanup(self) -> int:
if not self._initialized:
return 0
@@ -349,13 +398,11 @@ class MetricsHistoryCollector:
data["slow_query_active"] = 0
data["slow_query_waiting"] = 0
# Worker RSS memory
# Worker RSS memory (current, not peak)
try:
import resource
# ru_maxrss is in KB on Linux
data["worker_rss_bytes"] = resource.getrusage(
resource.RUSAGE_SELF
).ru_maxrss * 1024
from mes_dashboard.core.interactive_memory_guard import process_rss_mb
rss_mb = process_rss_mb()
data["worker_rss_bytes"] = int(rss_mb * 1024 * 1024) if rss_mb is not None else 0
except Exception:
data["worker_rss_bytes"] = 0

View File

@@ -405,7 +405,7 @@ def api_performance_history():
minutes = request.args.get("minutes", 30, type=int)
minutes = max(1, min(minutes, 180))
store = get_metrics_history_store()
snapshots = store.query_snapshots(minutes=minutes)
snapshots = store.query_snapshots_aggregated(minutes=minutes)
return jsonify({
"success": True,
"data": {

View File

@@ -56,7 +56,12 @@ class TestHoldOverviewSummaryRoute(TestHoldOverviewRoutesBase):
self.assertTrue(payload['success'])
mock_service.assert_called_once_with(
reason=None,
hold_type='quality',
hold_type=None,
workorder=None,
lotid=None,
pj_type=None,
firstname=None,
waferdesc=None,
include_dummy=False,
)
@@ -74,8 +79,13 @@ class TestHoldOverviewSummaryRoute(TestHoldOverviewRoutesBase):
response = self.client.get('/api/hold-overview/summary?hold_type=all&reason=品質確認')
self.assertEqual(response.status_code, 200)
mock_service.assert_called_once_with(
reason='品質確認',
reason=['品質確認'],
hold_type=None,
workorder=None,
lotid=None,
pj_type=None,
firstname=None,
waferdesc=None,
include_dummy=False,
)
@@ -114,7 +124,12 @@ class TestHoldOverviewMatrixRoute(TestHoldOverviewRoutesBase):
include_dummy=False,
status='HOLD',
hold_type='non-quality',
reason='特殊需求管控',
reason=['特殊需求管控'],
workorder=None,
lotid=None,
pj_type=None,
firstname=None,
waferdesc=None,
)
def test_matrix_invalid_hold_type(self):
@@ -145,7 +160,7 @@ class TestHoldOverviewTreemapRoute(TestHoldOverviewRoutesBase):
self.assertEqual(response.status_code, 200)
mock_service.assert_called_once_with(
hold_type='quality',
reason='品質確認',
reason=['品質確認'],
workcenter='WB',
package='QFN',
include_dummy=False,
@@ -178,11 +193,16 @@ class TestHoldOverviewLotsRoute(TestHoldOverviewRoutesBase):
)
self.assertEqual(response.status_code, 200)
mock_service.assert_called_once_with(
reason='品質確認',
reason=['品質確認'],
hold_type=None,
treemap_reason='品質確認',
workcenter='WB',
package='QFN',
workorder=None,
lotid=None,
pj_type=None,
firstname=None,
waferdesc=None,
age_range='1-3',
include_dummy=False,
page=2,

View File

@@ -23,47 +23,47 @@ class TestHoldRoutesBase(unittest.TestCase):
self.client = self.app.test_client()
class TestHoldDetailPageRoute(TestHoldRoutesBase):
"""Test GET /hold-detail page route."""
def setUp(self):
super().setUp()
self.app.config['PORTAL_SPA_ENABLED'] = True
def test_hold_detail_page_requires_reason(self):
"""SPA mode should single-hop redirect missing reason to canonical shell overview."""
response = self.client.get('/hold-detail', follow_redirects=False)
self.assertEqual(response.status_code, 302)
self.assertTrue(response.location.endswith('/portal-shell/wip-overview'))
def test_hold_detail_page_requires_reason_non_spa_mode(self):
"""Non-SPA mode should keep legacy overview redirect behavior."""
self.app.config['PORTAL_SPA_ENABLED'] = False
response = self.client.get('/hold-detail', follow_redirects=False)
self.assertEqual(response.status_code, 302)
self.assertTrue(response.location.endswith('/wip-overview'))
def test_hold_detail_page_requires_reason_has_single_redirect_hop_in_spa_mode(self):
"""Follow-redirect flow should complete with exactly one redirect hop."""
response = self.client.get('/hold-detail', follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.history), 1)
self.assertTrue(response.history[0].location.endswith('/portal-shell/wip-overview'))
class TestHoldDetailPageRoute(TestHoldRoutesBase):
"""Test GET /hold-detail page route."""
def test_hold_detail_page_with_reason(self):
"""GET /hold-detail?reason=xxx should redirect to canonical shell route."""
response = self.client.get('/hold-detail?reason=YieldLimit', follow_redirects=False)
self.assertEqual(response.status_code, 302)
self.assertTrue(response.location.endswith('/portal-shell/hold-detail?reason=YieldLimit'))
def test_hold_detail_page_includes_vite_entry(self):
"""Direct entry should be redirected to canonical shell host page."""
response = self.client.get('/hold-detail?reason=YieldLimit', follow_redirects=False)
self.assertEqual(response.status_code, 302)
self.assertIn('/portal-shell/hold-detail?reason=YieldLimit', response.location)
def setUp(self):
super().setUp()
self.app.config['PORTAL_SPA_ENABLED'] = True
def test_hold_detail_page_requires_reason(self):
"""SPA mode should single-hop redirect missing reason to canonical shell overview."""
response = self.client.get('/hold-detail', follow_redirects=False)
self.assertEqual(response.status_code, 302)
self.assertTrue(response.location.endswith('/portal-shell/hold-overview'))
def test_hold_detail_page_requires_reason_non_spa_mode(self):
"""Non-SPA mode should keep legacy overview redirect behavior."""
self.app.config['PORTAL_SPA_ENABLED'] = False
response = self.client.get('/hold-detail', follow_redirects=False)
self.assertEqual(response.status_code, 302)
self.assertTrue(response.location.endswith('/hold-overview'))
def test_hold_detail_page_requires_reason_has_single_redirect_hop_in_spa_mode(self):
"""Follow-redirect flow should complete with exactly one redirect hop."""
response = self.client.get('/hold-detail', follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.history), 1)
self.assertTrue(response.history[0].location.endswith('/portal-shell/hold-overview'))
def test_hold_detail_page_with_reason(self):
"""GET /hold-detail?reason=xxx should redirect to canonical shell route."""
response = self.client.get('/hold-detail?reason=YieldLimit', follow_redirects=False)
self.assertEqual(response.status_code, 302)
self.assertTrue(response.location.endswith('/portal-shell/hold-detail?reason=YieldLimit'))
def test_hold_detail_page_includes_vite_entry(self):
"""Direct entry should be redirected to canonical shell host page."""
response = self.client.get('/hold-detail?reason=YieldLimit', follow_redirects=False)
self.assertEqual(response.status_code, 302)
self.assertIn('/portal-shell/hold-detail?reason=YieldLimit', response.location)
class TestHoldDetailSummaryRoute(TestHoldRoutesBase):
class TestHoldDetailSummaryRoute(TestHoldRoutesBase):
"""Test GET /api/wip/hold-detail/summary endpoint."""
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_summary')
@@ -97,38 +97,38 @@ class TestHoldDetailSummaryRoute(TestHoldRoutesBase):
self.assertFalse(data['success'])
self.assertIn('reason', data['error'])
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_summary')
def test_returns_error_on_failure(self, mock_get_summary):
"""Should return success=False and 500 on failure."""
mock_get_summary.return_value = None
response = self.client.get('/api/wip/hold-detail/summary?reason=YieldLimit')
data = json.loads(response.data)
self.assertEqual(response.status_code, 500)
self.assertFalse(data['success'])
self.assertIn('error', data)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_summary')
def test_passes_include_dummy(self, mock_get_summary):
"""Should pass include_dummy flag to summary service."""
mock_get_summary.return_value = {
'totalLots': 0,
'totalQty': 0,
'avgAge': 0,
'maxAge': 0,
'workcenterCount': 0,
}
self.client.get('/api/wip/hold-detail/summary?reason=YieldLimit&include_dummy=true')
mock_get_summary.assert_called_once_with(
reason='YieldLimit',
include_dummy=True
)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_summary')
def test_returns_error_on_failure(self, mock_get_summary):
"""Should return success=False and 500 on failure."""
mock_get_summary.return_value = None
response = self.client.get('/api/wip/hold-detail/summary?reason=YieldLimit')
data = json.loads(response.data)
self.assertEqual(response.status_code, 500)
self.assertFalse(data['success'])
self.assertIn('error', data)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_summary')
def test_passes_include_dummy(self, mock_get_summary):
"""Should pass include_dummy flag to summary service."""
mock_get_summary.return_value = {
'totalLots': 0,
'totalQty': 0,
'avgAge': 0,
'maxAge': 0,
'workcenterCount': 0,
}
self.client.get('/api/wip/hold-detail/summary?reason=YieldLimit&include_dummy=true')
mock_get_summary.assert_called_once_with(
reason='YieldLimit',
include_dummy=True
)
class TestHoldDetailDistributionRoute(TestHoldRoutesBase):
class TestHoldDetailDistributionRoute(TestHoldRoutesBase):
"""Test GET /api/wip/hold-detail/distribution endpoint."""
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_distribution')
@@ -170,35 +170,35 @@ class TestHoldDetailDistributionRoute(TestHoldRoutesBase):
self.assertEqual(response.status_code, 400)
self.assertFalse(data['success'])
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_distribution')
def test_returns_error_on_failure(self, mock_get_dist):
"""Should return success=False and 500 on failure."""
mock_get_dist.return_value = None
response = self.client.get('/api/wip/hold-detail/distribution?reason=YieldLimit')
data = json.loads(response.data)
self.assertEqual(response.status_code, 500)
self.assertFalse(data['success'])
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_distribution')
def test_passes_include_dummy(self, mock_get_dist):
"""Should pass include_dummy flag to distribution service."""
mock_get_dist.return_value = {
'byWorkcenter': [],
'byPackage': [],
'byAge': [],
}
self.client.get('/api/wip/hold-detail/distribution?reason=YieldLimit&include_dummy=1')
mock_get_dist.assert_called_once_with(
reason='YieldLimit',
include_dummy=True
)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_distribution')
def test_returns_error_on_failure(self, mock_get_dist):
"""Should return success=False and 500 on failure."""
mock_get_dist.return_value = None
response = self.client.get('/api/wip/hold-detail/distribution?reason=YieldLimit')
data = json.loads(response.data)
self.assertEqual(response.status_code, 500)
self.assertFalse(data['success'])
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_distribution')
def test_passes_include_dummy(self, mock_get_dist):
"""Should pass include_dummy flag to distribution service."""
mock_get_dist.return_value = {
'byWorkcenter': [],
'byPackage': [],
'byAge': [],
}
self.client.get('/api/wip/hold-detail/distribution?reason=YieldLimit&include_dummy=1')
mock_get_dist.assert_called_once_with(
reason='YieldLimit',
include_dummy=True
)
class TestHoldDetailLotsRoute(TestHoldRoutesBase):
class TestHoldDetailLotsRoute(TestHoldRoutesBase):
"""Test GET /api/wip/hold-detail/lots endpoint."""
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
@@ -297,8 +297,8 @@ class TestHoldDetailLotsRoute(TestHoldRoutesBase):
call_args = mock_get_lots.call_args
self.assertEqual(call_args.kwargs['page_size'], 200)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
def test_handles_page_less_than_one(self, mock_get_lots):
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
def test_handles_page_less_than_one(self, mock_get_lots):
"""Page number less than 1 should be set to 1."""
mock_get_lots.return_value = {
'lots': [],
@@ -308,59 +308,59 @@ class TestHoldDetailLotsRoute(TestHoldRoutesBase):
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&page=0')
call_args = mock_get_lots.call_args
self.assertEqual(call_args.kwargs['page'], 1)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
def test_handles_invalid_page_type(self, mock_get_lots):
mock_get_lots.return_value = {
'lots': [],
'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1},
'filters': {'workcenter': None, 'package': None, 'ageRange': None}
}
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&page=abc')
self.assertEqual(response.status_code, 200)
call_args = mock_get_lots.call_args
self.assertEqual(call_args.kwargs['page'], 1)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
def test_handles_invalid_per_page_type(self, mock_get_lots):
mock_get_lots.return_value = {
'lots': [],
'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1},
'filters': {'workcenter': None, 'package': None, 'ageRange': None}
}
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&per_page=abc')
self.assertEqual(response.status_code, 200)
call_args = mock_get_lots.call_args
self.assertEqual(call_args.kwargs['page_size'], 50)
call_args = mock_get_lots.call_args
self.assertEqual(call_args.kwargs['page'], 1)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
def test_returns_error_on_failure(self, mock_get_lots):
"""Should return success=False and 500 on failure."""
mock_get_lots.return_value = None
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit')
data = json.loads(response.data)
self.assertEqual(response.status_code, 500)
self.assertFalse(data['success'])
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
@patch('mes_dashboard.core.rate_limit.check_and_record', return_value=(True, 4))
def test_lots_rate_limited_returns_429(self, _mock_limit, mock_get_lots):
"""Rate-limited lots requests should return 429."""
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit')
data = json.loads(response.data)
self.assertEqual(response.status_code, 429)
self.assertFalse(data['success'])
self.assertEqual(data['error']['code'], 'TOO_MANY_REQUESTS')
mock_get_lots.assert_not_called()
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
def test_handles_invalid_page_type(self, mock_get_lots):
mock_get_lots.return_value = {
'lots': [],
'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1},
'filters': {'workcenter': None, 'package': None, 'ageRange': None}
}
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&page=abc')
self.assertEqual(response.status_code, 200)
call_args = mock_get_lots.call_args
self.assertEqual(call_args.kwargs['page'], 1)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
def test_handles_invalid_per_page_type(self, mock_get_lots):
mock_get_lots.return_value = {
'lots': [],
'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1},
'filters': {'workcenter': None, 'package': None, 'ageRange': None}
}
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&per_page=abc')
self.assertEqual(response.status_code, 200)
call_args = mock_get_lots.call_args
self.assertEqual(call_args.kwargs['page_size'], 50)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
def test_returns_error_on_failure(self, mock_get_lots):
"""Should return success=False and 500 on failure."""
mock_get_lots.return_value = None
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit')
data = json.loads(response.data)
self.assertEqual(response.status_code, 500)
self.assertFalse(data['success'])
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
@patch('mes_dashboard.core.rate_limit.check_and_record', return_value=(True, 4))
def test_lots_rate_limited_returns_429(self, _mock_limit, mock_get_lots):
"""Rate-limited lots requests should return 429."""
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit')
data = json.loads(response.data)
self.assertEqual(response.status_code, 429)
self.assertFalse(data['success'])
self.assertEqual(data['error']['code'], 'TOO_MANY_REQUESTS')
mock_get_lots.assert_not_called()
class TestHoldDetailAgeRangeFilters(TestHoldRoutesBase):

View File

@@ -0,0 +1,222 @@
# -*- coding: utf-8 -*-
"""Tests for core/metrics_history module.
Covers RSS measurement via process_rss_mb, multi-worker aggregation,
time-bucket grouping, redis_used_memory_mb calculation, and the
original (non-aggregated) query_snapshots API.
"""
from __future__ import annotations
import os
import tempfile
from datetime import datetime, timedelta
from unittest.mock import MagicMock, patch
import pytest
from mes_dashboard.core.metrics_history import (
MetricsHistoryCollector,
MetricsHistoryStore,
)
# ============================================================
# Helpers
# ============================================================
def _insert_snapshot(store, ts_str, pid, rss_bytes, **kwargs):
"""Insert a metrics snapshot directly into the store's SQLite DB."""
defaults = {
'pool_saturation': 0.5, 'pool_checked_out': 0, 'pool_checked_in': 5,
'pool_overflow': 0, 'pool_max_capacity': 10, 'redis_used_memory': 0,
'redis_hit_rate': 0, 'rc_l1_hit_rate': 0, 'rc_l2_hit_rate': 0,
'rc_miss_rate': 0, 'latency_p50_ms': 0, 'latency_p95_ms': 0,
'latency_p99_ms': 0, 'latency_count': 0, 'slow_query_active': 0,
'slow_query_waiting': 0,
}
defaults.update(kwargs)
with store._write_lock:
with store._get_connection() as conn:
conn.execute(
"""INSERT INTO metrics_snapshots
(ts, worker_pid, worker_rss_bytes, pool_saturation, pool_checked_out,
pool_checked_in, pool_overflow, pool_max_capacity, redis_used_memory,
redis_hit_rate, rc_l1_hit_rate, rc_l2_hit_rate, rc_miss_rate,
latency_p50_ms, latency_p95_ms, latency_p99_ms, latency_count,
slow_query_active, slow_query_waiting)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)""",
(ts_str, pid, rss_bytes, defaults['pool_saturation'], defaults['pool_checked_out'],
defaults['pool_checked_in'], defaults['pool_overflow'], defaults['pool_max_capacity'],
defaults['redis_used_memory'], defaults['redis_hit_rate'], defaults['rc_l1_hit_rate'],
defaults['rc_l2_hit_rate'], defaults['rc_miss_rate'], defaults['latency_p50_ms'],
defaults['latency_p95_ms'], defaults['latency_p99_ms'], defaults['latency_count'],
defaults['slow_query_active'], defaults['slow_query_waiting']),
)
conn.commit()
# ============================================================
# Test RSS Measurement
# ============================================================
class TestRSSMeasurement:
"""Verify that _collect_snapshot reads RSS via process_rss_mb."""
@patch("mes_dashboard.core.metrics_history.get_metrics_history_store")
def test_rss_bytes_from_process_rss_mb(self, mock_get_store):
"""Mock process_rss_mb to return 512.5 MB, verify worker_rss_bytes
in the stored snapshot equals int(512.5 * 1024 * 1024)."""
with tempfile.TemporaryDirectory() as tmpdir:
db_path = os.path.join(tmpdir, "test_rss.sqlite")
store = MetricsHistoryStore(db_path=db_path)
store.initialize()
mock_get_store.return_value = store
collector = MetricsHistoryCollector(app=None, store=store)
with (
patch(
"mes_dashboard.core.metrics_history.get_pool_status",
create=True,
side_effect=ImportError,
),
patch(
"mes_dashboard.core.interactive_memory_guard.process_rss_mb",
return_value=512.5,
) as mock_rss,
):
collector._collect_snapshot()
mock_rss.assert_called_once()
rows = store.query_snapshots(minutes=5)
assert len(rows) >= 1
last = rows[-1]
expected_bytes = int(512.5 * 1024 * 1024)
assert last["worker_rss_bytes"] == expected_bytes
@patch("mes_dashboard.core.metrics_history.get_metrics_history_store")
def test_resource_getrusage_not_called(self, mock_get_store):
"""Ensure that resource.getrusage is NOT called during snapshot
collection -- we use psutil-based process_rss_mb instead."""
with tempfile.TemporaryDirectory() as tmpdir:
db_path = os.path.join(tmpdir, "test_no_rusage.sqlite")
store = MetricsHistoryStore(db_path=db_path)
store.initialize()
mock_get_store.return_value = store
collector = MetricsHistoryCollector(app=None, store=store)
with (
patch(
"mes_dashboard.core.interactive_memory_guard.process_rss_mb",
return_value=100.0,
),
patch("resource.getrusage") as mock_rusage,
):
collector._collect_snapshot()
mock_rusage.assert_not_called()
# ============================================================
# Test Multi-Worker Aggregation
# ============================================================
class TestMultiWorkerAggregation:
"""Two workers in the same 30s bucket must collapse into one row."""
def test_same_bucket_aggregation(self):
with tempfile.TemporaryDirectory() as tmpdir:
db_path = os.path.join(tmpdir, "test_agg.sqlite")
store = MetricsHistoryStore(db_path=db_path)
store.initialize()
now = datetime.now()
ts = now.isoformat()
rss_a = 1770 * 1024 * 1024
rss_b = 563 * 1024 * 1024
_insert_snapshot(store, ts, pid=1001, rss_bytes=rss_a, pool_saturation=0.8)
_insert_snapshot(store, ts, pid=1002, rss_bytes=rss_b, pool_saturation=0.3)
rows = store.query_snapshots_aggregated(minutes=5)
assert len(rows) == 1, f"Expected 1 aggregated row, got {len(rows)}"
row = rows[0]
assert row["worker_rss_bytes"] == rss_a # MAX
assert row["worker_count"] == 2
# ============================================================
# Test Different Time Buckets
# ============================================================
class TestDifferentTimeBuckets:
"""Snapshots 60 seconds apart must land in separate 30s buckets."""
def test_two_buckets(self):
with tempfile.TemporaryDirectory() as tmpdir:
db_path = os.path.join(tmpdir, "test_buckets.sqlite")
store = MetricsHistoryStore(db_path=db_path)
store.initialize()
now = datetime.now()
ts1 = now.isoformat()
ts2 = (now - timedelta(seconds=60)).isoformat()
_insert_snapshot(store, ts1, pid=1001, rss_bytes=500 * 1024 * 1024)
_insert_snapshot(store, ts2, pid=1001, rss_bytes=400 * 1024 * 1024)
rows = store.query_snapshots_aggregated(minutes=5)
assert len(rows) == 2, f"Expected 2 rows for different buckets, got {len(rows)}"
# ============================================================
# Test redis_used_memory_mb Calculation
# ============================================================
class TestRedisUsedMemoryMb:
"""Aggregated query must compute redis_used_memory_mb = bytes / 1048576."""
def test_redis_memory_conversion(self):
with tempfile.TemporaryDirectory() as tmpdir:
db_path = os.path.join(tmpdir, "test_redis_mb.sqlite")
store = MetricsHistoryStore(db_path=db_path)
store.initialize()
redis_bytes = 268435456 # 256 MB
now = datetime.now()
_insert_snapshot(
store,
now.isoformat(),
pid=1001,
rss_bytes=0,
redis_used_memory=redis_bytes,
)
rows = store.query_snapshots_aggregated(minutes=5)
assert len(rows) == 1
assert rows[0]["redis_used_memory_mb"] == 256.0
# ============================================================
# Test Original query_snapshots Still Works
# ============================================================
class TestQuerySnapshotsNoAggregation:
"""The non-aggregated query_snapshots must return all rows as-is."""
def test_no_aggregation(self):
with tempfile.TemporaryDirectory() as tmpdir:
db_path = os.path.join(tmpdir, "test_raw.sqlite")
store = MetricsHistoryStore(db_path=db_path)
store.initialize()
now = datetime.now()
ts = now.isoformat()
_insert_snapshot(store, ts, pid=2001, rss_bytes=100 * 1024 * 1024)
_insert_snapshot(store, ts, pid=2002, rss_bytes=200 * 1024 * 1024)
rows = store.query_snapshots(minutes=5)
assert len(rows) == 2, f"Expected 2 raw rows, got {len(rows)}"

View File

@@ -132,7 +132,7 @@ def test_navigation_drawer_and_page_order_deterministic_non_admin():
payload = json.loads(response.data.decode("utf-8"))
drawer_ids = [drawer["id"] for drawer in payload["drawers"]]
assert drawer_ids == ["reports", "drawer-2", "drawer"]
assert drawer_ids == ["reports", "drawer-2", "drawer", "drawer-3"]
reports_routes = [page["route"] for page in payload["drawers"][0]["pages"]]
assert reports_routes == ["/wip-overview", "/hold-overview", "/resource", "/qc-gate"]

View File

@@ -8,23 +8,23 @@ Tests the core service functions without database dependencies:
"""
import pytest
from mes_dashboard.services.query_tool_service import (
validate_date_range,
validate_lot_input,
validate_equipment_input,
_resolve_by_lot_id,
_resolve_by_wafer_lot,
_resolve_by_serial_number,
_resolve_by_work_order,
get_lot_split_merge_history,
from mes_dashboard.services.query_tool_service import (
validate_date_range,
validate_lot_input,
validate_equipment_input,
_resolve_by_lot_id,
_resolve_by_wafer_lot,
_resolve_by_serial_number,
_resolve_by_work_order,
get_lot_split_merge_history,
BATCH_SIZE,
MAX_LOT_IDS,
MAX_SERIAL_NUMBERS,
MAX_WORK_ORDERS,
MAX_GD_WORK_ORDERS,
MAX_EQUIPMENTS,
MAX_DATE_RANGE_DAYS,
)
MAX_LOT_IDS,
MAX_SERIAL_NUMBERS,
MAX_WORK_ORDERS,
MAX_GD_WORK_ORDERS,
MAX_EQUIPMENTS,
MAX_DATE_RANGE_DAYS,
)
class TestValidateDateRange:
@@ -52,18 +52,18 @@ class TestValidateDateRange:
assert result is not None
assert str(MAX_DATE_RANGE_DAYS) in result
def test_exactly_max_range(self):
"""Should allow exactly max range days."""
# 365 days from 2025-01-01 is 2026-01-01
result = validate_date_range('2025-01-01', '2026-01-01')
assert result is None
def test_one_day_over_max_range(self):
"""Should reject one day over max range."""
# 366 days
result = validate_date_range('2025-01-01', '2026-01-02')
assert result is not None
assert str(MAX_DATE_RANGE_DAYS) in result
def test_exactly_max_range(self):
"""Should allow exactly max range days."""
# 365 days from 2025-01-01 is 2026-01-01
result = validate_date_range('2025-01-01', '2026-01-01')
assert result is None
def test_one_day_over_max_range(self):
"""Should reject one day over max range."""
# 366 days
result = validate_date_range('2025-01-01', '2026-01-02')
assert result is not None
assert str(MAX_DATE_RANGE_DAYS) in result
def test_invalid_date_format(self):
"""Should reject invalid date format."""
@@ -90,7 +90,7 @@ class TestValidateDateRange:
assert '格式' in result or 'format' in result.lower()
class TestValidateLotInput:
class TestValidateLotInput:
"""Tests for validate_lot_input function."""
def test_valid_lot_ids(self):
@@ -117,24 +117,24 @@ class TestValidateLotInput:
assert result is not None
assert '至少一個' in result
def test_large_input_list_allowed_when_no_count_cap(self, monkeypatch):
"""Should allow large lists when count cap is disabled."""
monkeypatch.setenv("CONTAINER_RESOLVE_INPUT_MAX_VALUES", "0")
values = [f'GA{i:09d}' for i in range(MAX_LOT_IDS + 50)]
result = validate_lot_input('lot_id', values)
assert result is None
def test_rejects_too_broad_wildcard_pattern(self, monkeypatch):
"""Should reject broad wildcard like '%' to prevent full scan."""
monkeypatch.setenv("CONTAINER_RESOLVE_PATTERN_MIN_PREFIX_LEN", "2")
result = validate_lot_input('lot_id', ['%'])
assert result is not None
assert '萬用字元條件過於寬鬆' in result
def test_accepts_wildcard_with_prefix(self, monkeypatch):
monkeypatch.setenv("CONTAINER_RESOLVE_PATTERN_MIN_PREFIX_LEN", "2")
result = validate_lot_input('lot_id', ['GA25%'])
assert result is None
def test_large_input_list_allowed_when_no_count_cap(self, monkeypatch):
"""Should allow large lists when count cap is disabled."""
monkeypatch.setenv("CONTAINER_RESOLVE_INPUT_MAX_VALUES", "0")
values = [f'GA{i:09d}' for i in range(MAX_LOT_IDS + 50)]
result = validate_lot_input('lot_id', values)
assert result is None
def test_rejects_too_broad_wildcard_pattern(self, monkeypatch):
"""Should reject broad wildcard like '%' to prevent full scan."""
monkeypatch.setenv("CONTAINER_RESOLVE_PATTERN_MIN_PREFIX_LEN", "2")
result = validate_lot_input('lot_id', ['%'])
assert result is not None
assert '萬用字元條件過於寬鬆' in result
def test_accepts_wildcard_with_prefix(self, monkeypatch):
monkeypatch.setenv("CONTAINER_RESOLVE_PATTERN_MIN_PREFIX_LEN", "2")
result = validate_lot_input('lot_id', ['GA25%'])
assert result is None
class TestValidateEquipmentInput:
@@ -167,15 +167,15 @@ class TestValidateEquipmentInput:
assert result is None
class TestResolveQueriesUseBindParams:
class TestResolveQueriesUseBindParams:
"""Queries with user input should always use bind params."""
def test_resolve_by_lot_id_uses_query_builder_params(self):
def test_resolve_by_lot_id_uses_query_builder_params(self):
from unittest.mock import patch
import pandas as pd
with patch('mes_dashboard.services.query_tool_service.SQLLoader.load_with_params') as mock_load:
with patch('mes_dashboard.services.query_tool_service.read_sql_df') as mock_read:
with patch('mes_dashboard.services.query_tool_service.read_sql_df_slow') as mock_read:
mock_load.return_value = "SELECT * FROM DUAL"
mock_read.return_value = pd.DataFrame([
{
@@ -193,143 +193,143 @@ class TestResolveQueriesUseBindParams:
sql_params = mock_load.call_args.kwargs
assert 'CONTAINER_FILTER' in sql_params
assert ':p0' in sql_params['CONTAINER_FILTER']
_, query_params = mock_read.call_args.args
assert query_params == {'p0': 'LOT-1'}
def test_resolve_by_lot_id_supports_wildcard_pattern(self):
from unittest.mock import patch
import pandas as pd
with patch('mes_dashboard.services.query_tool_service.SQLLoader.load_with_params') as mock_load:
with patch('mes_dashboard.services.query_tool_service.read_sql_df') as mock_read:
mock_load.return_value = "SELECT * FROM DUAL"
mock_read.return_value = pd.DataFrame([
{
'CONTAINERID': 'CID-1',
'CONTAINERNAME': 'GA25123401',
'SPECNAME': 'SPEC-1',
'QTY': 100,
},
{
'CONTAINERID': 'CID-2',
'CONTAINERNAME': 'GA24123401',
'SPECNAME': 'SPEC-2',
'QTY': 200,
},
])
result = _resolve_by_lot_id(['GA25%01'])
assert result['total'] == 1
assert result['data'][0]['lot_id'] == 'GA25123401'
assert result['data'][0]['input_value'] == 'GA25%01'
sql_params = mock_load.call_args.kwargs
assert "LIKE" in sql_params['CONTAINER_FILTER']
_, query_params = mock_read.call_args.args
assert query_params == {'p0': 'GA25%01'}
def test_resolve_by_wafer_lot_supports_wildcard_pattern(self):
from unittest.mock import patch
import pandas as pd
with patch('mes_dashboard.services.query_tool_service.SQLLoader.load_with_params') as mock_load:
with patch('mes_dashboard.services.query_tool_service.read_sql_df') as mock_read:
mock_load.return_value = "SELECT * FROM DUAL"
mock_read.return_value = pd.DataFrame([
{
'CONTAINERID': 'CID-1',
'CONTAINERNAME': 'GA25123401-A00-001',
'SPECNAME': 'SPEC-1',
'QTY': 100,
'FIRSTNAME': 'GMSN-1173#A',
},
{
'CONTAINERID': 'CID-2',
'CONTAINERNAME': 'GA25123402-A00-001',
'SPECNAME': 'SPEC-2',
'QTY': 100,
'FIRSTNAME': 'GMSN-9999#B',
},
])
result = _resolve_by_wafer_lot(['GMSN-1173%'])
assert result['total'] == 1
assert result['data'][0]['input_value'] == 'GMSN-1173%'
sql_params = mock_load.call_args.kwargs
assert "LIKE" in sql_params['WAFER_FILTER']
assert "OBJECTTYPE = 'LOT'" in sql_params['WAFER_FILTER']
def test_resolve_by_serial_number_uses_query_builder_params(self):
from unittest.mock import patch
import pandas as pd
with patch('mes_dashboard.services.query_tool_service.SQLLoader.load_with_params') as mock_load:
with patch('mes_dashboard.services.query_tool_service.read_sql_df') as mock_read:
mock_load.side_effect = [
"SELECT * FROM COMBINE",
"SELECT * FROM CONTAINER_NAME",
"SELECT * FROM FIRSTNAME",
]
mock_read.side_effect = [
pd.DataFrame([
{
'CONTAINERID': 'CID-FIN',
'FINISHEDNAME': 'SN-1',
'CONTAINERNAME': 'LOT-FIN',
'SPECNAME': 'SPEC-1',
}
]),
pd.DataFrame([
{
'CONTAINERID': 'CID-NAME',
'CONTAINERNAME': 'SN-1',
'SPECNAME': 'SPEC-2',
'MFGORDERNAME': None,
'QTY': 1,
}
]),
pd.DataFrame([
{
'CONTAINERID': 'CID-FIRST',
'CONTAINERNAME': 'GD25000001-A01',
'FIRSTNAME': 'SN-1',
'SPECNAME': 'SPEC-3',
'QTY': 1,
}
]),
]
result = _resolve_by_serial_number(['SN-1'])
assert result['total'] == 3
assert {row['match_source'] for row in result['data']} == {
'finished_name',
'container_name',
'first_name',
}
assert [call.args[0] for call in mock_load.call_args_list] == [
'query_tool/lot_resolve_serial',
'query_tool/lot_resolve_id',
'query_tool/lot_resolve_wafer_lot',
]
assert ':p0' in mock_load.call_args_list[0].kwargs['SERIAL_FILTER']
assert ':p0' in mock_load.call_args_list[1].kwargs['CONTAINER_FILTER']
assert ':p0' in mock_load.call_args_list[2].kwargs['WAFER_FILTER']
assert "OBJECTTYPE = 'LOT'" in mock_load.call_args_list[1].kwargs['CONTAINER_FILTER']
assert "OBJECTTYPE = 'LOT'" in mock_load.call_args_list[2].kwargs['WAFER_FILTER']
assert mock_read.call_args_list[0].args[1] == {'p0': 'SN-1'}
assert mock_read.call_args_list[1].args[1] == {'p0': 'SN-1'}
assert mock_read.call_args_list[2].args[1] == {'p0': 'SN-1'}
_, query_params = mock_read.call_args.args
assert query_params == {'p0': 'LOT-1'}
def test_resolve_by_work_order_uses_query_builder_params(self):
def test_resolve_by_lot_id_supports_wildcard_pattern(self):
from unittest.mock import patch
import pandas as pd
with patch('mes_dashboard.services.query_tool_service.SQLLoader.load_with_params') as mock_load:
with patch('mes_dashboard.services.query_tool_service.read_sql_df') as mock_read:
with patch('mes_dashboard.services.query_tool_service.read_sql_df_slow') as mock_read:
mock_load.return_value = "SELECT * FROM DUAL"
mock_read.return_value = pd.DataFrame([
{
'CONTAINERID': 'CID-1',
'CONTAINERNAME': 'GA25123401',
'SPECNAME': 'SPEC-1',
'QTY': 100,
},
{
'CONTAINERID': 'CID-2',
'CONTAINERNAME': 'GA24123401',
'SPECNAME': 'SPEC-2',
'QTY': 200,
},
])
result = _resolve_by_lot_id(['GA25%01'])
assert result['total'] == 1
assert result['data'][0]['lot_id'] == 'GA25123401'
assert result['data'][0]['input_value'] == 'GA25%01'
sql_params = mock_load.call_args.kwargs
assert "LIKE" in sql_params['CONTAINER_FILTER']
_, query_params = mock_read.call_args.args
assert query_params == {'p0': 'GA25%01'}
def test_resolve_by_wafer_lot_supports_wildcard_pattern(self):
from unittest.mock import patch
import pandas as pd
with patch('mes_dashboard.services.query_tool_service.SQLLoader.load_with_params') as mock_load:
with patch('mes_dashboard.services.query_tool_service.read_sql_df_slow') as mock_read:
mock_load.return_value = "SELECT * FROM DUAL"
mock_read.return_value = pd.DataFrame([
{
'CONTAINERID': 'CID-1',
'CONTAINERNAME': 'GA25123401-A00-001',
'SPECNAME': 'SPEC-1',
'QTY': 100,
'FIRSTNAME': 'GMSN-1173#A',
},
{
'CONTAINERID': 'CID-2',
'CONTAINERNAME': 'GA25123402-A00-001',
'SPECNAME': 'SPEC-2',
'QTY': 100,
'FIRSTNAME': 'GMSN-9999#B',
},
])
result = _resolve_by_wafer_lot(['GMSN-1173%'])
assert result['total'] == 1
assert result['data'][0]['input_value'] == 'GMSN-1173%'
sql_params = mock_load.call_args.kwargs
assert "LIKE" in sql_params['WAFER_FILTER']
assert "OBJECTTYPE = 'LOT'" in sql_params['WAFER_FILTER']
def test_resolve_by_serial_number_uses_query_builder_params(self):
from unittest.mock import patch
import pandas as pd
with patch('mes_dashboard.services.query_tool_service.SQLLoader.load_with_params') as mock_load:
with patch('mes_dashboard.services.query_tool_service.read_sql_df_slow') as mock_read:
mock_load.side_effect = [
"SELECT * FROM COMBINE",
"SELECT * FROM CONTAINER_NAME",
"SELECT * FROM FIRSTNAME",
]
mock_read.side_effect = [
pd.DataFrame([
{
'CONTAINERID': 'CID-FIN',
'FINISHEDNAME': 'SN-1',
'CONTAINERNAME': 'LOT-FIN',
'SPECNAME': 'SPEC-1',
}
]),
pd.DataFrame([
{
'CONTAINERID': 'CID-NAME',
'CONTAINERNAME': 'SN-1',
'SPECNAME': 'SPEC-2',
'MFGORDERNAME': None,
'QTY': 1,
}
]),
pd.DataFrame([
{
'CONTAINERID': 'CID-FIRST',
'CONTAINERNAME': 'GD25000001-A01',
'FIRSTNAME': 'SN-1',
'SPECNAME': 'SPEC-3',
'QTY': 1,
}
]),
]
result = _resolve_by_serial_number(['SN-1'])
assert result['total'] == 3
assert {row['match_source'] for row in result['data']} == {
'finished_name',
'container_name',
'first_name',
}
assert [call.args[0] for call in mock_load.call_args_list] == [
'query_tool/lot_resolve_serial',
'query_tool/lot_resolve_id',
'query_tool/lot_resolve_wafer_lot',
]
assert ':p0' in mock_load.call_args_list[0].kwargs['SERIAL_FILTER']
assert ':p0' in mock_load.call_args_list[1].kwargs['CONTAINER_FILTER']
assert ':p0' in mock_load.call_args_list[2].kwargs['WAFER_FILTER']
assert "OBJECTTYPE = 'LOT'" in mock_load.call_args_list[1].kwargs['CONTAINER_FILTER']
assert "OBJECTTYPE = 'LOT'" in mock_load.call_args_list[2].kwargs['WAFER_FILTER']
assert mock_read.call_args_list[0].args[1] == {'p0': 'SN-1'}
assert mock_read.call_args_list[1].args[1] == {'p0': 'SN-1'}
assert mock_read.call_args_list[2].args[1] == {'p0': 'SN-1'}
def test_resolve_by_work_order_uses_query_builder_params(self):
from unittest.mock import patch
import pandas as pd
with patch('mes_dashboard.services.query_tool_service.SQLLoader.load_with_params') as mock_load:
with patch('mes_dashboard.services.query_tool_service.read_sql_df_slow') as mock_read:
mock_load.return_value = "SELECT * FROM DUAL"
mock_read.return_value = pd.DataFrame([
{
@@ -343,66 +343,64 @@ class TestResolveQueriesUseBindParams:
result = _resolve_by_work_order(['WO-1'])
assert result['total'] == 1
sql_params = mock_load.call_args.kwargs
assert ':p0' in sql_params['WORK_ORDER_FILTER']
_, query_params = mock_read.call_args.args
assert query_params == {'p0': 'WO-1'}
def test_resolve_by_work_order_supports_wildcard_pattern(self):
from unittest.mock import patch
import pandas as pd
with patch('mes_dashboard.services.query_tool_service.SQLLoader.load_with_params') as mock_load:
with patch('mes_dashboard.services.query_tool_service.read_sql_df') as mock_read:
mock_load.return_value = "SELECT * FROM DUAL"
mock_read.return_value = pd.DataFrame([
{
'CONTAINERID': 'CID-1',
'MFGORDERNAME': 'GA25120018',
'CONTAINERNAME': 'GA25120018-A00-001',
'SPECNAME': 'SPEC-1',
},
{
'CONTAINERID': 'CID-2',
'MFGORDERNAME': 'GA24120018',
'CONTAINERNAME': 'GA24120018-A00-001',
'SPECNAME': 'SPEC-2',
},
])
result = _resolve_by_work_order(['ga25%'])
assert result['total'] == 1
assert result['data'][0]['input_value'] == 'ga25%'
assert result['data'][0]['lot_id'] == 'GA25120018-A00-001'
sql_params = mock_load.call_args.kwargs
assert "LIKE" in sql_params['WORK_ORDER_FILTER']
assert "UPPER(NVL(MFGORDERNAME, ''))" in sql_params['WORK_ORDER_FILTER']
_, query_params = mock_read.call_args.args
assert query_params == {'p0': 'GA25%'}
sql_params = mock_load.call_args.kwargs
assert ':p0' in sql_params['WORK_ORDER_FILTER']
_, query_params = mock_read.call_args.args
assert query_params == {'p0': 'WO-1'}
def test_resolve_by_work_order_supports_wildcard_pattern(self):
from unittest.mock import patch
import pandas as pd
with patch('mes_dashboard.services.query_tool_service.SQLLoader.load_with_params') as mock_load:
with patch('mes_dashboard.services.query_tool_service.read_sql_df_slow') as mock_read:
mock_load.return_value = "SELECT * FROM DUAL"
mock_read.return_value = pd.DataFrame([
{
'CONTAINERID': 'CID-1',
'MFGORDERNAME': 'GA25120018',
'CONTAINERNAME': 'GA25120018-A00-001',
'SPECNAME': 'SPEC-1',
},
{
'CONTAINERID': 'CID-2',
'MFGORDERNAME': 'GA24120018',
'CONTAINERNAME': 'GA24120018-A00-001',
'SPECNAME': 'SPEC-2',
},
])
result = _resolve_by_work_order(['ga25%'])
assert result['total'] == 1
assert result['data'][0]['input_value'] == 'ga25%'
assert result['data'][0]['lot_id'] == 'GA25120018-A00-001'
sql_params = mock_load.call_args.kwargs
assert "LIKE" in sql_params['WORK_ORDER_FILTER']
assert "UPPER(NVL(MFGORDERNAME, ''))" in sql_params['WORK_ORDER_FILTER']
_, query_params = mock_read.call_args.args
assert query_params == {'p0': 'GA25%'}
class TestSplitMergeHistoryMode:
"""Fast mode should use read_sql_df, full mode should use read_sql_df_slow."""
"""Both modes use read_sql_df_slow for timeout protection."""
def test_fast_mode_uses_time_window_and_row_limit(self):
from unittest.mock import patch
import pandas as pd
with patch('mes_dashboard.services.query_tool_service.SQLLoader.load_with_params') as mock_load:
with patch('mes_dashboard.services.query_tool_service.read_sql_df') as mock_fast:
with patch('mes_dashboard.services.query_tool_service.read_sql_df_slow') as mock_slow:
mock_load.return_value = "SELECT * FROM DUAL"
mock_fast.return_value = pd.DataFrame([])
with patch('mes_dashboard.services.query_tool_service.read_sql_df_slow') as mock_slow:
mock_load.return_value = "SELECT * FROM DUAL"
mock_slow.return_value = pd.DataFrame([])
result = get_lot_split_merge_history('WO-1', full_history=False)
result = get_lot_split_merge_history('WO-1', full_history=False)
assert result['mode'] == 'fast'
kwargs = mock_load.call_args.kwargs
assert "ADD_MONTHS(SYSDATE, -6)" in kwargs['TIME_WINDOW']
assert "FETCH FIRST 500 ROWS ONLY" == kwargs['ROW_LIMIT']
mock_fast.assert_called_once()
mock_slow.assert_not_called()
assert result['mode'] == 'fast'
kwargs = mock_load.call_args.kwargs
assert "ADD_MONTHS(SYSDATE, -6)" in kwargs['TIME_WINDOW']
assert "FETCH FIRST 500 ROWS ONLY" == kwargs['ROW_LIMIT']
mock_slow.assert_called_once()
def test_full_mode_uses_slow_query_without_limits(self):
from unittest.mock import patch
@@ -431,9 +429,9 @@ class TestServiceConstants:
"""Batch size should be <= 1000 (Oracle limit)."""
assert BATCH_SIZE <= 1000
def test_max_date_range_is_reasonable(self):
"""Max date range should be 365 days."""
assert MAX_DATE_RANGE_DAYS == 365
def test_max_date_range_is_reasonable(self):
"""Max date range should be 365 days."""
assert MAX_DATE_RANGE_DAYS == 365
def test_max_lot_ids_is_reasonable(self):
"""Max LOT IDs should be sensible."""
@@ -443,13 +441,13 @@ class TestServiceConstants:
"""Max serial numbers should be sensible."""
assert 10 <= MAX_SERIAL_NUMBERS <= 100
def test_max_work_orders_is_reasonable(self):
"""Max work orders should match API contract."""
assert MAX_WORK_ORDERS == 50
def test_max_gd_work_orders_is_reasonable(self):
"""Max GD work orders should match API contract."""
assert MAX_GD_WORK_ORDERS == 100
def test_max_work_orders_is_reasonable(self):
"""Max work orders should match API contract."""
assert MAX_WORK_ORDERS == 50
def test_max_gd_work_orders_is_reasonable(self):
"""Max GD work orders should match API contract."""
assert MAX_GD_WORK_ORDERS == 100
def test_max_equipments_is_reasonable(self):
"""Max equipments should be sensible."""