feat: dimension pareto cache-based computation, filter propagation, and MSD events cache isolation
Reject History: - Compute dimension pareto (package/type/workflow/workcenter/equipment) from cached DataFrame instead of re-querying Oracle per dimension change - Propagate supplementary filters and trend date selection to dimension pareto - Add staleness tracking to prevent race conditions on rapid dimension switches - Add WORKFLOWNAME to detail and export outputs - Fix button hover visibility with CSS specificity MSD (製程不良追溯分析): - Separate raw events caching from aggregation computation so changing loss_reasons uses EventFetcher per-domain cache (fast) and recomputes aggregation with current filters instead of returning stale cached results - Exclude loss_reasons from MSD seed cache key since seed resolution does not use it, avoiding unnecessary Oracle re-queries - Add suspect context panel, analysis summary, upstream station/spec filters - Add machine bar click drill-down and filtered attribution charts Query Tool: - Support batch container_ids in lot CSV export (history/materials/rejects/holds) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -8,6 +8,8 @@ from unittest.mock import patch
|
||||
import pandas as pd
|
||||
|
||||
from mes_dashboard.services.mid_section_defect_service import (
|
||||
_attribute_materials,
|
||||
_attribute_wafer_roots,
|
||||
build_trace_aggregation_from_events,
|
||||
query_analysis,
|
||||
query_analysis_detail,
|
||||
@@ -251,3 +253,112 @@ def test_query_station_options_returns_ordered_list():
|
||||
assert result[0]['order'] == 0
|
||||
assert result[-1]['name'] == '測試'
|
||||
assert result[-1]['order'] == 11
|
||||
|
||||
|
||||
# --- _attribute_materials tests ---
|
||||
|
||||
def _make_detection_data(entries):
|
||||
"""Helper: build detection_data dict from simplified entries."""
|
||||
data = {}
|
||||
for e in entries:
|
||||
data[e['cid']] = {
|
||||
'containername': e.get('name', e['cid']),
|
||||
'trackinqty': e['trackinqty'],
|
||||
'rejectqty_by_reason': e.get('reasons', {}),
|
||||
}
|
||||
return data
|
||||
|
||||
|
||||
def test_attribute_materials_basic_rate_calculation():
|
||||
detection_data = _make_detection_data([
|
||||
{'cid': 'C1', 'trackinqty': 100, 'reasons': {'R1': 5}},
|
||||
{'cid': 'C2', 'trackinqty': 200, 'reasons': {'R1': 10}},
|
||||
])
|
||||
ancestors = {'C1': {'A1'}, 'C2': {'A1'}}
|
||||
materials_by_cid = {
|
||||
'A1': [{'MATERIALPARTNAME': 'PART-A', 'MATERIALLOTNAME': 'LOT-X'}],
|
||||
}
|
||||
|
||||
result = _attribute_materials(detection_data, ancestors, materials_by_cid)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['MATERIAL_KEY'] == 'PART-A (LOT-X)'
|
||||
assert result[0]['INPUT_QTY'] == 300
|
||||
assert result[0]['DEFECT_QTY'] == 15
|
||||
assert abs(result[0]['DEFECT_RATE'] - 5.0) < 0.01
|
||||
assert result[0]['DETECTION_LOT_COUNT'] == 2
|
||||
|
||||
|
||||
def test_attribute_materials_null_lot_name():
|
||||
detection_data = _make_detection_data([
|
||||
{'cid': 'C1', 'trackinqty': 100, 'reasons': {'R1': 3}},
|
||||
])
|
||||
ancestors = {'C1': {'A1'}}
|
||||
materials_by_cid = {
|
||||
'A1': [{'MATERIALPARTNAME': 'PART-B', 'MATERIALLOTNAME': None}],
|
||||
}
|
||||
|
||||
result = _attribute_materials(detection_data, ancestors, materials_by_cid)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['MATERIAL_KEY'] == 'PART-B'
|
||||
assert result[0]['MATERIAL_LOT_NAME'] == ''
|
||||
|
||||
|
||||
def test_attribute_materials_with_loss_reason_filter():
|
||||
detection_data = _make_detection_data([
|
||||
{'cid': 'C1', 'trackinqty': 100, 'reasons': {'R1': 5, 'R2': 3}},
|
||||
])
|
||||
ancestors = {'C1': {'A1'}}
|
||||
materials_by_cid = {
|
||||
'A1': [{'MATERIALPARTNAME': 'P', 'MATERIALLOTNAME': 'L'}],
|
||||
}
|
||||
|
||||
result = _attribute_materials(detection_data, ancestors, materials_by_cid, loss_reasons=['R1'])
|
||||
|
||||
assert result[0]['DEFECT_QTY'] == 5
|
||||
|
||||
|
||||
# --- _attribute_wafer_roots tests ---
|
||||
|
||||
def test_attribute_wafer_roots_basic():
|
||||
detection_data = _make_detection_data([
|
||||
{'cid': 'C1', 'name': 'LOT-1', 'trackinqty': 100, 'reasons': {'R1': 5}},
|
||||
{'cid': 'C2', 'name': 'LOT-2', 'trackinqty': 200, 'reasons': {'R1': 10}},
|
||||
])
|
||||
roots = {'C1': 'ROOT-A', 'C2': 'ROOT-A'}
|
||||
|
||||
result = _attribute_wafer_roots(detection_data, roots)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['ROOT_CONTAINER_NAME'] == 'ROOT-A'
|
||||
assert result[0]['INPUT_QTY'] == 300
|
||||
assert result[0]['DEFECT_QTY'] == 15
|
||||
|
||||
|
||||
def test_attribute_wafer_roots_self_root():
|
||||
"""LOTs with no root mapping should use their own container name."""
|
||||
detection_data = _make_detection_data([
|
||||
{'cid': 'C1', 'name': 'LOT-SELF', 'trackinqty': 100, 'reasons': {'R1': 2}},
|
||||
])
|
||||
roots = {} # No root for C1
|
||||
|
||||
result = _attribute_wafer_roots(detection_data, roots)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['ROOT_CONTAINER_NAME'] == 'LOT-SELF'
|
||||
|
||||
|
||||
def test_attribute_wafer_roots_multiple_roots():
|
||||
detection_data = _make_detection_data([
|
||||
{'cid': 'C1', 'name': 'L1', 'trackinqty': 100, 'reasons': {'R1': 5}},
|
||||
{'cid': 'C2', 'name': 'L2', 'trackinqty': 200, 'reasons': {'R1': 20}},
|
||||
])
|
||||
roots = {'C1': 'ROOT-A', 'C2': 'ROOT-B'}
|
||||
|
||||
result = _attribute_wafer_roots(detection_data, roots)
|
||||
|
||||
assert len(result) == 2
|
||||
# Sorted by DEFECT_RATE desc
|
||||
assert result[0]['ROOT_CONTAINER_NAME'] == 'ROOT-B'
|
||||
assert result[1]['ROOT_CONTAINER_NAME'] == 'ROOT-A'
|
||||
|
||||
@@ -1152,6 +1152,62 @@ class TestWorkcenterGroupsEndpoint:
|
||||
assert 'error' in data
|
||||
|
||||
|
||||
class TestEquipmentRecentJobsEndpoint:
|
||||
"""Tests for /api/query-tool/equipment-recent-jobs/<equipment_id> endpoint."""
|
||||
|
||||
@patch('mes_dashboard.core.database.read_sql_df')
|
||||
@patch('mes_dashboard.sql.SQLLoader.load', return_value='SELECT 1')
|
||||
def test_returns_recent_jobs(self, _mock_sql, mock_read_sql, client):
|
||||
"""Should return recent JOB records for given equipment."""
|
||||
import pandas as pd
|
||||
mock_read_sql.return_value = pd.DataFrame([
|
||||
{
|
||||
'JOBID': 'JOB-001',
|
||||
'JOBSTATUS': 'Complete',
|
||||
'JOBMODELNAME': 'MODEL-A',
|
||||
'CREATEDATE': '2026-02-01 10:00:00',
|
||||
'COMPLETEDATE': '2026-02-01 12:00:00',
|
||||
'CAUSECODENAME': 'CAUSE-1',
|
||||
'REPAIRCODENAME': 'REPAIR-1',
|
||||
'RESOURCENAME': 'EQ-001',
|
||||
},
|
||||
])
|
||||
|
||||
response = client.get('/api/query-tool/equipment-recent-jobs/EQ001')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert len(data['data']) == 1
|
||||
assert data['data'][0]['JOBID'] == 'JOB-001'
|
||||
assert data['total'] == 1
|
||||
|
||||
@patch('mes_dashboard.core.database.read_sql_df')
|
||||
@patch('mes_dashboard.sql.SQLLoader.load', return_value='SELECT 1')
|
||||
def test_returns_empty_when_no_jobs(self, _mock_sql, mock_read_sql, client):
|
||||
"""Should return empty list when no jobs found."""
|
||||
import pandas as pd
|
||||
mock_read_sql.return_value = pd.DataFrame()
|
||||
|
||||
response = client.get('/api/query-tool/equipment-recent-jobs/EQ002')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data['data'] == []
|
||||
assert data['total'] == 0
|
||||
|
||||
@patch('mes_dashboard.core.database.read_sql_df')
|
||||
@patch('mes_dashboard.sql.SQLLoader.load', return_value='SELECT 1')
|
||||
def test_handles_db_error(self, _mock_sql, mock_read_sql, client):
|
||||
"""Should return 500 on database error."""
|
||||
mock_read_sql.side_effect = Exception('DB connection failed')
|
||||
|
||||
response = client.get('/api/query-tool/equipment-recent-jobs/EQ003')
|
||||
|
||||
assert response.status_code == 500
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
|
||||
|
||||
class TestLotHistoryWithWorkcenterFilter:
|
||||
"""Tests for /api/query-tool/lot-history with workcenter filter."""
|
||||
|
||||
|
||||
@@ -166,7 +166,7 @@ class TestRejectHistoryApiRoutes(TestRejectHistoryRoutesBase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertFalse(payload['success'])
|
||||
|
||||
@patch('mes_dashboard.routes.reject_history_routes.query_reason_pareto')
|
||||
@patch('mes_dashboard.routes.reject_history_routes.query_dimension_pareto')
|
||||
def test_reason_pareto_defaults_top80(self, mock_pareto):
|
||||
mock_pareto.return_value = {'items': [], 'metric_mode': 'reject_total', 'pareto_scope': 'top80', 'meta': {}}
|
||||
|
||||
@@ -176,6 +176,47 @@ class TestRejectHistoryApiRoutes(TestRejectHistoryRoutesBase):
|
||||
_, kwargs = mock_pareto.call_args
|
||||
self.assertEqual(kwargs['pareto_scope'], 'top80')
|
||||
self.assertEqual(kwargs['metric_mode'], 'reject_total')
|
||||
self.assertEqual(kwargs['dimension'], 'reason')
|
||||
|
||||
@patch('mes_dashboard.routes.reject_history_routes.query_dimension_pareto')
|
||||
def test_dimension_pareto_accepts_package(self, mock_pareto):
|
||||
mock_pareto.return_value = {
|
||||
'items': [{'reason': 'PKG-A', 'metric_value': 100, 'pct': 50, 'cumPct': 50}],
|
||||
'dimension': 'package',
|
||||
'metric_mode': 'reject_total',
|
||||
'pareto_scope': 'all',
|
||||
'meta': {},
|
||||
}
|
||||
|
||||
response = self.client.get(
|
||||
'/api/reject-history/reason-pareto?start_date=2026-02-01&end_date=2026-02-07&dimension=package&pareto_scope=all'
|
||||
)
|
||||
payload = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(payload['success'])
|
||||
_, kwargs = mock_pareto.call_args
|
||||
self.assertEqual(kwargs['dimension'], 'package')
|
||||
|
||||
@patch('mes_dashboard.routes.reject_history_routes.query_dimension_pareto')
|
||||
def test_dimension_pareto_accepts_equipment(self, mock_pareto):
|
||||
mock_pareto.return_value = {
|
||||
'items': [{'reason': 'EQ-01', 'metric_value': 50, 'pct': 100, 'cumPct': 100}],
|
||||
'dimension': 'equipment',
|
||||
'metric_mode': 'reject_total',
|
||||
'pareto_scope': 'top80',
|
||||
'meta': {},
|
||||
}
|
||||
|
||||
response = self.client.get(
|
||||
'/api/reject-history/reason-pareto?start_date=2026-02-01&end_date=2026-02-07&dimension=equipment'
|
||||
)
|
||||
payload = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(payload['success'])
|
||||
_, kwargs = mock_pareto.call_args
|
||||
self.assertEqual(kwargs['dimension'], 'equipment')
|
||||
|
||||
@patch('mes_dashboard.routes.reject_history_routes.query_list')
|
||||
@patch('mes_dashboard.core.rate_limit.check_and_record', return_value=(True, 6))
|
||||
|
||||
@@ -9,7 +9,7 @@ import mes_dashboard.core.database as db
|
||||
from mes_dashboard.app import create_app
|
||||
from mes_dashboard.core.cache import NoOpCache
|
||||
from mes_dashboard.core.rate_limit import reset_rate_limits_for_tests
|
||||
from mes_dashboard.routes.trace_routes import _lineage_cache_key
|
||||
from mes_dashboard.routes.trace_routes import _lineage_cache_key, _seed_cache_key
|
||||
|
||||
|
||||
def _client():
|
||||
@@ -380,3 +380,105 @@ def test_events_rate_limited_returns_429(_mock_rate_limit):
|
||||
assert response.headers.get('Retry-After') == '5'
|
||||
payload = response.get_json()
|
||||
assert payload['error']['code'] == 'TOO_MANY_REQUESTS'
|
||||
|
||||
|
||||
# ---- MSD cache isolation tests ----
|
||||
|
||||
|
||||
def test_msd_seed_cache_key_ignores_loss_reasons():
|
||||
"""Changing loss_reasons should not change the seed cache key for MSD."""
|
||||
base_params = {
|
||||
'start_date': '2025-01-01',
|
||||
'end_date': '2025-01-31',
|
||||
'station': '測試',
|
||||
'direction': 'backward',
|
||||
}
|
||||
key_all = _seed_cache_key('mid_section_defect', {**base_params, 'loss_reasons': ['A', 'B', 'C']})
|
||||
key_two = _seed_cache_key('mid_section_defect', {**base_params, 'loss_reasons': ['A']})
|
||||
key_none = _seed_cache_key('mid_section_defect', base_params)
|
||||
|
||||
assert key_all == key_two == key_none
|
||||
|
||||
|
||||
def test_non_msd_seed_cache_key_includes_all_params():
|
||||
"""For non-MSD profiles the seed cache key should still hash all params."""
|
||||
params_a = {'resolve_type': 'lot_id', 'values': ['LOT-001'], 'extra': 'x'}
|
||||
params_b = {'resolve_type': 'lot_id', 'values': ['LOT-001'], 'extra': 'y'}
|
||||
|
||||
key_a = _seed_cache_key('query_tool', params_a)
|
||||
key_b = _seed_cache_key('query_tool', params_b)
|
||||
assert key_a != key_b
|
||||
|
||||
|
||||
@patch('mes_dashboard.routes.trace_routes.build_trace_aggregation_from_events')
|
||||
@patch('mes_dashboard.routes.trace_routes.EventFetcher.fetch_events')
|
||||
def test_msd_events_recomputes_aggregation_on_each_call(
|
||||
mock_fetch_events,
|
||||
mock_build_aggregation,
|
||||
):
|
||||
"""MSD events should NOT use events-level cache, so aggregation is always fresh."""
|
||||
mock_fetch_events.return_value = {
|
||||
'CID-001': [{'CONTAINERID': 'CID-001', 'WORKCENTER_GROUP': '測試'}]
|
||||
}
|
||||
mock_build_aggregation.return_value = {
|
||||
'kpi': {'total_input': 100},
|
||||
'charts': {},
|
||||
'daily_trend': [],
|
||||
'available_loss_reasons': [],
|
||||
'genealogy_status': 'ready',
|
||||
'detail_total_count': 0,
|
||||
}
|
||||
|
||||
client = _client()
|
||||
body = {
|
||||
'profile': 'mid_section_defect',
|
||||
'container_ids': ['CID-001'],
|
||||
'domains': ['upstream_history'],
|
||||
'params': {
|
||||
'start_date': '2025-01-01',
|
||||
'end_date': '2025-01-31',
|
||||
'loss_reasons': ['Reason-A'],
|
||||
},
|
||||
'lineage': {'ancestors': {'CID-001': ['CID-A']}},
|
||||
'seed_container_ids': ['CID-001'],
|
||||
}
|
||||
|
||||
# First call
|
||||
resp1 = client.post('/api/trace/events', json=body)
|
||||
assert resp1.status_code == 200
|
||||
|
||||
# Second call with different loss_reasons — aggregation must be re-invoked
|
||||
body['params']['loss_reasons'] = ['Reason-B']
|
||||
resp2 = client.post('/api/trace/events', json=body)
|
||||
assert resp2.status_code == 200
|
||||
|
||||
assert mock_build_aggregation.call_count == 2
|
||||
|
||||
|
||||
@patch('mes_dashboard.routes.trace_routes.EventFetcher.fetch_events')
|
||||
@patch('mes_dashboard.routes.trace_routes.cache_get')
|
||||
@patch('mes_dashboard.routes.trace_routes.cache_set')
|
||||
def test_non_msd_events_cache_unchanged(mock_cache_set, mock_cache_get, mock_fetch_events):
|
||||
"""Non-MSD profiles should still use events-level cache as before."""
|
||||
cached_response = {
|
||||
'stage': 'events',
|
||||
'results': {'history': {'data': [], 'count': 0}},
|
||||
'aggregation': None,
|
||||
}
|
||||
mock_cache_get.return_value = cached_response
|
||||
|
||||
client = _client()
|
||||
response = client.post(
|
||||
'/api/trace/events',
|
||||
json={
|
||||
'profile': 'query_tool',
|
||||
'container_ids': ['CID-001'],
|
||||
'domains': ['history'],
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
payload = response.get_json()
|
||||
assert payload['stage'] == 'events'
|
||||
# EventFetcher should NOT have been called — served from cache
|
||||
mock_fetch_events.assert_not_called()
|
||||
|
||||
Reference in New Issue
Block a user