feat(reject-history): fix Pareto datasources, multi-select filtering, and UX enhancements
- Fix dimension Pareto datasources: PJ_TYPE/PRODUCTLINENAME from DW_MES_CONTAINER, WORKFLOWNAME from DW_MES_LOTWIPHISTORY via WIPTRACKINGGROUPKEYID, EQUIPMENTNAME from LOTREJECTHISTORY only (no WIP fallback), workcenter dimension uses WORKCENTER_GROUP - Add multi-select Pareto click filtering with chip display and detail list integration - Add TOP 20 display scope selector for TYPE/WORKFLOW/機台 dimensions - Pass Pareto selection (dimension + values) through to list/export endpoints - Enable TRACE_WORKER_ENABLED=true by default in start_server.sh and .env.example - Archive reject-history-pareto-datasource-fix and reject-history-pareto-ux-enhancements - Update reject-history-api and reject-history-page specs with new Pareto behaviors Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
188
tests/test_reject_dataset_cache.py
Normal file
188
tests/test_reject_dataset_cache.py
Normal file
@@ -0,0 +1,188 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for reject_dataset_cache helpers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
from mes_dashboard.services import reject_dataset_cache as cache_svc
|
||||
|
||||
|
||||
def test_compute_dimension_pareto_applies_policy_filters_before_grouping(monkeypatch):
|
||||
"""Cached pareto should honor the same policy toggles as view/query paths."""
|
||||
df = pd.DataFrame(
|
||||
[
|
||||
{
|
||||
"CONTAINERID": "C1",
|
||||
"LOSSREASONNAME": "001_A",
|
||||
"LOSSREASON_CODE": "001_A",
|
||||
"SCRAP_OBJECTTYPE": "MATERIAL",
|
||||
"PRODUCTLINENAME": "(NA)",
|
||||
"WORKCENTER_GROUP": "WB",
|
||||
"REJECT_TOTAL_QTY": 100,
|
||||
"DEFECT_QTY": 0,
|
||||
"MOVEIN_QTY": 1000,
|
||||
},
|
||||
{
|
||||
"CONTAINERID": "C2",
|
||||
"LOSSREASONNAME": "001_A",
|
||||
"LOSSREASON_CODE": "001_A",
|
||||
"SCRAP_OBJECTTYPE": "LOT",
|
||||
"PRODUCTLINENAME": "PKG-A",
|
||||
"WORKCENTER_GROUP": "WB",
|
||||
"REJECT_TOTAL_QTY": 50,
|
||||
"DEFECT_QTY": 0,
|
||||
"MOVEIN_QTY": 900,
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
monkeypatch.setattr(cache_svc, "_get_cached_df", lambda _query_id: df)
|
||||
monkeypatch.setattr(
|
||||
"mes_dashboard.services.scrap_reason_exclusion_cache.get_excluded_reasons",
|
||||
lambda: [],
|
||||
)
|
||||
|
||||
excluded_material = cache_svc.compute_dimension_pareto(
|
||||
query_id="qid-1",
|
||||
dimension="package",
|
||||
pareto_scope="all",
|
||||
include_excluded_scrap=False,
|
||||
exclude_material_scrap=True,
|
||||
exclude_pb_diode=True,
|
||||
)
|
||||
kept_all = cache_svc.compute_dimension_pareto(
|
||||
query_id="qid-1",
|
||||
dimension="package",
|
||||
pareto_scope="all",
|
||||
include_excluded_scrap=False,
|
||||
exclude_material_scrap=False,
|
||||
exclude_pb_diode=True,
|
||||
)
|
||||
|
||||
excluded_labels = {item.get("reason") for item in excluded_material.get("items", [])}
|
||||
all_labels = {item.get("reason") for item in kept_all.get("items", [])}
|
||||
|
||||
assert "PKG-A" in excluded_labels
|
||||
assert "(NA)" not in excluded_labels
|
||||
assert "(NA)" in all_labels
|
||||
|
||||
|
||||
def _build_detail_filter_df():
|
||||
return pd.DataFrame(
|
||||
[
|
||||
{
|
||||
"CONTAINERID": "C1",
|
||||
"CONTAINERNAME": "LOT-001",
|
||||
"TXN_DAY": pd.Timestamp("2026-02-01"),
|
||||
"TXN_TIME": pd.Timestamp("2026-02-01 08:00:00"),
|
||||
"WORKCENTERSEQUENCE_GROUP": 1,
|
||||
"WORKCENTER_GROUP": "WB",
|
||||
"WORKCENTERNAME": "WB-A",
|
||||
"SPECNAME": "SPEC-A",
|
||||
"WORKFLOWNAME": "WF-A",
|
||||
"PRIMARY_EQUIPMENTNAME": "EQ-1",
|
||||
"EQUIPMENTNAME": "EQ-1",
|
||||
"PRODUCTLINENAME": "PKG-A",
|
||||
"PJ_TYPE": "TYPE-A",
|
||||
"LOSSREASONNAME": "001_A",
|
||||
"LOSSREASON_CODE": "001_A",
|
||||
"SCRAP_OBJECTTYPE": "LOT",
|
||||
"MOVEIN_QTY": 100,
|
||||
"REJECT_TOTAL_QTY": 30,
|
||||
"DEFECT_QTY": 0,
|
||||
},
|
||||
{
|
||||
"CONTAINERID": "C2",
|
||||
"CONTAINERNAME": "LOT-002",
|
||||
"TXN_DAY": pd.Timestamp("2026-02-01"),
|
||||
"TXN_TIME": pd.Timestamp("2026-02-01 09:00:00"),
|
||||
"WORKCENTERSEQUENCE_GROUP": 1,
|
||||
"WORKCENTER_GROUP": "WB",
|
||||
"WORKCENTERNAME": "WB-B",
|
||||
"SPECNAME": "SPEC-B",
|
||||
"WORKFLOWNAME": "WF-B",
|
||||
"PRIMARY_EQUIPMENTNAME": "EQ-2",
|
||||
"EQUIPMENTNAME": "EQ-2",
|
||||
"PRODUCTLINENAME": "PKG-B",
|
||||
"PJ_TYPE": "TYPE-B",
|
||||
"LOSSREASONNAME": "001_A",
|
||||
"LOSSREASON_CODE": "001_A",
|
||||
"SCRAP_OBJECTTYPE": "LOT",
|
||||
"MOVEIN_QTY": 100,
|
||||
"REJECT_TOTAL_QTY": 20,
|
||||
"DEFECT_QTY": 0,
|
||||
},
|
||||
{
|
||||
"CONTAINERID": "C3",
|
||||
"CONTAINERNAME": "LOT-003",
|
||||
"TXN_DAY": pd.Timestamp("2026-02-01"),
|
||||
"TXN_TIME": pd.Timestamp("2026-02-01 10:00:00"),
|
||||
"WORKCENTERSEQUENCE_GROUP": 1,
|
||||
"WORKCENTER_GROUP": "WB",
|
||||
"WORKCENTERNAME": "WB-C",
|
||||
"SPECNAME": "SPEC-C",
|
||||
"WORKFLOWNAME": "WF-C",
|
||||
"PRIMARY_EQUIPMENTNAME": "EQ-3",
|
||||
"EQUIPMENTNAME": "EQ-3",
|
||||
"PRODUCTLINENAME": "PKG-C",
|
||||
"PJ_TYPE": "TYPE-C",
|
||||
"LOSSREASONNAME": "002_B",
|
||||
"LOSSREASON_CODE": "002_B",
|
||||
"SCRAP_OBJECTTYPE": "LOT",
|
||||
"MOVEIN_QTY": 100,
|
||||
"REJECT_TOTAL_QTY": 10,
|
||||
"DEFECT_QTY": 0,
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_apply_view_and_export_share_same_pareto_multi_select_filter(monkeypatch):
|
||||
df = _build_detail_filter_df()
|
||||
|
||||
monkeypatch.setattr(cache_svc, "_get_cached_df", lambda _query_id: df)
|
||||
monkeypatch.setattr(
|
||||
"mes_dashboard.services.scrap_reason_exclusion_cache.get_excluded_reasons",
|
||||
lambda: [],
|
||||
)
|
||||
|
||||
view_result = cache_svc.apply_view(
|
||||
query_id="qid-2",
|
||||
pareto_dimension="type",
|
||||
pareto_values=["TYPE-A", "TYPE-C"],
|
||||
)
|
||||
export_rows = cache_svc.export_csv_from_cache(
|
||||
query_id="qid-2",
|
||||
pareto_dimension="type",
|
||||
pareto_values=["TYPE-A", "TYPE-C"],
|
||||
)
|
||||
|
||||
detail_items = view_result["detail"]["items"]
|
||||
detail_types = {item["PJ_TYPE"] for item in detail_items}
|
||||
exported_types = {row["TYPE"] for row in export_rows}
|
||||
|
||||
assert view_result["detail"]["pagination"]["total"] == 2
|
||||
assert detail_types == {"TYPE-A", "TYPE-C"}
|
||||
assert exported_types == {"TYPE-A", "TYPE-C"}
|
||||
assert len(export_rows) == 2
|
||||
|
||||
|
||||
def test_apply_view_rejects_invalid_pareto_dimension(monkeypatch):
|
||||
df = _build_detail_filter_df()
|
||||
monkeypatch.setattr(cache_svc, "_get_cached_df", lambda _query_id: df)
|
||||
|
||||
with pytest.raises(ValueError, match="不支援的 pareto_dimension"):
|
||||
cache_svc.apply_view(
|
||||
query_id="qid-3",
|
||||
pareto_dimension="invalid-dimension",
|
||||
pareto_values=["X"],
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="不支援的 pareto_dimension"):
|
||||
cache_svc.export_csv_from_cache(
|
||||
query_id="qid-3",
|
||||
pareto_dimension="invalid-dimension",
|
||||
pareto_values=["X"],
|
||||
)
|
||||
@@ -218,6 +218,121 @@ class TestRejectHistoryApiRoutes(TestRejectHistoryRoutesBase):
|
||||
_, kwargs = mock_pareto.call_args
|
||||
self.assertEqual(kwargs['dimension'], 'equipment')
|
||||
|
||||
@patch('mes_dashboard.routes.reject_history_routes.query_dimension_pareto')
|
||||
@patch('mes_dashboard.routes.reject_history_routes.compute_dimension_pareto')
|
||||
def test_dimension_pareto_with_query_id_passes_policy_flags_to_cached_path(
|
||||
self,
|
||||
mock_cached_pareto,
|
||||
mock_sql_pareto,
|
||||
):
|
||||
mock_cached_pareto.return_value = {
|
||||
'items': [{'reason': 'PKG-A', 'metric_value': 100, 'pct': 100, 'cumPct': 100}],
|
||||
'dimension': 'package',
|
||||
'metric_mode': 'reject_total',
|
||||
'pareto_scope': 'all',
|
||||
}
|
||||
|
||||
response = self.client.get(
|
||||
'/api/reject-history/reason-pareto'
|
||||
'?start_date=2026-02-01'
|
||||
'&end_date=2026-02-07'
|
||||
'&query_id=qid-001'
|
||||
'&dimension=package'
|
||||
'&pareto_scope=all'
|
||||
'&include_excluded_scrap=true'
|
||||
'&exclude_material_scrap=false'
|
||||
'&exclude_pb_diode=false'
|
||||
)
|
||||
payload = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(payload['success'])
|
||||
_, kwargs = mock_cached_pareto.call_args
|
||||
self.assertEqual(kwargs['query_id'], 'qid-001')
|
||||
self.assertEqual(kwargs['dimension'], 'package')
|
||||
self.assertEqual(kwargs['pareto_scope'], 'all')
|
||||
self.assertIs(kwargs['include_excluded_scrap'], True)
|
||||
self.assertIs(kwargs['exclude_material_scrap'], False)
|
||||
self.assertIs(kwargs['exclude_pb_diode'], False)
|
||||
mock_sql_pareto.assert_not_called()
|
||||
|
||||
@patch('mes_dashboard.routes.reject_history_routes.apply_view')
|
||||
def test_view_passes_pareto_multi_select_filters(self, mock_apply_view):
|
||||
mock_apply_view.return_value = {
|
||||
'analytics_raw': [],
|
||||
'summary': {},
|
||||
'detail': {
|
||||
'items': [],
|
||||
'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1},
|
||||
},
|
||||
}
|
||||
|
||||
response = self.client.get(
|
||||
'/api/reject-history/view'
|
||||
'?query_id=qid-001'
|
||||
'&pareto_dimension=workflow'
|
||||
'&pareto_values=WF-A'
|
||||
'&pareto_values=WF-B'
|
||||
)
|
||||
payload = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(payload['success'])
|
||||
_, kwargs = mock_apply_view.call_args
|
||||
self.assertEqual(kwargs['pareto_dimension'], 'workflow')
|
||||
self.assertEqual(kwargs['pareto_values'], ['WF-A', 'WF-B'])
|
||||
|
||||
@patch('mes_dashboard.routes.reject_history_routes.apply_view')
|
||||
def test_view_invalid_pareto_dimension_returns_400(self, mock_apply_view):
|
||||
response = self.client.get(
|
||||
'/api/reject-history/view'
|
||||
'?query_id=qid-001'
|
||||
'&pareto_dimension=invalid'
|
||||
'&pareto_values=X'
|
||||
)
|
||||
payload = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertFalse(payload['success'])
|
||||
mock_apply_view.assert_not_called()
|
||||
|
||||
@patch('mes_dashboard.routes.reject_history_routes._list_to_csv')
|
||||
@patch('mes_dashboard.routes.reject_history_routes.export_csv_from_cache')
|
||||
def test_export_cached_passes_pareto_multi_select_filters(
|
||||
self,
|
||||
mock_export_cached,
|
||||
mock_list_to_csv,
|
||||
):
|
||||
mock_export_cached.return_value = [{'LOT': 'LOT-001'}]
|
||||
mock_list_to_csv.return_value = iter(['A,B\n', '1,2\n'])
|
||||
|
||||
response = self.client.get(
|
||||
'/api/reject-history/export-cached'
|
||||
'?query_id=qid-001'
|
||||
'&pareto_dimension=type'
|
||||
'&pareto_values=TYPE-A'
|
||||
'&pareto_values=TYPE-C'
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
_, kwargs = mock_export_cached.call_args
|
||||
self.assertEqual(kwargs['pareto_dimension'], 'type')
|
||||
self.assertEqual(kwargs['pareto_values'], ['TYPE-A', 'TYPE-C'])
|
||||
|
||||
@patch('mes_dashboard.routes.reject_history_routes.export_csv_from_cache')
|
||||
def test_export_cached_invalid_pareto_dimension_returns_400(self, mock_export_cached):
|
||||
response = self.client.get(
|
||||
'/api/reject-history/export-cached'
|
||||
'?query_id=qid-001'
|
||||
'&pareto_dimension=invalid'
|
||||
'&pareto_values=TYPE-A'
|
||||
)
|
||||
payload = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertFalse(payload['success'])
|
||||
mock_export_cached.assert_not_called()
|
||||
|
||||
@patch('mes_dashboard.routes.reject_history_routes.query_list')
|
||||
@patch('mes_dashboard.core.rate_limit.check_and_record', return_value=(True, 6))
|
||||
def test_list_rate_limited_returns_429(self, _mock_limit, mock_list):
|
||||
|
||||
Reference in New Issue
Block a user