feat(reject-history): ship report page and archive openspec change

This commit is contained in:
egg
2026-02-13 20:35:52 +08:00
parent 7cbb155619
commit 05d907ac72
47 changed files with 4419 additions and 73 deletions

View File

@@ -55,6 +55,7 @@ class AppFactoryTests(unittest.TestCase):
"/wip-overview",
"/wip-detail",
"/hold-overview",
"/reject-history",
"/excel-query",
"/query-tool",
"/tmtt-defect",
@@ -74,6 +75,7 @@ class AppFactoryTests(unittest.TestCase):
"/api/excel-query/upload",
"/api/query-tool/resolve",
"/api/tmtt-defect/analysis",
"/api/reject-history/summary",
}
missing = expected - rules
self.assertFalse(missing, f"Missing routes: {sorted(missing)}")

View File

@@ -260,3 +260,133 @@ def test_tmtt_defect_native_smoke_range_query_and_csv_export(client):
export = client.get("/api/tmtt-defect/export?start_date=2026-02-01&end_date=2026-02-11")
assert export.status_code == 200
assert "text/csv" in export.content_type
def test_reject_history_native_smoke_query_sections_and_export(client):
_login_as_admin(client)
shell = client.get("/portal-shell/reject-history?start_date=2026-02-01&end_date=2026-02-11")
assert shell.status_code == 200
page = client.get("/reject-history", follow_redirects=False)
if page.status_code == 302:
assert page.status_code == 302
assert page.location.endswith("/portal-shell/reject-history")
elif page.status_code == 200:
assert page.status_code == 200
else:
raise AssertionError(f"unexpected status for /reject-history: {page.status_code}")
with (
patch(
"mes_dashboard.routes.reject_history_routes.get_filter_options",
return_value={
"workcenter_groups": [{"name": "WB", "sequence": 1}],
"reasons": ["R1"],
"meta": {"include_excluded_scrap": False},
},
),
patch(
"mes_dashboard.routes.reject_history_routes.query_summary",
return_value={
"MOVEIN_QTY": 100,
"REJECT_TOTAL_QTY": 10,
"DEFECT_QTY": 2,
"REJECT_RATE_PCT": 10.0,
"DEFECT_RATE_PCT": 2.0,
"REJECT_SHARE_PCT": 83.3333,
"AFFECTED_LOT_COUNT": 5,
"AFFECTED_WORKORDER_COUNT": 3,
"meta": {"include_excluded_scrap": False},
},
),
patch(
"mes_dashboard.routes.reject_history_routes.query_trend",
return_value={
"items": [
{
"bucket_date": "2026-02-01",
"MOVEIN_QTY": 100,
"REJECT_TOTAL_QTY": 10,
"DEFECT_QTY": 2,
"REJECT_RATE_PCT": 10.0,
"DEFECT_RATE_PCT": 2.0,
}
],
"granularity": "day",
"meta": {"include_excluded_scrap": False},
},
),
patch(
"mes_dashboard.routes.reject_history_routes.query_reason_pareto",
return_value={
"items": [
{
"reason": "R1",
"category": "CAT1",
"metric_value": 10,
"pct": 100.0,
"cumPct": 100.0,
}
],
"metric_mode": "reject_total",
"pareto_scope": "top80",
"meta": {"include_excluded_scrap": False},
},
),
patch(
"mes_dashboard.routes.reject_history_routes.query_list",
return_value={
"items": [
{
"TXN_DAY": "2026-02-01",
"WORKCENTER_GROUP": "WB",
"WORKCENTERNAME": "WB01",
"LOSSREASONNAME": "R1",
"REJECT_TOTAL_QTY": 10,
"DEFECT_QTY": 2,
}
],
"pagination": {"page": 1, "perPage": 50, "total": 1, "totalPages": 1},
"meta": {"include_excluded_scrap": False},
},
),
patch(
"mes_dashboard.routes.reject_history_routes.export_csv",
return_value=iter(
[
"TXN_DAY,REJECT_TOTAL_QTY,DEFECT_QTY\n",
"2026-02-01,10,2\n",
]
),
),
):
options = client.get("/api/reject-history/options?start_date=2026-02-01&end_date=2026-02-11")
assert options.status_code == 200
assert options.get_json()["success"] is True
assert options.get_json()["data"]["reasons"] == ["R1"]
summary = client.get("/api/reject-history/summary?start_date=2026-02-01&end_date=2026-02-11")
assert summary.status_code == 200
summary_payload = summary.get_json()
assert summary_payload["success"] is True
assert summary_payload["data"]["REJECT_TOTAL_QTY"] == 10
trend = client.get("/api/reject-history/trend?start_date=2026-02-01&end_date=2026-02-11")
assert trend.status_code == 200
assert trend.get_json()["success"] is True
assert trend.get_json()["data"]["items"][0]["bucket_date"] == "2026-02-01"
pareto = client.get("/api/reject-history/reason-pareto?start_date=2026-02-01&end_date=2026-02-11")
assert pareto.status_code == 200
assert pareto.get_json()["success"] is True
assert pareto.get_json()["data"]["items"][0]["reason"] == "R1"
detail = client.get("/api/reject-history/list?start_date=2026-02-01&end_date=2026-02-11")
assert detail.status_code == 200
assert detail.get_json()["success"] is True
assert detail.get_json()["data"]["pagination"]["total"] == 1
export = client.get("/api/reject-history/export?start_date=2026-02-01&end_date=2026-02-11")
assert export.status_code == 200
assert "text/csv" in export.content_type

View File

@@ -0,0 +1,150 @@
# -*- coding: utf-8 -*-
"""Unit tests for reject-history routes."""
import json
import os
import unittest
from unittest.mock import patch
from mes_dashboard.app import create_app
import mes_dashboard.core.database as db
def _login_as_admin(client):
with client.session_transaction() as sess:
sess['admin'] = {'displayName': 'Admin', 'employeeNo': 'A001'}
class TestRejectHistoryRoutesBase(unittest.TestCase):
def setUp(self):
db._ENGINE = None
self.app = create_app('testing')
self.app.config['TESTING'] = True
self.client = self.app.test_client()
class TestRejectHistoryPageRoute(unittest.TestCase):
@patch.dict(os.environ, {'PORTAL_SPA_ENABLED': 'false'})
@patch('mes_dashboard.app.os.path.exists', return_value=False)
def test_reject_history_page_fallback_contains_vite_entry(self, _mock_exists):
db._ENGINE = None
app = create_app('testing')
app.config['TESTING'] = True
client = app.test_client()
_login_as_admin(client)
response = client.get('/reject-history', follow_redirects=False)
self.assertEqual(response.status_code, 200)
html = response.data.decode('utf-8')
self.assertIn('/static/dist/reject-history.js', html)
class TestRejectHistoryApiRoutes(TestRejectHistoryRoutesBase):
def test_summary_missing_dates_returns_400(self):
response = self.client.get('/api/reject-history/summary')
payload = json.loads(response.data)
self.assertEqual(response.status_code, 400)
self.assertFalse(payload['success'])
def test_summary_invalid_include_excluded_scrap_returns_400(self):
response = self.client.get(
'/api/reject-history/summary?start_date=2026-02-01&end_date=2026-02-07'
'&include_excluded_scrap=invalid'
)
payload = json.loads(response.data)
self.assertEqual(response.status_code, 400)
self.assertFalse(payload['success'])
def test_summary_invalid_exclude_material_scrap_returns_400(self):
response = self.client.get(
'/api/reject-history/summary?start_date=2026-02-01&end_date=2026-02-07'
'&exclude_material_scrap=invalid'
)
payload = json.loads(response.data)
self.assertEqual(response.status_code, 400)
self.assertFalse(payload['success'])
@patch('mes_dashboard.routes.reject_history_routes.query_summary')
def test_summary_passes_filters_and_meta(self, mock_summary):
mock_summary.return_value = {
'MOVEIN_QTY': 100,
'REJECT_TOTAL_QTY': 10,
'DEFECT_QTY': 5,
'REJECT_RATE_PCT': 10,
'DEFECT_RATE_PCT': 5,
'REJECT_SHARE_PCT': 66.7,
'AFFECTED_LOT_COUNT': 8,
'AFFECTED_WORKORDER_COUNT': 4,
'meta': {
'include_excluded_scrap': False,
'exclusion_applied': True,
'excluded_reason_count': 2,
},
}
response = self.client.get(
'/api/reject-history/summary?start_date=2026-02-01&end_date=2026-02-07'
'&workcenter_groups=WB&packages=PKG-A&reasons=R1&reasons=R2'
)
payload = json.loads(response.data)
self.assertEqual(response.status_code, 200)
self.assertTrue(payload['success'])
self.assertEqual(payload['meta']['include_excluded_scrap'], False)
_, kwargs = mock_summary.call_args
self.assertEqual(kwargs['workcenter_groups'], ['WB'])
self.assertEqual(kwargs['packages'], ['PKG-A'])
self.assertEqual(kwargs['reasons'], ['R1', 'R2'])
self.assertIs(kwargs['include_excluded_scrap'], False)
self.assertIs(kwargs['exclude_material_scrap'], True)
@patch('mes_dashboard.routes.reject_history_routes.query_trend')
def test_trend_invalid_granularity_returns_400(self, mock_trend):
mock_trend.side_effect = ValueError('Invalid granularity. Use day, week, or month')
response = self.client.get(
'/api/reject-history/trend?start_date=2026-02-01&end_date=2026-02-07&granularity=hour'
)
payload = json.loads(response.data)
self.assertEqual(response.status_code, 400)
self.assertFalse(payload['success'])
@patch('mes_dashboard.routes.reject_history_routes.query_reason_pareto')
def test_reason_pareto_defaults_top80(self, mock_pareto):
mock_pareto.return_value = {'items': [], 'metric_mode': 'reject_total', 'pareto_scope': 'top80', 'meta': {}}
response = self.client.get('/api/reject-history/reason-pareto?start_date=2026-02-01&end_date=2026-02-07')
self.assertEqual(response.status_code, 200)
_, kwargs = mock_pareto.call_args
self.assertEqual(kwargs['pareto_scope'], 'top80')
self.assertEqual(kwargs['metric_mode'], 'reject_total')
@patch('mes_dashboard.routes.reject_history_routes.query_list')
@patch('mes_dashboard.core.rate_limit.check_and_record', return_value=(True, 6))
def test_list_rate_limited_returns_429(self, _mock_limit, mock_list):
response = self.client.get('/api/reject-history/list?start_date=2026-02-01&end_date=2026-02-07')
payload = json.loads(response.data)
self.assertEqual(response.status_code, 429)
self.assertEqual(payload['error']['code'], 'TOO_MANY_REQUESTS')
self.assertEqual(response.headers.get('Retry-After'), '6')
mock_list.assert_not_called()
@patch('mes_dashboard.routes.reject_history_routes.export_csv')
def test_export_returns_csv_response(self, mock_export):
mock_export.return_value = iter(['A,B\n', '1,2\n'])
response = self.client.get('/api/reject-history/export?start_date=2026-02-01&end_date=2026-02-07')
self.assertEqual(response.status_code, 200)
self.assertIn('attachment; filename=reject_history_2026-02-01_to_2026-02-07.csv', response.headers.get('Content-Disposition', ''))
self.assertIn('text/csv', response.headers.get('Content-Type', ''))
if __name__ == '__main__':
unittest.main()

View File

@@ -0,0 +1,360 @@
# -*- coding: utf-8 -*-
"""Unit tests for reject_history_service."""
from __future__ import annotations
import pandas as pd
import pytest
from mes_dashboard.services import reject_history_service as svc
def test_query_summary_returns_metrics_and_policy_meta(monkeypatch):
monkeypatch.setattr(svc, "get_excluded_reasons", lambda force_refresh=False: {"358"})
captured = {}
def _fake_read_sql_df(_sql, params=None):
captured["params"] = dict(params or {})
return pd.DataFrame(
[
{
"MOVEIN_QTY": 1000,
"REJECT_TOTAL_QTY": 25,
"DEFECT_QTY": 10,
"REJECT_RATE_PCT": 2.5,
"DEFECT_RATE_PCT": 1.0,
"REJECT_SHARE_PCT": 71.4286,
"AFFECTED_LOT_COUNT": 12,
"AFFECTED_WORKORDER_COUNT": 7,
}
]
)
monkeypatch.setattr(svc, "read_sql_df", _fake_read_sql_df)
result = svc.query_summary(
start_date="2026-02-01",
end_date="2026-02-07",
include_excluded_scrap=False,
)
assert result["MOVEIN_QTY"] == 1000
assert result["REJECT_TOTAL_QTY"] == 25
assert result["DEFECT_QTY"] == 10
assert result["AFFECTED_LOT_COUNT"] == 12
assert result["meta"]["include_excluded_scrap"] is False
assert result["meta"]["exclusion_applied"] is True
assert result["meta"]["excluded_reason_count"] == 1
assert captured["params"]["start_date"] == "2026-02-01"
assert captured["params"]["end_date"] == "2026-02-07"
assert "358" in captured["params"].values()
def test_query_summary_include_override_skips_exclusion_filter(monkeypatch):
monkeypatch.setattr(svc, "get_excluded_reasons", lambda force_refresh=False: {"358", "REASON_X"})
captured = {}
def _fake_read_sql_df(_sql, params=None):
captured["params"] = dict(params or {})
return pd.DataFrame(
[
{
"MOVEIN_QTY": 1000,
"REJECT_TOTAL_QTY": 25,
"DEFECT_QTY": 10,
"REJECT_RATE_PCT": 2.5,
"DEFECT_RATE_PCT": 1.0,
"REJECT_SHARE_PCT": 71.4286,
"AFFECTED_LOT_COUNT": 12,
"AFFECTED_WORKORDER_COUNT": 7,
}
]
)
monkeypatch.setattr(svc, "read_sql_df", _fake_read_sql_df)
result = svc.query_summary(
start_date="2026-02-01",
end_date="2026-02-07",
include_excluded_scrap=True,
)
assert result["meta"]["include_excluded_scrap"] is True
assert result["meta"]["exclusion_applied"] is False
assert result["meta"]["excluded_reason_count"] == 0
assert "358" not in captured["params"].values()
assert "REASON_X" not in captured["params"].values()
def test_build_where_clause_applies_reason_prefix_policy_by_default(monkeypatch):
monkeypatch.setattr(svc, "get_excluded_reasons", lambda force_refresh=False: set())
where_clause, _params, meta = svc._build_where_clause(include_excluded_scrap=False)
assert "REGEXP_LIKE(UPPER(NVL(TRIM(b.LOSSREASONNAME), '')), '^[0-9]{3}_')" in where_clause
assert "NOT REGEXP_LIKE(UPPER(NVL(TRIM(b.LOSSREASONNAME), '')), '^(XXX|ZZZ)_')" in where_clause
assert meta["reason_name_prefix_policy_applied"] is True
assert meta["exclusion_applied"] is True
def test_build_where_clause_include_override_skips_reason_prefix_policy(monkeypatch):
monkeypatch.setattr(svc, "get_excluded_reasons", lambda force_refresh=False: {"358"})
where_clause, params, meta = svc._build_where_clause(
include_excluded_scrap=True,
packages=["PKG-A"],
)
assert "REGEXP_LIKE(UPPER(NVL(TRIM(b.LOSSREASONNAME), '')), '^[0-9]{3}_')" not in where_clause
assert "NOT REGEXP_LIKE(UPPER(NVL(TRIM(b.LOSSREASONNAME), '')), '^(XXX|ZZZ)_')" not in where_clause
assert meta["reason_name_prefix_policy_applied"] is False
assert meta["exclusion_applied"] is False
assert meta["package_filter_count"] == 1
assert "358" not in params.values()
def test_get_filter_options_includes_packages(monkeypatch):
monkeypatch.setattr(svc, "get_excluded_reasons", lambda force_refresh=False: set())
monkeypatch.setattr(
svc,
"get_workcenter_groups",
lambda: [
{"name": "WB", "sequence": 1},
{"name": "FA", "sequence": 2},
],
)
def _fake_read_sql_df(sql, _params=None):
if "AS REASON" in sql:
return pd.DataFrame([{"REASON": "R1"}, {"REASON": "R2"}])
if "AS PACKAGE" in sql:
return pd.DataFrame([{"PACKAGE": "PKG-A"}, {"PACKAGE": "PKG-B"}])
return pd.DataFrame()
monkeypatch.setattr(svc, "read_sql_df", _fake_read_sql_df)
result = svc.get_filter_options(
start_date="2026-02-01",
end_date="2026-02-07",
include_excluded_scrap=False,
)
assert result["reasons"] == ["R1", "R2"]
assert result["packages"] == ["PKG-A", "PKG-B"]
assert result["workcenter_groups"][0]["name"] == "WB"
def test_get_filter_options_appends_material_reason_option(monkeypatch):
monkeypatch.setattr(svc, "get_excluded_reasons", lambda force_refresh=False: set())
monkeypatch.setattr(svc, "get_workcenter_groups", lambda: [])
def _fake_read_sql_df(sql, _params=None):
if "AS REASON" in sql:
return pd.DataFrame([{"REASON": "001_TEST"}])
if "AS PACKAGE" in sql:
return pd.DataFrame([{"PACKAGE": "PKG-A"}])
if "AS HAS_MATERIAL" in sql:
return pd.DataFrame([{"HAS_MATERIAL": 1}])
return pd.DataFrame()
monkeypatch.setattr(svc, "read_sql_df", _fake_read_sql_df)
result = svc.get_filter_options(start_date="2026-02-01", end_date="2026-02-07")
assert svc.MATERIAL_REASON_OPTION in result["reasons"]
def test_build_where_clause_with_material_reason_adds_objecttype_condition(monkeypatch):
monkeypatch.setattr(svc, "get_excluded_reasons", lambda force_refresh=False: set())
where_clause, _params, meta = svc._build_where_clause(reasons=[svc.MATERIAL_REASON_OPTION])
assert "UPPER(NVL(TRIM(b.SCRAP_OBJECTTYPE), '-')) = 'MATERIAL'" in where_clause
assert meta["material_reason_selected"] is True
def test_build_where_clause_exclude_material_scrap_adds_not_material_condition(monkeypatch):
monkeypatch.setattr(svc, "get_excluded_reasons", lambda force_refresh=False: set())
where_clause, _params, meta = svc._build_where_clause(exclude_material_scrap=True)
assert "UPPER(NVL(TRIM(b.SCRAP_OBJECTTYPE), '-')) <> 'MATERIAL'" in where_clause
assert meta["exclude_material_scrap"] is True
assert meta["material_exclusion_applied"] is True
def test_sql_template_replacement_does_not_introduce_fake_bind_placeholders():
sql = svc._prepare_sql(
"summary",
where_clause="WHERE 1=1",
bucket_expr="TRUNC(b.TXN_DAY)",
metric_column="b.REJECT_TOTAL_QTY",
)
assert ":BASE" not in sql
assert ":WHERE" not in sql
assert ":BUCKET" not in sql
assert ":METRIC" not in sql
def test_base_with_cte_sql_flattens_nested_with(monkeypatch):
monkeypatch.setattr(
svc,
"_load_sql",
lambda name: (
"-- comment line\n"
"WITH c1 AS (SELECT 1 AS X FROM DUAL),\n"
"c2 AS (SELECT X FROM c1)\n"
"SELECT X FROM c2"
)
if name == "performance_daily"
else "",
)
rendered = svc._base_with_cte_sql("base")
assert rendered.startswith("WITH c1 AS")
assert "base AS (\nSELECT X FROM c2\n)" in rendered
assert "WITH base AS (\nWITH c1" not in rendered
def test_query_trend_invalid_granularity_raises():
with pytest.raises(ValueError, match="Invalid granularity"):
svc.query_trend(start_date="2026-02-01", end_date="2026-02-07", granularity="hour")
def test_query_reason_pareto_top80_scope(monkeypatch):
monkeypatch.setattr(svc, "get_excluded_reasons", lambda force_refresh=False: set())
monkeypatch.setattr(
svc,
"read_sql_df",
lambda _sql, _params=None: pd.DataFrame(
[
{"REASON": "R1", "CATEGORY": "C1", "METRIC_VALUE": 50, "MOVEIN_QTY": 100, "REJECT_TOTAL_QTY": 50, "DEFECT_QTY": 0, "AFFECTED_LOT_COUNT": 10, "PCT": 50, "CUM_PCT": 50},
{"REASON": "R2", "CATEGORY": "C1", "METRIC_VALUE": 29, "MOVEIN_QTY": 100, "REJECT_TOTAL_QTY": 29, "DEFECT_QTY": 0, "AFFECTED_LOT_COUNT": 8, "PCT": 29, "CUM_PCT": 79},
{"REASON": "R3", "CATEGORY": "C2", "METRIC_VALUE": 13, "MOVEIN_QTY": 100, "REJECT_TOTAL_QTY": 13, "DEFECT_QTY": 0, "AFFECTED_LOT_COUNT": 6, "PCT": 13, "CUM_PCT": 92},
{"REASON": "R4", "CATEGORY": "C3", "METRIC_VALUE": 8, "MOVEIN_QTY": 100, "REJECT_TOTAL_QTY": 8, "DEFECT_QTY": 0, "AFFECTED_LOT_COUNT": 5, "PCT": 8, "CUM_PCT": 100},
]
),
)
top80 = svc.query_reason_pareto(
start_date="2026-02-01",
end_date="2026-02-07",
metric_mode="reject_total",
pareto_scope="top80",
)
assert len(top80["items"]) == 2
assert top80["items"][-1]["reason"] == "R2"
assert "category" not in top80["items"][0]
all_items = svc.query_reason_pareto(
start_date="2026-02-01",
end_date="2026-02-07",
metric_mode="reject_total",
pareto_scope="all",
)
assert len(all_items["items"]) == 4
def test_query_list_pagination_and_caps(monkeypatch):
monkeypatch.setattr(svc, "get_excluded_reasons", lambda force_refresh=False: set())
captured = {}
def _fake_read_sql_df(_sql, params=None):
captured["params"] = dict(params or {})
return pd.DataFrame(
[
{
"TXN_DAY": "2026-02-03",
"TXN_MONTH": "2026-02",
"WORKCENTER_GROUP": "WB",
"WORKCENTERNAME": "WB01",
"SPECNAME": "S1",
"PRODUCTLINENAME": "P1",
"PJ_TYPE": "TYPE1",
"LOSSREASONNAME": "R1",
"LOSSREASON_CODE": "001",
"REJECTCATEGORYNAME": "CAT",
"MOVEIN_QTY": 100,
"REJECT_QTY": 3,
"STANDBY_QTY": 1,
"QTYTOPROCESS_QTY": 1,
"INPROCESS_QTY": 1,
"PROCESSED_QTY": 1,
"REJECT_TOTAL_QTY": 7,
"DEFECT_QTY": 2,
"REJECT_RATE_PCT": 7,
"DEFECT_RATE_PCT": 2,
"REJECT_SHARE_PCT": 77.777,
"AFFECTED_LOT_COUNT": 3,
"AFFECTED_WORKORDER_COUNT": 2,
"TOTAL_COUNT": 12,
}
]
)
monkeypatch.setattr(svc, "read_sql_df", _fake_read_sql_df)
result = svc.query_list(
start_date="2026-02-01",
end_date="2026-02-07",
page=2,
per_page=500,
packages=["PKG1"],
)
assert result["pagination"]["page"] == 2
assert result["pagination"]["perPage"] == 200
assert result["pagination"]["total"] == 12
assert result["pagination"]["totalPages"] == 1
assert captured["params"]["offset"] == 200
assert captured["params"]["limit"] == 200
assert "PKG1" in captured["params"].values()
def test_export_csv_contains_semantic_headers(monkeypatch):
monkeypatch.setattr(svc, "get_excluded_reasons", lambda force_refresh=False: set())
monkeypatch.setattr(
svc,
"read_sql_df",
lambda _sql, _params=None: pd.DataFrame(
[
{
"TXN_DAY": "2026-02-03",
"TXN_MONTH": "2026-02",
"WORKCENTER_GROUP": "WB",
"WORKCENTERNAME": "WB01",
"SPECNAME": "S1",
"PRODUCTLINENAME": "P1",
"PJ_TYPE": "TYPE1",
"LOSSREASONNAME": "R1",
"LOSSREASON_CODE": "001",
"REJECTCATEGORYNAME": "CAT",
"MOVEIN_QTY": 100,
"REJECT_QTY": 3,
"STANDBY_QTY": 1,
"QTYTOPROCESS_QTY": 1,
"INPROCESS_QTY": 1,
"PROCESSED_QTY": 1,
"REJECT_TOTAL_QTY": 7,
"DEFECT_QTY": 2,
"REJECT_RATE_PCT": 7,
"DEFECT_RATE_PCT": 2,
"REJECT_SHARE_PCT": 77.777,
"AFFECTED_LOT_COUNT": 3,
"AFFECTED_WORKORDER_COUNT": 2,
}
]
),
)
chunks = list(
svc.export_csv(
start_date="2026-02-01",
end_date="2026-02-07",
)
)
payload = "".join(chunks)
assert "REJECT_TOTAL_QTY" in payload
assert "DEFECT_QTY" in payload
assert "2026-02-03" in payload

View File

@@ -0,0 +1,38 @@
# -*- coding: utf-8 -*-
"""Governance coverage tests for reject-history shell integration."""
from __future__ import annotations
import json
from pathlib import Path
ROOT = Path(__file__).resolve().parents[1]
ROUTE_CONTRACTS_FILE = ROOT / 'frontend' / 'src' / 'portal-shell' / 'routeContracts.js'
NATIVE_REGISTRY_FILE = ROOT / 'frontend' / 'src' / 'portal-shell' / 'nativeModuleRegistry.js'
PAGE_STATUS_FILE = ROOT / 'data' / 'page_status.json'
def test_reject_history_route_contract_entry_exists():
text = ROUTE_CONTRACTS_FILE.read_text(encoding='utf-8')
assert "'/reject-history'" in text
assert "routeId: 'reject-history'" in text
assert "title: '報廢歷史查詢'" in text
def test_reject_history_native_loader_entry_exists():
text = NATIVE_REGISTRY_FILE.read_text(encoding='utf-8')
assert "'/reject-history'" in text
assert "import('../reject-history/App.vue')" in text
def test_reject_history_page_status_entry_exists():
payload = json.loads(PAGE_STATUS_FILE.read_text(encoding='utf-8'))
pages = payload.get('pages', [])
entry = next((item for item in pages if item.get('route') == '/reject-history'), None)
assert entry is not None
assert entry.get('drawer_id')
assert isinstance(entry.get('order'), int)

View File

@@ -0,0 +1,67 @@
# -*- coding: utf-8 -*-
"""Tests for scrap_reason_exclusion_cache service."""
from __future__ import annotations
import json
from unittest.mock import MagicMock
import pandas as pd
from mes_dashboard.services import scrap_reason_exclusion_cache as cache
def _reset_cache_state():
with cache._CACHE_LOCK:
cache._CACHE["reasons"] = set()
cache._CACHE["updated_at"] = None
cache._CACHE["loaded"] = False
cache._CACHE["source"] = None
def test_refresh_cache_loads_enabled_reason_codes(monkeypatch):
_reset_cache_state()
monkeypatch.setattr(cache, "try_acquire_lock", lambda *_args, **_kwargs: True)
monkeypatch.setattr(cache, "release_lock", lambda *_args, **_kwargs: None)
monkeypatch.setattr(cache, "get_redis_client", lambda: None)
monkeypatch.setattr(
cache,
"read_sql_df",
lambda _sql: pd.DataFrame({"REASON_NAME": ["358", " 160 ", "bonus_adjust"]}),
)
assert cache.refresh_cache(force=True) is True
assert cache.get_excluded_reasons() == {"358", "160", "BONUS_ADJUST"}
def test_refresh_cache_falls_back_to_redis_when_oracle_fails(monkeypatch):
_reset_cache_state()
redis_client = MagicMock()
redis_client.get.side_effect = [json.dumps(["A01", "b02"]), "2026-02-13T00:00:00"]
monkeypatch.setattr(cache, "try_acquire_lock", lambda *_args, **_kwargs: True)
monkeypatch.setattr(cache, "release_lock", lambda *_args, **_kwargs: None)
monkeypatch.setattr(cache, "get_redis_client", lambda: redis_client)
def _raise(_sql):
raise RuntimeError("oracle unavailable")
monkeypatch.setattr(cache, "read_sql_df", _raise)
assert cache.refresh_cache(force=True) is True
assert cache.get_excluded_reasons() == {"A01", "B02"}
def test_get_excluded_reasons_uses_redis_for_lazy_bootstrap(monkeypatch):
_reset_cache_state()
redis_client = MagicMock()
redis_client.get.side_effect = [json.dumps(["X1", "x2"]), "2026-02-13T12:00:00"]
monkeypatch.setattr(cache, "get_redis_client", lambda: redis_client)
monkeypatch.setattr(cache, "refresh_cache", lambda force=False: True)
reasons = cache.get_excluded_reasons(force_refresh=False)
assert reasons == {"X1", "X2"}

View File

@@ -373,6 +373,7 @@ class TestViteModuleIntegration(unittest.TestCase):
('/tables', 'tables.js'),
('/resource', 'resource-status.js'),
('/resource-history', 'resource-history.js'),
('/reject-history', 'reject-history.js'),
('/job-query', 'job-query.js'),
('/excel-query', 'excel-query.js'),
('/query-tool', 'query-tool.js'),
@@ -396,13 +397,18 @@ class TestViteModuleIntegration(unittest.TestCase):
response = self.client.get(endpoint, follow_redirects=False)
if endpoint in canonical_routes:
self.assertEqual(response.status_code, 302)
self.assertTrue(response.location.endswith(canonical_routes[endpoint]))
follow = self.client.get(response.location)
self.assertEqual(follow.status_code, 200)
html = follow.data.decode('utf-8')
self.assertIn('/static/dist/portal-shell.js', html)
self.assertIn('type="module"', html)
if response.status_code == 302:
self.assertTrue(response.location.endswith(canonical_routes[endpoint]))
follow = self.client.get(response.location)
self.assertEqual(follow.status_code, 200)
html = follow.data.decode('utf-8')
self.assertIn('/static/dist/portal-shell.js', html)
self.assertIn('type="module"', html)
else:
self.assertEqual(response.status_code, 200)
html = response.data.decode('utf-8')
self.assertIn(f'/static/dist/{asset}', html)
self.assertIn('type="module"', html)
else:
self.assertEqual(response.status_code, 200)
html = response.data.decode('utf-8')