Files
DashBoard/tests/test_mid_section_defect_engine.py
egg a275c30c0e feat(reject-history): fix silent data loss by propagating partial failure metadata to frontend
Chunk failures in BatchQueryEngine were silently discarded — `has_partial_failure` was tracked
in Redis but never surfaced to the API response or frontend. Users could see incomplete data
without any warning. This commit closes the gap end-to-end:

Backend:
- Track failed chunk time ranges (`failed_ranges`) in batch engine progress metadata
- Add single retry for transient Oracle errors (timeout, connection) in `_execute_single_chunk`
- Read `get_batch_progress()` after merge but before `redis_clear_batch()` cleanup
- Inject `has_partial_failure`, `failed_chunk_count`, `failed_ranges` into API response meta
- Persist partial failure flag to independent Redis key with TTL aligned to data storage layer
- Add shared container-resolution policy module with wildcard/expansion guardrails
- Refactor reason filter from single-value to multi-select (`reason` → `reasons`)

Frontend:
- Add client-side date range validation (730-day limit) before API submission
- Display amber warning banner on partial failure with specific failed date ranges
- Support generic fallback message for container-mode queries without date ranges
- Update FilterPanel to support multi-select reason chips

Specs & tests:
- Create batch-query-resilience spec; update reject-history-api and reject-history-page specs
- Add 7 new tests for retry, memory guard, failed ranges, partial failure propagation, TTL
- Cross-service regression verified (hold, resource, job, msd — 411 tests pass)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-03 14:00:07 +08:00

95 lines
3.3 KiB
Python

# -*- coding: utf-8 -*-
"""Unit tests for mid_section_defect_service — engine integration (task 8.4)."""
from __future__ import annotations
import pandas as pd
from mes_dashboard.services import mid_section_defect_service as msd_svc
class TestDetectionEngineDecomposition:
"""8.4 — large date range + high-volume station → engine decomposition."""
def test_long_range_triggers_engine(self, monkeypatch):
"""90-day range → engine decomposition for detection query."""
import mes_dashboard.services.batch_query_engine as engine_mod
engine_calls = {"execute": 0, "merge": 0}
def fake_execute_plan(chunks, query_fn, **kwargs):
engine_calls["execute"] += 1
assert len(chunks) == 3 # 90 days / 31 = 3 chunks
assert kwargs.get("cache_prefix") == "msd_detect"
return kwargs.get("query_hash", "fake_hash")
result_df = pd.DataFrame({
"CONTAINERID": ["C1", "C2"],
"WORKCENTERNAME": ["TEST-WC-A", "TEST-WC-B"],
})
def fake_merge_chunks(prefix, qhash, **kwargs):
engine_calls["merge"] += 1
return result_df
monkeypatch.setattr(engine_mod, "execute_plan", fake_execute_plan)
monkeypatch.setattr(engine_mod, "merge_chunks", fake_merge_chunks)
monkeypatch.setattr(
"mes_dashboard.services.mid_section_defect_service.cache_get",
lambda key: None,
)
monkeypatch.setattr(
"mes_dashboard.services.mid_section_defect_service.cache_set",
lambda key, val, ttl=None: None,
)
monkeypatch.setattr(
"mes_dashboard.services.mid_section_defect_service.SQLLoader",
type("FakeLoader", (), {
"load_with_params": staticmethod(lambda name, **kw: "SELECT 1 FROM dual"),
}),
)
df = msd_svc._fetch_station_detection_data(
start_date="2025-01-01",
end_date="2025-03-31",
station="測試",
)
assert engine_calls["execute"] == 1
assert engine_calls["merge"] == 1
assert df is not None
assert len(df) == 2
def test_short_range_skips_engine(self, monkeypatch):
"""30-day range → direct path, no engine."""
engine_calls = {"execute": 0}
monkeypatch.setattr(
"mes_dashboard.services.mid_section_defect_service.cache_get",
lambda key: None,
)
monkeypatch.setattr(
"mes_dashboard.services.mid_section_defect_service.cache_set",
lambda key, val, ttl=None: None,
)
monkeypatch.setattr(
"mes_dashboard.services.mid_section_defect_service.SQLLoader",
type("FakeLoader", (), {
"load_with_params": staticmethod(lambda name, **kw: "SELECT 1 FROM dual"),
}),
)
monkeypatch.setattr(
"mes_dashboard.services.mid_section_defect_service.read_sql_df",
lambda sql, params: pd.DataFrame({"CONTAINERID": ["C1"]}),
)
df = msd_svc._fetch_station_detection_data(
start_date="2025-06-01",
end_date="2025-06-05",
station="測試",
)
assert engine_calls["execute"] == 0 # Engine NOT used
assert df is not None
assert len(df) == 1