feat(wip): preserve filters between Overview and Detail with thundering-herd fix
URL is now single source of truth for filter state (workorder, lotid, package, type, status) across WIP Overview and Detail pages. Drill-down carries all filters + status; back button dynamically reflects Detail changes. Backend Detail API now supports pj_type filter parameter. Harden concurrency: add pagehide abort for MPA navigation, double-check locking on Redis JSON parse and snapshot build to prevent thread pool saturation during rapid page switching. Fix watchdog setsid and PID discovery. Fix test_realtime_equipment_cache RUNCARDLOTID field mismatch. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
168
tests/e2e/test_wip_hold_pages_e2e.py
Normal file
168
tests/e2e/test_wip_hold_pages_e2e.py
Normal file
@@ -0,0 +1,168 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""E2E coverage for WIP Overview / WIP Detail / Hold Detail pages."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from urllib.parse import parse_qs, quote, urlparse
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
from playwright.sync_api import Page, expect
|
||||
|
||||
|
||||
def _pick_workcenter(app_server: str) -> str:
|
||||
"""Pick a real workcenter to reduce flaky E2E failures."""
|
||||
try:
|
||||
response = requests.get(f"{app_server}/api/wip/meta/workcenters", timeout=10)
|
||||
payload = response.json() if response.ok else {}
|
||||
items = payload.get("data") or []
|
||||
if items:
|
||||
return items[0].get("name") or "TMTT"
|
||||
except Exception:
|
||||
pass
|
||||
return "TMTT"
|
||||
|
||||
|
||||
def _pick_hold_reason(app_server: str) -> str:
|
||||
"""Pick a real hold reason to reduce flaky E2E failures."""
|
||||
try:
|
||||
response = requests.get(f"{app_server}/api/wip/overview/hold", timeout=10)
|
||||
payload = response.json() if response.ok else {}
|
||||
items = (payload.get("data") or {}).get("items") or []
|
||||
if items:
|
||||
return items[0].get("reason") or "YieldLimit"
|
||||
except Exception:
|
||||
pass
|
||||
return "YieldLimit"
|
||||
|
||||
|
||||
def _get_with_retry(url: str, attempts: int = 3, timeout: float = 10.0):
|
||||
"""Best-effort GET helper to reduce transient test flakiness."""
|
||||
last_exc = None
|
||||
for _ in range(max(attempts, 1)):
|
||||
try:
|
||||
return requests.get(url, timeout=timeout, allow_redirects=False)
|
||||
except requests.RequestException as exc:
|
||||
last_exc = exc
|
||||
time.sleep(0.5)
|
||||
if last_exc:
|
||||
raise last_exc
|
||||
raise RuntimeError("request retry exhausted without exception")
|
||||
|
||||
|
||||
def _wait_for_response_url_tokens(page: Page, tokens: list[str], timeout_seconds: float = 30.0):
|
||||
"""Wait until a response URL contains all tokens."""
|
||||
matched = []
|
||||
|
||||
def handle_response(resp):
|
||||
if all(token in resp.url for token in tokens):
|
||||
matched.append(resp)
|
||||
|
||||
page.on("response", handle_response)
|
||||
deadline = time.time() + timeout_seconds
|
||||
while time.time() < deadline and not matched:
|
||||
page.wait_for_timeout(200)
|
||||
return matched[0] if matched else None
|
||||
|
||||
|
||||
def _wait_for_response(page: Page, predicate, timeout_seconds: float = 30.0):
|
||||
"""Wait until a response satisfies the predicate."""
|
||||
matched = []
|
||||
|
||||
def handle_response(resp):
|
||||
try:
|
||||
if predicate(resp):
|
||||
matched.append(resp)
|
||||
except Exception:
|
||||
return
|
||||
|
||||
page.on("response", handle_response)
|
||||
deadline = time.time() + timeout_seconds
|
||||
while time.time() < deadline and not matched:
|
||||
page.wait_for_timeout(200)
|
||||
return matched[0] if matched else None
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestWipAndHoldPagesE2E:
|
||||
"""E2E tests for WIP/Hold page URL + API behavior."""
|
||||
|
||||
def test_wip_overview_restores_status_from_url(self, page: Page, app_server: str):
|
||||
page.goto(
|
||||
f"{app_server}/wip-overview?type=PJA3460&status=queue",
|
||||
wait_until="commit",
|
||||
timeout=60000,
|
||||
)
|
||||
response = _wait_for_response_url_tokens(
|
||||
page,
|
||||
["/api/wip/overview/matrix", "type=PJA3460", "status=QUEUE"],
|
||||
timeout_seconds=30.0,
|
||||
)
|
||||
assert response is not None, "Did not observe expected matrix request with URL filters"
|
||||
assert response.ok
|
||||
expect(page.locator("body")).to_be_visible()
|
||||
|
||||
def test_wip_detail_reads_status_and_back_link_keeps_filters(self, page: Page, app_server: str):
|
||||
workcenter = _pick_workcenter(app_server)
|
||||
page.goto(
|
||||
f"{app_server}/wip-detail?workcenter={quote(workcenter)}&type=PJA3460&status=queue",
|
||||
wait_until="commit",
|
||||
timeout=60000,
|
||||
)
|
||||
|
||||
response = _wait_for_response(
|
||||
page,
|
||||
lambda resp: (
|
||||
"/api/wip/detail/" in resp.url
|
||||
and (
|
||||
parse_qs(urlparse(resp.url).query).get("type", [None])[0] == "PJA3460"
|
||||
or parse_qs(urlparse(resp.url).query).get("pj_type", [None])[0] == "PJA3460"
|
||||
)
|
||||
and parse_qs(urlparse(resp.url).query).get("status", [None])[0] in {"QUEUE", "queue"}
|
||||
),
|
||||
timeout_seconds=30.0,
|
||||
)
|
||||
assert response is not None, "Did not observe expected detail request with URL filters"
|
||||
assert response.ok
|
||||
|
||||
back_href = page.locator("a.btn-back").get_attribute("href") or ""
|
||||
parsed = urlparse(back_href)
|
||||
params = parse_qs(parsed.query)
|
||||
assert parsed.path == "/wip-overview"
|
||||
assert params.get("type", [None])[0] == "PJA3460"
|
||||
assert params.get("status", [None])[0] in {"queue", "QUEUE"}
|
||||
|
||||
def test_hold_detail_without_reason_redirects_to_overview(self, page: Page, app_server: str):
|
||||
response = _get_with_retry(f"{app_server}/hold-detail", attempts=3, timeout=10.0)
|
||||
assert response.status_code == 302
|
||||
assert response.headers.get("Location") == "/wip-overview"
|
||||
|
||||
def test_hold_detail_calls_summary_distribution_and_lots(self, page: Page, app_server: str):
|
||||
reason = _pick_hold_reason(app_server)
|
||||
seen = set()
|
||||
|
||||
def handle_response(resp):
|
||||
parsed = urlparse(resp.url)
|
||||
query = parse_qs(parsed.query)
|
||||
if query.get("reason", [None])[0] != reason:
|
||||
return
|
||||
if parsed.path.endswith("/api/wip/hold-detail/summary"):
|
||||
seen.add("summary")
|
||||
elif parsed.path.endswith("/api/wip/hold-detail/distribution"):
|
||||
seen.add("distribution")
|
||||
elif parsed.path.endswith("/api/wip/hold-detail/lots"):
|
||||
seen.add("lots")
|
||||
|
||||
page.on("response", handle_response)
|
||||
page.goto(
|
||||
f"{app_server}/hold-detail?reason={quote(reason)}",
|
||||
wait_until="commit",
|
||||
timeout=60000,
|
||||
)
|
||||
|
||||
deadline = time.time() + 30
|
||||
while time.time() < deadline and len(seen) < 3:
|
||||
page.wait_for_timeout(200)
|
||||
|
||||
assert seen == {"summary", "distribution", "lots"}
|
||||
@@ -11,19 +11,20 @@ Run with: pytest tests/stress/test_api_load.py -v -s
|
||||
|
||||
import pytest
|
||||
import time
|
||||
import requests
|
||||
import concurrent.futures
|
||||
from typing import List, Tuple
|
||||
import requests
|
||||
import concurrent.futures
|
||||
from typing import List, Tuple
|
||||
from urllib.parse import quote
|
||||
|
||||
# Import from local conftest via pytest fixtures
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
@pytest.mark.load
|
||||
class TestAPILoadConcurrent:
|
||||
"""Load tests with concurrent requests."""
|
||||
class TestAPILoadConcurrent:
|
||||
"""Load tests with concurrent requests."""
|
||||
|
||||
def _make_request(self, url: str, timeout: float) -> Tuple[bool, float, str]:
|
||||
def _make_request(self, url: str, timeout: float) -> Tuple[bool, float, str]:
|
||||
"""Make a single request and return (success, duration, error)."""
|
||||
start = time.time()
|
||||
try:
|
||||
@@ -41,9 +42,37 @@ class TestAPILoadConcurrent:
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
duration = time.time() - start
|
||||
return (False, duration, f"Connection error: {str(e)[:50]}")
|
||||
except Exception as e:
|
||||
duration = time.time() - start
|
||||
return (False, duration, f"Error: {str(e)[:50]}")
|
||||
except Exception as e:
|
||||
duration = time.time() - start
|
||||
return (False, duration, f"Error: {str(e)[:50]}")
|
||||
|
||||
def _discover_workcenter(self, base_url: str, timeout: float) -> str:
|
||||
"""Get one available workcenter for detail load tests."""
|
||||
try:
|
||||
response = requests.get(f"{base_url}/api/wip/meta/workcenters", timeout=timeout)
|
||||
if response.status_code != 200:
|
||||
return "TMTT"
|
||||
payload = response.json()
|
||||
items = payload.get("data") or []
|
||||
if not items:
|
||||
return "TMTT"
|
||||
return str(items[0].get("name") or "TMTT")
|
||||
except Exception:
|
||||
return "TMTT"
|
||||
|
||||
def _discover_hold_reason(self, base_url: str, timeout: float) -> str:
|
||||
"""Get one available hold reason for hold-detail load tests."""
|
||||
try:
|
||||
response = requests.get(f"{base_url}/api/wip/overview/hold", timeout=timeout)
|
||||
if response.status_code != 200:
|
||||
return "YieldLimit"
|
||||
payload = response.json()
|
||||
items = (payload.get("data") or {}).get("items") or []
|
||||
if not items:
|
||||
return "YieldLimit"
|
||||
return str(items[0].get("reason") or "YieldLimit")
|
||||
except Exception:
|
||||
return "YieldLimit"
|
||||
|
||||
def test_wip_summary_concurrent_load(self, base_url: str, stress_config: dict, stress_result):
|
||||
"""Test WIP summary API under concurrent load."""
|
||||
@@ -77,7 +106,7 @@ class TestAPILoadConcurrent:
|
||||
assert result.success_rate >= 90.0, f"Success rate {result.success_rate:.1f}% is below 90%"
|
||||
assert result.avg_response_time < 10.0, f"Avg response time {result.avg_response_time:.2f}s exceeds 10s"
|
||||
|
||||
def test_wip_matrix_concurrent_load(self, base_url: str, stress_config: dict, stress_result):
|
||||
def test_wip_matrix_concurrent_load(self, base_url: str, stress_config: dict, stress_result):
|
||||
"""Test WIP matrix API under concurrent load."""
|
||||
result = stress_result("WIP Matrix Concurrent Load")
|
||||
url = f"{base_url}/api/wip/overview/matrix"
|
||||
@@ -105,8 +134,70 @@ class TestAPILoadConcurrent:
|
||||
|
||||
print(result.report())
|
||||
|
||||
assert result.success_rate >= 90.0, f"Success rate {result.success_rate:.1f}% is below 90%"
|
||||
assert result.avg_response_time < 15.0, f"Avg response time {result.avg_response_time:.2f}s exceeds 15s"
|
||||
assert result.success_rate >= 90.0, f"Success rate {result.success_rate:.1f}% is below 90%"
|
||||
assert result.avg_response_time < 15.0, f"Avg response time {result.avg_response_time:.2f}s exceeds 15s"
|
||||
|
||||
def test_wip_detail_concurrent_load(self, base_url: str, stress_config: dict, stress_result):
|
||||
"""Test WIP detail API under concurrent load."""
|
||||
result = stress_result("WIP Detail Concurrent Load")
|
||||
concurrent_users = stress_config['concurrent_users']
|
||||
requests_per_user = stress_config['requests_per_user']
|
||||
timeout = stress_config['timeout']
|
||||
|
||||
workcenter = self._discover_workcenter(base_url, timeout)
|
||||
url = f"{base_url}/api/wip/detail/{quote(workcenter)}?page=1&page_size=100"
|
||||
total_requests = concurrent_users * requests_per_user
|
||||
|
||||
start_time = time.time()
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor:
|
||||
futures = [
|
||||
executor.submit(self._make_request, url, timeout)
|
||||
for _ in range(total_requests)
|
||||
]
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
success, duration, error = future.result()
|
||||
if success:
|
||||
result.add_success(duration)
|
||||
else:
|
||||
result.add_failure(error, duration)
|
||||
|
||||
result.total_duration = time.time() - start_time
|
||||
print(result.report())
|
||||
|
||||
assert result.success_rate >= 85.0, f"Success rate {result.success_rate:.1f}% is below 85%"
|
||||
assert result.avg_response_time < 20.0, f"Avg response time {result.avg_response_time:.2f}s exceeds 20s"
|
||||
|
||||
def test_hold_detail_lots_concurrent_load(self, base_url: str, stress_config: dict, stress_result):
|
||||
"""Test hold-detail lots API under concurrent load."""
|
||||
result = stress_result("Hold Detail Lots Concurrent Load")
|
||||
concurrent_users = stress_config['concurrent_users']
|
||||
requests_per_user = stress_config['requests_per_user']
|
||||
timeout = stress_config['timeout']
|
||||
|
||||
reason = self._discover_hold_reason(base_url, timeout)
|
||||
url = f"{base_url}/api/wip/hold-detail/lots?reason={quote(reason)}&page=1&per_page=50"
|
||||
total_requests = concurrent_users * requests_per_user
|
||||
|
||||
start_time = time.time()
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor:
|
||||
futures = [
|
||||
executor.submit(self._make_request, url, timeout)
|
||||
for _ in range(total_requests)
|
||||
]
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
success, duration, error = future.result()
|
||||
if success:
|
||||
result.add_success(duration)
|
||||
else:
|
||||
result.add_failure(error, duration)
|
||||
|
||||
result.total_duration = time.time() - start_time
|
||||
print(result.report())
|
||||
|
||||
assert result.success_rate >= 85.0, f"Success rate {result.success_rate:.1f}% is below 85%"
|
||||
assert result.avg_response_time < 20.0, f"Avg response time {result.avg_response_time:.2f}s exceeds 20s"
|
||||
|
||||
def test_resource_summary_concurrent_load(self, base_url: str, stress_config: dict, stress_result):
|
||||
"""Test resource status summary API under concurrent load."""
|
||||
@@ -234,12 +325,37 @@ class TestAPILoadRampUp:
|
||||
assert result.success_rate >= 80.0, f"Success rate {result.success_rate:.1f}% is below 80%"
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
class TestAPITimeoutHandling:
|
||||
"""Tests for timeout handling under load."""
|
||||
|
||||
def test_connection_recovery_after_timeout(self, base_url: str, stress_result):
|
||||
"""Test that API recovers after timeout scenarios."""
|
||||
@pytest.mark.stress
|
||||
class TestAPITimeoutHandling:
|
||||
"""Tests for timeout handling under load."""
|
||||
|
||||
@staticmethod
|
||||
def _make_request(url: str, timeout: float) -> Tuple[bool, float, str]:
|
||||
"""Make a single request and return (success, duration, error)."""
|
||||
start = time.time()
|
||||
try:
|
||||
response = requests.get(url, timeout=timeout)
|
||||
duration = time.time() - start
|
||||
if response.status_code == 200:
|
||||
if "application/json" in response.headers.get("Content-Type", ""):
|
||||
payload = response.json()
|
||||
if payload.get("success", True):
|
||||
return (True, duration, "")
|
||||
return (False, duration, f"API returned success=false: {payload.get('error', 'unknown')}")
|
||||
return (True, duration, "")
|
||||
return (False, duration, f"HTTP {response.status_code}")
|
||||
except requests.exceptions.Timeout:
|
||||
duration = time.time() - start
|
||||
return (False, duration, "Request timeout")
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
duration = time.time() - start
|
||||
return (False, duration, f"Connection error: {str(exc)[:50]}")
|
||||
except Exception as exc:
|
||||
duration = time.time() - start
|
||||
return (False, duration, f"Error: {str(exc)[:50]}")
|
||||
|
||||
def test_connection_recovery_after_timeout(self, base_url: str, stress_result):
|
||||
"""Test that API recovers after timeout scenarios."""
|
||||
result = stress_result("Connection Recovery After Timeout")
|
||||
|
||||
# First, make requests with very short timeout to trigger timeouts
|
||||
@@ -276,9 +392,58 @@ class TestAPITimeoutHandling:
|
||||
|
||||
result.total_duration = sum(result.response_times)
|
||||
|
||||
print(result.report())
|
||||
|
||||
assert recovered, "System did not recover after timeout scenarios"
|
||||
print(result.report())
|
||||
|
||||
assert recovered, "System did not recover after timeout scenarios"
|
||||
|
||||
def test_wip_pages_recoverability_after_burst(self, base_url: str, stress_result):
|
||||
"""After a burst, health and critical WIP APIs should still respond."""
|
||||
result = stress_result("WIP Pages Recoverability After Burst")
|
||||
timeout = 30.0
|
||||
probe_endpoints = [
|
||||
f"{base_url}/api/wip/overview/summary",
|
||||
f"{base_url}/api/wip/overview/matrix",
|
||||
f"{base_url}/api/wip/overview/hold",
|
||||
f"{base_url}/health",
|
||||
]
|
||||
|
||||
# Burst phase
|
||||
burst_count = 40
|
||||
start_time = time.time()
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
|
||||
futures = []
|
||||
for _ in range(burst_count):
|
||||
for endpoint in probe_endpoints[:-1]:
|
||||
futures.append(executor.submit(self._make_request, endpoint, timeout))
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
success, duration, error = future.result()
|
||||
if success:
|
||||
result.add_success(duration)
|
||||
else:
|
||||
result.add_failure(error, duration)
|
||||
|
||||
# Recoverability probes
|
||||
healthy_probes = 0
|
||||
for _ in range(5):
|
||||
probe_start = time.time()
|
||||
try:
|
||||
response = requests.get(f"{base_url}/health", timeout=5)
|
||||
duration = time.time() - probe_start
|
||||
if response.status_code in (200, 503):
|
||||
payload = response.json()
|
||||
if payload.get("status") in {"healthy", "degraded", "unhealthy"}:
|
||||
healthy_probes += 1
|
||||
result.add_success(duration)
|
||||
continue
|
||||
result.add_failure(f"Unexpected health response: {response.status_code}", duration)
|
||||
except Exception as exc:
|
||||
result.add_failure(str(exc)[:80], time.time() - probe_start)
|
||||
time.sleep(0.2)
|
||||
|
||||
result.total_duration = time.time() - start_time
|
||||
print(result.report())
|
||||
assert healthy_probes >= 3, f"Health endpoint recoverability too low: {healthy_probes}/5"
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
|
||||
@@ -10,10 +10,12 @@ Tests frontend stability under high-frequency operations:
|
||||
Run with: pytest tests/stress/test_frontend_stress.py -v -s
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import time
|
||||
import re
|
||||
from playwright.sync_api import Page, expect
|
||||
import pytest
|
||||
import time
|
||||
import re
|
||||
import requests
|
||||
from urllib.parse import quote
|
||||
from playwright.sync_api import Page, expect
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
@@ -257,7 +259,7 @@ class TestMesApiStress:
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
class TestPageNavigationStress:
|
||||
class TestPageNavigationStress:
|
||||
"""Stress tests for rapid page navigation."""
|
||||
|
||||
def test_rapid_tab_switching(self, page: Page, app_server: str):
|
||||
@@ -309,7 +311,121 @@ class TestPageNavigationStress:
|
||||
tab = page.locator(f'.tab:has-text("{tab_name}")')
|
||||
expect(tab).to_have_class(re.compile(r'active'))
|
||||
|
||||
print(f"\n All {len(tabs)} tabs clickable and responsive")
|
||||
print(f"\n All {len(tabs)} tabs clickable and responsive")
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
class TestWipHoldPageStress:
|
||||
"""Stress tests focused on WIP Overview / WIP Detail / Hold Detail pages."""
|
||||
|
||||
def _pick_workcenter(self, app_server: str) -> str:
|
||||
"""Get one available workcenter for WIP detail tests."""
|
||||
try:
|
||||
response = requests.get(f"{app_server}/api/wip/meta/workcenters", timeout=10)
|
||||
if response.status_code != 200:
|
||||
return "TMTT"
|
||||
payload = response.json()
|
||||
items = payload.get("data") or []
|
||||
if not items:
|
||||
return "TMTT"
|
||||
return str(items[0].get("name") or "TMTT")
|
||||
except Exception:
|
||||
return "TMTT"
|
||||
|
||||
def _pick_reason(self, app_server: str) -> str:
|
||||
"""Get one hold reason for hold-detail tests."""
|
||||
try:
|
||||
response = requests.get(f"{app_server}/api/wip/overview/hold", timeout=10)
|
||||
if response.status_code != 200:
|
||||
return "YieldLimit"
|
||||
payload = response.json()
|
||||
items = (payload.get("data") or {}).get("items") or []
|
||||
if not items:
|
||||
return "YieldLimit"
|
||||
return str(items[0].get("reason") or "YieldLimit")
|
||||
except Exception:
|
||||
return "YieldLimit"
|
||||
|
||||
def test_rapid_navigation_across_wip_and_hold_pages(self, page: Page, app_server: str):
|
||||
"""Rapid page switching should keep pages responsive and error-free."""
|
||||
workcenter = self._pick_workcenter(app_server)
|
||||
reason = self._pick_reason(app_server)
|
||||
|
||||
urls = [
|
||||
f"{app_server}/wip-overview",
|
||||
f"{app_server}/wip-overview?type=PJA3460&status=queue",
|
||||
f"{app_server}/wip-detail?workcenter={quote(workcenter)}&type=PJA3460&status=queue",
|
||||
f"{app_server}/hold-detail?reason={quote(reason)}",
|
||||
]
|
||||
|
||||
js_errors = []
|
||||
page.on("pageerror", lambda error: js_errors.append(str(error)))
|
||||
|
||||
start_time = time.time()
|
||||
for i in range(16):
|
||||
page.goto(urls[i % len(urls)], wait_until='domcontentloaded', timeout=60000)
|
||||
expect(page.locator("body")).to_be_visible()
|
||||
page.wait_for_timeout(150)
|
||||
|
||||
elapsed = time.time() - start_time
|
||||
print(f"\n Rapid navigation across 3 pages completed in {elapsed:.2f}s")
|
||||
|
||||
assert len(js_errors) == 0, f"JavaScript errors detected: {js_errors[:3]}"
|
||||
|
||||
def test_wip_and_hold_api_burst_from_browser(self, page: Page, app_server: str):
|
||||
"""Browser-side API burst should still return mostly successful responses."""
|
||||
load_page_with_js(page, f"{app_server}/wip-overview")
|
||||
|
||||
result = page.evaluate("""
|
||||
async () => {
|
||||
const safeJson = async (resp) => {
|
||||
try {
|
||||
return await resp.json();
|
||||
} catch (_) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const wcResp = await fetch('/api/wip/meta/workcenters');
|
||||
const wcPayload = await safeJson(wcResp) || {};
|
||||
const workcenter = (wcPayload.data && wcPayload.data[0] && wcPayload.data[0].name) || 'TMTT';
|
||||
|
||||
const holdResp = await fetch('/api/wip/overview/hold');
|
||||
const holdPayload = await safeJson(holdResp) || {};
|
||||
const holdItems = (holdPayload.data && holdPayload.data.items) || [];
|
||||
const reason = (holdItems[0] && holdItems[0].reason) || 'YieldLimit';
|
||||
|
||||
const endpoints = [
|
||||
'/api/wip/overview/summary',
|
||||
'/api/wip/overview/matrix',
|
||||
'/api/wip/overview/hold',
|
||||
`/api/wip/detail/${encodeURIComponent(workcenter)}?page=1&page_size=100`,
|
||||
`/api/wip/hold-detail/lots?reason=${encodeURIComponent(reason)}&page=1&per_page=50`,
|
||||
];
|
||||
|
||||
let total = 0;
|
||||
let success = 0;
|
||||
let failures = 0;
|
||||
|
||||
for (let round = 0; round < 5; round++) {
|
||||
const responses = await Promise.all(
|
||||
endpoints.map((endpoint) =>
|
||||
fetch(endpoint)
|
||||
.then((r) => ({ ok: r.status < 500 }))
|
||||
.catch(() => ({ ok: false }))
|
||||
)
|
||||
);
|
||||
total += responses.length;
|
||||
success += responses.filter((r) => r.ok).length;
|
||||
failures += responses.filter((r) => !r.ok).length;
|
||||
}
|
||||
|
||||
return { total, success, failures };
|
||||
}
|
||||
""")
|
||||
|
||||
print(f"\n Browser burst total={result['total']}, success={result['success']}, failures={result['failures']}")
|
||||
assert result['success'] >= 20, f"Too many failed API requests: {result}"
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
|
||||
@@ -4,13 +4,16 @@
|
||||
Tests cache read/write functionality and fallback mechanism.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import pandas as pd
|
||||
import json
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import pandas as pd
|
||||
import json
|
||||
import threading
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
|
||||
class TestGetCachedWipData:
|
||||
class TestGetCachedWipData:
|
||||
"""Test get_cached_wip_data function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
@@ -78,18 +81,61 @@ class TestGetCachedWipData:
|
||||
assert len(result) == 2
|
||||
assert 'LOTID' in result.columns
|
||||
|
||||
def test_handles_invalid_json(self, reset_redis):
|
||||
"""Test handles invalid JSON gracefully."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
def test_handles_invalid_json(self, reset_redis):
|
||||
"""Test handles invalid JSON gracefully."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.return_value = 'invalid json {'
|
||||
|
||||
with patch.object(cache, 'REDIS_ENABLED', True):
|
||||
with patch.object(cache, 'get_redis_client', return_value=mock_client):
|
||||
with patch.object(cache, 'get_key', return_value='mes_wip:data'):
|
||||
result = cache.get_cached_wip_data()
|
||||
assert result is None
|
||||
with patch.object(cache, 'get_key', return_value='mes_wip:data'):
|
||||
result = cache.get_cached_wip_data()
|
||||
assert result is None
|
||||
|
||||
def test_concurrent_requests_parse_redis_once(self, reset_redis):
|
||||
"""Concurrent misses should trigger Redis parse exactly once."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
test_data = [
|
||||
{'LOTID': 'LOT001', 'QTY': 100, 'WORKORDER': 'WO001'},
|
||||
{'LOTID': 'LOT002', 'QTY': 200, 'WORKORDER': 'WO002'}
|
||||
]
|
||||
cached_json = json.dumps(test_data)
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.return_value = cached_json
|
||||
|
||||
parse_count_lock = threading.Lock()
|
||||
parse_count = 0
|
||||
|
||||
def slow_read_json(*args, **kwargs):
|
||||
nonlocal parse_count
|
||||
with parse_count_lock:
|
||||
parse_count += 1
|
||||
time.sleep(0.05)
|
||||
return pd.DataFrame(test_data)
|
||||
|
||||
start_event = threading.Event()
|
||||
|
||||
def call_cache():
|
||||
start_event.wait(timeout=1)
|
||||
return cache.get_cached_wip_data()
|
||||
|
||||
with patch.object(cache, 'REDIS_ENABLED', True):
|
||||
with patch.object(cache, 'get_redis_client', return_value=mock_client):
|
||||
with patch.object(cache, 'get_key', return_value='mes_wip:data'):
|
||||
with patch.object(cache.pd, 'read_json', side_effect=slow_read_json):
|
||||
with ThreadPoolExecutor(max_workers=6) as pool:
|
||||
futures = [pool.submit(call_cache) for _ in range(6)]
|
||||
start_event.set()
|
||||
results = [future.result(timeout=3) for future in futures]
|
||||
|
||||
assert parse_count == 1
|
||||
assert mock_client.get.call_count == 1
|
||||
assert all(result is not None for result in results)
|
||||
assert all(len(result) == 2 for result in results)
|
||||
|
||||
|
||||
class TestGetCachedSysDate:
|
||||
|
||||
@@ -44,7 +44,7 @@ class TestHoldDetailPageRoute(TestHoldRoutesBase):
|
||||
self.assertIn(b'/static/dist/hold-detail.js', response.data)
|
||||
|
||||
|
||||
class TestHoldDetailSummaryRoute(TestHoldRoutesBase):
|
||||
class TestHoldDetailSummaryRoute(TestHoldRoutesBase):
|
||||
"""Test GET /api/wip/hold-detail/summary endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_summary')
|
||||
@@ -78,20 +78,38 @@ class TestHoldDetailSummaryRoute(TestHoldRoutesBase):
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('reason', data['error'])
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_summary')
|
||||
def test_returns_error_on_failure(self, mock_get_summary):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_summary.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/hold-detail/summary?reason=YieldLimit')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('error', data)
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_summary')
|
||||
def test_returns_error_on_failure(self, mock_get_summary):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_summary.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/hold-detail/summary?reason=YieldLimit')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('error', data)
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_summary')
|
||||
def test_passes_include_dummy(self, mock_get_summary):
|
||||
"""Should pass include_dummy flag to summary service."""
|
||||
mock_get_summary.return_value = {
|
||||
'totalLots': 0,
|
||||
'totalQty': 0,
|
||||
'avgAge': 0,
|
||||
'maxAge': 0,
|
||||
'workcenterCount': 0,
|
||||
}
|
||||
|
||||
self.client.get('/api/wip/hold-detail/summary?reason=YieldLimit&include_dummy=true')
|
||||
|
||||
mock_get_summary.assert_called_once_with(
|
||||
reason='YieldLimit',
|
||||
include_dummy=True
|
||||
)
|
||||
|
||||
|
||||
class TestHoldDetailDistributionRoute(TestHoldRoutesBase):
|
||||
class TestHoldDetailDistributionRoute(TestHoldRoutesBase):
|
||||
"""Test GET /api/wip/hold-detail/distribution endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_distribution')
|
||||
@@ -133,19 +151,35 @@ class TestHoldDetailDistributionRoute(TestHoldRoutesBase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_distribution')
|
||||
def test_returns_error_on_failure(self, mock_get_dist):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_dist.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/hold-detail/distribution?reason=YieldLimit')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_distribution')
|
||||
def test_returns_error_on_failure(self, mock_get_dist):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_dist.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/hold-detail/distribution?reason=YieldLimit')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_distribution')
|
||||
def test_passes_include_dummy(self, mock_get_dist):
|
||||
"""Should pass include_dummy flag to distribution service."""
|
||||
mock_get_dist.return_value = {
|
||||
'byWorkcenter': [],
|
||||
'byPackage': [],
|
||||
'byAge': [],
|
||||
}
|
||||
|
||||
self.client.get('/api/wip/hold-detail/distribution?reason=YieldLimit&include_dummy=1')
|
||||
|
||||
mock_get_dist.assert_called_once_with(
|
||||
reason='YieldLimit',
|
||||
include_dummy=True
|
||||
)
|
||||
|
||||
|
||||
class TestHoldDetailLotsRoute(TestHoldRoutesBase):
|
||||
class TestHoldDetailLotsRoute(TestHoldRoutesBase):
|
||||
"""Test GET /api/wip/hold-detail/lots endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
|
||||
@@ -258,16 +292,28 @@ class TestHoldDetailLotsRoute(TestHoldRoutesBase):
|
||||
call_args = mock_get_lots.call_args
|
||||
self.assertEqual(call_args.kwargs['page'], 1)
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
|
||||
def test_returns_error_on_failure(self, mock_get_lots):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_lots.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
|
||||
def test_returns_error_on_failure(self, mock_get_lots):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_lots.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
|
||||
@patch('mes_dashboard.core.rate_limit.check_and_record', return_value=(True, 4))
|
||||
def test_lots_rate_limited_returns_429(self, _mock_limit, mock_get_lots):
|
||||
"""Rate-limited lots requests should return 429."""
|
||||
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 429)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertEqual(data['error']['code'], 'TOO_MANY_REQUESTS')
|
||||
mock_get_lots.assert_not_called()
|
||||
|
||||
|
||||
class TestHoldDetailAgeRangeFilters(TestHoldRoutesBase):
|
||||
|
||||
@@ -4,10 +4,10 @@
|
||||
Tests aggregation, status classification, and cache query functionality.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import json
|
||||
import pandas as pd
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import json
|
||||
import pandas as pd
|
||||
|
||||
|
||||
class TestClassifyStatus:
|
||||
@@ -98,6 +98,7 @@ class TestAggregateByResourceid:
|
||||
'OBJECTCATEGORY': 'ASSEMBLY',
|
||||
'EQUIPMENTASSETSSTATUS': 'PRD',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'RUNCARDLOTID': 'LOT001',
|
||||
'JOBORDER': 'JO001',
|
||||
'JOBSTATUS': 'RUN',
|
||||
'SYMPTOMCODE': None,
|
||||
@@ -127,6 +128,7 @@ class TestAggregateByResourceid:
|
||||
'OBJECTCATEGORY': 'ASSEMBLY',
|
||||
'EQUIPMENTASSETSSTATUS': 'PRD',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'RUNCARDLOTID': 'LOT001',
|
||||
'JOBORDER': 'JO001',
|
||||
'JOBSTATUS': 'RUN',
|
||||
'SYMPTOMCODE': None,
|
||||
@@ -141,6 +143,7 @@ class TestAggregateByResourceid:
|
||||
'OBJECTCATEGORY': 'ASSEMBLY',
|
||||
'EQUIPMENTASSETSSTATUS': 'PRD',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'RUNCARDLOTID': 'LOT002',
|
||||
'JOBORDER': 'JO002',
|
||||
'JOBSTATUS': 'RUN',
|
||||
'SYMPTOMCODE': None,
|
||||
@@ -155,6 +158,7 @@ class TestAggregateByResourceid:
|
||||
'OBJECTCATEGORY': 'ASSEMBLY',
|
||||
'EQUIPMENTASSETSSTATUS': 'PRD',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'RUNCARDLOTID': 'LOT003',
|
||||
'JOBORDER': 'JO003',
|
||||
'JOBSTATUS': 'RUN',
|
||||
'SYMPTOMCODE': None,
|
||||
@@ -184,6 +188,7 @@ class TestAggregateByResourceid:
|
||||
'OBJECTCATEGORY': 'ASSEMBLY',
|
||||
'EQUIPMENTASSETSSTATUS': 'PRD',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'RUNCARDLOTID': 'LOT001',
|
||||
'JOBORDER': 'JO001',
|
||||
'JOBSTATUS': 'RUN',
|
||||
'SYMPTOMCODE': None,
|
||||
@@ -198,6 +203,7 @@ class TestAggregateByResourceid:
|
||||
'OBJECTCATEGORY': 'WAFERSORT',
|
||||
'EQUIPMENTASSETSSTATUS': 'SBY',
|
||||
'EQUIPMENTASSETSSTATUSREASON': 'Waiting',
|
||||
'RUNCARDLOTID': None,
|
||||
'JOBORDER': None,
|
||||
'JOBSTATUS': None,
|
||||
'SYMPTOMCODE': None,
|
||||
@@ -216,7 +222,7 @@ class TestAggregateByResourceid:
|
||||
|
||||
assert r1['LOT_COUNT'] == 1
|
||||
assert r1['STATUS_CATEGORY'] == 'PRODUCTIVE'
|
||||
assert r2['LOT_COUNT'] == 1
|
||||
assert r2['LOT_COUNT'] == 0
|
||||
assert r2['STATUS_CATEGORY'] == 'STANDBY'
|
||||
|
||||
def test_handles_empty_records(self):
|
||||
@@ -298,17 +304,17 @@ class TestGetEquipmentStatusById:
|
||||
"""Test get_equipment_status_by_id function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_modules(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
import mes_dashboard.services.realtime_equipment_cache as eq
|
||||
rc._REDIS_CLIENT = None
|
||||
eq._equipment_status_cache.invalidate("equipment_status_all")
|
||||
eq._invalidate_equipment_status_lookup()
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
eq._equipment_status_cache.invalidate("equipment_status_all")
|
||||
eq._invalidate_equipment_status_lookup()
|
||||
def reset_modules(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
import mes_dashboard.services.realtime_equipment_cache as eq
|
||||
rc._REDIS_CLIENT = None
|
||||
eq._equipment_status_cache.invalidate("equipment_status_all")
|
||||
eq._invalidate_equipment_status_lookup()
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
eq._equipment_status_cache.invalidate("equipment_status_all")
|
||||
eq._invalidate_equipment_status_lookup()
|
||||
|
||||
def test_returns_none_when_redis_unavailable(self):
|
||||
"""Test returns None when Redis client unavailable."""
|
||||
@@ -356,17 +362,17 @@ class TestGetEquipmentStatusByIds:
|
||||
"""Test get_equipment_status_by_ids function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_modules(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
import mes_dashboard.services.realtime_equipment_cache as eq
|
||||
rc._REDIS_CLIENT = None
|
||||
eq._equipment_status_cache.invalidate("equipment_status_all")
|
||||
eq._invalidate_equipment_status_lookup()
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
eq._equipment_status_cache.invalidate("equipment_status_all")
|
||||
eq._invalidate_equipment_status_lookup()
|
||||
def reset_modules(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
import mes_dashboard.services.realtime_equipment_cache as eq
|
||||
rc._REDIS_CLIENT = None
|
||||
eq._equipment_status_cache.invalidate("equipment_status_all")
|
||||
eq._invalidate_equipment_status_lookup()
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
eq._equipment_status_cache.invalidate("equipment_status_all")
|
||||
eq._invalidate_equipment_status_lookup()
|
||||
|
||||
def test_returns_empty_for_empty_input(self):
|
||||
"""Test returns empty list for empty input."""
|
||||
@@ -412,17 +418,17 @@ class TestGetAllEquipmentStatus:
|
||||
"""Test get_all_equipment_status function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_modules(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
import mes_dashboard.services.realtime_equipment_cache as eq
|
||||
rc._REDIS_CLIENT = None
|
||||
eq._equipment_status_cache.invalidate("equipment_status_all")
|
||||
eq._invalidate_equipment_status_lookup()
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
eq._equipment_status_cache.invalidate("equipment_status_all")
|
||||
eq._invalidate_equipment_status_lookup()
|
||||
def reset_modules(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
import mes_dashboard.services.realtime_equipment_cache as eq
|
||||
rc._REDIS_CLIENT = None
|
||||
eq._equipment_status_cache.invalidate("equipment_status_all")
|
||||
eq._invalidate_equipment_status_lookup()
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
eq._equipment_status_cache.invalidate("equipment_status_all")
|
||||
eq._invalidate_equipment_status_lookup()
|
||||
|
||||
def test_returns_empty_when_redis_unavailable(self):
|
||||
"""Test returns empty list when Redis unavailable."""
|
||||
@@ -465,7 +471,7 @@ class TestGetAllEquipmentStatus:
|
||||
assert result[1]['RESOURCEID'] == 'R002'
|
||||
|
||||
|
||||
class TestGetEquipmentStatusCacheStatus:
|
||||
class TestGetEquipmentStatusCacheStatus:
|
||||
"""Test get_equipment_status_cache_status function."""
|
||||
|
||||
@pytest.fixture
|
||||
@@ -505,44 +511,44 @@ class TestGetEquipmentStatusCacheStatus:
|
||||
from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_cache_status
|
||||
result = get_equipment_status_cache_status()
|
||||
|
||||
assert result['enabled'] is True
|
||||
assert result['loaded'] is True
|
||||
assert result['count'] == 1000
|
||||
|
||||
|
||||
class TestEquipmentProcessLevelCache:
|
||||
"""Test bounded process-level cache behavior for equipment status."""
|
||||
|
||||
def test_lru_eviction_prefers_recent_keys(self):
|
||||
import mes_dashboard.services.realtime_equipment_cache as eq
|
||||
|
||||
cache = eq._ProcessLevelCache(ttl_seconds=60, max_size=2)
|
||||
cache.set("a", [{"RESOURCEID": "R001"}])
|
||||
cache.set("b", [{"RESOURCEID": "R002"}])
|
||||
assert cache.get("a") is not None # refresh recency
|
||||
cache.set("c", [{"RESOURCEID": "R003"}]) # should evict "b"
|
||||
|
||||
assert cache.get("b") is None
|
||||
assert cache.get("a") is not None
|
||||
assert cache.get("c") is not None
|
||||
|
||||
def test_global_equipment_cache_uses_bounded_config(self):
|
||||
import mes_dashboard.services.realtime_equipment_cache as eq
|
||||
|
||||
assert eq.EQUIPMENT_PROCESS_CACHE_MAX_SIZE >= 1
|
||||
assert eq._equipment_status_cache.max_size == eq.EQUIPMENT_PROCESS_CACHE_MAX_SIZE
|
||||
|
||||
|
||||
class TestSharedQueryFragments:
|
||||
"""Test shared SQL fragment governance for equipment cache."""
|
||||
|
||||
def test_equipment_load_uses_shared_sql_fragment(self):
|
||||
import mes_dashboard.services.realtime_equipment_cache as eq
|
||||
from mes_dashboard.services.sql_fragments import EQUIPMENT_STATUS_SELECT_SQL
|
||||
|
||||
mock_df = pd.DataFrame([{"RESOURCEID": "R001", "EQUIPMENTID": "EQ-01"}])
|
||||
with patch.object(eq, "read_sql_df", return_value=mock_df) as mock_read:
|
||||
eq._load_equipment_status_from_oracle()
|
||||
|
||||
sql = mock_read.call_args[0][0]
|
||||
assert sql.strip() == EQUIPMENT_STATUS_SELECT_SQL.strip()
|
||||
assert result['enabled'] is True
|
||||
assert result['loaded'] is True
|
||||
assert result['count'] == 1000
|
||||
|
||||
|
||||
class TestEquipmentProcessLevelCache:
|
||||
"""Test bounded process-level cache behavior for equipment status."""
|
||||
|
||||
def test_lru_eviction_prefers_recent_keys(self):
|
||||
import mes_dashboard.services.realtime_equipment_cache as eq
|
||||
|
||||
cache = eq._ProcessLevelCache(ttl_seconds=60, max_size=2)
|
||||
cache.set("a", [{"RESOURCEID": "R001"}])
|
||||
cache.set("b", [{"RESOURCEID": "R002"}])
|
||||
assert cache.get("a") is not None # refresh recency
|
||||
cache.set("c", [{"RESOURCEID": "R003"}]) # should evict "b"
|
||||
|
||||
assert cache.get("b") is None
|
||||
assert cache.get("a") is not None
|
||||
assert cache.get("c") is not None
|
||||
|
||||
def test_global_equipment_cache_uses_bounded_config(self):
|
||||
import mes_dashboard.services.realtime_equipment_cache as eq
|
||||
|
||||
assert eq.EQUIPMENT_PROCESS_CACHE_MAX_SIZE >= 1
|
||||
assert eq._equipment_status_cache.max_size == eq.EQUIPMENT_PROCESS_CACHE_MAX_SIZE
|
||||
|
||||
|
||||
class TestSharedQueryFragments:
|
||||
"""Test shared SQL fragment governance for equipment cache."""
|
||||
|
||||
def test_equipment_load_uses_shared_sql_fragment(self):
|
||||
import mes_dashboard.services.realtime_equipment_cache as eq
|
||||
from mes_dashboard.services.sql_fragments import EQUIPMENT_STATUS_SELECT_SQL
|
||||
|
||||
mock_df = pd.DataFrame([{"RESOURCEID": "R001", "EQUIPMENTID": "EQ-01"}])
|
||||
with patch.object(eq, "read_sql_df", return_value=mock_df) as mock_read:
|
||||
eq._load_equipment_status_from_oracle()
|
||||
|
||||
sql = mock_read.call_args[0][0]
|
||||
assert sql.strip() == EQUIPMENT_STATUS_SELECT_SQL.strip()
|
||||
|
||||
152
tests/test_wip_hold_pages_integration.py
Normal file
152
tests/test_wip_hold_pages_integration.py
Normal file
@@ -0,0 +1,152 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Integration tests for WIP Overview / WIP Detail / Hold Detail page contracts."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
import mes_dashboard.core.database as db
|
||||
from mes_dashboard.app import create_app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client():
|
||||
"""Create a test client with isolated DB engine state."""
|
||||
db._ENGINE = None
|
||||
app = create_app("testing")
|
||||
app.config["TESTING"] = True
|
||||
return app.test_client()
|
||||
|
||||
|
||||
def test_wip_pages_render_vite_assets(client):
|
||||
"""Core WIP/Hold pages should render Vite bundles."""
|
||||
overview = client.get("/wip-overview")
|
||||
detail = client.get("/wip-detail")
|
||||
hold = client.get("/hold-detail?reason=YieldLimit")
|
||||
|
||||
assert overview.status_code == 200
|
||||
assert detail.status_code == 200
|
||||
assert hold.status_code == 200
|
||||
|
||||
overview_html = overview.data.decode("utf-8")
|
||||
detail_html = detail.data.decode("utf-8")
|
||||
hold_html = hold.data.decode("utf-8")
|
||||
|
||||
assert "/static/dist/wip-overview.js" in overview_html
|
||||
assert "/static/dist/wip-detail.js" in detail_html
|
||||
assert "/static/dist/hold-detail.js" in hold_html
|
||||
|
||||
|
||||
def test_wip_overview_and_detail_status_parameter_contract(client):
|
||||
"""Status/type params should be accepted across overview and detail APIs."""
|
||||
with (
|
||||
patch("mes_dashboard.routes.wip_routes.get_wip_matrix") as mock_matrix,
|
||||
patch("mes_dashboard.routes.wip_routes.get_wip_detail") as mock_detail,
|
||||
):
|
||||
mock_matrix.return_value = {
|
||||
"workcenters": [],
|
||||
"packages": [],
|
||||
"matrix": {},
|
||||
"workcenter_totals": {},
|
||||
"package_totals": {},
|
||||
"grand_total": 0,
|
||||
}
|
||||
mock_detail.return_value = {
|
||||
"workcenter": "TMTT",
|
||||
"summary": {
|
||||
"total_lots": 0,
|
||||
"on_equipment_lots": 0,
|
||||
"waiting_lots": 0,
|
||||
"hold_lots": 0,
|
||||
},
|
||||
"specs": [],
|
||||
"lots": [],
|
||||
"pagination": {"page": 1, "page_size": 100, "total_count": 0, "total_pages": 1},
|
||||
"sys_date": None,
|
||||
}
|
||||
|
||||
matrix_resp = client.get("/api/wip/overview/matrix?type=PJA3460&status=queue")
|
||||
detail_resp = client.get("/api/wip/detail/TMTT?type=PJA3460&status=queue&page=1&page_size=100")
|
||||
|
||||
assert matrix_resp.status_code == 200
|
||||
assert detail_resp.status_code == 200
|
||||
assert json.loads(matrix_resp.data)["success"] is True
|
||||
assert json.loads(detail_resp.data)["success"] is True
|
||||
|
||||
mock_matrix.assert_called_once_with(
|
||||
include_dummy=False,
|
||||
workorder=None,
|
||||
lotid=None,
|
||||
status="QUEUE",
|
||||
hold_type=None,
|
||||
package=None,
|
||||
pj_type="PJA3460",
|
||||
)
|
||||
mock_detail.assert_called_once_with(
|
||||
workcenter="TMTT",
|
||||
package=None,
|
||||
pj_type="PJA3460",
|
||||
status="QUEUE",
|
||||
hold_type=None,
|
||||
workorder=None,
|
||||
lotid=None,
|
||||
include_dummy=False,
|
||||
page=1,
|
||||
page_size=100,
|
||||
)
|
||||
|
||||
|
||||
def test_hold_detail_api_contract_flow(client):
|
||||
"""Hold detail summary/distribution/lots should all accept the same reason."""
|
||||
with (
|
||||
patch("mes_dashboard.routes.hold_routes.get_hold_detail_summary") as mock_summary,
|
||||
patch("mes_dashboard.routes.hold_routes.get_hold_detail_distribution") as mock_distribution,
|
||||
patch("mes_dashboard.routes.hold_routes.get_hold_detail_lots") as mock_lots,
|
||||
):
|
||||
mock_summary.return_value = {
|
||||
"totalLots": 10,
|
||||
"totalQty": 1000,
|
||||
"avgAge": 1.2,
|
||||
"maxAge": 5.0,
|
||||
"workcenterCount": 2,
|
||||
}
|
||||
mock_distribution.return_value = {
|
||||
"byWorkcenter": [],
|
||||
"byPackage": [],
|
||||
"byAge": [],
|
||||
}
|
||||
mock_lots.return_value = {
|
||||
"lots": [],
|
||||
"pagination": {"page": 1, "perPage": 50, "total": 0, "totalPages": 1},
|
||||
"filters": {"workcenter": None, "package": None, "ageRange": None},
|
||||
}
|
||||
|
||||
reason = "YieldLimit"
|
||||
summary_resp = client.get(f"/api/wip/hold-detail/summary?reason={reason}")
|
||||
dist_resp = client.get(f"/api/wip/hold-detail/distribution?reason={reason}")
|
||||
lots_resp = client.get(
|
||||
f"/api/wip/hold-detail/lots?reason={reason}&workcenter=DA&package=DIP-B&age_range=1-3&page=2&per_page=80"
|
||||
)
|
||||
|
||||
assert summary_resp.status_code == 200
|
||||
assert dist_resp.status_code == 200
|
||||
assert lots_resp.status_code == 200
|
||||
|
||||
assert json.loads(summary_resp.data)["success"] is True
|
||||
assert json.loads(dist_resp.data)["success"] is True
|
||||
assert json.loads(lots_resp.data)["success"] is True
|
||||
|
||||
mock_summary.assert_called_once_with(reason=reason, include_dummy=False)
|
||||
mock_distribution.assert_called_once_with(reason=reason, include_dummy=False)
|
||||
mock_lots.assert_called_once_with(
|
||||
reason=reason,
|
||||
workcenter="DA",
|
||||
package="DIP-B",
|
||||
age_range="1-3",
|
||||
include_dummy=False,
|
||||
page=2,
|
||||
page_size=80,
|
||||
)
|
||||
@@ -23,7 +23,7 @@ class TestWipRoutesBase(unittest.TestCase):
|
||||
self.client = self.app.test_client()
|
||||
|
||||
|
||||
class TestOverviewSummaryRoute(TestWipRoutesBase):
|
||||
class TestOverviewSummaryRoute(TestWipRoutesBase):
|
||||
"""Test GET /api/wip/overview/summary endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_summary')
|
||||
@@ -48,20 +48,42 @@ class TestOverviewSummaryRoute(TestWipRoutesBase):
|
||||
self.assertEqual(data['data']['totalLots'], 9073)
|
||||
self.assertEqual(data['data']['byWipStatus']['hold']['lots'], 120)
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_summary')
|
||||
def test_returns_error_on_failure(self, mock_get_summary):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_summary.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/overview/summary')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('error', data)
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_summary')
|
||||
def test_returns_error_on_failure(self, mock_get_summary):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_summary.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/overview/summary')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('error', data)
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_summary')
|
||||
def test_passes_filters_and_include_dummy(self, mock_get_summary):
|
||||
"""Should pass overview filter params to service layer."""
|
||||
mock_get_summary.return_value = {
|
||||
'totalLots': 0,
|
||||
'totalQtyPcs': 0,
|
||||
'byWipStatus': {},
|
||||
'dataUpdateDate': None,
|
||||
}
|
||||
|
||||
self.client.get(
|
||||
'/api/wip/overview/summary?workorder=WO1&lotid=L1&package=SOT-23&type=PJA&include_dummy=true'
|
||||
)
|
||||
|
||||
mock_get_summary.assert_called_once_with(
|
||||
include_dummy=True,
|
||||
workorder='WO1',
|
||||
lotid='L1',
|
||||
package='SOT-23',
|
||||
pj_type='PJA'
|
||||
)
|
||||
|
||||
|
||||
class TestOverviewMatrixRoute(TestWipRoutesBase):
|
||||
class TestOverviewMatrixRoute(TestWipRoutesBase):
|
||||
"""Test GET /api/wip/overview/matrix endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_matrix')
|
||||
@@ -85,19 +107,37 @@ class TestOverviewMatrixRoute(TestWipRoutesBase):
|
||||
self.assertIn('packages', data['data'])
|
||||
self.assertIn('matrix', data['data'])
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_matrix')
|
||||
def test_returns_error_on_failure(self, mock_get_matrix):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_matrix.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/overview/matrix')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_matrix')
|
||||
def test_returns_error_on_failure(self, mock_get_matrix):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_matrix.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/overview/matrix')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
def test_rejects_invalid_status(self):
|
||||
"""Invalid status should return 400."""
|
||||
response = self.client.get('/api/wip/overview/matrix?status=INVALID')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('Invalid status', data['error'])
|
||||
|
||||
def test_rejects_invalid_hold_type(self):
|
||||
"""Invalid hold_type should return 400."""
|
||||
response = self.client.get('/api/wip/overview/matrix?status=HOLD&hold_type=oops')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('Invalid hold_type', data['error'])
|
||||
|
||||
|
||||
class TestOverviewHoldRoute(TestWipRoutesBase):
|
||||
class TestOverviewHoldRoute(TestWipRoutesBase):
|
||||
"""Test GET /api/wip/overview/hold endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_hold_summary')
|
||||
@@ -117,16 +157,29 @@ class TestOverviewHoldRoute(TestWipRoutesBase):
|
||||
self.assertTrue(data['success'])
|
||||
self.assertEqual(len(data['data']['items']), 2)
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_hold_summary')
|
||||
def test_returns_error_on_failure(self, mock_get_hold):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_hold.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/overview/hold')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_hold_summary')
|
||||
def test_returns_error_on_failure(self, mock_get_hold):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_hold.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/overview/hold')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_hold_summary')
|
||||
def test_passes_filters_and_include_dummy(self, mock_get_hold):
|
||||
"""Should pass hold filter params to service layer."""
|
||||
mock_get_hold.return_value = {'items': []}
|
||||
|
||||
self.client.get('/api/wip/overview/hold?workorder=WO1&lotid=L1&include_dummy=1')
|
||||
|
||||
mock_get_hold.assert_called_once_with(
|
||||
include_dummy=True,
|
||||
workorder='WO1',
|
||||
lotid='L1'
|
||||
)
|
||||
|
||||
|
||||
class TestDetailRoute(TestWipRoutesBase):
|
||||
@@ -187,6 +240,7 @@ class TestDetailRoute(TestWipRoutesBase):
|
||||
mock_get_detail.assert_called_once_with(
|
||||
workcenter='焊接_DB',
|
||||
package='SOT-23',
|
||||
pj_type=None,
|
||||
status='RUN',
|
||||
hold_type=None,
|
||||
workorder=None,
|
||||
@@ -217,10 +271,10 @@ class TestDetailRoute(TestWipRoutesBase):
|
||||
self.assertEqual(call_args.kwargs['page_size'], 500)
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_detail')
|
||||
def test_handles_page_less_than_one(self, mock_get_detail):
|
||||
"""Page number less than 1 should be set to 1."""
|
||||
mock_get_detail.return_value = {
|
||||
'workcenter': '切割',
|
||||
def test_handles_page_less_than_one(self, mock_get_detail):
|
||||
"""Page number less than 1 should be set to 1."""
|
||||
mock_get_detail.return_value = {
|
||||
'workcenter': '切割',
|
||||
'summary': {'total_lots': 0, 'on_equipment_lots': 0,
|
||||
'waiting_lots': 0, 'hold_lots': 0},
|
||||
'specs': [],
|
||||
@@ -232,28 +286,28 @@ class TestDetailRoute(TestWipRoutesBase):
|
||||
|
||||
response = self.client.get('/api/wip/detail/切割?page=0')
|
||||
|
||||
call_args = mock_get_detail.call_args
|
||||
self.assertEqual(call_args.kwargs['page'], 1)
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_detail')
|
||||
def test_handles_page_size_less_than_one(self, mock_get_detail):
|
||||
"""Page size less than 1 should be set to 1."""
|
||||
mock_get_detail.return_value = {
|
||||
'workcenter': '切割',
|
||||
'summary': {'total_lots': 0, 'on_equipment_lots': 0,
|
||||
'waiting_lots': 0, 'hold_lots': 0},
|
||||
'specs': [],
|
||||
'lots': [],
|
||||
'pagination': {'page': 1, 'page_size': 1,
|
||||
'total_count': 0, 'total_pages': 1},
|
||||
'sys_date': None
|
||||
}
|
||||
|
||||
self.client.get('/api/wip/detail/切割?page_size=0')
|
||||
|
||||
call_args = mock_get_detail.call_args
|
||||
self.assertEqual(call_args.kwargs['page_size'], 1)
|
||||
|
||||
call_args = mock_get_detail.call_args
|
||||
self.assertEqual(call_args.kwargs['page'], 1)
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_detail')
|
||||
def test_handles_page_size_less_than_one(self, mock_get_detail):
|
||||
"""Page size less than 1 should be set to 1."""
|
||||
mock_get_detail.return_value = {
|
||||
'workcenter': '切割',
|
||||
'summary': {'total_lots': 0, 'on_equipment_lots': 0,
|
||||
'waiting_lots': 0, 'hold_lots': 0},
|
||||
'specs': [],
|
||||
'lots': [],
|
||||
'pagination': {'page': 1, 'page_size': 1,
|
||||
'total_count': 0, 'total_pages': 1},
|
||||
'sys_date': None
|
||||
}
|
||||
|
||||
self.client.get('/api/wip/detail/切割?page_size=0')
|
||||
|
||||
call_args = mock_get_detail.call_args
|
||||
self.assertEqual(call_args.kwargs['page_size'], 1)
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_detail')
|
||||
def test_returns_error_on_failure(self, mock_get_detail):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
@@ -265,17 +319,35 @@ class TestDetailRoute(TestWipRoutesBase):
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_detail')
|
||||
@patch('mes_dashboard.core.rate_limit.check_and_record', return_value=(True, 7))
|
||||
def test_detail_rate_limited_returns_429(self, _mock_limit, mock_get_detail):
|
||||
"""Rate-limited detail requests should return 429."""
|
||||
response = self.client.get('/api/wip/detail/焊接_DB')
|
||||
def test_rejects_invalid_status(self):
|
||||
"""Invalid status should return 400."""
|
||||
response = self.client.get('/api/wip/detail/焊接_DB?status=INVALID')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 429)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertEqual(data['error']['code'], 'TOO_MANY_REQUESTS')
|
||||
mock_get_detail.assert_not_called()
|
||||
self.assertIn('Invalid status', data['error'])
|
||||
|
||||
def test_rejects_invalid_hold_type(self):
|
||||
"""Invalid hold_type should return 400."""
|
||||
response = self.client.get('/api/wip/detail/焊接_DB?status=HOLD&hold_type=oops')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('Invalid hold_type', data['error'])
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_detail')
|
||||
@patch('mes_dashboard.core.rate_limit.check_and_record', return_value=(True, 7))
|
||||
def test_detail_rate_limited_returns_429(self, _mock_limit, mock_get_detail):
|
||||
"""Rate-limited detail requests should return 429."""
|
||||
response = self.client.get('/api/wip/detail/焊接_DB')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 429)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertEqual(data['error']['code'], 'TOO_MANY_REQUESTS')
|
||||
mock_get_detail.assert_not_called()
|
||||
|
||||
|
||||
class TestMetaWorkcentersRoute(TestWipRoutesBase):
|
||||
|
||||
@@ -4,10 +4,13 @@
|
||||
Tests the WIP query functions that use DW_MES_LOT_V view.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch, MagicMock
|
||||
from functools import wraps
|
||||
import pandas as pd
|
||||
import unittest
|
||||
from unittest.mock import patch, MagicMock
|
||||
from functools import wraps
|
||||
import pandas as pd
|
||||
import threading
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from mes_dashboard.services.wip_service import (
|
||||
WIP_VIEW,
|
||||
@@ -452,7 +455,7 @@ class TestSearchLotIds(unittest.TestCase):
|
||||
self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args)
|
||||
|
||||
|
||||
class TestWipSearchIndexShortcut(unittest.TestCase):
|
||||
class TestWipSearchIndexShortcut(unittest.TestCase):
|
||||
"""Test derived search index fast-path behavior."""
|
||||
|
||||
@patch('mes_dashboard.services.wip_service._search_workorders_from_oracle')
|
||||
@@ -477,8 +480,72 @@ class TestWipSearchIndexShortcut(unittest.TestCase):
|
||||
|
||||
result = search_workorders("GA26", package="SOT-23")
|
||||
|
||||
self.assertEqual(result, ["GA26012001"])
|
||||
mock_oracle.assert_called_once()
|
||||
self.assertEqual(result, ["GA26012001"])
|
||||
mock_oracle.assert_called_once()
|
||||
|
||||
|
||||
class TestWipSnapshotLocking(unittest.TestCase):
|
||||
"""Concurrency behavior for snapshot cache build path."""
|
||||
|
||||
def setUp(self):
|
||||
import mes_dashboard.services.wip_service as wip_service
|
||||
with wip_service._wip_snapshot_lock:
|
||||
wip_service._wip_snapshot_cache.clear()
|
||||
|
||||
@staticmethod
|
||||
def _sample_df() -> pd.DataFrame:
|
||||
return pd.DataFrame({
|
||||
"WORKORDER": ["WO1", "WO2"],
|
||||
"LOTID": ["LOT1", "LOT2"],
|
||||
"QTY": [100, 200],
|
||||
"EQUIPMENTCOUNT": [1, 0],
|
||||
"CURRENTHOLDCOUNT": [0, 1],
|
||||
"HOLDREASONNAME": [None, "品質確認"],
|
||||
"WORKCENTER_GROUP": ["WC-A", "WC-B"],
|
||||
"PACKAGE_LEF": ["PKG-A", "PKG-B"],
|
||||
"PJ_TYPE": ["T1", "T2"],
|
||||
})
|
||||
|
||||
def test_concurrent_snapshot_miss_builds_once(self):
|
||||
import mes_dashboard.services.wip_service as wip_service
|
||||
|
||||
df = self._sample_df()
|
||||
build_count_lock = threading.Lock()
|
||||
build_count = 0
|
||||
|
||||
def slow_build(snapshot_df, include_dummy, version):
|
||||
nonlocal build_count
|
||||
with build_count_lock:
|
||||
build_count += 1
|
||||
time.sleep(0.05)
|
||||
return {
|
||||
"version": version,
|
||||
"built_at": "2026-02-10T00:00:00",
|
||||
"row_count": int(len(snapshot_df)),
|
||||
"frame": snapshot_df,
|
||||
"indexes": {},
|
||||
"frame_bytes": 0,
|
||||
"index_bucket_count": 0,
|
||||
}
|
||||
|
||||
start_event = threading.Event()
|
||||
|
||||
def call_snapshot():
|
||||
start_event.wait(timeout=1)
|
||||
return wip_service._get_wip_snapshot(include_dummy=False)
|
||||
|
||||
with patch.object(wip_service, "_get_wip_cache_version", return_value="version-1"):
|
||||
with patch.object(wip_service, "_get_wip_dataframe", return_value=df) as mock_get_df:
|
||||
with patch.object(wip_service, "_build_wip_snapshot", side_effect=slow_build):
|
||||
with ThreadPoolExecutor(max_workers=6) as pool:
|
||||
futures = [pool.submit(call_snapshot) for _ in range(6)]
|
||||
start_event.set()
|
||||
results = [future.result(timeout=3) for future in futures]
|
||||
|
||||
self.assertEqual(build_count, 1)
|
||||
self.assertEqual(mock_get_df.call_count, 1)
|
||||
self.assertTrue(all(result is not None for result in results))
|
||||
self.assertTrue(all(result.get("version") == "version-1" for result in results))
|
||||
|
||||
|
||||
class TestDummyExclusionInAllFunctions(unittest.TestCase):
|
||||
|
||||
Reference in New Issue
Block a user