feat(wip): preserve filters between Overview and Detail with thundering-herd fix

URL is now single source of truth for filter state (workorder, lotid,
package, type, status) across WIP Overview and Detail pages. Drill-down
carries all filters + status; back button dynamically reflects Detail
changes. Backend Detail API now supports pj_type filter parameter.

Harden concurrency: add pagehide abort for MPA navigation, double-check
locking on Redis JSON parse and snapshot build to prevent thread pool
saturation during rapid page switching. Fix watchdog setsid and PID
discovery. Fix test_realtime_equipment_cache RUNCARDLOTID field mismatch.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
egg
2026-02-11 07:22:48 +08:00
parent 3a15b0abaf
commit be22571421
24 changed files with 1522 additions and 266 deletions

View File

@@ -69,6 +69,9 @@ function updateUrlState() {
if (filters.type) {
params.set('type', filters.type);
}
if (activeStatusFilter.value) {
params.set('status', activeStatusFilter.value);
}
window.history.replaceState({}, '', `/wip-detail?${params.toString()}`);
}
@@ -183,6 +186,28 @@ const tableData = computed(() => ({
specs: detailData.value?.specs || [],
pagination: detailData.value?.pagination || { page: 1, page_size: PAGE_SIZE, total_count: 0, total_pages: 1 },
}));
const backUrl = computed(() => {
const params = new URLSearchParams();
if (filters.workorder) {
params.set('workorder', filters.workorder);
}
if (filters.lotid) {
params.set('lotid', filters.lotid);
}
if (filters.package) {
params.set('package', filters.package);
}
if (filters.type) {
params.set('type', filters.type);
}
if (activeStatusFilter.value) {
params.set('status', activeStatusFilter.value);
}
const query = params.toString();
return query ? `/wip-overview?${query}` : '/wip-overview';
});
function updateFilters(nextFilters) {
filters.workorder = nextFilters.workorder || '';
@@ -210,6 +235,7 @@ function toggleStatusFilter(status) {
activeStatusFilter.value = activeStatusFilter.value === status ? null : status;
page.value = 1;
selectedLotId.value = '';
updateUrlState();
void loadTableOnly();
}
@@ -251,6 +277,7 @@ async function initializePage() {
filters.lotid = getUrlParam('lotid');
filters.package = getUrlParam('package');
filters.type = getUrlParam('type');
activeStatusFilter.value = getUrlParam('status') || null;
if (!workcenter.value) {
const signal = createAbortSignal('wip-detail-init');
@@ -284,7 +311,7 @@ void initializePage();
<div class="dashboard wip-detail-page">
<header class="header">
<div class="header-left">
<a href="/wip-overview" class="btn btn-back">&larr; Overview</a>
<a :href="backUrl" class="btn btn-back">&larr; Overview</a>
<h1>{{ pageTitle }}</h1>
</div>
<div class="header-right">

View File

@@ -46,6 +46,10 @@ function unwrapApiResult(result, fallbackMessage) {
return result;
}
function getUrlParam(name) {
return new URLSearchParams(window.location.search).get(name)?.trim() || '';
}
function buildFilters(status = null) {
return buildWipOverviewQueryParams(filters, status);
}
@@ -165,6 +169,7 @@ async function loadMatrixOnly() {
function toggleStatusFilter(status) {
activeStatusFilter.value = activeStatusFilter.value === status ? null : status;
updateUrlState();
void loadMatrixOnly();
}
@@ -175,18 +180,46 @@ function updateFilters(nextFilters) {
filters.type = nextFilters.type || '';
}
function updateUrlState() {
const params = new URLSearchParams();
if (filters.workorder) {
params.set('workorder', filters.workorder);
}
if (filters.lotid) {
params.set('lotid', filters.lotid);
}
if (filters.package) {
params.set('package', filters.package);
}
if (filters.type) {
params.set('type', filters.type);
}
if (activeStatusFilter.value) {
params.set('status', activeStatusFilter.value);
}
const query = params.toString();
const nextUrl = query ? `/wip-overview?${query}` : '/wip-overview';
window.history.replaceState({}, '', nextUrl);
}
function applyFilters(nextFilters) {
updateFilters(nextFilters);
updateUrlState();
void loadAllData(false);
}
function clearFilters() {
updateFilters({ workorder: '', lotid: '', package: '', type: '' });
activeStatusFilter.value = null;
updateUrlState();
void loadAllData(false);
}
function removeFilter(field) {
filters[field] = '';
updateUrlState();
void loadAllData(false);
}
@@ -206,6 +239,9 @@ function navigateToDetail(workcenter) {
if (filters.type) {
params.append('type', filters.type);
}
if (activeStatusFilter.value) {
params.append('status', activeStatusFilter.value);
}
window.location.href = `/wip-detail?${params.toString()}`;
}
@@ -221,7 +257,17 @@ async function manualRefresh() {
await triggerRefresh({ resetTimer: true, force: true });
}
void loadAllData(true);
async function initializePage() {
filters.workorder = getUrlParam('workorder');
filters.lotid = getUrlParam('lotid');
filters.package = getUrlParam('package');
filters.type = getUrlParam('type');
activeStatusFilter.value = getUrlParam('status') || null;
await loadAllData(true);
}
void initializePage();
</script>
<template>

View File

@@ -10,6 +10,7 @@ export function useAutoRefresh({
} = {}) {
let refreshTimer = null;
const controllers = new Map();
let pageHideHandler = null;
function stopAutoRefresh() {
if (refreshTimer) {
@@ -75,16 +76,26 @@ export function useAutoRefresh({
}
onMounted(() => {
pageHideHandler = () => {
stopAutoRefresh();
abortAllRequests();
};
if (autoStart) {
startAutoRefresh();
}
document.addEventListener('visibilitychange', handleVisibilityChange);
window.addEventListener('pagehide', pageHideHandler);
});
onBeforeUnmount(() => {
stopAutoRefresh();
abortAllRequests();
document.removeEventListener('visibilitychange', handleVisibilityChange);
if (pageHideHandler) {
window.removeEventListener('pagehide', pageHideHandler);
pageHideHandler = null;
}
});
return {

View File

@@ -0,0 +1,2 @@
schema: spec-driven
created: 2026-02-10

View File

@@ -0,0 +1,56 @@
## Context
WIP Overview 和 Detail 是兩個獨立的 Vite multi-page 應用,透過 `window.location.href` 導航。目前 Overview 的 filter 狀態只存在 `reactive()` 物件中,不反映到 URLDetail 已有 URL 狀態管理(`getUrlParam` / `updateUrlState`),但不包含 status filter。Back button 是 hard-coded `<a href="/wip-overview">`,導致返回時所有狀態丟失。
兩個頁面都不使用 Vue Router各自是獨立 Vite entry所以導航都是 full-page navigation狀態只能透過 URL params 傳遞。
## Goals / Non-Goals
**Goals:**
- URL 作為 filter 狀態的 single source of truth兩頁面一致
- Overview → Detail drill-down 傳遞所有 filters + status
- Detail → Overview back navigation 還原所有 filters + status含 Detail 中的變更)
- 無參數時行為與現行完全相同backwards compatible
**Non-Goals:**
- 不引入 sessionStorage / localStorage / Pinia 全域狀態管理
- 不修改 API endpoints 或 backend 邏輯
- 不改變 pagination 狀態的傳遞pagination 是 Detail 內部狀態,不帶回 Overview
- 不改變 Hold Detail 頁的 back link 行為
## Decisions
### D1: URL params 作為唯一狀態傳遞機制
**選擇**: 透過 URL query params 在頁面間傳遞 filter 和 status 狀態
**替代方案**:
- sessionStorageURL 乾淨但引入隱藏狀態debug 困難tab 生命週期不可控
- localStorage跨 tab 污染,多開情境容易混亂
**理由**: Detail 已經用 URL params 管理 filter 狀態Overview 採相同模式保持一致性。URL 可 bookmark、可分享、可 debug。
### D2: Overview 用 `history.replaceState` 同步 URL不產生 history entry
**選擇**: 每次 filter/status 變更後用 `replaceState` 更新 URL不用 `pushState`
**理由**: filter 切換不應產生 browser back history避免用戶按 back 時陷入 filter 歷史中。Detail 已是相同做法。
### D3: Detail back button 用 computed URL 組合當前所有 filter 狀態
**選擇**: `<a :href="backUrl">` 其中 `backUrl` 是 computed property從當前 Detail 的 filters + status 動態組出 `/wip-overview?...`
**理由**: 如果用戶在 Detail 中變更了 filter 或 status返回 Overview 應反映這些變更。computed 確保 backUrl 永遠是最新狀態。
### D4: Status filter 使用字串值作為 URL param
**選擇**: `status` 參數值直接使用 `activeStatusFilter` 的值(`RUN`, `QUEUE`, `quality-hold`, `non-quality-hold`
**理由**: 這些值已在 API 呼叫的 query params 中使用(`buildWipOverviewQueryParams` / `buildWipDetailQueryParams`),直接複用保持一致。
## Risks / Trade-offs
- **[Risk] URL 長度** → 4 個 filter fields + status + workcenter 不會超過 URL 長度限制,風險極低
- **[Risk] 空值造成冗長 URL** → 只 append 非空值的 params空 filter 不出現在 URL 中
- **[Trade-off] Overview 載入時多一步 URL parsing** → 極輕量操作,無性能影響
- **[Trade-off] Back button 從 static `<a>` 變成 dynamic `:href`** → Vue reactive 計算,無感知差異

View File

@@ -0,0 +1,30 @@
## Why
WIP Overview 和 WIP Detail 之間的篩選條件無法雙向保留。用戶在 Overview 設定的 filtersworkorder, lotid, package, type和 status filterRUN/QUEUE/品質異常/非品質異常)在 drill down 到 Detail 時只部分傳遞(缺 status而從 Detail 返回 Overview 時所有篩選狀態完全丟失。這迫使用戶反覆重新輸入篩選條件,破壞了 drill-down 的分析流程。
## What Changes
- Overview 頁面新增 URL 狀態管理:所有 filters 和 status filter 同步到 URL query params頁面載入時從 URL 還原狀態
- Overview drill-down 導航額外傳遞 `status` 參數到 Detail
- Detail 頁面初始化時額外讀取 `status` URL 參數並還原 status filter 狀態
- Detail 頁面的 `updateUrlState()` 額外同步 `status` 參數
- Detail 的 Back button 改為動態 computed URL攜帶當前所有 filter + status 回 Overview
- Detail 中 `toggleStatusFilter()` 操作後同步 URL 狀態
## Capabilities
### New Capabilities
_None — this change enhances existing capabilities._
### Modified Capabilities
- `wip-overview-page`: Overview 新增 URL 狀態管理filters + status 雙向同步到 URLdrill-down 導航額外傳遞 status 參數
- `wip-detail-page`: Detail 新增 status URL 參數讀寫Back button 改為動態 URL 攜帶所有 filter 狀態回 Overview
## Impact
- **Frontend**: `frontend/src/wip-overview/App.vue` — 新增 `initializePage()``updateUrlState()`,修改 `navigateToDetail()``applyFilters()``clearFilters()``removeFilter()``toggleStatusFilter()`
- **Frontend**: `frontend/src/wip-detail/App.vue` — 修改 `initializePage()` 加讀 status、`updateUrlState()` 加寫 status、`toggleStatusFilter()` 加呼叫 `updateUrlState()`、back button 改為 computed `backUrl`
- **No backend changes** — 所有 API endpoints 和 SQL 不需修改
- **No breaking changes** — URL params 為 additive無參數時行為與現行相同

View File

@@ -0,0 +1,63 @@
## MODIFIED Requirements
### Requirement: Detail page SHALL receive drill-down parameters from Overview
The page SHALL read URL query parameters to initialize its state from the Overview page drill-down.
#### Scenario: URL parameter initialization
- **WHEN** the page loads with `?workcenter={name}` in the URL
- **THEN** the page SHALL use the specified workcenter for data loading
- **THEN** the page title SHALL display "WIP Detail - {workcenter}"
#### Scenario: Filter passthrough from Overview
- **WHEN** the URL contains additional filter parameters (workorder, lotid, package, type)
- **THEN** filter inputs SHALL be pre-filled with those values
- **THEN** data SHALL be loaded with those filters applied
#### Scenario: Status passthrough from Overview
- **WHEN** the URL contains a `status` parameter (e.g., `?workcenter=焊接_DW&status=RUN`)
- **THEN** the status card corresponding to the `status` value SHALL be activated
- **THEN** data SHALL be loaded with the status filter applied
#### Scenario: Missing workcenter fallback
- **WHEN** the page loads without a `workcenter` parameter
- **THEN** the page SHALL fetch available workcenters from `GET /api/wip/meta/workcenters`
- **THEN** the first workcenter SHALL be used and the URL SHALL be updated via `replaceState`
### Requirement: Detail page SHALL display WIP summary cards
The page SHALL display five summary cards with status counts for the current workcenter.
#### Scenario: Summary cards rendering
- **WHEN** detail data is loaded
- **THEN** five cards SHALL display: Total Lots, RUN, QUEUE, 品質異常, 非品質異常
#### Scenario: Status card click filters table
- **WHEN** user clicks a status card (RUN, QUEUE, 品質異常, 非品質異常)
- **THEN** the lot table SHALL reload filtered to that status
- **THEN** the active card SHALL show a visual active state
- **THEN** non-active status cards SHALL dim
- **THEN** clicking the same card again SHALL remove the filter
- **THEN** the URL SHALL be updated to reflect the active status filter
### Requirement: Detail page SHALL have back navigation to Overview with filter preservation
The page SHALL provide a way to return to the Overview page while preserving all current filter state.
#### Scenario: Back button with filter state
- **WHEN** user clicks the "← Overview" button in the header
- **THEN** the page SHALL navigate to `/wip-overview` with current filter values (workorder, lotid, package, type) and status as URL parameters
- **THEN** only non-empty filter values SHALL appear as URL parameters
#### Scenario: Back button reflects Detail changes
- **WHEN** the user modifies filters or status in Detail (e.g., changes status from RUN to QUEUE)
- **THEN** the back button URL SHALL dynamically update to reflect the current Detail filter state
- **THEN** navigating back SHALL cause Overview to load with the updated filter state
## ADDED Requirements
### Requirement: Detail page SHALL synchronize status filter to URL
The page SHALL include the active status filter in URL state management.
#### Scenario: Status included in URL state
- **WHEN** the status filter is active
- **THEN** `updateUrlState()` SHALL include `status={value}` in the URL parameters
- **WHEN** the status filter is cleared
- **THEN** the `status` parameter SHALL be removed from the URL

View File

@@ -0,0 +1,79 @@
## MODIFIED Requirements
### Requirement: Overview page SHALL display WIP status breakdown cards
The page SHALL display four clickable status cards (RUN, QUEUE, 品質異常, 非品質異常) with lot and quantity counts.
#### Scenario: Status cards rendering
- **WHEN** summary data is loaded
- **THEN** four status cards SHALL be displayed with color coding (green=RUN, yellow=QUEUE, red=品質異常, orange=非品質異常)
- **THEN** each card SHALL show lot count and quantity
#### Scenario: Status card click filters matrix
- **WHEN** user clicks a status card
- **THEN** the matrix table SHALL reload with the selected status filter
- **THEN** the clicked card SHALL show an active visual state
- **THEN** non-active cards SHALL dim to 50% opacity
- **THEN** clicking the same card again SHALL deactivate the filter and restore all cards
- **THEN** the URL SHALL be updated to reflect the active status filter
### Requirement: Overview page SHALL display Workcenter × Package matrix
The page SHALL display a cross-tabulation table of workcenters vs packages.
#### Scenario: Matrix table rendering
- **WHEN** matrix data is loaded from `GET /api/wip/overview/matrix`
- **THEN** the table SHALL display workcenters as rows and packages as columns (limited to top 15)
- **THEN** the first column (Workcenter) SHALL be sticky on horizontal scroll
- **THEN** a Total row and Total column SHALL be displayed
#### Scenario: Matrix workcenter drill-down
- **WHEN** user clicks a workcenter name in the matrix
- **THEN** the page SHALL navigate to `/wip-detail?workcenter={name}`
- **THEN** active filter values (workorder, lotid, package, type) SHALL be passed as URL parameters
- **THEN** the active status filter SHALL be passed as the `status` URL parameter if set
### Requirement: Overview page SHALL support autocomplete filtering
The page SHALL provide autocomplete-enabled filter inputs for WORKORDER, LOT ID, PACKAGE, and TYPE.
#### Scenario: Autocomplete search
- **WHEN** user types 2+ characters in a filter input
- **THEN** the page SHALL call `GET /api/wip/meta/search` with debounce (300ms)
- **THEN** suggestions SHALL appear in a dropdown below the input
- **THEN** cross-filter parameters SHALL be included (other active filter values)
#### Scenario: Apply and clear filters
- **WHEN** user clicks "套用篩選" or presses Enter in a filter input
- **THEN** all three API calls (summary, matrix, hold) SHALL reload with the filter values
- **THEN** the URL SHALL be updated to reflect the applied filter values
- **WHEN** user clicks "清除篩選"
- **THEN** all filter inputs SHALL be cleared and data SHALL reload without filters
- **THEN** the URL SHALL be cleared of all filter and status parameters
#### Scenario: Active filter display
- **WHEN** filters are applied
- **THEN** active filters SHALL be displayed as removable tags (e.g., "WO: {value} ×")
- **THEN** clicking a tag's remove button SHALL clear that filter, reload data, and update the URL
## ADDED Requirements
### Requirement: Overview page SHALL persist filter state in URL
The page SHALL synchronize all filter state (workorder, lotid, package, type, status) to URL query parameters as the single source of truth.
#### Scenario: URL state initialization on page load
- **WHEN** the page loads with filter query parameters in the URL (e.g., `?package=SOD-323&status=RUN`)
- **THEN** the filter inputs SHALL be pre-filled with the URL parameter values
- **THEN** the status card corresponding to the `status` parameter SHALL be activated
- **THEN** data SHALL be loaded with all restored filters and status applied
#### Scenario: URL state initialization without parameters
- **WHEN** the page loads without any filter query parameters
- **THEN** all filters SHALL be empty and no status card SHALL be active
- **THEN** data SHALL load without filters (current default behavior)
#### Scenario: URL update on filter change
- **WHEN** filters are applied, cleared, or a single filter is removed
- **THEN** the URL SHALL be updated via `history.replaceState` to reflect the current filter state
- **THEN** only non-empty filter values SHALL appear as URL parameters
#### Scenario: URL update on status toggle
- **WHEN** a status card is clicked to activate or deactivate
- **THEN** the URL SHALL be updated via `history.replaceState` to include or remove the `status` parameter

View File

@@ -0,0 +1,28 @@
## 1. WIP Overview — URL 狀態管理
- [x] 1.1 新增 `updateUrlState()` 函式:將 filters (workorder, lotid, package, type) 和 activeStatusFilter 同步到 URL via `history.replaceState`,只 append 非空值
- [x] 1.2 新增 `initializePage()` 函式:從 URL params 讀取 filters + status還原到 `filters` reactive 和 `activeStatusFilter` ref然後呼叫 `loadAllData(true)`;取代目前的 `void loadAllData(true)` 直接呼叫
- [x] 1.3 修改 `applyFilters()``clearFilters()``removeFilter()` 三個函式:每次操作後呼叫 `updateUrlState()`
- [x] 1.4 修改 `toggleStatusFilter()`:操作後呼叫 `updateUrlState()`
## 2. WIP Overview — Drill-Down 帶 Status
- [x] 2.1 修改 `navigateToDetail()`:在組建 URL params 時,若 `activeStatusFilter.value` 非 nullappend `status` 參數
## 3. WIP Detail — 讀取 Status URL 參數
- [x] 3.1 修改 `initializePage()`:新增 `activeStatusFilter.value = getUrlParam('status') || null`,在 filters 讀取之後、`loadAllData` 之前
- [x] 3.2 修改 `updateUrlState()`:若 `activeStatusFilter.value` 非 null`params.set('status', activeStatusFilter.value)`
- [x] 3.3 修改 `toggleStatusFilter()`:操作後呼叫 `updateUrlState()`
## 4. WIP Detail — Back Button 動態 URL
- [x] 4.1 新增 computed `backUrl`:從當前 filters + activeStatusFilter 組出 `/wip-overview?...`(只含非空值,不含 workcenter
- [x] 4.2 將 template 中 `<a href="/wip-overview">` 改為 `<a :href="backUrl">`
## 5. 驗證
- [x] 5.1 驗證Overview 設定 filter + status → drill down → Detail 正確還原所有狀態
- [x] 5.2 驗證Detail 中變更 filter/status → 點 Back → Overview 正確還原變更後的狀態
- [x] 5.3 驗證:無參數直接訪問 `/wip-overview``/wip-detail` 行為與現行相同
- [x] 5.4 驗證Overview 的 clearFilters 清除所有 filter + status 並更新 URL

View File

@@ -17,6 +17,11 @@ The page SHALL read URL query parameters to initialize its state from the Overvi
- **THEN** filter inputs SHALL be pre-filled with those values
- **THEN** data SHALL be loaded with those filters applied
#### Scenario: Status passthrough from Overview
- **WHEN** the URL contains a `status` parameter (e.g., `?workcenter=焊接_DW&status=RUN`)
- **THEN** the status card corresponding to the `status` value SHALL be activated
- **THEN** data SHALL be loaded with the status filter applied
#### Scenario: Missing workcenter fallback
- **WHEN** the page loads without a `workcenter` parameter
- **THEN** the page SHALL fetch available workcenters from `GET /api/wip/meta/workcenters`
@@ -35,6 +40,7 @@ The page SHALL display five summary cards with status counts for the current wor
- **THEN** the active card SHALL show a visual active state
- **THEN** non-active status cards SHALL dim
- **THEN** clicking the same card again SHALL remove the filter
- **THEN** the URL SHALL be updated to reflect the active status filter
### Requirement: Detail page SHALL display lot details table with sticky columns
The page SHALL display a scrollable table with fixed left columns and dynamic spec columns.
@@ -107,12 +113,18 @@ The page SHALL paginate lot data with server-side support.
- **WHEN** user clicks Next or Prev
- **THEN** data SHALL reload with the updated page number
### Requirement: Detail page SHALL have back navigation to Overview
The page SHALL provide a way to return to the Overview page.
### Requirement: Detail page SHALL have back navigation to Overview with filter preservation
The page SHALL provide a way to return to the Overview page while preserving all current filter state.
#### Scenario: Back button
#### Scenario: Back button with filter state
- **WHEN** user clicks the "← Overview" button in the header
- **THEN** the page SHALL navigate to `/wip-overview`
- **THEN** the page SHALL navigate to `/wip-overview` with current filter values (workorder, lotid, package, type) and status as URL parameters
- **THEN** only non-empty filter values SHALL appear as URL parameters
#### Scenario: Back button reflects Detail changes
- **WHEN** the user modifies filters or status in Detail (e.g., changes status from RUN to QUEUE)
- **THEN** the back button URL SHALL dynamically update to reflect the current Detail filter state
- **THEN** navigating back SHALL cause Overview to load with the updated filter state
### Requirement: Detail page SHALL auto-refresh and handle request cancellation
The page SHALL auto-refresh and cancel stale requests identically to Overview.
@@ -122,3 +134,12 @@ The page SHALL auto-refresh and cancel stale requests identically to Overview.
- **THEN** data SHALL auto-refresh every 10 minutes, skipping when tab is hidden
- **THEN** visibility change SHALL trigger immediate refresh
- **THEN** new requests SHALL cancel in-flight requests via AbortController
### Requirement: Detail page SHALL synchronize status filter to URL
The page SHALL include the active status filter in URL state management.
#### Scenario: Status included in URL state
- **WHEN** the status filter is active
- **THEN** `updateUrlState()` SHALL include `status={value}` in the URL parameters
- **WHEN** the status filter is cleared
- **THEN** the `status` parameter SHALL be removed from the URL

View File

@@ -31,6 +31,7 @@ The page SHALL display four clickable status cards (RUN, QUEUE, 品質異常,
- **THEN** the clicked card SHALL show an active visual state
- **THEN** non-active cards SHALL dim to 50% opacity
- **THEN** clicking the same card again SHALL deactivate the filter and restore all cards
- **THEN** the URL SHALL be updated to reflect the active status filter
### Requirement: Overview page SHALL display Workcenter × Package matrix
The page SHALL display a cross-tabulation table of workcenters vs packages.
@@ -45,6 +46,7 @@ The page SHALL display a cross-tabulation table of workcenters vs packages.
- **WHEN** user clicks a workcenter name in the matrix
- **THEN** the page SHALL navigate to `/wip-detail?workcenter={name}`
- **THEN** active filter values (workorder, lotid, package, type) SHALL be passed as URL parameters
- **THEN** the active status filter SHALL be passed as the `status` URL parameter if set
### Requirement: Overview page SHALL display Hold Pareto analysis
The page SHALL display Pareto charts and tables for quality and non-quality hold reasons.
@@ -81,13 +83,15 @@ The page SHALL provide autocomplete-enabled filter inputs for WORKORDER, LOT ID,
#### Scenario: Apply and clear filters
- **WHEN** user clicks "套用篩選" or presses Enter in a filter input
- **THEN** all three API calls (summary, matrix, hold) SHALL reload with the filter values
- **THEN** the URL SHALL be updated to reflect the applied filter values
- **WHEN** user clicks "清除篩選"
- **THEN** all filter inputs SHALL be cleared and data SHALL reload without filters
- **THEN** the URL SHALL be cleared of all filter and status parameters
#### Scenario: Active filter display
- **WHEN** filters are applied
- **THEN** active filters SHALL be displayed as removable tags (e.g., "WO: {value} ×")
- **THEN** clicking a tag's remove button SHALL clear that filter and reload data
- **THEN** clicking a tag's remove button SHALL clear that filter, reload data, and update the URL
### Requirement: Overview page SHALL auto-refresh and handle request cancellation
The page SHALL automatically refresh data and prevent stale request pile-up.
@@ -109,3 +113,26 @@ The page SHALL automatically refresh data and prevent stale request pile-up.
#### Scenario: Manual refresh
- **WHEN** user clicks the "重新整理" button
- **THEN** data SHALL reload and the auto-refresh timer SHALL reset
### Requirement: Overview page SHALL persist filter state in URL
The page SHALL synchronize all filter state (workorder, lotid, package, type, status) to URL query parameters as the single source of truth.
#### Scenario: URL state initialization on page load
- **WHEN** the page loads with filter query parameters in the URL (e.g., `?package=SOD-323&status=RUN`)
- **THEN** the filter inputs SHALL be pre-filled with the URL parameter values
- **THEN** the status card corresponding to the `status` parameter SHALL be activated
- **THEN** data SHALL be loaded with all restored filters and status applied
#### Scenario: URL state initialization without parameters
- **WHEN** the page loads without any filter query parameters
- **THEN** all filters SHALL be empty and no status card SHALL be active
- **THEN** data SHALL load without filters (current default behavior)
#### Scenario: URL update on filter change
- **WHEN** filters are applied, cleared, or a single filter is removed
- **THEN** the URL SHALL be updated via `history.replaceState` to reflect the current filter state
- **THEN** only non-empty filter values SHALL appear as URL parameters
#### Scenario: URL update on status toggle
- **WHEN** a status card is clicked to activate or deactivate
- **THEN** the URL SHALL be updated via `history.replaceState` to include or remove the `status` parameter

View File

@@ -460,6 +460,16 @@ get_watchdog_pid() {
fi
rm -f "$WATCHDOG_PROCESS_PID_FILE"
fi
# Fallback: discover watchdog process even if PID file is missing/stale
local discovered_pid
discovered_pid=$(pgrep -f "[p]ython .*scripts/worker_watchdog.py" 2>/dev/null | head -1 || true)
if [ -n "${discovered_pid}" ] && kill -0 "${discovered_pid}" 2>/dev/null; then
echo "${discovered_pid}" > "$WATCHDOG_PROCESS_PID_FILE"
echo "${discovered_pid}"
return 0
fi
return 1
}
@@ -481,7 +491,12 @@ start_watchdog() {
fi
log_info "Starting worker watchdog..."
nohup python scripts/worker_watchdog.py >> "$WATCHDOG_LOG" 2>&1 &
if command -v setsid >/dev/null 2>&1; then
# Start watchdog in its own session so it survives non-interactive shell teardown.
setsid python scripts/worker_watchdog.py >> "$WATCHDOG_LOG" 2>&1 < /dev/null &
else
nohup python scripts/worker_watchdog.py >> "$WATCHDOG_LOG" 2>&1 < /dev/null &
fi
local pid=$!
echo "$pid" > "$WATCHDOG_PROCESS_PID_FILE"

View File

@@ -363,38 +363,36 @@ def get_cached_wip_data() -> Optional[pd.DataFrame]:
logger.debug(f"Process cache hit: {len(cached_df)} rows")
return cached_df
# Tier 2: Parse from Redis (slow path - needs lock)
# Tier 2: Parse from Redis (slow path, double-check locking)
if not REDIS_ENABLED:
return None
client = get_redis_client()
if client is None:
return None
try:
start_time = time.time()
data_json = client.get(get_key("data"))
if data_json is None:
logger.debug("Cache miss: no data in Redis")
return None
# Parse outside lock to reduce contention on hot paths.
parsed_df = pd.read_json(io.StringIO(data_json), orient='records')
parse_time = time.time() - start_time
except Exception as e:
logger.warning(f"Failed to read cache: {e}")
return None
# Keep lock scope tight: consistency check + cache write only.
with _wip_parse_lock:
cached_df = _wip_df_cache.get(cache_key)
if cached_df is not None:
logger.debug(f"Process cache hit (after parse): {len(cached_df)} rows")
logger.debug(f"Process cache hit (after lock): {len(cached_df)} rows")
return cached_df
_wip_df_cache.set(cache_key, parsed_df)
logger.debug(f"Cache hit: loaded {len(parsed_df)} rows from Redis (parsed in {parse_time:.2f}s)")
return parsed_df
client = get_redis_client()
if client is None:
return None
try:
start_time = time.time()
data_json = client.get(get_key("data"))
if data_json is None:
logger.debug("Cache miss: no data in Redis")
return None
parsed_df = pd.read_json(io.StringIO(data_json), orient='records')
_wip_df_cache.set(cache_key, parsed_df)
parse_time = time.time() - start_time
except Exception as e:
logger.warning(f"Failed to read cache: {e}")
return None
logger.debug(f"Cache hit: loaded {len(parsed_df)} rows from Redis (parsed in {parse_time:.2f}s)")
return parsed_df
def get_cached_sys_date() -> Optional[str]:

View File

@@ -174,6 +174,7 @@ def api_detail(workcenter: str):
Query Parameters:
package: Optional PRODUCTLINENAME filter
type: Optional PJ_TYPE filter (exact match)
status: Optional WIP status filter ('RUN', 'QUEUE', 'HOLD')
hold_type: Optional hold type filter ('quality', 'non-quality')
Only effective when status='HOLD'
@@ -187,6 +188,7 @@ def api_detail(workcenter: str):
JSON with workcenter, summary, specs, lots, pagination, sys_date
"""
package = request.args.get('package', '').strip() or None
pj_type = request.args.get('type', '').strip() or None
status = request.args.get('status', '').strip().upper() or None
hold_type = request.args.get('hold_type', '').strip().lower() or None
workorder = request.args.get('workorder', '').strip() or None
@@ -220,6 +222,7 @@ def api_detail(workcenter: str):
result = get_wip_detail(
workcenter=workcenter,
package=package,
pj_type=pj_type,
status=status,
hold_type=hold_type,
workorder=workorder,

View File

@@ -522,18 +522,19 @@ def _get_wip_snapshot(include_dummy: bool) -> Optional[Dict[str, Any]]:
return cached
_increment_wip_metric("snapshot_misses")
df = _get_wip_dataframe()
if df is None:
return None
snapshot = _build_wip_snapshot(df, include_dummy=include_dummy, version=version)
with _wip_snapshot_lock:
existing = _wip_snapshot_cache.get(cache_key)
if existing and existing.get("version") == version:
_increment_wip_metric("snapshot_hits")
return existing
df = _get_wip_dataframe()
if df is None:
return None
snapshot = _build_wip_snapshot(df, include_dummy=include_dummy, version=version)
_wip_snapshot_cache[cache_key] = snapshot
return snapshot
return snapshot
def _get_wip_search_index(include_dummy: bool) -> Optional[Dict[str, Any]]:
@@ -1206,6 +1207,7 @@ def _get_wip_hold_summary_from_oracle(
def get_wip_detail(
workcenter: str,
package: Optional[str] = None,
pj_type: Optional[str] = None,
status: Optional[str] = None,
hold_type: Optional[str] = None,
workorder: Optional[str] = None,
@@ -1221,6 +1223,7 @@ def get_wip_detail(
Args:
workcenter: WORKCENTER_GROUP name
package: Optional PACKAGE_LEF filter
pj_type: Optional PJ_TYPE filter (exact match)
status: Optional WIP status filter ('RUN', 'QUEUE', 'HOLD')
hold_type: Optional hold type filter ('quality', 'non-quality')
Only effective when status='HOLD'
@@ -1248,12 +1251,14 @@ def get_wip_detail(
workorder=workorder,
lotid=lotid,
package=package,
pj_type=pj_type,
workcenter=workcenter,
)
if summary_df is None:
return _get_wip_detail_from_oracle(
workcenter,
package,
pj_type,
status,
hold_type,
workorder,
@@ -1302,6 +1307,7 @@ def get_wip_detail(
workorder=workorder,
lotid=lotid,
package=package,
pj_type=pj_type,
workcenter=workcenter,
status=status_upper,
hold_type=hold_type_filter,
@@ -1310,6 +1316,7 @@ def get_wip_detail(
return _get_wip_detail_from_oracle(
workcenter,
package,
pj_type,
status,
hold_type,
workorder,
@@ -1367,13 +1374,14 @@ def get_wip_detail(
# Fallback to Oracle direct query
return _get_wip_detail_from_oracle(
workcenter, package, status, hold_type, workorder, lotid, include_dummy, page, page_size
workcenter, package, pj_type, status, hold_type, workorder, lotid, include_dummy, page, page_size
)
def _get_wip_detail_from_oracle(
workcenter: str,
package: Optional[str] = None,
pj_type: Optional[str] = None,
status: Optional[str] = None,
hold_type: Optional[str] = None,
workorder: Optional[str] = None,
@@ -1390,6 +1398,8 @@ def _get_wip_detail_from_oracle(
if package:
builder.add_param_condition("PACKAGE_LEF", package)
if pj_type:
builder.add_param_condition("PJ_TYPE", pj_type)
# WIP status filter (RUN/QUEUE/HOLD based on EQUIPMENTCOUNT and CURRENTHOLDCOUNT)
if status:
@@ -1411,6 +1421,8 @@ def _get_wip_detail_from_oracle(
summary_builder.add_param_condition("WORKCENTER_GROUP", workcenter)
if package:
summary_builder.add_param_condition("PACKAGE_LEF", package)
if pj_type:
summary_builder.add_param_condition("PJ_TYPE", pj_type)
summary_where, summary_params = summary_builder.build_where_only()
non_quality_list = CommonFilters.get_non_quality_reasons_sql()

View File

@@ -0,0 +1,168 @@
# -*- coding: utf-8 -*-
"""E2E coverage for WIP Overview / WIP Detail / Hold Detail pages."""
from __future__ import annotations
import time
from urllib.parse import parse_qs, quote, urlparse
import pytest
import requests
from playwright.sync_api import Page, expect
def _pick_workcenter(app_server: str) -> str:
"""Pick a real workcenter to reduce flaky E2E failures."""
try:
response = requests.get(f"{app_server}/api/wip/meta/workcenters", timeout=10)
payload = response.json() if response.ok else {}
items = payload.get("data") or []
if items:
return items[0].get("name") or "TMTT"
except Exception:
pass
return "TMTT"
def _pick_hold_reason(app_server: str) -> str:
"""Pick a real hold reason to reduce flaky E2E failures."""
try:
response = requests.get(f"{app_server}/api/wip/overview/hold", timeout=10)
payload = response.json() if response.ok else {}
items = (payload.get("data") or {}).get("items") or []
if items:
return items[0].get("reason") or "YieldLimit"
except Exception:
pass
return "YieldLimit"
def _get_with_retry(url: str, attempts: int = 3, timeout: float = 10.0):
"""Best-effort GET helper to reduce transient test flakiness."""
last_exc = None
for _ in range(max(attempts, 1)):
try:
return requests.get(url, timeout=timeout, allow_redirects=False)
except requests.RequestException as exc:
last_exc = exc
time.sleep(0.5)
if last_exc:
raise last_exc
raise RuntimeError("request retry exhausted without exception")
def _wait_for_response_url_tokens(page: Page, tokens: list[str], timeout_seconds: float = 30.0):
"""Wait until a response URL contains all tokens."""
matched = []
def handle_response(resp):
if all(token in resp.url for token in tokens):
matched.append(resp)
page.on("response", handle_response)
deadline = time.time() + timeout_seconds
while time.time() < deadline and not matched:
page.wait_for_timeout(200)
return matched[0] if matched else None
def _wait_for_response(page: Page, predicate, timeout_seconds: float = 30.0):
"""Wait until a response satisfies the predicate."""
matched = []
def handle_response(resp):
try:
if predicate(resp):
matched.append(resp)
except Exception:
return
page.on("response", handle_response)
deadline = time.time() + timeout_seconds
while time.time() < deadline and not matched:
page.wait_for_timeout(200)
return matched[0] if matched else None
@pytest.mark.e2e
class TestWipAndHoldPagesE2E:
"""E2E tests for WIP/Hold page URL + API behavior."""
def test_wip_overview_restores_status_from_url(self, page: Page, app_server: str):
page.goto(
f"{app_server}/wip-overview?type=PJA3460&status=queue",
wait_until="commit",
timeout=60000,
)
response = _wait_for_response_url_tokens(
page,
["/api/wip/overview/matrix", "type=PJA3460", "status=QUEUE"],
timeout_seconds=30.0,
)
assert response is not None, "Did not observe expected matrix request with URL filters"
assert response.ok
expect(page.locator("body")).to_be_visible()
def test_wip_detail_reads_status_and_back_link_keeps_filters(self, page: Page, app_server: str):
workcenter = _pick_workcenter(app_server)
page.goto(
f"{app_server}/wip-detail?workcenter={quote(workcenter)}&type=PJA3460&status=queue",
wait_until="commit",
timeout=60000,
)
response = _wait_for_response(
page,
lambda resp: (
"/api/wip/detail/" in resp.url
and (
parse_qs(urlparse(resp.url).query).get("type", [None])[0] == "PJA3460"
or parse_qs(urlparse(resp.url).query).get("pj_type", [None])[0] == "PJA3460"
)
and parse_qs(urlparse(resp.url).query).get("status", [None])[0] in {"QUEUE", "queue"}
),
timeout_seconds=30.0,
)
assert response is not None, "Did not observe expected detail request with URL filters"
assert response.ok
back_href = page.locator("a.btn-back").get_attribute("href") or ""
parsed = urlparse(back_href)
params = parse_qs(parsed.query)
assert parsed.path == "/wip-overview"
assert params.get("type", [None])[0] == "PJA3460"
assert params.get("status", [None])[0] in {"queue", "QUEUE"}
def test_hold_detail_without_reason_redirects_to_overview(self, page: Page, app_server: str):
response = _get_with_retry(f"{app_server}/hold-detail", attempts=3, timeout=10.0)
assert response.status_code == 302
assert response.headers.get("Location") == "/wip-overview"
def test_hold_detail_calls_summary_distribution_and_lots(self, page: Page, app_server: str):
reason = _pick_hold_reason(app_server)
seen = set()
def handle_response(resp):
parsed = urlparse(resp.url)
query = parse_qs(parsed.query)
if query.get("reason", [None])[0] != reason:
return
if parsed.path.endswith("/api/wip/hold-detail/summary"):
seen.add("summary")
elif parsed.path.endswith("/api/wip/hold-detail/distribution"):
seen.add("distribution")
elif parsed.path.endswith("/api/wip/hold-detail/lots"):
seen.add("lots")
page.on("response", handle_response)
page.goto(
f"{app_server}/hold-detail?reason={quote(reason)}",
wait_until="commit",
timeout=60000,
)
deadline = time.time() + 30
while time.time() < deadline and len(seen) < 3:
page.wait_for_timeout(200)
assert seen == {"summary", "distribution", "lots"}

View File

@@ -14,6 +14,7 @@ import time
import requests
import concurrent.futures
from typing import List, Tuple
from urllib.parse import quote
# Import from local conftest via pytest fixtures
@@ -45,6 +46,34 @@ class TestAPILoadConcurrent:
duration = time.time() - start
return (False, duration, f"Error: {str(e)[:50]}")
def _discover_workcenter(self, base_url: str, timeout: float) -> str:
"""Get one available workcenter for detail load tests."""
try:
response = requests.get(f"{base_url}/api/wip/meta/workcenters", timeout=timeout)
if response.status_code != 200:
return "TMTT"
payload = response.json()
items = payload.get("data") or []
if not items:
return "TMTT"
return str(items[0].get("name") or "TMTT")
except Exception:
return "TMTT"
def _discover_hold_reason(self, base_url: str, timeout: float) -> str:
"""Get one available hold reason for hold-detail load tests."""
try:
response = requests.get(f"{base_url}/api/wip/overview/hold", timeout=timeout)
if response.status_code != 200:
return "YieldLimit"
payload = response.json()
items = (payload.get("data") or {}).get("items") or []
if not items:
return "YieldLimit"
return str(items[0].get("reason") or "YieldLimit")
except Exception:
return "YieldLimit"
def test_wip_summary_concurrent_load(self, base_url: str, stress_config: dict, stress_result):
"""Test WIP summary API under concurrent load."""
result = stress_result("WIP Summary Concurrent Load")
@@ -108,6 +137,68 @@ class TestAPILoadConcurrent:
assert result.success_rate >= 90.0, f"Success rate {result.success_rate:.1f}% is below 90%"
assert result.avg_response_time < 15.0, f"Avg response time {result.avg_response_time:.2f}s exceeds 15s"
def test_wip_detail_concurrent_load(self, base_url: str, stress_config: dict, stress_result):
"""Test WIP detail API under concurrent load."""
result = stress_result("WIP Detail Concurrent Load")
concurrent_users = stress_config['concurrent_users']
requests_per_user = stress_config['requests_per_user']
timeout = stress_config['timeout']
workcenter = self._discover_workcenter(base_url, timeout)
url = f"{base_url}/api/wip/detail/{quote(workcenter)}?page=1&page_size=100"
total_requests = concurrent_users * requests_per_user
start_time = time.time()
with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor:
futures = [
executor.submit(self._make_request, url, timeout)
for _ in range(total_requests)
]
for future in concurrent.futures.as_completed(futures):
success, duration, error = future.result()
if success:
result.add_success(duration)
else:
result.add_failure(error, duration)
result.total_duration = time.time() - start_time
print(result.report())
assert result.success_rate >= 85.0, f"Success rate {result.success_rate:.1f}% is below 85%"
assert result.avg_response_time < 20.0, f"Avg response time {result.avg_response_time:.2f}s exceeds 20s"
def test_hold_detail_lots_concurrent_load(self, base_url: str, stress_config: dict, stress_result):
"""Test hold-detail lots API under concurrent load."""
result = stress_result("Hold Detail Lots Concurrent Load")
concurrent_users = stress_config['concurrent_users']
requests_per_user = stress_config['requests_per_user']
timeout = stress_config['timeout']
reason = self._discover_hold_reason(base_url, timeout)
url = f"{base_url}/api/wip/hold-detail/lots?reason={quote(reason)}&page=1&per_page=50"
total_requests = concurrent_users * requests_per_user
start_time = time.time()
with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor:
futures = [
executor.submit(self._make_request, url, timeout)
for _ in range(total_requests)
]
for future in concurrent.futures.as_completed(futures):
success, duration, error = future.result()
if success:
result.add_success(duration)
else:
result.add_failure(error, duration)
result.total_duration = time.time() - start_time
print(result.report())
assert result.success_rate >= 85.0, f"Success rate {result.success_rate:.1f}% is below 85%"
assert result.avg_response_time < 20.0, f"Avg response time {result.avg_response_time:.2f}s exceeds 20s"
def test_resource_summary_concurrent_load(self, base_url: str, stress_config: dict, stress_result):
"""Test resource status summary API under concurrent load."""
result = stress_result("Resource Status Summary Concurrent Load")
@@ -238,6 +329,31 @@ class TestAPILoadRampUp:
class TestAPITimeoutHandling:
"""Tests for timeout handling under load."""
@staticmethod
def _make_request(url: str, timeout: float) -> Tuple[bool, float, str]:
"""Make a single request and return (success, duration, error)."""
start = time.time()
try:
response = requests.get(url, timeout=timeout)
duration = time.time() - start
if response.status_code == 200:
if "application/json" in response.headers.get("Content-Type", ""):
payload = response.json()
if payload.get("success", True):
return (True, duration, "")
return (False, duration, f"API returned success=false: {payload.get('error', 'unknown')}")
return (True, duration, "")
return (False, duration, f"HTTP {response.status_code}")
except requests.exceptions.Timeout:
duration = time.time() - start
return (False, duration, "Request timeout")
except requests.exceptions.ConnectionError as exc:
duration = time.time() - start
return (False, duration, f"Connection error: {str(exc)[:50]}")
except Exception as exc:
duration = time.time() - start
return (False, duration, f"Error: {str(exc)[:50]}")
def test_connection_recovery_after_timeout(self, base_url: str, stress_result):
"""Test that API recovers after timeout scenarios."""
result = stress_result("Connection Recovery After Timeout")
@@ -280,6 +396,55 @@ class TestAPITimeoutHandling:
assert recovered, "System did not recover after timeout scenarios"
def test_wip_pages_recoverability_after_burst(self, base_url: str, stress_result):
"""After a burst, health and critical WIP APIs should still respond."""
result = stress_result("WIP Pages Recoverability After Burst")
timeout = 30.0
probe_endpoints = [
f"{base_url}/api/wip/overview/summary",
f"{base_url}/api/wip/overview/matrix",
f"{base_url}/api/wip/overview/hold",
f"{base_url}/health",
]
# Burst phase
burst_count = 40
start_time = time.time()
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
futures = []
for _ in range(burst_count):
for endpoint in probe_endpoints[:-1]:
futures.append(executor.submit(self._make_request, endpoint, timeout))
for future in concurrent.futures.as_completed(futures):
success, duration, error = future.result()
if success:
result.add_success(duration)
else:
result.add_failure(error, duration)
# Recoverability probes
healthy_probes = 0
for _ in range(5):
probe_start = time.time()
try:
response = requests.get(f"{base_url}/health", timeout=5)
duration = time.time() - probe_start
if response.status_code in (200, 503):
payload = response.json()
if payload.get("status") in {"healthy", "degraded", "unhealthy"}:
healthy_probes += 1
result.add_success(duration)
continue
result.add_failure(f"Unexpected health response: {response.status_code}", duration)
except Exception as exc:
result.add_failure(str(exc)[:80], time.time() - probe_start)
time.sleep(0.2)
result.total_duration = time.time() - start_time
print(result.report())
assert healthy_probes >= 3, f"Health endpoint recoverability too low: {healthy_probes}/5"
@pytest.mark.stress
class TestAPIResponseConsistency:

View File

@@ -13,6 +13,8 @@ Run with: pytest tests/stress/test_frontend_stress.py -v -s
import pytest
import time
import re
import requests
from urllib.parse import quote
from playwright.sync_api import Page, expect
@@ -312,6 +314,120 @@ class TestPageNavigationStress:
print(f"\n All {len(tabs)} tabs clickable and responsive")
@pytest.mark.stress
class TestWipHoldPageStress:
"""Stress tests focused on WIP Overview / WIP Detail / Hold Detail pages."""
def _pick_workcenter(self, app_server: str) -> str:
"""Get one available workcenter for WIP detail tests."""
try:
response = requests.get(f"{app_server}/api/wip/meta/workcenters", timeout=10)
if response.status_code != 200:
return "TMTT"
payload = response.json()
items = payload.get("data") or []
if not items:
return "TMTT"
return str(items[0].get("name") or "TMTT")
except Exception:
return "TMTT"
def _pick_reason(self, app_server: str) -> str:
"""Get one hold reason for hold-detail tests."""
try:
response = requests.get(f"{app_server}/api/wip/overview/hold", timeout=10)
if response.status_code != 200:
return "YieldLimit"
payload = response.json()
items = (payload.get("data") or {}).get("items") or []
if not items:
return "YieldLimit"
return str(items[0].get("reason") or "YieldLimit")
except Exception:
return "YieldLimit"
def test_rapid_navigation_across_wip_and_hold_pages(self, page: Page, app_server: str):
"""Rapid page switching should keep pages responsive and error-free."""
workcenter = self._pick_workcenter(app_server)
reason = self._pick_reason(app_server)
urls = [
f"{app_server}/wip-overview",
f"{app_server}/wip-overview?type=PJA3460&status=queue",
f"{app_server}/wip-detail?workcenter={quote(workcenter)}&type=PJA3460&status=queue",
f"{app_server}/hold-detail?reason={quote(reason)}",
]
js_errors = []
page.on("pageerror", lambda error: js_errors.append(str(error)))
start_time = time.time()
for i in range(16):
page.goto(urls[i % len(urls)], wait_until='domcontentloaded', timeout=60000)
expect(page.locator("body")).to_be_visible()
page.wait_for_timeout(150)
elapsed = time.time() - start_time
print(f"\n Rapid navigation across 3 pages completed in {elapsed:.2f}s")
assert len(js_errors) == 0, f"JavaScript errors detected: {js_errors[:3]}"
def test_wip_and_hold_api_burst_from_browser(self, page: Page, app_server: str):
"""Browser-side API burst should still return mostly successful responses."""
load_page_with_js(page, f"{app_server}/wip-overview")
result = page.evaluate("""
async () => {
const safeJson = async (resp) => {
try {
return await resp.json();
} catch (_) {
return null;
}
};
const wcResp = await fetch('/api/wip/meta/workcenters');
const wcPayload = await safeJson(wcResp) || {};
const workcenter = (wcPayload.data && wcPayload.data[0] && wcPayload.data[0].name) || 'TMTT';
const holdResp = await fetch('/api/wip/overview/hold');
const holdPayload = await safeJson(holdResp) || {};
const holdItems = (holdPayload.data && holdPayload.data.items) || [];
const reason = (holdItems[0] && holdItems[0].reason) || 'YieldLimit';
const endpoints = [
'/api/wip/overview/summary',
'/api/wip/overview/matrix',
'/api/wip/overview/hold',
`/api/wip/detail/${encodeURIComponent(workcenter)}?page=1&page_size=100`,
`/api/wip/hold-detail/lots?reason=${encodeURIComponent(reason)}&page=1&per_page=50`,
];
let total = 0;
let success = 0;
let failures = 0;
for (let round = 0; round < 5; round++) {
const responses = await Promise.all(
endpoints.map((endpoint) =>
fetch(endpoint)
.then((r) => ({ ok: r.status < 500 }))
.catch(() => ({ ok: false }))
)
);
total += responses.length;
success += responses.filter((r) => r.ok).length;
failures += responses.filter((r) => !r.ok).length;
}
return { total, success, failures };
}
""")
print(f"\n Browser burst total={result['total']}, success={result['success']}, failures={result['failures']}")
assert result['success'] >= 20, f"Too many failed API requests: {result}"
@pytest.mark.stress
class TestMemoryStress:
"""Tests for memory leak detection."""

View File

@@ -8,6 +8,9 @@ import pytest
from unittest.mock import patch, MagicMock
import pandas as pd
import json
import threading
import time
from concurrent.futures import ThreadPoolExecutor
class TestGetCachedWipData:
@@ -91,6 +94,49 @@ class TestGetCachedWipData:
result = cache.get_cached_wip_data()
assert result is None
def test_concurrent_requests_parse_redis_once(self, reset_redis):
"""Concurrent misses should trigger Redis parse exactly once."""
import mes_dashboard.core.cache as cache
test_data = [
{'LOTID': 'LOT001', 'QTY': 100, 'WORKORDER': 'WO001'},
{'LOTID': 'LOT002', 'QTY': 200, 'WORKORDER': 'WO002'}
]
cached_json = json.dumps(test_data)
mock_client = MagicMock()
mock_client.get.return_value = cached_json
parse_count_lock = threading.Lock()
parse_count = 0
def slow_read_json(*args, **kwargs):
nonlocal parse_count
with parse_count_lock:
parse_count += 1
time.sleep(0.05)
return pd.DataFrame(test_data)
start_event = threading.Event()
def call_cache():
start_event.wait(timeout=1)
return cache.get_cached_wip_data()
with patch.object(cache, 'REDIS_ENABLED', True):
with patch.object(cache, 'get_redis_client', return_value=mock_client):
with patch.object(cache, 'get_key', return_value='mes_wip:data'):
with patch.object(cache.pd, 'read_json', side_effect=slow_read_json):
with ThreadPoolExecutor(max_workers=6) as pool:
futures = [pool.submit(call_cache) for _ in range(6)]
start_event.set()
results = [future.result(timeout=3) for future in futures]
assert parse_count == 1
assert mock_client.get.call_count == 1
assert all(result is not None for result in results)
assert all(len(result) == 2 for result in results)
class TestGetCachedSysDate:
"""Test get_cached_sys_date function."""

View File

@@ -90,6 +90,24 @@ class TestHoldDetailSummaryRoute(TestHoldRoutesBase):
self.assertFalse(data['success'])
self.assertIn('error', data)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_summary')
def test_passes_include_dummy(self, mock_get_summary):
"""Should pass include_dummy flag to summary service."""
mock_get_summary.return_value = {
'totalLots': 0,
'totalQty': 0,
'avgAge': 0,
'maxAge': 0,
'workcenterCount': 0,
}
self.client.get('/api/wip/hold-detail/summary?reason=YieldLimit&include_dummy=true')
mock_get_summary.assert_called_once_with(
reason='YieldLimit',
include_dummy=True
)
class TestHoldDetailDistributionRoute(TestHoldRoutesBase):
"""Test GET /api/wip/hold-detail/distribution endpoint."""
@@ -144,6 +162,22 @@ class TestHoldDetailDistributionRoute(TestHoldRoutesBase):
self.assertEqual(response.status_code, 500)
self.assertFalse(data['success'])
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_distribution')
def test_passes_include_dummy(self, mock_get_dist):
"""Should pass include_dummy flag to distribution service."""
mock_get_dist.return_value = {
'byWorkcenter': [],
'byPackage': [],
'byAge': [],
}
self.client.get('/api/wip/hold-detail/distribution?reason=YieldLimit&include_dummy=1')
mock_get_dist.assert_called_once_with(
reason='YieldLimit',
include_dummy=True
)
class TestHoldDetailLotsRoute(TestHoldRoutesBase):
"""Test GET /api/wip/hold-detail/lots endpoint."""
@@ -269,6 +303,18 @@ class TestHoldDetailLotsRoute(TestHoldRoutesBase):
self.assertEqual(response.status_code, 500)
self.assertFalse(data['success'])
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
@patch('mes_dashboard.core.rate_limit.check_and_record', return_value=(True, 4))
def test_lots_rate_limited_returns_429(self, _mock_limit, mock_get_lots):
"""Rate-limited lots requests should return 429."""
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit')
data = json.loads(response.data)
self.assertEqual(response.status_code, 429)
self.assertFalse(data['success'])
self.assertEqual(data['error']['code'], 'TOO_MANY_REQUESTS')
mock_get_lots.assert_not_called()
class TestHoldDetailAgeRangeFilters(TestHoldRoutesBase):
"""Test age range filter validation."""

View File

@@ -98,6 +98,7 @@ class TestAggregateByResourceid:
'OBJECTCATEGORY': 'ASSEMBLY',
'EQUIPMENTASSETSSTATUS': 'PRD',
'EQUIPMENTASSETSSTATUSREASON': None,
'RUNCARDLOTID': 'LOT001',
'JOBORDER': 'JO001',
'JOBSTATUS': 'RUN',
'SYMPTOMCODE': None,
@@ -127,6 +128,7 @@ class TestAggregateByResourceid:
'OBJECTCATEGORY': 'ASSEMBLY',
'EQUIPMENTASSETSSTATUS': 'PRD',
'EQUIPMENTASSETSSTATUSREASON': None,
'RUNCARDLOTID': 'LOT001',
'JOBORDER': 'JO001',
'JOBSTATUS': 'RUN',
'SYMPTOMCODE': None,
@@ -141,6 +143,7 @@ class TestAggregateByResourceid:
'OBJECTCATEGORY': 'ASSEMBLY',
'EQUIPMENTASSETSSTATUS': 'PRD',
'EQUIPMENTASSETSSTATUSREASON': None,
'RUNCARDLOTID': 'LOT002',
'JOBORDER': 'JO002',
'JOBSTATUS': 'RUN',
'SYMPTOMCODE': None,
@@ -155,6 +158,7 @@ class TestAggregateByResourceid:
'OBJECTCATEGORY': 'ASSEMBLY',
'EQUIPMENTASSETSSTATUS': 'PRD',
'EQUIPMENTASSETSSTATUSREASON': None,
'RUNCARDLOTID': 'LOT003',
'JOBORDER': 'JO003',
'JOBSTATUS': 'RUN',
'SYMPTOMCODE': None,
@@ -184,6 +188,7 @@ class TestAggregateByResourceid:
'OBJECTCATEGORY': 'ASSEMBLY',
'EQUIPMENTASSETSSTATUS': 'PRD',
'EQUIPMENTASSETSSTATUSREASON': None,
'RUNCARDLOTID': 'LOT001',
'JOBORDER': 'JO001',
'JOBSTATUS': 'RUN',
'SYMPTOMCODE': None,
@@ -198,6 +203,7 @@ class TestAggregateByResourceid:
'OBJECTCATEGORY': 'WAFERSORT',
'EQUIPMENTASSETSSTATUS': 'SBY',
'EQUIPMENTASSETSSTATUSREASON': 'Waiting',
'RUNCARDLOTID': None,
'JOBORDER': None,
'JOBSTATUS': None,
'SYMPTOMCODE': None,
@@ -216,7 +222,7 @@ class TestAggregateByResourceid:
assert r1['LOT_COUNT'] == 1
assert r1['STATUS_CATEGORY'] == 'PRODUCTIVE'
assert r2['LOT_COUNT'] == 1
assert r2['LOT_COUNT'] == 0
assert r2['STATUS_CATEGORY'] == 'STANDBY'
def test_handles_empty_records(self):

View File

@@ -0,0 +1,152 @@
# -*- coding: utf-8 -*-
"""Integration tests for WIP Overview / WIP Detail / Hold Detail page contracts."""
from __future__ import annotations
import json
from unittest.mock import patch
import pytest
import mes_dashboard.core.database as db
from mes_dashboard.app import create_app
@pytest.fixture
def client():
"""Create a test client with isolated DB engine state."""
db._ENGINE = None
app = create_app("testing")
app.config["TESTING"] = True
return app.test_client()
def test_wip_pages_render_vite_assets(client):
"""Core WIP/Hold pages should render Vite bundles."""
overview = client.get("/wip-overview")
detail = client.get("/wip-detail")
hold = client.get("/hold-detail?reason=YieldLimit")
assert overview.status_code == 200
assert detail.status_code == 200
assert hold.status_code == 200
overview_html = overview.data.decode("utf-8")
detail_html = detail.data.decode("utf-8")
hold_html = hold.data.decode("utf-8")
assert "/static/dist/wip-overview.js" in overview_html
assert "/static/dist/wip-detail.js" in detail_html
assert "/static/dist/hold-detail.js" in hold_html
def test_wip_overview_and_detail_status_parameter_contract(client):
"""Status/type params should be accepted across overview and detail APIs."""
with (
patch("mes_dashboard.routes.wip_routes.get_wip_matrix") as mock_matrix,
patch("mes_dashboard.routes.wip_routes.get_wip_detail") as mock_detail,
):
mock_matrix.return_value = {
"workcenters": [],
"packages": [],
"matrix": {},
"workcenter_totals": {},
"package_totals": {},
"grand_total": 0,
}
mock_detail.return_value = {
"workcenter": "TMTT",
"summary": {
"total_lots": 0,
"on_equipment_lots": 0,
"waiting_lots": 0,
"hold_lots": 0,
},
"specs": [],
"lots": [],
"pagination": {"page": 1, "page_size": 100, "total_count": 0, "total_pages": 1},
"sys_date": None,
}
matrix_resp = client.get("/api/wip/overview/matrix?type=PJA3460&status=queue")
detail_resp = client.get("/api/wip/detail/TMTT?type=PJA3460&status=queue&page=1&page_size=100")
assert matrix_resp.status_code == 200
assert detail_resp.status_code == 200
assert json.loads(matrix_resp.data)["success"] is True
assert json.loads(detail_resp.data)["success"] is True
mock_matrix.assert_called_once_with(
include_dummy=False,
workorder=None,
lotid=None,
status="QUEUE",
hold_type=None,
package=None,
pj_type="PJA3460",
)
mock_detail.assert_called_once_with(
workcenter="TMTT",
package=None,
pj_type="PJA3460",
status="QUEUE",
hold_type=None,
workorder=None,
lotid=None,
include_dummy=False,
page=1,
page_size=100,
)
def test_hold_detail_api_contract_flow(client):
"""Hold detail summary/distribution/lots should all accept the same reason."""
with (
patch("mes_dashboard.routes.hold_routes.get_hold_detail_summary") as mock_summary,
patch("mes_dashboard.routes.hold_routes.get_hold_detail_distribution") as mock_distribution,
patch("mes_dashboard.routes.hold_routes.get_hold_detail_lots") as mock_lots,
):
mock_summary.return_value = {
"totalLots": 10,
"totalQty": 1000,
"avgAge": 1.2,
"maxAge": 5.0,
"workcenterCount": 2,
}
mock_distribution.return_value = {
"byWorkcenter": [],
"byPackage": [],
"byAge": [],
}
mock_lots.return_value = {
"lots": [],
"pagination": {"page": 1, "perPage": 50, "total": 0, "totalPages": 1},
"filters": {"workcenter": None, "package": None, "ageRange": None},
}
reason = "YieldLimit"
summary_resp = client.get(f"/api/wip/hold-detail/summary?reason={reason}")
dist_resp = client.get(f"/api/wip/hold-detail/distribution?reason={reason}")
lots_resp = client.get(
f"/api/wip/hold-detail/lots?reason={reason}&workcenter=DA&package=DIP-B&age_range=1-3&page=2&per_page=80"
)
assert summary_resp.status_code == 200
assert dist_resp.status_code == 200
assert lots_resp.status_code == 200
assert json.loads(summary_resp.data)["success"] is True
assert json.loads(dist_resp.data)["success"] is True
assert json.loads(lots_resp.data)["success"] is True
mock_summary.assert_called_once_with(reason=reason, include_dummy=False)
mock_distribution.assert_called_once_with(reason=reason, include_dummy=False)
mock_lots.assert_called_once_with(
reason=reason,
workcenter="DA",
package="DIP-B",
age_range="1-3",
include_dummy=False,
page=2,
page_size=80,
)

View File

@@ -60,6 +60,28 @@ class TestOverviewSummaryRoute(TestWipRoutesBase):
self.assertFalse(data['success'])
self.assertIn('error', data)
@patch('mes_dashboard.routes.wip_routes.get_wip_summary')
def test_passes_filters_and_include_dummy(self, mock_get_summary):
"""Should pass overview filter params to service layer."""
mock_get_summary.return_value = {
'totalLots': 0,
'totalQtyPcs': 0,
'byWipStatus': {},
'dataUpdateDate': None,
}
self.client.get(
'/api/wip/overview/summary?workorder=WO1&lotid=L1&package=SOT-23&type=PJA&include_dummy=true'
)
mock_get_summary.assert_called_once_with(
include_dummy=True,
workorder='WO1',
lotid='L1',
package='SOT-23',
pj_type='PJA'
)
class TestOverviewMatrixRoute(TestWipRoutesBase):
"""Test GET /api/wip/overview/matrix endpoint."""
@@ -96,6 +118,24 @@ class TestOverviewMatrixRoute(TestWipRoutesBase):
self.assertEqual(response.status_code, 500)
self.assertFalse(data['success'])
def test_rejects_invalid_status(self):
"""Invalid status should return 400."""
response = self.client.get('/api/wip/overview/matrix?status=INVALID')
data = json.loads(response.data)
self.assertEqual(response.status_code, 400)
self.assertFalse(data['success'])
self.assertIn('Invalid status', data['error'])
def test_rejects_invalid_hold_type(self):
"""Invalid hold_type should return 400."""
response = self.client.get('/api/wip/overview/matrix?status=HOLD&hold_type=oops')
data = json.loads(response.data)
self.assertEqual(response.status_code, 400)
self.assertFalse(data['success'])
self.assertIn('Invalid hold_type', data['error'])
class TestOverviewHoldRoute(TestWipRoutesBase):
"""Test GET /api/wip/overview/hold endpoint."""
@@ -128,6 +168,19 @@ class TestOverviewHoldRoute(TestWipRoutesBase):
self.assertEqual(response.status_code, 500)
self.assertFalse(data['success'])
@patch('mes_dashboard.routes.wip_routes.get_wip_hold_summary')
def test_passes_filters_and_include_dummy(self, mock_get_hold):
"""Should pass hold filter params to service layer."""
mock_get_hold.return_value = {'items': []}
self.client.get('/api/wip/overview/hold?workorder=WO1&lotid=L1&include_dummy=1')
mock_get_hold.assert_called_once_with(
include_dummy=True,
workorder='WO1',
lotid='L1'
)
class TestDetailRoute(TestWipRoutesBase):
"""Test GET /api/wip/detail/<workcenter> endpoint."""
@@ -187,6 +240,7 @@ class TestDetailRoute(TestWipRoutesBase):
mock_get_detail.assert_called_once_with(
workcenter='焊接_DB',
package='SOT-23',
pj_type=None,
status='RUN',
hold_type=None,
workorder=None,
@@ -265,6 +319,24 @@ class TestDetailRoute(TestWipRoutesBase):
self.assertEqual(response.status_code, 500)
self.assertFalse(data['success'])
def test_rejects_invalid_status(self):
"""Invalid status should return 400."""
response = self.client.get('/api/wip/detail/焊接_DB?status=INVALID')
data = json.loads(response.data)
self.assertEqual(response.status_code, 400)
self.assertFalse(data['success'])
self.assertIn('Invalid status', data['error'])
def test_rejects_invalid_hold_type(self):
"""Invalid hold_type should return 400."""
response = self.client.get('/api/wip/detail/焊接_DB?status=HOLD&hold_type=oops')
data = json.loads(response.data)
self.assertEqual(response.status_code, 400)
self.assertFalse(data['success'])
self.assertIn('Invalid hold_type', data['error'])
@patch('mes_dashboard.routes.wip_routes.get_wip_detail')
@patch('mes_dashboard.core.rate_limit.check_and_record', return_value=(True, 7))
def test_detail_rate_limited_returns_429(self, _mock_limit, mock_get_detail):

View File

@@ -8,6 +8,9 @@ import unittest
from unittest.mock import patch, MagicMock
from functools import wraps
import pandas as pd
import threading
import time
from concurrent.futures import ThreadPoolExecutor
from mes_dashboard.services.wip_service import (
WIP_VIEW,
@@ -481,6 +484,70 @@ class TestWipSearchIndexShortcut(unittest.TestCase):
mock_oracle.assert_called_once()
class TestWipSnapshotLocking(unittest.TestCase):
"""Concurrency behavior for snapshot cache build path."""
def setUp(self):
import mes_dashboard.services.wip_service as wip_service
with wip_service._wip_snapshot_lock:
wip_service._wip_snapshot_cache.clear()
@staticmethod
def _sample_df() -> pd.DataFrame:
return pd.DataFrame({
"WORKORDER": ["WO1", "WO2"],
"LOTID": ["LOT1", "LOT2"],
"QTY": [100, 200],
"EQUIPMENTCOUNT": [1, 0],
"CURRENTHOLDCOUNT": [0, 1],
"HOLDREASONNAME": [None, "品質確認"],
"WORKCENTER_GROUP": ["WC-A", "WC-B"],
"PACKAGE_LEF": ["PKG-A", "PKG-B"],
"PJ_TYPE": ["T1", "T2"],
})
def test_concurrent_snapshot_miss_builds_once(self):
import mes_dashboard.services.wip_service as wip_service
df = self._sample_df()
build_count_lock = threading.Lock()
build_count = 0
def slow_build(snapshot_df, include_dummy, version):
nonlocal build_count
with build_count_lock:
build_count += 1
time.sleep(0.05)
return {
"version": version,
"built_at": "2026-02-10T00:00:00",
"row_count": int(len(snapshot_df)),
"frame": snapshot_df,
"indexes": {},
"frame_bytes": 0,
"index_bucket_count": 0,
}
start_event = threading.Event()
def call_snapshot():
start_event.wait(timeout=1)
return wip_service._get_wip_snapshot(include_dummy=False)
with patch.object(wip_service, "_get_wip_cache_version", return_value="version-1"):
with patch.object(wip_service, "_get_wip_dataframe", return_value=df) as mock_get_df:
with patch.object(wip_service, "_build_wip_snapshot", side_effect=slow_build):
with ThreadPoolExecutor(max_workers=6) as pool:
futures = [pool.submit(call_snapshot) for _ in range(6)]
start_event.set()
results = [future.result(timeout=3) for future in futures]
self.assertEqual(build_count, 1)
self.assertEqual(mock_get_df.call_count, 1)
self.assertTrue(all(result is not None for result in results))
self.assertTrue(all(result.get("version") == "version-1" for result in results))
class TestDummyExclusionInAllFunctions(unittest.TestCase):
"""Test DUMMY exclusion is applied in all WIP functions."""