feat(reject-history): fix Pareto datasources, multi-select filtering, and UX enhancements

- Fix dimension Pareto datasources: PJ_TYPE/PRODUCTLINENAME from DW_MES_CONTAINER,
  WORKFLOWNAME from DW_MES_LOTWIPHISTORY via WIPTRACKINGGROUPKEYID, EQUIPMENTNAME
  from LOTREJECTHISTORY only (no WIP fallback), workcenter dimension uses WORKCENTER_GROUP
- Add multi-select Pareto click filtering with chip display and detail list integration
- Add TOP 20 display scope selector for TYPE/WORKFLOW/機台 dimensions
- Pass Pareto selection (dimension + values) through to list/export endpoints
- Enable TRACE_WORKER_ENABLED=true by default in start_server.sh and .env.example
- Archive reject-history-pareto-datasource-fix and reject-history-pareto-ux-enhancements
- Update reject-history-api and reject-history-page specs with new Pareto behaviors

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
egg
2026-03-02 13:23:16 +08:00
parent ff37768a15
commit e83d8e1a36
31 changed files with 1251 additions and 286 deletions

View File

@@ -197,7 +197,7 @@ EVENT_FETCHER_CACHE_SKIP_CID_THRESHOLD=10000
# --- Async Job Queue (提案 2: trace-async-job-queue) ---
# Enable RQ trace worker for async large query processing
# Set to true and start the worker: rq worker trace-events
TRACE_WORKER_ENABLED=false
TRACE_WORKER_ENABLED=true
# CID threshold for automatic async job routing (requires RQ worker).
# Requests with CID count > threshold are queued instead of processed synchronously.

View File

@@ -173,6 +173,8 @@ export function buildViewParams(queryId, {
metricFilter = 'all',
trendDates = [],
detailReason = '',
paretoDimension = '',
paretoValues = [],
page = 1,
perPage = 50,
policyFilters = {},
@@ -196,6 +198,14 @@ export function buildViewParams(queryId, {
if (detailReason) {
params.detail_reason = detailReason;
}
const normalizedParetoDimension = normalizeText(paretoDimension).toLowerCase();
const normalizedParetoValues = normalizeArray(paretoValues);
if (normalizedParetoDimension) {
params.pareto_dimension = normalizedParetoDimension;
}
if (normalizedParetoValues.length > 0) {
params.pareto_values = normalizedParetoValues;
}
params.page = page || 1;
params.per_page = perPage || 50;

View File

@@ -5,7 +5,6 @@ import { apiGet, apiPost } from '../core/api.js';
import {
buildViewParams,
parseMultiLineInput,
toRejectFilterSnapshot,
} from '../core/reject-history-filters.js';
import { replaceRuntimeHistory } from '../core/shell-navigation.js';
@@ -17,6 +16,15 @@ import TrendChart from './components/TrendChart.vue';
const API_TIMEOUT = 360000;
const DEFAULT_PER_PAGE = 50;
const PARETO_TOP20_DIMENSIONS = new Set(['type', 'workflow', 'equipment']);
const PARETO_DIMENSION_LABELS = {
reason: '不良原因',
package: 'PACKAGE',
type: 'TYPE',
workflow: 'WORKFLOW',
workcenter: '站點',
equipment: '機台',
};
// ---- Primary query form state ----
const queryMode = ref('date_range');
@@ -59,10 +67,11 @@ const supplementaryFilters = reactive({
// ---- Interactive state ----
const page = ref(1);
const detailReason = ref('');
const selectedTrendDates = ref([]);
const trendLegendSelected = ref({ '扣帳報廢量': true, '不扣帳報廢量': true });
const paretoDimension = ref('reason');
const selectedParetoValues = ref([]);
const paretoDisplayScope = ref('all');
const dimensionParetoItems = ref([]);
const dimensionParetoLoading = ref(false);
@@ -198,8 +207,9 @@ async function executePrimaryQuery() {
supplementaryFilters.workcenterGroups = [];
supplementaryFilters.reason = '';
page.value = 1;
detailReason.value = '';
selectedTrendDates.value = [];
selectedParetoValues.value = [];
paretoDisplayScope.value = 'all';
paretoDimension.value = 'reason';
dimensionParetoItems.value = [];
@@ -239,7 +249,8 @@ async function refreshView() {
supplementaryFilters,
metricFilter: metricFilterParam(),
trendDates: selectedTrendDates.value,
detailReason: detailReason.value,
paretoDimension: paretoDimension.value,
paretoValues: selectedParetoValues.value,
page: page.value,
perPage: DEFAULT_PER_PAGE,
policyFilters: {
@@ -330,10 +341,18 @@ function onTrendLegendChange(selected) {
refreshDimensionParetoIfActive();
}
function onParetoClick(reason) {
if (!reason) return;
detailReason.value = detailReason.value === reason ? '' : reason;
function onParetoItemToggle(itemValue) {
const normalized = String(itemValue || '').trim();
if (!normalized) return;
if (selectedParetoValues.value.includes(normalized)) {
selectedParetoValues.value = selectedParetoValues.value.filter(
(item) => item !== normalized,
);
} else {
selectedParetoValues.value = [...selectedParetoValues.value, normalized];
}
page.value = 1;
updateUrlState();
void refreshView();
}
@@ -341,6 +360,7 @@ function handleParetoScopeToggle(checked) {
draftFilters.paretoTop80 = Boolean(checked);
committedPrimary.paretoTop80 = Boolean(checked);
updateUrlState();
refreshDimensionParetoIfActive();
}
let activeDimRequestId = 0;
@@ -391,20 +411,37 @@ function refreshDimensionParetoIfActive() {
function onDimensionChange(dim) {
paretoDimension.value = dim;
selectedParetoValues.value = [];
paretoDisplayScope.value = 'all';
page.value = 1;
if (dim === 'reason') {
dimensionParetoItems.value = [];
void refreshView();
} else {
void fetchDimensionPareto(dim);
void refreshView();
}
}
function onParetoDisplayScopeChange(scope) {
paretoDisplayScope.value = scope === 'top20' ? 'top20' : 'all';
updateUrlState();
}
function clearParetoSelection() {
selectedParetoValues.value = [];
page.value = 1;
updateUrlState();
void refreshView();
}
function onSupplementaryChange(filters) {
supplementaryFilters.packages = filters.packages || [];
supplementaryFilters.workcenterGroups = filters.workcenterGroups || [];
supplementaryFilters.reason = filters.reason || '';
page.value = 1;
detailReason.value = '';
selectedTrendDates.value = [];
selectedParetoValues.value = [];
void refreshView();
refreshDimensionParetoIfActive();
}
@@ -412,9 +449,12 @@ function onSupplementaryChange(filters) {
function removeFilterChip(chip) {
if (!chip?.removable) return;
if (chip.type === 'detail-reason') {
detailReason.value = '';
if (chip.type === 'pareto-value') {
selectedParetoValues.value = selectedParetoValues.value.filter(
(value) => value !== chip.value,
);
page.value = 1;
updateUrlState();
void refreshView();
return;
}
@@ -471,7 +511,8 @@ async function exportCsv() {
if (supplementaryFilters.reason) params.set('reason', supplementaryFilters.reason);
params.set('metric_filter', metricFilterParam());
for (const date of selectedTrendDates.value) params.append('trend_dates', date);
if (detailReason.value) params.set('detail_reason', detailReason.value);
params.set('pareto_dimension', paretoDimension.value);
for (const value of selectedParetoValues.value) params.append('pareto_values', value);
// Policy filters (applied in-memory on cached data)
if (committedPrimary.includeExcludedScrap) params.set('include_excluded_scrap', 'true');
@@ -642,10 +683,24 @@ const filteredParetoItems = computed(() => {
});
const activeParetoItems = computed(() => {
if (paretoDimension.value !== 'reason') return dimensionParetoItems.value;
return filteredParetoItems.value;
const baseItems =
paretoDimension.value === 'reason'
? filteredParetoItems.value
: (dimensionParetoItems.value || []);
if (
PARETO_TOP20_DIMENSIONS.has(paretoDimension.value)
&& paretoDisplayScope.value === 'top20'
) {
return baseItems.slice(0, 20);
}
return baseItems;
});
const selectedParetoDimensionLabel = computed(
() => PARETO_DIMENSION_LABELS[paretoDimension.value] || 'Pareto',
);
const activeFilterChips = computed(() => {
const chips = [];
@@ -742,15 +797,15 @@ const activeFilterChips = computed(() => {
});
}
if (detailReason.value) {
selectedParetoValues.value.forEach((value) => {
chips.push({
key: `detail-reason:${detailReason.value}`,
label: `明細原因: ${detailReason.value}`,
key: `pareto-value:${paretoDimension.value}:${value}`,
label: `${selectedParetoDimensionLabel.value}: ${value}`,
removable: true,
type: 'detail-reason',
value: detailReason.value,
type: 'pareto-value',
value,
});
});
}
return chips;
});
@@ -809,9 +864,9 @@ function updateUrlState() {
// Interactive
appendArrayParams(params, 'trend_dates', selectedTrendDates.value);
if (detailReason.value) {
params.set('detail_reason', detailReason.value);
}
params.set('pareto_dimension', paretoDimension.value);
appendArrayParams(params, 'pareto_values', selectedParetoValues.value);
if (paretoDisplayScope.value !== 'all') params.set('pareto_display_scope', paretoDisplayScope.value);
if (!committedPrimary.paretoTop80) {
params.set('pareto_scope_all', 'true');
}
@@ -886,15 +941,26 @@ function restoreFromUrl() {
// Interactive
const urlTrendDates = readArrayParam(params, 'trend_dates');
const urlDetailReason = String(params.get('detail_reason') || '').trim();
const rawParetoDimension = String(params.get('pareto_dimension') || '').trim().toLowerCase();
const urlParetoDimension = Object.hasOwn(PARETO_DIMENSION_LABELS, rawParetoDimension)
? rawParetoDimension
: 'reason';
const urlParetoValues = readArrayParam(params, 'pareto_values');
const urlParetoDisplayScope = String(params.get('pareto_display_scope') || '').trim().toLowerCase();
const parsedPage = Number(params.get('page') || '1');
paretoDimension.value = urlParetoDimension;
selectedParetoValues.value = urlParetoValues;
paretoDisplayScope.value = urlParetoDisplayScope === 'top20' ? 'top20' : 'all';
return {
packages: urlPackages,
workcenterGroups: urlWcGroups,
reason: urlReason,
trendDates: urlTrendDates,
detailReason: urlDetailReason,
paretoDimension: urlParetoDimension,
paretoValues: urlParetoValues,
paretoDisplayScope: paretoDisplayScope.value,
page: Number.isFinite(parsedPage) && parsedPage > 0 ? parsedPage : 1,
};
}
@@ -962,23 +1028,26 @@ onMounted(() => {
<ParetoSection
:items="activeParetoItems"
:detail-reason="detailReason"
:selected-values="selectedParetoValues"
:display-scope="paretoDisplayScope"
:selected-dates="selectedTrendDates"
:metric-label="paretoMetricLabel"
:loading="loading.querying || dimensionParetoLoading"
:dimension="paretoDimension"
:show-dimension-selector="committedPrimary.mode === 'date_range'"
@reason-click="onParetoClick"
@item-toggle="onParetoItemToggle"
@dimension-change="onDimensionChange"
@display-scope-change="onParetoDisplayScopeChange"
/>
<DetailTable
:items="detail.items"
:pagination="pagination"
:loading="loading.list"
:detail-reason="detailReason"
:selected-pareto-values="selectedParetoValues"
:selected-pareto-dimension-label="selectedParetoDimensionLabel"
@go-to-page="goToPage"
@clear-reason="onParetoClick(detailReason)"
@clear-pareto-selection="clearParetoSelection"
/>
</template>
</div>

View File

@@ -8,10 +8,11 @@ defineProps({
default: () => ({ page: 1, perPage: 50, total: 0, totalPages: 1 }),
},
loading: { type: Boolean, default: false },
detailReason: { type: String, default: '' },
selectedParetoValues: { type: Array, default: () => [] },
selectedParetoDimensionLabel: { type: String, default: '' },
});
defineEmits(['go-to-page', 'clear-reason']);
defineEmits(['go-to-page', 'clear-pareto-selection']);
const showRejectBreakdown = ref(false);
@@ -25,9 +26,14 @@ function formatNumber(value) {
<div class="card-header">
<div class="card-title">
明細列表
<span v-if="detailReason" class="detail-reason-badge">
原因: {{ detailReason }}
<button type="button" class="badge-clear" @click="$emit('clear-reason')">×</button>
<span v-if="selectedParetoValues.length > 0" class="detail-reason-badge">
{{ selectedParetoDimensionLabel || 'Pareto 篩選' }}:
{{
selectedParetoValues.length === 1
? selectedParetoValues[0]
: `${selectedParetoValues.length} `
}}
<button type="button" class="badge-clear" @click="$emit('clear-pareto-selection')">×</button>
</span>
</div>
</div>

View File

@@ -126,14 +126,6 @@ function emitSupplementary(patch) {
<input v-model="filters.excludePbDiode" type="checkbox" />
排除 PB_* 系列
</label>
<label class="checkbox-pill">
<input
:checked="filters.paretoTop80"
type="checkbox"
@change="$emit('pareto-scope-toggle', $event.target.checked)"
/>
Pareto 僅顯示累計前 80%
</label>
</div>
<div class="filter-actions">
<button
@@ -190,6 +182,16 @@ function emitSupplementary(patch) {
<!-- Supplementary filters (only after primary query) -->
<div v-if="queryId" class="supplementary-panel">
<div class="supplementary-header">補充篩選 (快取內篩選)</div>
<div class="supplementary-toolbar">
<label class="checkbox-pill">
<input
:checked="filters.paretoTop80"
type="checkbox"
@change="$emit('pareto-scope-toggle', $event.target.checked)"
/>
Pareto 僅顯示累計前 80%
</label>
</div>
<div class="supplementary-row">
<div class="filter-group">
<label class="filter-label">WORKCENTER GROUP</label>

View File

@@ -17,26 +17,36 @@ const DIMENSION_OPTIONS = [
{ value: 'workcenter', label: '站點' },
{ value: 'equipment', label: '機台' },
];
const DISPLAY_SCOPE_TOP20_DIMENSIONS = new Set(['type', 'workflow', 'equipment']);
const props = defineProps({
items: { type: Array, default: () => [] },
detailReason: { type: String, default: '' },
selectedValues: { type: Array, default: () => [] },
selectedDates: { type: Array, default: () => [] },
metricLabel: { type: String, default: '報廢量' },
loading: { type: Boolean, default: false },
dimension: { type: String, default: 'reason' },
showDimensionSelector: { type: Boolean, default: false },
displayScope: { type: String, default: 'all' },
});
const emit = defineEmits(['reason-click', 'dimension-change']);
const emit = defineEmits(['item-toggle', 'dimension-change', 'display-scope-change']);
const hasData = computed(() => Array.isArray(props.items) && props.items.length > 0);
const selectedValueSet = computed(() => new Set((props.selectedValues || []).map((item) => String(item || '').trim())));
const showDisplayScopeSelector = computed(
() => DISPLAY_SCOPE_TOP20_DIMENSIONS.has(props.dimension),
);
const dimensionLabel = computed(() => {
const opt = DIMENSION_OPTIONS.find((o) => o.value === props.dimension);
return opt ? opt.label : '報廢原因';
});
function isSelected(value) {
return selectedValueSet.value.has(String(value || '').trim());
}
function formatNumber(value) {
return Number(value || 0).toLocaleString('zh-TW');
}
@@ -105,7 +115,7 @@ const chartOption = computed(() => {
itemStyle: {
color(params) {
const reason = items[params.dataIndex]?.reason || '';
return reason === props.detailReason ? '#b91c1c' : '#2563eb';
return isSelected(reason) ? '#b91c1c' : '#2563eb';
},
borderRadius: [4, 4, 0, 0],
},
@@ -124,12 +134,12 @@ const chartOption = computed(() => {
});
function handleChartClick(params) {
if (params?.seriesType !== 'bar' || props.dimension !== 'reason') {
if (params?.seriesType !== 'bar') {
return;
}
const reason = props.items?.[params.dataIndex]?.reason;
if (reason) {
emit('reason-click', reason);
const itemValue = props.items?.[params.dataIndex]?.reason;
if (itemValue) {
emit('item-toggle', itemValue);
}
}
</script>
@@ -141,6 +151,7 @@ function handleChartClick(params) {
{{ metricLabel }} vs {{ dimensionLabel }}Pareto
<span v-for="d in selectedDates" :key="d" class="pareto-date-badge">{{ d }}</span>
</div>
<div class="pareto-controls">
<select
v-if="showDimensionSelector"
class="dimension-select"
@@ -149,6 +160,16 @@ function handleChartClick(params) {
>
<option v-for="opt in DIMENSION_OPTIONS" :key="opt.value" :value="opt.value">{{ opt.label }}</option>
</select>
<select
v-if="showDisplayScopeSelector"
class="dimension-select pareto-scope-select"
:value="displayScope"
@change="emit('display-scope-change', $event.target.value)"
>
<option value="all">全部顯示</option>
<option value="top20">只顯示 TOP 20</option>
</select>
</div>
</div>
<div class="card-body pareto-layout">
<div class="pareto-chart-wrap">
@@ -169,13 +190,12 @@ function handleChartClick(params) {
<tr
v-for="item in items"
:key="item.reason"
:class="{ active: detailReason === item.reason }"
:class="{ active: isSelected(item.reason) }"
>
<td>
<button v-if="dimension === 'reason'" class="reason-link" type="button" @click="$emit('reason-click', item.reason)">
<button class="reason-link" type="button" @click="$emit('item-toggle', item.reason)">
{{ item.reason }}
</button>
<span v-else>{{ item.reason }}</span>
</td>
<td>{{ formatNumber(item.metric_value) }}</td>
<td>{{ formatPct(item.pct) }}</td>

View File

@@ -53,6 +53,10 @@
margin-bottom: 12px;
}
.supplementary-toolbar {
margin-bottom: 12px;
}
.supplementary-row {
display: grid;
grid-template-columns: repeat(3, minmax(0, 1fr));
@@ -312,6 +316,13 @@
gap: 12px;
}
.pareto-controls {
display: inline-flex;
align-items: center;
gap: 8px;
margin-left: auto;
}
.dimension-select {
font-size: 12px;
padding: 3px 8px;
@@ -320,7 +331,10 @@
background: var(--bg-primary, #fff);
color: var(--text-primary, #374151);
cursor: pointer;
margin-left: auto;
}
.pareto-scope-select {
min-width: 110px;
}
.pareto-date-badge {

View File

@@ -0,0 +1,2 @@
schema: spec-driven
created: 2026-03-02

View File

@@ -0,0 +1,59 @@
## Context
報廢歷史查詢的 SQL 基底查詢(`performance_daily.sql` / `performance_daily_lot.sql`)目前有一個 `wip_lookup` CTE`DW_MES_WIP`8000 萬筆)以 `ROW_NUMBER()` + `CONTAINERID` 取最新一筆 WIP 記錄,用於 PJ_TYPE、PRODUCTLINENAME、EQUIPMENTS、WORKFLOWNAME 的 fallback。此做法存在多項問題
1. `DW_MES_WIP` 取的是「最新」WIP 步驟,不是報廢發生當下的步驟
2. PJ_TYPE 和 PRODUCTLINENAME 應直接從 `DW_MES_CONTAINER` 取得
3. EQUIPMENTNAME 若 reject history 沒有值就應留空,不需額外查找
4. WORKFLOWNAME 應從 `DW_MES_LOTWIPHISTORY` 透過 `WIPTRACKINGGROUPKEYID` 精確對應
## Goals / Non-Goals
**Goals:**
- 修正 5 個維度柏拉圖的資料來源,使 package/type/workflow/equipment 正確顯示
- workcenter 柏拉圖維度維持使用 WORKCENTER_GROUP
- WORKFLOW 精確對應到報廢發生當下的 WIP 步驟
**Non-Goals:**
- 不變更前端元件邏輯
- 不變更 API 介面或回應結構
- 不新增篩選維度
## Decisions
### D1: 移除 `wip_lookup` CTE改用直接 LEFT JOIN `DW_MES_LOTWIPHISTORY`
移除整個 `wip_lookup` CTE來自 `DW_MES_WIP`),在 `reject_raw` 的 FROM 區段新增:
```sql
LEFT JOIN DWH.DW_MES_LOTWIPHISTORY lwh
ON lwh.WIPTRACKINGGROUPKEYID = r.WIPTRACKINGGROUPKEYID
```
理由:`DW_MES_LOTREJECTHISTORY``DW_MES_LOTWIPHISTORY` 都有 `WIPTRACKINGGROUPKEYID`,兩邊都有索引,直接 JOIN 即可精確對應到報廢事件所在的 WIP 步驟。不需要 CTE、不需要 ROW_NUMBER、不需要子查詢。
### D2: 各欄位來源
| 欄位 | 來源表 | 寫法 |
|---|---|---|
| PJ_TYPE | DW_MES_CONTAINER | `NVL(TRIM(c.PJ_TYPE), '(NA)')` |
| PRODUCTLINENAME | DW_MES_CONTAINER | `NVL(TRIM(c.PRODUCTLINENAME), '(NA)')` |
| EQUIPMENTNAME | DW_MES_LOTREJECTHISTORY | `NVL(TRIM(r.EQUIPMENTNAME), '(NA)')` |
| PRIMARY_EQUIPMENTNAME | DW_MES_LOTREJECTHISTORY | `NVL(TRIM(REGEXP_SUBSTR(r.EQUIPMENTNAME, '[^,]+', 1, 1)), '(NA)')` |
| WORKFLOWNAME | DW_MES_LOTWIPHISTORY | `NVL(TRIM(lwh.WORKFLOWNAME), '(NA)')` |
| WORKCENTERNAME | spec_map (既有) | `NVL(TRIM(sm.WORK_CENTER), NVL(TRIM(r.WORKCENTERNAME), '(NA)'))` |
| WORKCENTER_GROUP | spec_map (既有) | `NVL(TRIM(sm.WORKCENTER_GROUP), NVL(TRIM(r.WORKCENTERNAME), '(NA)'))` |
### D3: Python 維度映射 workcenter 改回 WORKCENTER_GROUP
`reject_dataset_cache.py``_DIM_TO_DF_COLUMN``reject_history_service.py``_DIMENSION_COLUMN_MAP``"workcenter"` 映射改回 `WORKCENTER_GROUP` / `b.WORKCENTER_GROUP`
### D4: 兩支 SQL 同步修改
`performance_daily.sql``performance_daily_lot.sql` 需同步做相同變更,保持一致。
## Risks / Trade-offs
- `DW_MES_LOTWIPHISTORY` 有 5400 萬筆,但 `WIPTRACKINGGROUPKEYID` 有索引JOIN 效率可控
-`WIPTRACKINGGROUPKEYID` 為 NULL 的 reject 記錄WORKFLOWNAME 會顯示 (NA)——這是正確行為
- `DW_MES_CONTAINER` 的 PJ_TYPE / PRODUCTLINENAME 若為 NULL仍會顯示 (NA)——這代表該 container 確實沒有此資訊

View File

@@ -0,0 +1,28 @@
## Why
報廢歷史柏拉圖的多個維度顯示不正確package、type、equipment 全顯示 (NA)workflow 顯示成 spec nameworkcenter 維度應顯示 WORKCENTER_GROUP。原因是 SQL 資料來源選擇錯誤——目前錯誤地使用 `DW_MES_WIP` 作為 fallback應改為正確使用 `DW_MES_CONTAINER` 取 package/type`DW_MES_LOTWIPHISTORY` 取 workflow透過 `WIPTRACKINGGROUPKEYID` 精確對應報廢事件equipment 不做額外查找。
## What Changes
- 移除 `performance_daily.sql``performance_daily_lot.sql` 中的 `wip_lookup` CTE來自 `DW_MES_WIP`
- 新增 `LEFT JOIN DWH.DW_MES_LOTWIPHISTORY` 透過 `WIPTRACKINGGROUPKEYID` 取得報廢當下對應的 WORKFLOWNAME
- PJ_TYPE、PRODUCTLINENAME 還原為僅從 `DW_MES_CONTAINER` 取得
- EQUIPMENTNAME 還原為僅從 `DW_MES_LOTREJECTHISTORY` 取得(空就空,不額外查找)
- WORKFLOWNAME 改為從 `DW_MES_LOTWIPHISTORY` 取得(精確對應報廢事件的 WIP 步驟)
- 柏拉圖 workcenter 維度映射改回 `WORKCENTER_GROUP`Python service 層)
## Capabilities
### New Capabilities
(none)
### Modified Capabilities
- `reject-history-api`: Dimension Pareto 的 SQL 資料來源變更——移除 DW_MES_WIP fallback改用 DW_MES_LOTWIPHISTORY 取 workflowworkcenter 維度映射改回 WORKCENTER_GROUP
## Impact
- SQL: `performance_daily.sql`, `performance_daily_lot.sql` — CTE 結構變更JOIN 變更
- Python: `reject_dataset_cache.py`, `reject_history_service.py` — 維度映射常數調整
- 無前端變更、無 API 介面變更、無新增依賴

View File

@@ -0,0 +1,42 @@
## MODIFIED Requirements
### Requirement: Reject History SQL base query SHALL source dimension columns from correct tables
The base query (`performance_daily.sql`, `performance_daily_lot.sql`) SHALL source each dimension column from its authoritative table.
#### Scenario: PJ_TYPE sourced from DW_MES_CONTAINER
- **WHEN** the base query resolves PJ_TYPE
- **THEN** it SHALL use `DW_MES_CONTAINER.PJ_TYPE` only
- **THEN** it SHALL NOT fall back to `DW_MES_WIP`
#### Scenario: PRODUCTLINENAME sourced from DW_MES_CONTAINER
- **WHEN** the base query resolves PRODUCTLINENAME (package)
- **THEN** it SHALL use `DW_MES_CONTAINER.PRODUCTLINENAME` only
- **THEN** it SHALL NOT fall back to `DW_MES_WIP`
#### Scenario: EQUIPMENTNAME sourced from DW_MES_LOTREJECTHISTORY only
- **WHEN** the base query resolves EQUIPMENTNAME
- **THEN** it SHALL use `DW_MES_LOTREJECTHISTORY.EQUIPMENTNAME` only
- **THEN** it SHALL NOT perform any additional lookup when the value is NULL
#### Scenario: WORKFLOWNAME sourced from DW_MES_LOTWIPHISTORY via WIPTRACKINGGROUPKEYID
- **WHEN** the base query resolves WORKFLOWNAME
- **THEN** it SHALL LEFT JOIN `DW_MES_LOTWIPHISTORY` on `WIPTRACKINGGROUPKEYID`
- **THEN** it SHALL use `DW_MES_LOTWIPHISTORY.WORKFLOWNAME`
- **THEN** it SHALL NOT fall back to SPECNAME or any other field
#### Scenario: No DW_MES_WIP dependency in base query
- **WHEN** the base query CTEs are examined
- **THEN** there SHALL be no CTE or JOIN referencing `DW_MES_WIP`
### Requirement: Dimension Pareto workcenter dimension SHALL use WORKCENTER_GROUP
The workcenter dimension in Pareto analysis SHALL group by `WORKCENTER_GROUP`, not individual `WORKCENTERNAME`.
#### Scenario: Cache-based Pareto workcenter mapping
- **WHEN** `reject_dataset_cache.py` computes workcenter dimension Pareto
- **THEN** the dimension column SHALL be `WORKCENTER_GROUP`
#### Scenario: SQL-based Pareto workcenter mapping
- **WHEN** `reject_history_service.py` builds workcenter dimension Pareto SQL
- **THEN** the dimension column SHALL be `b.WORKCENTER_GROUP`

View File

@@ -0,0 +1,22 @@
## 1. SQL 基底查詢修正 — performance_daily.sql
- [x] 1.1 移除 `wip_lookup` CTE整個 DW_MES_WIP 相關區段)
- [x] 1.2 新增 `LEFT JOIN DWH.DW_MES_LOTWIPHISTORY lwh ON lwh.WIPTRACKINGGROUPKEYID = r.WIPTRACKINGGROUPKEYID`
- [x] 1.3 PJ_TYPE 還原為 `NVL(TRIM(c.PJ_TYPE), '(NA)')`
- [x] 1.4 PRODUCTLINENAME 還原為 `NVL(TRIM(c.PRODUCTLINENAME), '(NA)')`
- [x] 1.5 EQUIPMENTNAME 還原為 `NVL(TRIM(r.EQUIPMENTNAME), '(NA)')`PRIMARY_EQUIPMENTNAME 同步還原
- [x] 1.6 WORKFLOWNAME 改為 `NVL(TRIM(lwh.WORKFLOWNAME), '(NA)')`
## 2. SQL 基底查詢修正 — performance_daily_lot.sql
- [x] 2.1 移除 `wip_lookup` CTE
- [x] 2.2 新增 `LEFT JOIN DWH.DW_MES_LOTWIPHISTORY lwh ON lwh.WIPTRACKINGGROUPKEYID = r.WIPTRACKINGGROUPKEYID`
- [x] 2.3 PJ_TYPE 還原為 `NVL(TRIM(c.PJ_TYPE), '(NA)')`
- [x] 2.4 PRODUCTLINENAME 還原為 `NVL(TRIM(c.PRODUCTLINENAME), '(NA)')`
- [x] 2.5 EQUIPMENTNAME 還原為 `NVL(TRIM(r.EQUIPMENTNAME), '(NA)')`PRIMARY_EQUIPMENTNAME 同步還原
- [x] 2.6 WORKFLOWNAME 改為 `NVL(TRIM(lwh.WORKFLOWNAME), '(NA)')`
## 3. Python 維度映射修正
- [x] 3.1 `reject_dataset_cache.py` `_DIM_TO_DF_COLUMN` — workcenter 改回 `WORKCENTER_GROUP`
- [x] 3.2 `reject_history_service.py` `_DIMENSION_COLUMN_MAP` — workcenter 改回 `b.WORKCENTER_GROUP`

View File

@@ -0,0 +1,2 @@
schema: spec-driven
created: 2026-03-02

View File

@@ -0,0 +1,3 @@
# reject-history-pareto-ux-enhancements
Add pareto display scope options, multi-select drill-down, and filtered detail CSV export

View File

@@ -0,0 +1,73 @@
## Context
Reject History 已改為兩階段查詢:`/api/reject-history/query` 先查 Oracle 並快取 DataFrame`/api/reject-history/view``/api/reject-history/export-cached` 在快取資料上做補充篩選。現況存在三個 UX/一致性缺口:
1. `TYPE/WORKFLOW/機台` 維度在 80% 範圍下仍可能過多,不易閱讀。
2. Pareto 點選目前僅支援單選原因reason不支援多選且不支援其他維度。
3. 匯出需要和畫面篩選完全一致,但目前缺少 Pareto 多選情境的等價參數傳遞與後端套用。
本變更跨前端 VueFilterPanel/ParetoSection/App與後端 Flask+Pandasroutes/cache service屬跨模組一致性調整。
## Goals / Non-Goals
**Goals:**
-`TYPE/WORKFLOW/機台` 維度提供 Pareto 顯示範圍切換(`全部顯示` / `只顯示 TOP 20`)。
- 將「Pareto 僅顯示累計前 80%」控制移到補充篩選區域,維持預設啟用。
- Pareto 圖表與表格都支援多選,並即時聯動刷新明系列表。
- 匯出 CSV 套用完整篩選上下文主查詢、補充篩選、互動篩選、Pareto 多選)。
- 保持 UTF-8 BOM 與標準 CSV escaping。
**Non-Goals:**
- 不改動 Oracle SQL schema 與資料來源邏輯。
- 不新增新資料表或 Redis key 結構。
- 不重做 Reject History 整體版面,只做既有模組行為擴充。
## Decisions
### 1) Pareto 多選狀態由 App.vue 集中管理
- Decision: 新增 `selectedParetoValues`array`paretoDisplayScope``all`/`top20`)於 `App.vue`
- Why: 既有趨勢日期、補充篩選、明細分頁都由 App 協調;將 Pareto 多選納入同一狀態中心可確保 URL、view、export 一致。
- Alternative considered:
-`ParetoSection.vue` 內部持有選取狀態:會造成匯出與後端參數組裝需要額外同步機制,易出現狀態漂移。
### 2) `TOP 20` 僅在前端呈現層裁切
- Decision: 後端仍回傳完整(或 top80Pareto items`TOP 20` 由前端 computed 再切片。
- Why: `TOP 20` 是視覺呈現策略,不是資料語意;放前端可避免增加 API 分支與快取鍵複雜度。
- Alternative considered:
- API 增加 `display_scope=top20`:可行但會讓同一資料語意被多組 API 參數切分,且對快取命中率不利。
### 3) Pareto 多選篩選在後端 cache service 統一套用
- Decision: `apply_view()``export_csv_from_cache()` 新增 `pareto_dimension` + `pareto_values` 參數,透過共享 helper 套用到對應欄位。
- Why: 明細畫面與匯出都要使用「同一過濾函式」才能保證 parity。
- Alternative considered:
- 前端先過濾當頁明細再匯出:無法涵蓋全資料集,且分頁資料可能不完整。
### 4) 80% toggle 位置調整但語意不變
- Decision: checkbox UI 從主工具列移至補充篩選區塊;預設值與 URL 參數(`pareto_scope_all`)維持既有相容。
- Why: 80% 為二階段視圖篩選,移入補充篩選可降低使用者誤解。
## Risks / Trade-offs
- [Risk] 多選維度欄位映射錯誤(特別是 `equipment` 對應欄位)導致篩選失準。
→ Mitigation: 單元測試覆蓋各維度映射與無效維度 400。
- [Risk] 前端多種互動趨勢日期、reason、pareto 多選)同時作用時狀態難追蹤。
→ Mitigation: `activeFilterChips` 顯示所有活躍條件,並統一經 `refreshView()` + `updateUrlState()`
- [Risk] CSV 匯出和列表排序/篩選不一致造成信任問題。
→ Mitigation: 匯出重用與 view 相同 filter helper並新增 route/service parity 測試。
## Migration Plan
1. 先落地後端 `apply_view/export_csv_from_cache` 共同 Pareto 多選過濾與參數驗證。
2. 再調整前端控制項與事件多選、TOP20、補充篩選區
3. 補上 route/service 單元測試。
4. 驗證目標:`reject-history` 相關測試通過,手動檢查 CSV 編碼與欄位。
Rollback strategy:
- 若上線後出現篩選偏差,可暫時忽略 `pareto_dimension/pareto_values` 參數(後端回退到舊邏輯),不影響既有查詢主路徑。
## Open Questions
- `TOP 20` 是否僅限定 `TYPE/WORKFLOW/機台`(本次採用是),或未來要擴展到全部維度?
- 多選 Pareto 與補充篩選中的 `reason` 同時存在時,是否需要 UI 顯示「交集」提示(本次先不新增提示文案)。

View File

@@ -0,0 +1,26 @@
## Why
目前 Reject History 柏拉圖在 `TYPE``WORKFLOW``機台` 維度上,僅靠累計 80% 顯示時仍可能出現過多項目,影響閱讀與分析效率;同時圖表點選與明細匯出尚未完整對齊,造成追查流程不連續。需要補強互動式篩選與匯出一致性,讓使用者可直接從柏拉圖一路鑽取到可交付的明細結果。
## What Changes
- 在柏拉圖 `TYPE``WORKFLOW``機台` 三個維度新增顯示範圍選項:`全部顯示``只顯示 TOP 20`
- 將「Pareto 僅顯示累計前 80%」移入「補充篩選」區域,並維持預設啟用。
- 柏拉圖支援點選項目多選bar/table並同步套用到下方明系列表。
- 明系列表新增 `匯出 CSV`匯出內容必須與當前明細可見結果完全一致套用主篩選、補充篩選、Pareto 點選篩選、排序/分頁語意)。
- 匯出 CSV 強化字元編碼與欄位轉義處理,避免中文亂碼與欄位錯位。
## Capabilities
### New Capabilities
- `reject-history-detail-export-parity`: 明細匯出與畫面篩選完全一致的 CSV 匯出能力
### Modified Capabilities
- `reject-history-page`: 擴充柏拉圖顯示範圍控制、補充篩選位置調整、Pareto 多選聯動明細
- `reject-history-api`: 匯出端點需保證套用所有有效篩選(含 Pareto 衍生篩選),並提供穩定 CSV 編碼輸出
## Impact
- Frontend: `src/pages/reject-history`FilterPanel、ParetoSection、DetailTable、頁面狀態管理與查詢參數組裝
- Backend/API: reject-history list/export 查詢參數解析與 CSV 產生流程
- Tests: 補齊 page 互動測試(多選/聯動/顯示範圍)與 API 匯出一致性測試filter parity、encoding

View File

@@ -0,0 +1,19 @@
## MODIFIED Requirements
### Requirement: Reject History API SHALL provide CSV export endpoint
The API SHALL provide CSV export using the same filter and metric semantics as list/query APIs.
#### Scenario: Export payload consistency
- **WHEN** `GET /api/reject-history/export` is called with valid filters
- **THEN** CSV headers SHALL include both `REJECT_TOTAL_QTY` and `DEFECT_QTY`
- **THEN** export rows SHALL follow the same semantic definitions as summary/list endpoints
#### Scenario: Cached export supports full detail-filter parity
- **WHEN** `GET /api/reject-history/export-cached` is called with an existing `query_id`
- **THEN** the endpoint SHALL apply primary policy toggles, supplementary filters, trend-date filters, metric filter, and Pareto-selected item filters
- **THEN** returned rows SHALL match the same filtered detail dataset semantics used by `GET /api/reject-history/view`
#### Scenario: CSV encoding and escaping are stable
- **WHEN** either export endpoint returns CSV
- **THEN** response charset SHALL be `utf-8-sig`
- **THEN** values containing commas, quotes, or newlines SHALL be CSV-escaped correctly

View File

@@ -0,0 +1,18 @@
## ADDED Requirements
### Requirement: Cached reject-history export SHALL support Pareto multi-select filter parity
The cached export endpoint SHALL support Pareto multi-select context so that exported rows match the currently drilled-down detail scope.
#### Scenario: Apply selected Pareto dimension values
- **WHEN** export request provides `pareto_dimension` and one or more `pareto_values`
- **THEN** the backend SHALL apply an OR-match filter against the mapped dimension column
- **THEN** only rows matching selected values SHALL be exported
#### Scenario: No Pareto selection keeps existing behavior
- **WHEN** `pareto_values` is absent or empty
- **THEN** export SHALL apply no extra Pareto-selected-item filter
- **THEN** existing supplementary and interactive filters SHALL still apply
#### Scenario: Invalid Pareto dimension is rejected
- **WHEN** `pareto_dimension` is not one of supported dimensions
- **THEN** API SHALL return HTTP 400 with descriptive validation error

View File

@@ -0,0 +1,43 @@
## MODIFIED Requirements
### Requirement: Reject History page SHALL provide reason Pareto analysis
The page SHALL provide a Pareto view for loss reasons and support downstream filtering.
#### Scenario: Pareto rendering and ordering
- **WHEN** Pareto data is loaded
- **THEN** items SHALL be sorted by selected metric descending
- **THEN** a cumulative percentage line SHALL be shown
#### Scenario: Pareto 80% filter is managed in supplementary filters
- **WHEN** the page first loads Pareto
- **THEN** supplementary filters SHALL include "Pareto 僅顯示累計前 80%" control
- **THEN** the control SHALL default to enabled
#### Scenario: TYPE/WORKFLOW/機台 support display scope selector
- **WHEN** Pareto dimension is `TYPE`, `WORKFLOW`, or `機台`
- **THEN** the UI SHALL provide `全部顯示` and `只顯示 TOP 20` options
- **THEN** `全部顯示` SHALL still respect the current 80% cumulative filter setting
#### Scenario: Pareto click filtering supports multi-select
- **WHEN** user clicks Pareto bars or table rows
- **THEN** clicked items SHALL become active selected chips
- **THEN** multiple selected items SHALL be supported at the same time
- **THEN** detail list SHALL reload using current selected Pareto items as additional filter criteria
#### Scenario: Re-click clears selected item only
- **WHEN** user clicks an already selected Pareto item
- **THEN** only that item SHALL be removed from selection
- **THEN** remaining selected items SHALL stay active
### Requirement: Reject History page SHALL support CSV export from current filter context
The page SHALL allow users to export records using the exact active filters.
#### Scenario: Export with all active filters
- **WHEN** user clicks "匯出 CSV"
- **THEN** export request SHALL include current primary filters, supplementary filters, trend-date filters, metric filters, and Pareto-selected items
- **THEN** downloaded file SHALL contain exactly the same rows currently represented by the detail list filter context
#### Scenario: Export remains UTF-8 Excel compatible
- **WHEN** CSV export is downloaded
- **THEN** the file SHALL be encoded in UTF-8 with BOM
- **THEN** Chinese headers and values SHALL render correctly in common spreadsheet tools

View File

@@ -0,0 +1,19 @@
## 1. Frontend Pareto UX Enhancements
- [x] 1.1 在 `FilterPanel.vue` 將「Pareto 僅顯示累計前 80%」移至補充篩選區域並維持預設啟用
- [x] 1.2 在 `ParetoSection.vue` 新增 `全部顯示 / 只顯示 TOP 20` 控制(僅 `TYPE/WORKFLOW/機台` 顯示)
- [x] 1.3 在 `ParetoSection.vue` 支援圖表與表格點選多選、選取高亮與取消選取
- [x] 1.4 在 `App.vue` 新增 Pareto 多選狀態管理與 URL 狀態同步dimension + selected values + display scope
## 2. Backend Filter/Export Parity
- [x] 2.1 在 `reject_dataset_cache.py` 新增 Pareto 維度多選過濾 helper供 view/export 共用
- [x] 2.2 擴充 `apply_view()` 支援 `pareto_dimension` + `pareto_values` 並套用到明細過濾
- [x] 2.3 擴充 `export_csv_from_cache()` 支援與 view 相同的 Pareto 多選過濾語意
- [x] 2.4 更新 `reject_history_routes.py``/view``/export-cached` 參數解析與維度驗證(非法維度回 400
## 3. Validation and Regression Tests
- [x] 3.1 新增/更新 route 測試:驗證 `/view``/export-cached` 會傳遞 Pareto 多選參數且非法維度回 400
- [x] 3.2 新增/更新 cache service 測試:驗證 Pareto 多選在 `apply_view``export_csv_from_cache` 行為一致
- [x] 3.3 執行 reject-history 相關測試並確認無回歸

View File

@@ -87,6 +87,16 @@ The API SHALL provide CSV export using the same filter and metric semantics as l
- **THEN** CSV headers SHALL include both `REJECT_TOTAL_QTY` and `DEFECT_QTY`
- **THEN** export rows SHALL follow the same semantic definitions as summary/list endpoints
#### Scenario: Cached export supports full detail-filter parity
- **WHEN** `GET /api/reject-history/export-cached` is called with an existing `query_id`
- **THEN** the endpoint SHALL apply primary policy toggles, supplementary filters, trend-date filters, metric filter, and Pareto-selected item filters
- **THEN** returned rows SHALL match the same filtered detail dataset semantics used by `GET /api/reject-history/view`
#### Scenario: CSV encoding and escaping are stable
- **WHEN** either export endpoint returns CSV
- **THEN** response charset SHALL be `utf-8-sig`
- **THEN** values containing commas, quotes, or newlines SHALL be CSV-escaped correctly
### Requirement: Reject History API SHALL centralize SQL in reject_history SQL directory
The service SHALL load SQL from dedicated files under `src/mes_dashboard/sql/reject_history/`.

View File

@@ -0,0 +1,22 @@
# reject-history-detail-export-parity Specification
## Purpose
TBD - created by archiving change reject-history-pareto-ux-enhancements. Update Purpose after archive.
## Requirements
### Requirement: Cached reject-history export SHALL support Pareto multi-select filter parity
The cached export endpoint SHALL support Pareto multi-select context so that exported rows match the currently drilled-down detail scope.
#### Scenario: Apply selected Pareto dimension values
- **WHEN** export request provides `pareto_dimension` and one or more `pareto_values`
- **THEN** the backend SHALL apply an OR-match filter against the mapped dimension column
- **THEN** only rows matching selected values SHALL be exported
#### Scenario: No Pareto selection keeps existing behavior
- **WHEN** `pareto_values` is absent or empty
- **THEN** export SHALL apply no extra Pareto-selected-item filter
- **THEN** existing supplementary and interactive filters SHALL still apply
#### Scenario: Invalid Pareto dimension is rejected
- **WHEN** `pareto_dimension` is not one of supported dimensions
- **THEN** API SHALL return HTTP 400 with descriptive validation error

View File

@@ -77,25 +77,30 @@ The page SHALL show both quantity trend and rate trend to avoid mixing unit scal
The page SHALL provide a Pareto view for loss reasons and support downstream filtering.
#### Scenario: Pareto rendering and ordering
- **WHEN** reason Pareto data is loaded
- **WHEN** Pareto data is loaded
- **THEN** items SHALL be sorted by selected metric descending
- **THEN** a cumulative percentage line SHALL be shown
#### Scenario: Default 80% cumulative display mode
#### Scenario: Pareto 80% filter is managed in supplementary filters
- **WHEN** the page first loads Pareto
- **THEN** it SHALL default to "only cumulative top 80%" mode
- **THEN** Pareto SHALL only render categories within the cumulative 80% threshold under current filters
- **THEN** supplementary filters SHALL include "Pareto 僅顯示累計前 80%" control
- **THEN** the control SHALL default to enabled
#### Scenario: Full Pareto toggle mode
- **WHEN** user turns OFF the 80% cumulative display mode
- **THEN** Pareto SHALL render all categories after applying current filters
- **THEN** switching mode SHALL NOT reset existing time/reason/workcenter-group filters
#### Scenario: TYPE/WORKFLOW/機台 support display scope selector
- **WHEN** Pareto dimension is `TYPE`, `WORKFLOW`, or `機台`
- **THEN** the UI SHALL provide `全部顯示` and `只顯示 TOP 20` options
- **THEN** `全部顯示` SHALL still respect the current 80% cumulative filter setting
#### Scenario: Pareto click filtering
- **WHEN** user clicks a Pareto bar or row
- **THEN** the selected reason SHALL become an active filter chip
- **THEN** detail list SHALL reload with that reason
- **THEN** clicking the same reason again SHALL clear the reason filter
#### Scenario: Pareto click filtering supports multi-select
- **WHEN** user clicks Pareto bars or table rows
- **THEN** clicked items SHALL become active selected chips
- **THEN** multiple selected items SHALL be supported at the same time
- **THEN** detail list SHALL reload using current selected Pareto items as additional filter criteria
#### Scenario: Re-click clears selected item only
- **WHEN** user clicks an already selected Pareto item
- **THEN** only that item SHALL be removed from selection
- **THEN** remaining selected items SHALL stay active
### Requirement: Reject History page SHALL show paginated detail rows
The page SHALL provide a paginated detail table for investigation and traceability.
@@ -112,10 +117,15 @@ The page SHALL provide a paginated detail table for investigation and traceabili
### Requirement: Reject History page SHALL support CSV export from current filter context
The page SHALL allow users to export records using the exact active filters.
#### Scenario: Export with current filters
#### Scenario: Export with all active filters
- **WHEN** user clicks "匯出 CSV"
- **THEN** export request SHALL include the current filter state and active reason filter
- **THEN** downloaded file SHALL contain both `REJECT_TOTAL_QTY` and `DEFECT_QTY`
- **THEN** export request SHALL include current primary filters, supplementary filters, trend-date filters, metric filters, and Pareto-selected items
- **THEN** downloaded file SHALL contain exactly the same rows currently represented by the detail list filter context
#### Scenario: Export remains UTF-8 Excel compatible
- **WHEN** CSV export is downloaded
- **THEN** the file SHALL be encoded in UTF-8 with BOM
- **THEN** Chinese headers and values SHALL render correctly in common spreadsheet tools
### Requirement: Reject History page SHALL provide robust feedback states
The page SHALL provide loading, empty, and error states without breaking interactions.

View File

@@ -26,7 +26,7 @@ REDIS_ENABLED="${REDIS_ENABLED:-true}"
# Worker watchdog configuration
WATCHDOG_ENABLED="${WATCHDOG_ENABLED:-true}"
# RQ trace worker configuration
TRACE_WORKER_ENABLED="${TRACE_WORKER_ENABLED:-false}"
TRACE_WORKER_ENABLED="${TRACE_WORKER_ENABLED:-true}"
TRACE_WORKER_QUEUE="${TRACE_WORKER_QUEUE:-trace-events}"
# Colors for output

View File

@@ -113,6 +113,14 @@ def _extract_meta(
_VALID_BOOL_STRINGS = {"", "0", "false", "no", "n", "off", "1", "true", "yes", "y", "on"}
_VALID_PARETO_DIMENSIONS = {
"reason",
"package",
"type",
"workflow",
"workcenter",
"equipment",
}
def _parse_common_bools() -> tuple[Optional[tuple[dict, int]], bool, bool, bool]:
@@ -137,6 +145,22 @@ def _parse_common_bools() -> tuple[Optional[tuple[dict, int]], bool, bool, bool]
return None, include_excluded_scrap, exclude_material_scrap, exclude_pb_diode
def _parse_pareto_selection() -> tuple[Optional[tuple[dict, int]], Optional[str], Optional[list[str]]]:
pareto_dimension = request.args.get("pareto_dimension", "").strip().lower()
pareto_values = _parse_multi_param("pareto_values")
if pareto_values and not pareto_dimension:
pareto_dimension = "reason"
if pareto_dimension and pareto_dimension not in _VALID_PARETO_DIMENSIONS:
return (
{
"success": False,
"error": f"Invalid pareto_dimension, supported: {', '.join(sorted(_VALID_PARETO_DIMENSIONS))}",
},
400,
), None, None
return None, (pareto_dimension or None), (pareto_values or None)
@reject_history_bp.route("/api/reject-history/options", methods=["GET"])
def api_reject_history_options():
start_date, end_date, date_error = _parse_date_range(required=False)
@@ -304,6 +328,9 @@ def api_reject_history_reason_pareto():
workcenter_groups=_parse_multi_param("workcenter_groups") or None,
reason=request.args.get("reason", "").strip() or None,
trend_dates=_parse_multi_param("trend_dates") or None,
include_excluded_scrap=include_excluded_scrap,
exclude_material_scrap=exclude_material_scrap,
exclude_pb_diode=exclude_pb_diode,
)
if result is not None:
return jsonify({"success": True, "data": result, "meta": {}})
@@ -517,6 +544,9 @@ def api_reject_history_view():
metric_filter = request.args.get("metric_filter", "all").strip().lower() or "all"
reason = request.args.get("reason", "").strip() or None
detail_reason = request.args.get("detail_reason", "").strip() or None
pareto_error, pareto_dimension, pareto_values = _parse_pareto_selection()
if pareto_error:
return jsonify(pareto_error[0]), pareto_error[1]
include_excluded_scrap = request.args.get("include_excluded_scrap", "false").lower() == "true"
exclude_material_scrap = request.args.get("exclude_material_scrap", "true").lower() != "false"
@@ -531,6 +561,8 @@ def api_reject_history_view():
metric_filter=metric_filter,
trend_dates=_parse_multi_param("trend_dates") or None,
detail_reason=detail_reason,
pareto_dimension=pareto_dimension,
pareto_values=pareto_values,
page=page,
per_page=per_page,
include_excluded_scrap=include_excluded_scrap,
@@ -561,6 +593,9 @@ def api_reject_history_export_cached():
metric_filter = request.args.get("metric_filter", "all").strip().lower() or "all"
reason = request.args.get("reason", "").strip() or None
detail_reason = request.args.get("detail_reason", "").strip() or None
pareto_error, pareto_dimension, pareto_values = _parse_pareto_selection()
if pareto_error:
return jsonify(pareto_error[0]), pareto_error[1]
include_excluded_scrap = request.args.get("include_excluded_scrap", "false").lower() == "true"
exclude_material_scrap = request.args.get("exclude_material_scrap", "true").lower() != "false"
@@ -575,6 +610,8 @@ def api_reject_history_export_cached():
metric_filter=metric_filter,
trend_dates=_parse_multi_param("trend_dates") or None,
detail_reason=detail_reason,
pareto_dimension=pareto_dimension,
pareto_values=pareto_values,
include_excluded_scrap=include_excluded_scrap,
exclude_material_scrap=exclude_material_scrap,
exclude_pb_diode=exclude_pb_diode,

View File

@@ -53,6 +53,7 @@ logger = logging.getLogger("mes_dashboard.reject_dataset_cache")
_CACHE_TTL = 900 # 15 minutes
_CACHE_MAX_SIZE = 8
_REDIS_NAMESPACE = "reject_dataset"
_CACHE_SCHEMA_VERSION = 4
_dataset_cache = ProcessLevelCache(ttl_seconds=_CACHE_TTL, max_size=_CACHE_MAX_SIZE)
register_process_cache("reject_dataset", _dataset_cache, "Reject Dataset (L1, 15min)")
@@ -253,6 +254,7 @@ def execute_primary_query(
# ---- Compute query_id from base params only (policy filters applied in-memory) ----
query_id_input = {
"cache_schema_version": _CACHE_SCHEMA_VERSION,
"mode": mode,
"start_date": start_date,
"end_date": end_date,
@@ -394,6 +396,8 @@ def apply_view(
metric_filter: str = "all",
trend_dates: Optional[List[str]] = None,
detail_reason: Optional[str] = None,
pareto_dimension: Optional[str] = None,
pareto_values: Optional[List[str]] = None,
page: int = 1,
per_page: int = 50,
include_excluded_scrap: bool = False,
@@ -438,6 +442,11 @@ def apply_view(
detail_df = detail_df[
detail_df["LOSSREASONNAME"].str.strip() == detail_reason.strip()
]
detail_df = _apply_pareto_selection_filter(
detail_df,
pareto_dimension=pareto_dimension,
pareto_values=pareto_values,
)
detail_page = _paginate_detail(detail_df, page=page, per_page=per_page)
@@ -488,6 +497,46 @@ def _apply_supplementary_filters(
return df[mask]
def _normalize_pareto_values(values: Optional[List[str]]) -> List[str]:
normalized: List[str] = []
seen = set()
for value in values or []:
item = _normalize_text(value)
if not item or item in seen:
continue
seen.add(item)
normalized.append(item)
return normalized
def _apply_pareto_selection_filter(
df: pd.DataFrame,
*,
pareto_dimension: Optional[str] = None,
pareto_values: Optional[List[str]] = None,
) -> pd.DataFrame:
"""Apply Pareto multi-select filters on detail/export datasets."""
if df is None or df.empty:
return df
normalized_values = _normalize_pareto_values(pareto_values)
if not normalized_values:
return df
dimension = _normalize_text(pareto_dimension).lower() or "reason"
dim_col = _DIM_TO_DF_COLUMN.get(dimension)
if not dim_col:
raise ValueError(f"不支援的 pareto_dimension: {pareto_dimension}")
if dim_col not in df.columns:
return df.iloc[0:0]
value_set = set(normalized_values)
normalized_dimension_values = df[dim_col].map(
lambda value: _normalize_text(value) or "(未知)"
)
return df[normalized_dimension_values.isin(value_set)]
# ============================================================
# Derivation helpers
# ============================================================
@@ -732,12 +781,25 @@ def compute_dimension_pareto(
workcenter_groups: Optional[List[str]] = None,
reason: Optional[str] = None,
trend_dates: Optional[List[str]] = None,
include_excluded_scrap: bool = False,
exclude_material_scrap: bool = True,
exclude_pb_diode: bool = True,
) -> Optional[Dict[str, Any]]:
"""Compute dimension pareto from cached DataFrame (no Oracle query)."""
df = _get_cached_df(query_id)
if df is None:
return None
# Keep cache-based pareto behavior aligned with primary/view policy filters.
df = _apply_policy_filters(
df,
include_excluded_scrap=include_excluded_scrap,
exclude_material_scrap=exclude_material_scrap,
exclude_pb_diode=exclude_pb_diode,
)
if df is None or df.empty:
return {"items": [], "dimension": dimension, "metric_mode": metric_mode}
dim_col = _DIM_TO_DF_COLUMN.get(dimension, "LOSSREASONNAME")
if dim_col not in df.columns:
return {"items": [], "dimension": dimension, "metric_mode": metric_mode}
@@ -843,6 +905,8 @@ def export_csv_from_cache(
metric_filter: str = "all",
trend_dates: Optional[List[str]] = None,
detail_reason: Optional[str] = None,
pareto_dimension: Optional[str] = None,
pareto_values: Optional[List[str]] = None,
include_excluded_scrap: bool = False,
exclude_material_scrap: bool = True,
exclude_pb_diode: bool = True,
@@ -876,6 +940,11 @@ def export_csv_from_cache(
filtered = filtered[
filtered["LOSSREASONNAME"].str.strip() == detail_reason.strip()
]
filtered = _apply_pareto_selection_filter(
filtered,
pareto_dimension=pareto_dimension,
pareto_values=pareto_values,
)
rows = []
for _, row in filtered.iterrows():

View File

@@ -34,6 +34,7 @@ SELECT
p.EQUIPMENTNAME,
p.PRIMARY_EQUIPMENTNAME,
p.PRODUCTLINENAME,
p.SCRAP_OBJECTTYPE,
p.PJ_TYPE,
p.CONTAINERNAME,
p.PJ_FUNCTION,

View File

@@ -30,16 +30,6 @@ WITH spec_map AS (
WHERE SPEC IS NOT NULL
GROUP BY SPEC
),
workflow_lookup AS (
SELECT /*+ MATERIALIZE */ DISTINCT w.CONTAINERID, w.WORKFLOWNAME
FROM DWH.DW_MES_WIP w
WHERE w.PRODUCTLINENAME <> '點測'
AND w.CONTAINERID IN (
SELECT DISTINCT r0.CONTAINERID
FROM DWH.DW_MES_LOTREJECTHISTORY r0
WHERE {{ WORKFLOW_FILTER }}
)
),
reject_raw AS (
SELECT
TRUNC(r.TXNDATE) AS TXN_DAY,
@@ -56,9 +46,37 @@ reject_raw AS (
NVL(TRIM(r.EQUIPMENTNAME), '(NA)') AS EQUIPMENTNAME,
NVL(
TRIM(REGEXP_SUBSTR(r.EQUIPMENTNAME, '[^,]+', 1, 1)),
NVL(TRIM(r.EQUIPMENTNAME), '(NA)')
'(NA)'
) AS PRIMARY_EQUIPMENTNAME,
NVL(TRIM(wf.WORKFLOWNAME), NVL(TRIM(r.SPECNAME), '(NA)')) AS WORKFLOWNAME,
NVL(
TRIM(lwh.WORKFLOWNAME),
NVL(
TRIM((
SELECT w.WORKFLOWNAME
FROM DWH.DW_MES_WIP w
WHERE c.CONTAINERNAME IS NOT NULL
AND w.CONTAINERNAME = c.CONTAINERNAME
AND NVL(TRIM(w.SPECNAME), '-') = NVL(TRIM(r.SPECNAME), '-')
AND w.TXNDATE <= r.TXNDATE
AND TRIM(w.WORKFLOWNAME) IS NOT NULL
ORDER BY w.TXNDATE DESC
FETCH FIRST 1 ROW ONLY
)),
NVL(
TRIM((
SELECT w.WORKFLOWNAME
FROM DWH.DW_MES_WIP w
WHERE c.CONTAINERNAME IS NOT NULL
AND w.CONTAINERNAME = c.CONTAINERNAME
AND NVL(TRIM(w.SPECNAME), '-') = NVL(TRIM(r.SPECNAME), '-')
AND TRIM(w.WORKFLOWNAME) IS NOT NULL
ORDER BY w.TXNDATE DESC
FETCH FIRST 1 ROW ONLY
)),
'(NA)'
)
)
) AS WORKFLOWNAME,
NVL(TRIM(r.LOSSREASONNAME), '(未填寫)') AS LOSSREASONNAME,
NVL(
TRIM(REGEXP_SUBSTR(NVL(TRIM(r.LOSSREASONNAME), '(未填寫)'), '^[^_[:space:]-]+')),
@@ -87,10 +105,10 @@ reject_raw AS (
FROM DWH.DW_MES_LOTREJECTHISTORY r
LEFT JOIN DWH.DW_MES_CONTAINER c
ON c.CONTAINERID = r.CONTAINERID
LEFT JOIN DWH.DW_MES_LOTWIPHISTORY lwh
ON lwh.WIPTRACKINGGROUPKEYID = r.WIPTRACKINGGROUPKEYID
LEFT JOIN spec_map sm
ON sm.SPEC = TRIM(r.SPECNAME)
LEFT JOIN workflow_lookup wf
ON wf.CONTAINERID = r.CONTAINERID
WHERE {{ BASE_WHERE }}
),
daily_agg AS (

View File

@@ -19,16 +19,6 @@ WITH spec_map AS (
WHERE SPEC IS NOT NULL
GROUP BY SPEC
),
workflow_lookup AS (
SELECT /*+ MATERIALIZE */ DISTINCT w.CONTAINERID, w.WORKFLOWNAME
FROM DWH.DW_MES_WIP w
WHERE w.PRODUCTLINENAME <> '點測'
AND w.CONTAINERID IN (
SELECT DISTINCT r0.CONTAINERID
FROM DWH.DW_MES_LOTREJECTHISTORY r0
WHERE {{ WORKFLOW_FILTER }}
)
),
reject_raw AS (
SELECT
r.TXNDATE,
@@ -49,9 +39,37 @@ reject_raw AS (
NVL(TRIM(r.EQUIPMENTNAME), '(NA)') AS EQUIPMENTNAME,
NVL(
TRIM(REGEXP_SUBSTR(r.EQUIPMENTNAME, '[^,]+', 1, 1)),
NVL(TRIM(r.EQUIPMENTNAME), '(NA)')
'(NA)'
) AS PRIMARY_EQUIPMENTNAME,
NVL(TRIM(wf.WORKFLOWNAME), NVL(TRIM(r.SPECNAME), '(NA)')) AS WORKFLOWNAME,
NVL(
TRIM(lwh.WORKFLOWNAME),
NVL(
TRIM((
SELECT w.WORKFLOWNAME
FROM DWH.DW_MES_WIP w
WHERE c.CONTAINERNAME IS NOT NULL
AND w.CONTAINERNAME = c.CONTAINERNAME
AND NVL(TRIM(w.SPECNAME), '-') = NVL(TRIM(r.SPECNAME), '-')
AND w.TXNDATE <= r.TXNDATE
AND TRIM(w.WORKFLOWNAME) IS NOT NULL
ORDER BY w.TXNDATE DESC
FETCH FIRST 1 ROW ONLY
)),
NVL(
TRIM((
SELECT w.WORKFLOWNAME
FROM DWH.DW_MES_WIP w
WHERE c.CONTAINERNAME IS NOT NULL
AND w.CONTAINERNAME = c.CONTAINERNAME
AND NVL(TRIM(w.SPECNAME), '-') = NVL(TRIM(r.SPECNAME), '-')
AND TRIM(w.WORKFLOWNAME) IS NOT NULL
ORDER BY w.TXNDATE DESC
FETCH FIRST 1 ROW ONLY
)),
'(NA)'
)
)
) AS WORKFLOWNAME,
NVL(TRIM(r.LOSSREASONNAME), '(未填寫)') AS LOSSREASONNAME,
NVL(
TRIM(REGEXP_SUBSTR(NVL(TRIM(r.LOSSREASONNAME), '(未填寫)'), '^[^_[:space:]-]+')),
@@ -81,10 +99,10 @@ reject_raw AS (
FROM DWH.DW_MES_LOTREJECTHISTORY r
LEFT JOIN DWH.DW_MES_CONTAINER c
ON c.CONTAINERID = r.CONTAINERID
LEFT JOIN DWH.DW_MES_LOTWIPHISTORY lwh
ON lwh.WIPTRACKINGGROUPKEYID = r.WIPTRACKINGGROUPKEYID
LEFT JOIN spec_map sm
ON sm.SPEC = TRIM(r.SPECNAME)
LEFT JOIN workflow_lookup wf
ON wf.CONTAINERID = r.CONTAINERID
WHERE {{ BASE_WHERE }}
),
daily_agg AS (

View File

@@ -0,0 +1,188 @@
# -*- coding: utf-8 -*-
"""Unit tests for reject_dataset_cache helpers."""
from __future__ import annotations
import pandas as pd
import pytest
from mes_dashboard.services import reject_dataset_cache as cache_svc
def test_compute_dimension_pareto_applies_policy_filters_before_grouping(monkeypatch):
"""Cached pareto should honor the same policy toggles as view/query paths."""
df = pd.DataFrame(
[
{
"CONTAINERID": "C1",
"LOSSREASONNAME": "001_A",
"LOSSREASON_CODE": "001_A",
"SCRAP_OBJECTTYPE": "MATERIAL",
"PRODUCTLINENAME": "(NA)",
"WORKCENTER_GROUP": "WB",
"REJECT_TOTAL_QTY": 100,
"DEFECT_QTY": 0,
"MOVEIN_QTY": 1000,
},
{
"CONTAINERID": "C2",
"LOSSREASONNAME": "001_A",
"LOSSREASON_CODE": "001_A",
"SCRAP_OBJECTTYPE": "LOT",
"PRODUCTLINENAME": "PKG-A",
"WORKCENTER_GROUP": "WB",
"REJECT_TOTAL_QTY": 50,
"DEFECT_QTY": 0,
"MOVEIN_QTY": 900,
},
]
)
monkeypatch.setattr(cache_svc, "_get_cached_df", lambda _query_id: df)
monkeypatch.setattr(
"mes_dashboard.services.scrap_reason_exclusion_cache.get_excluded_reasons",
lambda: [],
)
excluded_material = cache_svc.compute_dimension_pareto(
query_id="qid-1",
dimension="package",
pareto_scope="all",
include_excluded_scrap=False,
exclude_material_scrap=True,
exclude_pb_diode=True,
)
kept_all = cache_svc.compute_dimension_pareto(
query_id="qid-1",
dimension="package",
pareto_scope="all",
include_excluded_scrap=False,
exclude_material_scrap=False,
exclude_pb_diode=True,
)
excluded_labels = {item.get("reason") for item in excluded_material.get("items", [])}
all_labels = {item.get("reason") for item in kept_all.get("items", [])}
assert "PKG-A" in excluded_labels
assert "(NA)" not in excluded_labels
assert "(NA)" in all_labels
def _build_detail_filter_df():
return pd.DataFrame(
[
{
"CONTAINERID": "C1",
"CONTAINERNAME": "LOT-001",
"TXN_DAY": pd.Timestamp("2026-02-01"),
"TXN_TIME": pd.Timestamp("2026-02-01 08:00:00"),
"WORKCENTERSEQUENCE_GROUP": 1,
"WORKCENTER_GROUP": "WB",
"WORKCENTERNAME": "WB-A",
"SPECNAME": "SPEC-A",
"WORKFLOWNAME": "WF-A",
"PRIMARY_EQUIPMENTNAME": "EQ-1",
"EQUIPMENTNAME": "EQ-1",
"PRODUCTLINENAME": "PKG-A",
"PJ_TYPE": "TYPE-A",
"LOSSREASONNAME": "001_A",
"LOSSREASON_CODE": "001_A",
"SCRAP_OBJECTTYPE": "LOT",
"MOVEIN_QTY": 100,
"REJECT_TOTAL_QTY": 30,
"DEFECT_QTY": 0,
},
{
"CONTAINERID": "C2",
"CONTAINERNAME": "LOT-002",
"TXN_DAY": pd.Timestamp("2026-02-01"),
"TXN_TIME": pd.Timestamp("2026-02-01 09:00:00"),
"WORKCENTERSEQUENCE_GROUP": 1,
"WORKCENTER_GROUP": "WB",
"WORKCENTERNAME": "WB-B",
"SPECNAME": "SPEC-B",
"WORKFLOWNAME": "WF-B",
"PRIMARY_EQUIPMENTNAME": "EQ-2",
"EQUIPMENTNAME": "EQ-2",
"PRODUCTLINENAME": "PKG-B",
"PJ_TYPE": "TYPE-B",
"LOSSREASONNAME": "001_A",
"LOSSREASON_CODE": "001_A",
"SCRAP_OBJECTTYPE": "LOT",
"MOVEIN_QTY": 100,
"REJECT_TOTAL_QTY": 20,
"DEFECT_QTY": 0,
},
{
"CONTAINERID": "C3",
"CONTAINERNAME": "LOT-003",
"TXN_DAY": pd.Timestamp("2026-02-01"),
"TXN_TIME": pd.Timestamp("2026-02-01 10:00:00"),
"WORKCENTERSEQUENCE_GROUP": 1,
"WORKCENTER_GROUP": "WB",
"WORKCENTERNAME": "WB-C",
"SPECNAME": "SPEC-C",
"WORKFLOWNAME": "WF-C",
"PRIMARY_EQUIPMENTNAME": "EQ-3",
"EQUIPMENTNAME": "EQ-3",
"PRODUCTLINENAME": "PKG-C",
"PJ_TYPE": "TYPE-C",
"LOSSREASONNAME": "002_B",
"LOSSREASON_CODE": "002_B",
"SCRAP_OBJECTTYPE": "LOT",
"MOVEIN_QTY": 100,
"REJECT_TOTAL_QTY": 10,
"DEFECT_QTY": 0,
},
]
)
def test_apply_view_and_export_share_same_pareto_multi_select_filter(monkeypatch):
df = _build_detail_filter_df()
monkeypatch.setattr(cache_svc, "_get_cached_df", lambda _query_id: df)
monkeypatch.setattr(
"mes_dashboard.services.scrap_reason_exclusion_cache.get_excluded_reasons",
lambda: [],
)
view_result = cache_svc.apply_view(
query_id="qid-2",
pareto_dimension="type",
pareto_values=["TYPE-A", "TYPE-C"],
)
export_rows = cache_svc.export_csv_from_cache(
query_id="qid-2",
pareto_dimension="type",
pareto_values=["TYPE-A", "TYPE-C"],
)
detail_items = view_result["detail"]["items"]
detail_types = {item["PJ_TYPE"] for item in detail_items}
exported_types = {row["TYPE"] for row in export_rows}
assert view_result["detail"]["pagination"]["total"] == 2
assert detail_types == {"TYPE-A", "TYPE-C"}
assert exported_types == {"TYPE-A", "TYPE-C"}
assert len(export_rows) == 2
def test_apply_view_rejects_invalid_pareto_dimension(monkeypatch):
df = _build_detail_filter_df()
monkeypatch.setattr(cache_svc, "_get_cached_df", lambda _query_id: df)
with pytest.raises(ValueError, match="不支援的 pareto_dimension"):
cache_svc.apply_view(
query_id="qid-3",
pareto_dimension="invalid-dimension",
pareto_values=["X"],
)
with pytest.raises(ValueError, match="不支援的 pareto_dimension"):
cache_svc.export_csv_from_cache(
query_id="qid-3",
pareto_dimension="invalid-dimension",
pareto_values=["X"],
)

View File

@@ -218,6 +218,121 @@ class TestRejectHistoryApiRoutes(TestRejectHistoryRoutesBase):
_, kwargs = mock_pareto.call_args
self.assertEqual(kwargs['dimension'], 'equipment')
@patch('mes_dashboard.routes.reject_history_routes.query_dimension_pareto')
@patch('mes_dashboard.routes.reject_history_routes.compute_dimension_pareto')
def test_dimension_pareto_with_query_id_passes_policy_flags_to_cached_path(
self,
mock_cached_pareto,
mock_sql_pareto,
):
mock_cached_pareto.return_value = {
'items': [{'reason': 'PKG-A', 'metric_value': 100, 'pct': 100, 'cumPct': 100}],
'dimension': 'package',
'metric_mode': 'reject_total',
'pareto_scope': 'all',
}
response = self.client.get(
'/api/reject-history/reason-pareto'
'?start_date=2026-02-01'
'&end_date=2026-02-07'
'&query_id=qid-001'
'&dimension=package'
'&pareto_scope=all'
'&include_excluded_scrap=true'
'&exclude_material_scrap=false'
'&exclude_pb_diode=false'
)
payload = json.loads(response.data)
self.assertEqual(response.status_code, 200)
self.assertTrue(payload['success'])
_, kwargs = mock_cached_pareto.call_args
self.assertEqual(kwargs['query_id'], 'qid-001')
self.assertEqual(kwargs['dimension'], 'package')
self.assertEqual(kwargs['pareto_scope'], 'all')
self.assertIs(kwargs['include_excluded_scrap'], True)
self.assertIs(kwargs['exclude_material_scrap'], False)
self.assertIs(kwargs['exclude_pb_diode'], False)
mock_sql_pareto.assert_not_called()
@patch('mes_dashboard.routes.reject_history_routes.apply_view')
def test_view_passes_pareto_multi_select_filters(self, mock_apply_view):
mock_apply_view.return_value = {
'analytics_raw': [],
'summary': {},
'detail': {
'items': [],
'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1},
},
}
response = self.client.get(
'/api/reject-history/view'
'?query_id=qid-001'
'&pareto_dimension=workflow'
'&pareto_values=WF-A'
'&pareto_values=WF-B'
)
payload = json.loads(response.data)
self.assertEqual(response.status_code, 200)
self.assertTrue(payload['success'])
_, kwargs = mock_apply_view.call_args
self.assertEqual(kwargs['pareto_dimension'], 'workflow')
self.assertEqual(kwargs['pareto_values'], ['WF-A', 'WF-B'])
@patch('mes_dashboard.routes.reject_history_routes.apply_view')
def test_view_invalid_pareto_dimension_returns_400(self, mock_apply_view):
response = self.client.get(
'/api/reject-history/view'
'?query_id=qid-001'
'&pareto_dimension=invalid'
'&pareto_values=X'
)
payload = json.loads(response.data)
self.assertEqual(response.status_code, 400)
self.assertFalse(payload['success'])
mock_apply_view.assert_not_called()
@patch('mes_dashboard.routes.reject_history_routes._list_to_csv')
@patch('mes_dashboard.routes.reject_history_routes.export_csv_from_cache')
def test_export_cached_passes_pareto_multi_select_filters(
self,
mock_export_cached,
mock_list_to_csv,
):
mock_export_cached.return_value = [{'LOT': 'LOT-001'}]
mock_list_to_csv.return_value = iter(['A,B\n', '1,2\n'])
response = self.client.get(
'/api/reject-history/export-cached'
'?query_id=qid-001'
'&pareto_dimension=type'
'&pareto_values=TYPE-A'
'&pareto_values=TYPE-C'
)
self.assertEqual(response.status_code, 200)
_, kwargs = mock_export_cached.call_args
self.assertEqual(kwargs['pareto_dimension'], 'type')
self.assertEqual(kwargs['pareto_values'], ['TYPE-A', 'TYPE-C'])
@patch('mes_dashboard.routes.reject_history_routes.export_csv_from_cache')
def test_export_cached_invalid_pareto_dimension_returns_400(self, mock_export_cached):
response = self.client.get(
'/api/reject-history/export-cached'
'?query_id=qid-001'
'&pareto_dimension=invalid'
'&pareto_values=TYPE-A'
)
payload = json.loads(response.data)
self.assertEqual(response.status_code, 400)
self.assertFalse(payload['success'])
mock_export_cached.assert_not_called()
@patch('mes_dashboard.routes.reject_history_routes.query_list')
@patch('mes_dashboard.core.rate_limit.check_and_record', return_value=(True, 6))
def test_list_rate_limited_returns_429(self, _mock_limit, mock_list):