feat(mid-section-defect): harden with distributed lock, rate limit, filter separation, abort, SQL classification and tests

Address 6 code review findings (P0-P3): add Redis distributed lock to prevent
duplicate Oracle pipeline on cold cache, apply rate limiting to 3 high-cost
routes, separate UI filter state from committed query state, add AbortController
for request cancellation, push workcenter group classification into Oracle SQL
CASE WHEN, and add 18 route+service tests. Also add workcenter group selection
to job-query equipment selector and rename button to "查詢".

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
egg
2026-02-10 09:32:14 +08:00
parent 8b1b8da59b
commit af59031f95
16 changed files with 1461 additions and 601 deletions

View File

@@ -36,45 +36,45 @@ function renderTxnCell(txn, apiKey) {
return escapeHtml(safeText(txn[apiKey]));
}
// State
let allEquipments = [];
let selectedEquipments = new Set();
let jobsData = [];
let expandedJobs = new Set();
// Initialize
document.addEventListener('DOMContentLoaded', () => {
loadEquipments();
setLast90Days();
// Close dropdown when clicking outside
document.addEventListener('click', (e) => {
const dropdown = document.getElementById('equipmentDropdown');
const selector = document.querySelector('.equipment-selector');
if (!selector.contains(e.target)) {
dropdown.classList.remove('show');
}
});
});
// Load equipments from cache
async function loadEquipments() {
try {
const data = await MesApi.get('/api/job-query/resources');
if (data.error) {
document.getElementById('equipmentList').innerHTML = `<div class="error">${data.error}</div>`;
return;
}
allEquipments = data.data;
renderEquipmentList(allEquipments);
} catch (error) {
document.getElementById('equipmentList').innerHTML = `<div class="error">載入失敗: ${error.message}</div>`;
}
}
// Render equipment list
// State
let allEquipments = [];
let selectedEquipments = new Set();
let jobsData = [];
let expandedJobs = new Set();
// Initialize
document.addEventListener('DOMContentLoaded', () => {
loadEquipments();
setLast90Days();
// Close dropdown when clicking outside
document.addEventListener('click', (e) => {
const dropdown = document.getElementById('equipmentDropdown');
const selector = document.querySelector('.equipment-selector');
if (!selector.contains(e.target)) {
dropdown.classList.remove('show');
}
});
});
// Load equipments from cache
async function loadEquipments() {
try {
const data = await MesApi.get('/api/job-query/resources');
if (data.error) {
document.getElementById('equipmentList').innerHTML = `<div class="error">${data.error}</div>`;
return;
}
allEquipments = data.data;
renderEquipmentList(allEquipments);
} catch (error) {
document.getElementById('equipmentList').innerHTML = `<div class="error">載入失敗: ${error.message}</div>`;
}
}
// Render equipment list
function renderEquipmentList(equipments) {
const container = document.getElementById('equipmentList');
@@ -88,8 +88,18 @@ function renderTxnCell(txn, apiKey) {
const workcenters = sortBy(Object.keys(grouped), (name) => name);
workcenters.forEach((workcenterName) => {
html += `<div style="padding: 8px 15px; background: #f0f0f0; font-weight: 600; font-size: 12px; color: #666;">${escapeHtml(workcenterName)}</div>`;
grouped[workcenterName].forEach((eq) => {
const groupEquipments = grouped[workcenterName];
const groupIds = groupEquipments.map((eq) => eq.RESOURCEID);
const selectedInGroup = groupIds.filter((id) => selectedEquipments.has(id)).length;
const allSelected = selectedInGroup === groupIds.length;
const someSelected = selectedInGroup > 0 && !allSelected;
const escapedName = escapeHtml(workcenterName);
html += `<div class="workcenter-group-header" onclick="toggleWorkcenterGroup('${escapedName}')">
<input type="checkbox" ${allSelected ? 'checked' : ''} ${someSelected ? 'class="indeterminate"' : ''} onclick="event.stopPropagation(); toggleWorkcenterGroup('${escapedName}')">
<span class="workcenter-group-name">${escapedName}</span>
<span class="workcenter-group-count">${selectedInGroup}/${groupIds.length}</span>
</div>`;
groupEquipments.forEach((eq) => {
const isSelected = selectedEquipments.has(eq.RESOURCEID);
const resourceId = escapeHtml(safeText(eq.RESOURCEID));
const resourceName = escapeHtml(safeText(eq.RESOURCENAME));
@@ -109,145 +119,169 @@ function renderTxnCell(txn, apiKey) {
container.innerHTML = html;
}
// Toggle equipment dropdown
function toggleEquipmentDropdown() {
const dropdown = document.getElementById('equipmentDropdown');
dropdown.classList.toggle('show');
}
// Filter equipments by search
function filterEquipments(query) {
const q = query.toLowerCase();
const filtered = allEquipments.filter(eq =>
(eq.RESOURCENAME && eq.RESOURCENAME.toLowerCase().includes(q)) ||
(eq.WORKCENTERNAME && eq.WORKCENTERNAME.toLowerCase().includes(q)) ||
(eq.RESOURCEFAMILYNAME && eq.RESOURCEFAMILYNAME.toLowerCase().includes(q))
);
renderEquipmentList(filtered);
}
// Toggle equipment selection
function toggleEquipment(resourceId) {
if (selectedEquipments.has(resourceId)) {
selectedEquipments.delete(resourceId);
} else {
selectedEquipments.add(resourceId);
}
updateSelectedDisplay();
renderEquipmentList(allEquipments.filter(eq => {
const search = document.querySelector('.equipment-search');
if (!search || !search.value) return true;
const q = search.value.toLowerCase();
return (eq.RESOURCENAME && eq.RESOURCENAME.toLowerCase().includes(q)) ||
(eq.WORKCENTERNAME && eq.WORKCENTERNAME.toLowerCase().includes(q));
}));
}
// Update selected display
function updateSelectedDisplay() {
const display = document.getElementById('equipmentDisplay');
const count = document.getElementById('selectedCount');
if (selectedEquipments.size === 0) {
display.textContent = '點擊選擇設備...';
count.textContent = '';
} else if (selectedEquipments.size <= 3) {
const names = allEquipments
.filter(eq => selectedEquipments.has(eq.RESOURCEID))
.map(eq => eq.RESOURCENAME)
.join(', ');
display.textContent = names;
count.textContent = `已選擇 ${selectedEquipments.size} 台設備`;
} else {
display.textContent = `已選擇 ${selectedEquipments.size} 台設備`;
count.textContent = '';
}
}
// Set last 90 days
function setLast90Days() {
const today = new Date();
const past = new Date();
past.setDate(today.getDate() - 90);
document.getElementById('dateFrom').value = past.toISOString().split('T')[0];
document.getElementById('dateTo').value = today.toISOString().split('T')[0];
}
// Validate inputs
function validateInputs() {
if (selectedEquipments.size === 0) {
Toast.error('請選擇至少一台設備');
return false;
}
const dateFrom = document.getElementById('dateFrom').value;
const dateTo = document.getElementById('dateTo').value;
if (!dateFrom || !dateTo) {
Toast.error('請指定日期範圍');
return false;
}
const from = new Date(dateFrom);
const to = new Date(dateTo);
if (to < from) {
Toast.error('結束日期不可早於起始日期');
return false;
}
const daysDiff = (to - from) / (1000 * 60 * 60 * 24);
if (daysDiff > 365) {
Toast.error('日期範圍不可超過 365 天');
return false;
}
return true;
}
// Query jobs
async function queryJobs() {
if (!validateInputs()) return;
const resultSection = document.getElementById('resultSection');
resultSection.innerHTML = `
<div class="loading">
<div class="loading-spinner"></div>
<br>查詢中...
</div>
`;
document.getElementById('queryBtn').disabled = true;
document.getElementById('exportBtn').disabled = true;
try {
const data = await MesApi.post('/api/job-query/jobs', {
resource_ids: Array.from(selectedEquipments),
start_date: document.getElementById('dateFrom').value,
end_date: document.getElementById('dateTo').value
});
if (data.error) {
resultSection.innerHTML = `<div class="error">${data.error}</div>`;
return;
}
jobsData = data.data;
expandedJobs.clear();
renderJobsTable();
document.getElementById('exportBtn').disabled = jobsData.length === 0;
} catch (error) {
resultSection.innerHTML = `<div class="error">查詢失敗: ${error.message}</div>`;
} finally {
document.getElementById('queryBtn').disabled = false;
}
}
// Render jobs table
// Toggle equipment dropdown
function toggleEquipmentDropdown() {
const dropdown = document.getElementById('equipmentDropdown');
dropdown.classList.toggle('show');
}
// Filter equipments by search
function filterEquipments(query) {
const q = query.toLowerCase();
const filtered = allEquipments.filter(eq =>
(eq.RESOURCENAME && eq.RESOURCENAME.toLowerCase().includes(q)) ||
(eq.WORKCENTERNAME && eq.WORKCENTERNAME.toLowerCase().includes(q)) ||
(eq.RESOURCEFAMILYNAME && eq.RESOURCEFAMILYNAME.toLowerCase().includes(q))
);
renderEquipmentList(filtered);
}
// Toggle equipment selection
function toggleEquipment(resourceId) {
if (selectedEquipments.has(resourceId)) {
selectedEquipments.delete(resourceId);
} else {
selectedEquipments.add(resourceId);
}
updateSelectedDisplay();
renderEquipmentList(allEquipments.filter(eq => {
const search = document.querySelector('.equipment-search');
if (!search || !search.value) return true;
const q = search.value.toLowerCase();
return (eq.RESOURCENAME && eq.RESOURCENAME.toLowerCase().includes(q)) ||
(eq.WORKCENTERNAME && eq.WORKCENTERNAME.toLowerCase().includes(q));
}));
}
// Toggle entire workcenter group selection
function toggleWorkcenterGroup(workcenterName) {
const groupEquipments = allEquipments.filter(
(eq) => safeText(eq.WORKCENTERNAME, '未分類') === workcenterName
);
const groupIds = groupEquipments.map((eq) => eq.RESOURCEID);
const allSelected = groupIds.every((id) => selectedEquipments.has(id));
if (allSelected) {
groupIds.forEach((id) => selectedEquipments.delete(id));
} else {
groupIds.forEach((id) => selectedEquipments.add(id));
}
updateSelectedDisplay();
renderEquipmentList(allEquipments.filter((eq) => {
const search = document.querySelector('.equipment-search');
if (!search || !search.value) return true;
const q = search.value.toLowerCase();
return (eq.RESOURCENAME && eq.RESOURCENAME.toLowerCase().includes(q)) ||
(eq.WORKCENTERNAME && eq.WORKCENTERNAME.toLowerCase().includes(q));
}));
}
// Update selected display
function updateSelectedDisplay() {
const display = document.getElementById('equipmentDisplay');
const count = document.getElementById('selectedCount');
if (selectedEquipments.size === 0) {
display.textContent = '點擊選擇設備...';
count.textContent = '';
} else if (selectedEquipments.size <= 3) {
const names = allEquipments
.filter(eq => selectedEquipments.has(eq.RESOURCEID))
.map(eq => eq.RESOURCENAME)
.join(', ');
display.textContent = names;
count.textContent = `已選擇 ${selectedEquipments.size} 台設備`;
} else {
display.textContent = `已選擇 ${selectedEquipments.size} 台設備`;
count.textContent = '';
}
}
// Set last 90 days
function setLast90Days() {
const today = new Date();
const past = new Date();
past.setDate(today.getDate() - 90);
document.getElementById('dateFrom').value = past.toISOString().split('T')[0];
document.getElementById('dateTo').value = today.toISOString().split('T')[0];
}
// Validate inputs
function validateInputs() {
if (selectedEquipments.size === 0) {
Toast.error('請選擇至少一台設備');
return false;
}
const dateFrom = document.getElementById('dateFrom').value;
const dateTo = document.getElementById('dateTo').value;
if (!dateFrom || !dateTo) {
Toast.error('請指定日期範圍');
return false;
}
const from = new Date(dateFrom);
const to = new Date(dateTo);
if (to < from) {
Toast.error('結束日期不可早於起始日期');
return false;
}
const daysDiff = (to - from) / (1000 * 60 * 60 * 24);
if (daysDiff > 365) {
Toast.error('日期範圍不可超過 365 天');
return false;
}
return true;
}
// Query jobs
async function queryJobs() {
if (!validateInputs()) return;
const resultSection = document.getElementById('resultSection');
resultSection.innerHTML = `
<div class="loading">
<div class="loading-spinner"></div>
<br>查詢中...
</div>
`;
document.getElementById('queryBtn').disabled = true;
document.getElementById('exportBtn').disabled = true;
try {
const data = await MesApi.post('/api/job-query/jobs', {
resource_ids: Array.from(selectedEquipments),
start_date: document.getElementById('dateFrom').value,
end_date: document.getElementById('dateTo').value
});
if (data.error) {
resultSection.innerHTML = `<div class="error">${data.error}</div>`;
return;
}
jobsData = data.data;
expandedJobs.clear();
renderJobsTable();
document.getElementById('exportBtn').disabled = jobsData.length === 0;
} catch (error) {
resultSection.innerHTML = `<div class="error">查詢失敗: ${error.message}</div>`;
} finally {
document.getElementById('queryBtn').disabled = false;
}
}
// Render jobs table
function renderJobsTable() {
const resultSection = document.getElementById('resultSection');
const jobHeaders = jobTableFields.map((field) => `<th>${escapeHtml(field.ui_label)}</th>`).join('');
@@ -256,19 +290,19 @@ function renderTxnCell(txn, apiKey) {
resultSection.innerHTML = `
<div class="empty-state">
<p>無符合條件的工單</p>
</div>
`;
return;
}
let html = `
<div class="result-header">
<div class="result-info">共 ${jobsData.length} 筆工單</div>
<div class="result-actions">
<button class="btn btn-secondary btn-sm" onclick="expandAll()">全部展開</button>
<button class="btn btn-secondary btn-sm" onclick="collapseAll()">全部收合</button>
</div>
</div>
</div>
`;
return;
}
let html = `
<div class="result-header">
<div class="result-info">共 ${jobsData.length} 筆工單</div>
<div class="result-actions">
<button class="btn btn-secondary btn-sm" onclick="expandAll()">全部展開</button>
<button class="btn btn-secondary btn-sm" onclick="collapseAll()">全部收合</button>
</div>
</div>
<div class="table-container">
<table>
<thead>
@@ -300,58 +334,58 @@ function renderTxnCell(txn, apiKey) {
${isExpanded ? '<div class="loading"><div class="loading-spinner"></div></div>' : ''}
</div>
</td>
</tr>
`;
});
html += `
</tbody>
</table>
</div>
`;
resultSection.innerHTML = html;
// Load expanded histories
expandedJobs.forEach(jobId => {
const idx = jobsData.findIndex(j => j.JOBID === jobId);
if (idx >= 0) loadJobHistory(jobId, idx);
});
}
// Toggle job history
async function toggleJobHistory(jobId, idx) {
const txnRow = document.getElementById(`txn-row-${idx}`);
const jobRow = document.getElementById(`job-row-${idx}`);
const arrow = jobRow.querySelector('.arrow-icon');
if (expandedJobs.has(jobId)) {
expandedJobs.delete(jobId);
txnRow.classList.remove('show');
jobRow.classList.remove('expanded');
arrow.classList.remove('rotated');
} else {
expandedJobs.add(jobId);
txnRow.classList.add('show');
jobRow.classList.add('expanded');
arrow.classList.add('rotated');
loadJobHistory(jobId, idx);
}
}
// Load job history
async function loadJobHistory(jobId, idx) {
const container = document.getElementById(`txn-content-${idx}`);
container.innerHTML = '<div class="loading" style="padding: 20px;"><div class="loading-spinner"></div></div>';
try {
const data = await MesApi.get(`/api/job-query/txn/${jobId}`);
if (data.error) {
container.innerHTML = `<div class="error" style="margin: 10px 20px;">${data.error}</div>`;
return;
}
</tr>
`;
});
html += `
</tbody>
</table>
</div>
`;
resultSection.innerHTML = html;
// Load expanded histories
expandedJobs.forEach(jobId => {
const idx = jobsData.findIndex(j => j.JOBID === jobId);
if (idx >= 0) loadJobHistory(jobId, idx);
});
}
// Toggle job history
async function toggleJobHistory(jobId, idx) {
const txnRow = document.getElementById(`txn-row-${idx}`);
const jobRow = document.getElementById(`job-row-${idx}`);
const arrow = jobRow.querySelector('.arrow-icon');
if (expandedJobs.has(jobId)) {
expandedJobs.delete(jobId);
txnRow.classList.remove('show');
jobRow.classList.remove('expanded');
arrow.classList.remove('rotated');
} else {
expandedJobs.add(jobId);
txnRow.classList.add('show');
jobRow.classList.add('expanded');
arrow.classList.add('rotated');
loadJobHistory(jobId, idx);
}
}
// Load job history
async function loadJobHistory(jobId, idx) {
const container = document.getElementById(`txn-content-${idx}`);
container.innerHTML = '<div class="loading" style="padding: 20px;"><div class="loading-spinner"></div></div>';
try {
const data = await MesApi.get(`/api/job-query/txn/${jobId}`);
if (data.error) {
container.innerHTML = `<div class="error" style="margin: 10px 20px;">${data.error}</div>`;
return;
}
if (!data.data || data.data.length === 0) {
container.innerHTML = '<div style="padding: 20px; color: #666;">無交易歷史記錄</div>';
return;
@@ -378,80 +412,80 @@ function renderTxnCell(txn, apiKey) {
</tr>
`;
});
html += '</tbody></table>';
container.innerHTML = html;
} catch (error) {
container.innerHTML = `<div class="error" style="margin: 10px 20px;">載入失敗: ${error.message}</div>`;
}
}
// Expand all
function expandAll() {
jobsData.forEach((job, idx) => {
if (!expandedJobs.has(job.JOBID)) {
expandedJobs.add(job.JOBID);
}
});
renderJobsTable();
}
// Collapse all
function collapseAll() {
expandedJobs.clear();
renderJobsTable();
}
// Export CSV
async function exportCsv() {
if (!validateInputs()) return;
document.getElementById('exportBtn').disabled = true;
document.getElementById('exportBtn').textContent = '匯出中...';
try {
const response = await fetch('/api/job-query/export', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
resource_ids: Array.from(selectedEquipments),
start_date: document.getElementById('dateFrom').value,
end_date: document.getElementById('dateTo').value
})
});
if (!response.ok) {
const data = await response.json();
throw new Error(data.error || '匯出失敗');
}
// Download file
const blob = await response.blob();
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `job_history_${document.getElementById('dateFrom').value}_${document.getElementById('dateTo').value}.csv`;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
Toast.success('CSV 匯出完成');
} catch (error) {
Toast.error('匯出失敗: ' + error.message);
} finally {
document.getElementById('exportBtn').disabled = false;
document.getElementById('exportBtn').textContent = '匯出 CSV';
}
}
// Format date
function formatDate(dateStr) {
if (!dateStr) return '';
return dateStr.replace('T', ' ').substring(0, 19);
}
html += '</tbody></table>';
container.innerHTML = html;
} catch (error) {
container.innerHTML = `<div class="error" style="margin: 10px 20px;">載入失敗: ${error.message}</div>`;
}
}
// Expand all
function expandAll() {
jobsData.forEach((job, idx) => {
if (!expandedJobs.has(job.JOBID)) {
expandedJobs.add(job.JOBID);
}
});
renderJobsTable();
}
// Collapse all
function collapseAll() {
expandedJobs.clear();
renderJobsTable();
}
// Export CSV
async function exportCsv() {
if (!validateInputs()) return;
document.getElementById('exportBtn').disabled = true;
document.getElementById('exportBtn').textContent = '匯出中...';
try {
const response = await fetch('/api/job-query/export', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
resource_ids: Array.from(selectedEquipments),
start_date: document.getElementById('dateFrom').value,
end_date: document.getElementById('dateTo').value
})
});
if (!response.ok) {
const data = await response.json();
throw new Error(data.error || '匯出失敗');
}
// Download file
const blob = await response.blob();
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `job_history_${document.getElementById('dateFrom').value}_${document.getElementById('dateTo').value}.csv`;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
Toast.success('CSV 匯出完成');
} catch (error) {
Toast.error('匯出失敗: ' + error.message);
} finally {
document.getElementById('exportBtn').disabled = false;
document.getElementById('exportBtn').textContent = '匯出 CSV';
}
}
// Format date
function formatDate(dateStr) {
if (!dateStr) return '';
return dateStr.replace('T', ' ').substring(0, 19);
}
Object.assign(window, {
@@ -460,6 +494,7 @@ renderEquipmentList,
toggleEquipmentDropdown,
filterEquipments,
toggleEquipment,
toggleWorkcenterGroup,
updateSelectedDisplay,
setLast90Days,
validateInputs,

View File

@@ -15,11 +15,16 @@ ensureMesApiAvailable();
const API_TIMEOUT = 120000; // 2min (genealogy can be slow)
const PAGE_SIZE = 200;
const filters = reactive({
startDate: '',
endDate: '',
lossReasons: [],
});
const filters = reactive({
startDate: '',
endDate: '',
lossReasons: [],
});
const committedFilters = ref({
startDate: '',
endDate: '',
lossReasons: [],
});
const availableLossReasons = ref([]);
@@ -71,16 +76,25 @@ function unwrapApiResult(result, fallbackMessage) {
throw new Error(result?.error || fallbackMessage);
}
function buildFilterParams() {
const params = {
start_date: filters.startDate,
end_date: filters.endDate,
};
if (filters.lossReasons.length) {
params.loss_reasons = filters.lossReasons.join(',');
}
return params;
}
function buildFilterParams() {
const snapshot = committedFilters.value;
const params = {
start_date: snapshot.startDate,
end_date: snapshot.endDate,
};
if (snapshot.lossReasons.length) {
params.loss_reasons = snapshot.lossReasons.join(',');
}
return params;
}
function snapshotFilters() {
committedFilters.value = {
startDate: filters.startDate,
endDate: filters.endDate,
lossReasons: [...filters.lossReasons],
};
}
async function loadLossReasons() {
try {
@@ -92,46 +106,52 @@ async function loadLossReasons() {
}
}
async function loadDetail(page = 1) {
detailLoading.value = true;
try {
const params = {
...buildFilterParams(),
page,
page_size: PAGE_SIZE,
};
const result = await apiGet('/api/mid-section-defect/analysis/detail', {
params,
timeout: API_TIMEOUT,
});
async function loadDetail(page = 1, signal = null) {
detailLoading.value = true;
try {
const params = {
...buildFilterParams(),
page,
page_size: PAGE_SIZE,
};
const result = await apiGet('/api/mid-section-defect/analysis/detail', {
params,
timeout: API_TIMEOUT,
signal,
});
const unwrapped = unwrapApiResult(result, '載入明細失敗');
detailData.value = unwrapped.data?.detail || [];
detailPagination.value = unwrapped.data?.pagination || {
page: 1, page_size: PAGE_SIZE, total_count: 0, total_pages: 1,
};
} catch (err) {
console.error('Detail load failed:', err.message);
detailData.value = [];
} finally {
detailLoading.value = false;
}
} catch (err) {
if (err?.name === 'AbortError') {
return;
}
console.error('Detail load failed:', err.message);
detailData.value = [];
} finally {
detailLoading.value = false;
}
}
async function loadAnalysis() {
queryError.value = '';
loading.querying = true;
try {
const params = buildFilterParams();
// Fire summary and detail page 1 in parallel
const [summaryResult] = await Promise.all([
apiGet('/api/mid-section-defect/analysis', {
params,
timeout: API_TIMEOUT,
}),
loadDetail(1),
]);
async function loadAnalysis() {
queryError.value = '';
loading.querying = true;
const signal = createAbortSignal('msd-analysis');
try {
const params = buildFilterParams();
// Fire summary and detail page 1 in parallel
const [summaryResult] = await Promise.all([
apiGet('/api/mid-section-defect/analysis', {
params,
timeout: API_TIMEOUT,
signal,
}),
loadDetail(1, signal),
]);
const unwrapped = unwrapApiResult(summaryResult, '查詢失敗');
analysisData.value = unwrapped.data;
@@ -142,60 +162,66 @@ async function loadAnalysis() {
autoRefreshStarted = true;
startAutoRefresh();
}
} catch (err) {
queryError.value = err.message || '查詢失敗,請稍後再試';
} finally {
loading.querying = false;
}
}
} catch (err) {
if (err?.name === 'AbortError') {
return;
}
queryError.value = err.message || '查詢失敗,請稍後再試';
} finally {
loading.querying = false;
}
}
function handleUpdateFilters(updated) {
Object.assign(filters, updated);
}
function handleQuery() {
loadAnalysis();
}
function prevPage() {
if (detailPagination.value.page <= 1) return;
loadDetail(detailPagination.value.page - 1);
}
function nextPage() {
if (detailPagination.value.page >= detailPagination.value.total_pages) return;
loadDetail(detailPagination.value.page + 1);
}
function exportCsv() {
const params = new URLSearchParams({
start_date: filters.startDate,
end_date: filters.endDate,
});
if (filters.lossReasons.length) {
params.set('loss_reasons', filters.lossReasons.join(','));
}
const link = document.createElement('a');
link.href = `/api/mid-section-defect/export?${params}`;
link.download = `mid_section_defect_${filters.startDate}_to_${filters.endDate}.csv`;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
}
let autoRefreshStarted = false;
const { startAutoRefresh } = useAutoRefresh({
onRefresh: () => loadAnalysis(),
intervalMs: 5 * 60 * 1000,
autoStart: false,
refreshOnVisible: true,
});
function initPage() {
setDefaultDates();
loadLossReasons();
}
function handleQuery() {
snapshotFilters();
loadAnalysis();
}
function prevPage() {
if (detailPagination.value.page <= 1) return;
loadDetail(detailPagination.value.page - 1, createAbortSignal('msd-detail'));
}
function nextPage() {
if (detailPagination.value.page >= detailPagination.value.total_pages) return;
loadDetail(detailPagination.value.page + 1, createAbortSignal('msd-detail'));
}
function exportCsv() {
const snapshot = committedFilters.value;
const params = new URLSearchParams({
start_date: snapshot.startDate,
end_date: snapshot.endDate,
});
if (snapshot.lossReasons.length) {
params.set('loss_reasons', snapshot.lossReasons.join(','));
}
const link = document.createElement('a');
link.href = `/api/mid-section-defect/export?${params}`;
link.download = `mid_section_defect_${snapshot.startDate}_to_${snapshot.endDate}.csv`;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
}
let autoRefreshStarted = false;
const { createAbortSignal, startAutoRefresh } = useAutoRefresh({
onRefresh: () => loadAnalysis(),
intervalMs: 5 * 60 * 1000,
autoStart: false,
refreshOnVisible: true,
});
function initPage() {
setDefaultDates();
snapshotFilters();
loadLossReasons();
}
void initPage();
</script>

View File

@@ -0,0 +1,2 @@
schema: spec-driven
created: 2026-02-10

View File

@@ -0,0 +1,61 @@
## Context
`/mid-section-defect` 頁面已上線功能完整但缺乏生產環境保護機制。Code review 揭示 6 個問題,依嚴重度分為 P0-P3。現有基礎建設`try_acquire_lock``configured_rate_limit``createAbortSignal`)均可直接複用,無需新增框架。
**現有架構**
- Backend: `query_analysis()` (line 82-184) 含 5min Redis 快取 → `query_analysis_detail()` (line 187-224) 呼叫前者取快取結果再分頁
- Frontend: `Promise.all([summary, detail])` 平行載入 → `useAutoRefresh` 5min 自動刷新
- 上游歷史: SQL 查全量 → Python `get_workcenter_group()` 逐行分類 + order 4-11 過濾
## Goals / Non-Goals
**Goals:**
- 消除首次查詢的雙倍 Oracle 管線執行P0
- 保護高成本路由免受暴衝流量P1a
- 確保 UI 篩選變更不會汙染進行中的 API 呼叫P1b
- 新查詢自動取消舊的進行中請求P2a
- 善用 Oracle Server 做 workcenter 分類支援全線歷程追蹤P2b
- 基礎測試覆蓋防止回歸P3
**Non-Goals:**
- 不改變 API response 格式
- 不重構 `query_analysis()` 管線邏輯
- 不加入前端 UI 新功能
- 不處理 `export_csv()` 的串流效能(目前可接受)
- 不做 DuckDB 中間層或背景預計算
## Decisions
### D1: 分散式鎖策略 — Redis SET NX 輪詢等待
**選擇**: 使用既有 `try_acquire_lock()` + 輪詢 `cache_get()` 等待模式。
**替代方案**: (A) Pub/Sub 通知 — 複雜度高,需新增 channel 管理;(B) 前端序列化 — 改 `Promise.all` 為先 summary 再 detail但仍有自動刷新與手動查詢並行問題。
**理由**: 鎖機制在 service 層統一保護所有入口包含未來新路由fail-open 設計確保 Redis 故障不阻塞。輪詢 0.5s 間隔在 5-35s 典型管線執行時間下損耗可忽略。
### D2: Rate limit 預設值 — 依路由成本分級
**選擇**: `/analysis` 6/60s、`/detail` 15/60s、`/export` 3/60s。
**理由**: `/analysis` 冷查詢 35s每分鐘最多 6 次已足夠(含自動刷新)。`/detail` 分頁翻頁頻率高但走快取15 次寬裕。`/export` 觸發全量串流3 次防誤操作。`/loss-reasons` 已有 24h 快取,無需限速。
### D3: 篩選分離 — committedFilters ref 快照
**選擇**: 新增 `committedFilters` ref按「查詢」時從 `filters` reactive 快照。所有 API 函式讀 `committedFilters`
**替代方案**: (A) deep watch + debounce — 會在使用者輸入中途觸發查詢;(B) URL params 持久化 — 此頁面不需要書籤分享功能。
**理由**: 最小改動,與 `resource-history/App.vue``buildQueryString()` 模式一致。`filters` reactive 繼續作為 UI 雙向繫結,`committedFilters` 是「上次查詢使用的參數」。
### D4: AbortController — keyed signal 設計
**選擇**: `'msd-analysis'` key 用於查詢summary + detail page 1 共用),`'msd-detail'` key 用於獨立翻頁。
**理由**: 新查詢取消舊查詢的所有請求(含翻頁中的 detail翻頁取消前一次翻頁但不影響進行中的查詢。與 `wip-detail/App.vue` 相同模式。
### D5: 上游歷史 SQL 端分類 — CASE WHEN 全線保留
**選擇**: SQL CTE 內加 `CASE WHEN``WORKCENTERNAME` 分類為 `WORKCENTER_GROUP`12 組 + NULL fallbackPython 端直接讀取分類結果,不過濾任何站點。
**替代方案**: (A) Oracle 自訂函式 — 需 DBA 部署;(B) 維持 Python 端分類但移除過濾 — 仍有 10K+ 行逐行 regex 開銷。
**理由**: CASE WHEN 在 Oracle 查詢引擎內原生執行,無 row-by-row function call 開銷。分類邏輯與 `workcenter_groups.py` 的 patterns 完全對齊,但需注意 CASE 順序exclude-first: `元件切割``切割` 之前)。
## Risks / Trade-offs
- **[P0 鎖等待超時]** 若管線執行 >90s極大日期範圍等待方可能超時後自行查詢 → 緩解API_TIMEOUT 本身 120s鎖 TTL 120s 會自動釋放,最壞情況退化為當前行為(雙查詢)
- **[P2b SQL 分類與 Python 不一致]** 若 `workcenter_groups.py` 新增/修改 pattern 但忘記同步 SQL → 緩解SQL 的 NULL fallback 確保不會遺失行,僅分類名稱可能為 NULL
- **[Rate limit 誤擋]** 高頻翻頁或自動刷新可能觸發限速 → 緩解:`/detail` 15/60s 已足夠正常翻頁(每 4s 一頁),自動刷新 5min 間隔遠低於 `/analysis` 6/60s 門檻

View File

@@ -0,0 +1,31 @@
## Why
Code review 發現中段製程不良追溯分析(`/mid-section-defect`)有 6 個問題:首次查詢觸發雙倍 Oracle 管線P0、高成本路由無節流P1a、篩選與查詢狀態耦合P1b、無請求取消機制P2a、上游歷史 workcenter 分類在 Python 端逐行計算而非善用 DB ServerP2b、零測試覆蓋P3。需在功能穩定後立即修復防止 DB 過載與前端競態問題。
## What Changes
- **P0 分散式鎖**`query_analysis()` 加入 `try_acquire_lock` / `release_lock` 包裹計算區段,第二個平行請求等待快取而非重跑管線
- **P1a 路由限速**`/analysis`6/60s`/analysis/detail`15/60s`/export`3/60s加入 `configured_rate_limit` decorator
- **P1b 篩選分離**:新增 `committedFilters` ref所有 API 呼叫(翻頁/自動刷新/匯出)讀取已提交的篩選快照
- **P2a 請求取消**`loadAnalysis()``loadDetail()` 加入 `createAbortSignal(key)` keyed abort新查詢自動取消舊請求
- **P2b SQL 端分類**:上游歷史 SQL 加入 `CASE WHEN` workcenter group 分類(全線歷程不排除任何站點),移除 Python 端 `get_workcenter_group()` 逐行呼叫與 order 4-11 過濾
- **P3 測試覆蓋**:新增 `test_mid_section_defect_routes.py`9 個測試)和 `test_mid_section_defect_service.py`9 個測試)
## Capabilities
### New Capabilities
(無新增能力,本次為既有功能的強化修復)
### Modified Capabilities
- `api-safety-hygiene`: 新增 mid-section-defect 3 個路由的 rate limit 與分散式鎖機制
- `vue-vite-page-architecture`: mid-section-defect 前端加入 committedFilters 篩選分離與 AbortController 請求取消
## Impact
- **Backend**: `mid_section_defect_service.py`(分散式鎖 + 移除 Python 端 workcenter 過濾)、`mid_section_defect_routes.py`rate limit`upstream_history.sql`CASE WHEN 分類)
- **Frontend**: `mid-section-defect/App.vue`committedFilters + abort signal
- **Tests**: 2 個新測試檔案(`test_mid_section_defect_routes.py``test_mid_section_defect_service.py`
- **API 行為變更**: 超過限速門檻回傳 429上游歷史回傳含 `WORKCENTER_GROUP` 欄位(但 API response 格式不變,分類邏輯內部調整)
- **無破壞性變更**: API response 結構、快取 key、前端元件介面均不變

View File

@@ -0,0 +1,73 @@
## ADDED Requirements
### Requirement: Mid-section defect analysis endpoints SHALL apply distributed lock to prevent duplicate pipeline execution
The `/api/mid-section-defect/analysis` pipeline SHALL use a Redis distributed lock to prevent concurrent identical queries from executing the full Oracle pipeline in parallel.
#### Scenario: Two parallel requests with cold cache
- **WHEN** two requests with identical parameters arrive simultaneously and no cache exists
- **THEN** the first request SHALL acquire the lock and execute the full pipeline
- **THEN** the second request SHALL wait by polling the cache until the first request completes
- **THEN** only ONE full Oracle pipeline execution SHALL occur
#### Scenario: Lock wait timeout
- **WHEN** a waiting request does not see a cache result within 90 seconds
- **THEN** the request SHALL proceed with its own pipeline execution (fail-open)
#### Scenario: Redis unavailable
- **WHEN** Redis is unavailable during lock acquisition
- **THEN** the lock function SHALL return acquired=true (fail-open)
- **THEN** the request SHALL proceed normally without blocking
#### Scenario: Pipeline exception with lock held
- **WHEN** the pipeline throws an exception while the lock is held
- **THEN** the lock SHALL be released in a finally block
- **THEN** subsequent requests SHALL NOT be blocked by a stale lock
### Requirement: Mid-section defect routes SHALL apply rate limiting
The `/analysis`, `/analysis/detail`, and `/export` endpoints SHALL apply per-client rate limiting using the existing `configured_rate_limit` mechanism.
#### Scenario: Analysis endpoint rate limit exceeded
- **WHEN** a client sends more than 6 requests to `/api/mid-section-defect/analysis` within 60 seconds
- **THEN** the endpoint SHALL return HTTP 429 with a `Retry-After` header
- **THEN** the service function SHALL NOT be called
#### Scenario: Detail endpoint rate limit exceeded
- **WHEN** a client sends more than 15 requests to `/api/mid-section-defect/analysis/detail` within 60 seconds
- **THEN** the endpoint SHALL return HTTP 429 with a `Retry-After` header
#### Scenario: Export endpoint rate limit exceeded
- **WHEN** a client sends more than 3 requests to `/api/mid-section-defect/export` within 60 seconds
- **THEN** the endpoint SHALL return HTTP 429 with a `Retry-After` header
#### Scenario: Loss reasons endpoint not rate limited
- **WHEN** a client sends requests to `/api/mid-section-defect/loss-reasons`
- **THEN** no rate limiting SHALL be applied (endpoint is lightweight with 24h cache)
### Requirement: Mid-section defect upstream history SHALL classify workcenters in SQL
The upstream history SQL query SHALL classify `WORKCENTERNAME` into workcenter groups using Oracle `CASE WHEN` expressions, returning the full production line history without excluding any stations.
#### Scenario: Workcenter group classification in SQL
- **WHEN** the upstream history query executes
- **THEN** each row SHALL include a `WORKCENTER_GROUP` column derived from `CASE WHEN` pattern matching
- **THEN** the classification SHALL match the patterns defined in `workcenter_groups.py`
#### Scenario: Unknown workcenter name
- **WHEN** a `WORKCENTERNAME` does not match any known pattern
- **THEN** `WORKCENTER_GROUP` SHALL be NULL
- **THEN** the row SHALL still be included in the result (not filtered out)
#### Scenario: Full production line retention
- **WHEN** the upstream history is fetched for ancestor CIDs
- **THEN** ALL stations SHALL be included (cutting, welding, mid-section, testing)
- **THEN** no order-based filtering SHALL be applied
### Requirement: Mid-section defect routes and service SHALL have test coverage
Route and service test files SHALL exist and cover core behaviors.
#### Scenario: Route tests exist
- **WHEN** pytest discovers tests
- **THEN** `tests/test_mid_section_defect_routes.py` SHALL contain tests for success, parameter validation (400), service failure (500), and rate limiting (429)
#### Scenario: Service tests exist
- **WHEN** pytest discovers tests
- **THEN** `tests/test_mid_section_defect_service.py` SHALL contain tests for date validation, pagination logic, and loss reasons caching

View File

@@ -0,0 +1,37 @@
## ADDED Requirements
### Requirement: Mid-section defect page SHALL separate filter state from query state
The mid-section defect page SHALL maintain separate reactive state for UI input (`filters`) and committed query parameters (`committedFilters`).
#### Scenario: User changes date without clicking query
- **WHEN** user modifies the date range in the filter bar but does not click "查詢"
- **THEN** auto-refresh, pagination, and CSV export SHALL continue using the previously committed filter values
- **THEN** the new date range SHALL NOT affect any API calls until "查詢" is clicked
#### Scenario: User clicks query button
- **WHEN** user clicks "查詢"
- **THEN** the current `filters` state SHALL be snapshotted into `committedFilters`
- **THEN** all subsequent API calls SHALL use the committed values
#### Scenario: CSV export uses committed filters
- **WHEN** user clicks "匯出 CSV" after modifying filters without re-querying
- **THEN** the export SHALL use the committed filter values from the last query
- **THEN** the export SHALL NOT use the current UI filter values
### Requirement: Mid-section defect page SHALL cancel in-flight requests on new query
The mid-section defect page SHALL use `AbortController` to cancel in-flight API requests when a new query is initiated.
#### Scenario: New query cancels previous query
- **WHEN** user clicks "查詢" while a previous query is still in-flight
- **THEN** the previous query's summary and detail requests SHALL be aborted
- **THEN** the AbortError SHALL be handled silently (no error banner shown)
#### Scenario: Page navigation cancels previous detail request
- **WHEN** user clicks next page while a previous page request is still in-flight
- **THEN** the previous page request SHALL be aborted
- **THEN** the new page request SHALL proceed independently
#### Scenario: Query and pagination use independent abort keys
- **WHEN** a query is in-flight and user triggers pagination
- **THEN** the query SHALL NOT be cancelled by the pagination request
- **THEN** the pagination SHALL use a separate abort key from the query

View File

@@ -0,0 +1,35 @@
## 1. P0 — 分散式鎖防止重複管線執行
- [x] 1.1 在 `mid_section_defect_service.py``query_analysis()` cache miss 後加入 `try_acquire_lock` / `release_lock` 包裹計算區段
- [x] 1.2 實作 lock-or-wait 邏輯:未取得鎖時輪詢 `cache_get()` 每 0.5s,最多 90s超時 fail-open
- [x] 1.3 在 `finally` 區塊確保鎖釋放,取得鎖後再做 double-check cache
## 2. P1a — 高成本路由限速
- [x] 2.1 在 `mid_section_defect_routes.py` import `configured_rate_limit` 並建立 3 個限速器analysis 6/60s、detail 15/60s、export 3/60s
- [x] 2.2 將限速 decorator 套用到 `/analysis``/analysis/detail``/export` 三個路由
## 3. P1b + P2a — 前端篩選分離與請求取消
- [x] 3.1 在 `App.vue` 新增 `committedFilters` ref`handleQuery()` 時從 `filters` 快照
- [x] 3.2 修改 `buildFilterParams()``exportCsv()` 讀取 `committedFilters` 而非 `filters`
- [x] 3.3 `initPage()` 設定預設日期後同步快照到 `committedFilters`
- [x] 3.4 從 `useAutoRefresh` 解構 `createAbortSignal`,在 `loadAnalysis()` 加入 `'msd-analysis'` signal
- [x] 3.5 `loadDetail()` 接受外部 signal 參數,獨立翻頁時使用 `'msd-detail'` key
- [x] 3.6 `loadAnalysis()``loadDetail()` catch 區塊靜默處理 `AbortError`
## 4. P2b — 上游歷史 SQL 端分類
- [x] 4.1 修改 `upstream_history.sql` CTE 加入 `CASE WHEN``WORKCENTERNAME` 分類為 `WORKCENTER_GROUP`12 組 + NULL fallback
- [x] 4.2 確保 CASE 順序正確(`元件切割`/`PKG_SAW``切割` 之前)
- [x] 4.3 修改 `_fetch_upstream_history()` 讀取 SQL 回傳的 `WORKCENTER_GROUP` 欄位,移除 `get_workcenter_group()` 逐行呼叫與 order 4-11 過濾
## 5. P3 — 測試覆蓋
- [x] 5.1 建立 `tests/test_mid_section_defect_routes.py`success、400 參數驗證、500 service 失敗、429 rate limit共 9 個測試)
- [x] 5.2 建立 `tests/test_mid_section_defect_service.py`日期驗證、分頁邏輯、loss reasons 快取(共 9 個測試)
## 6. 驗證
- [x] 6.1 `npm run build` 前端建置通過
- [x] 6.2 `pytest tests/test_mid_section_defect_routes.py tests/test_mid_section_defect_service.py -v` 全部通過

View File

@@ -31,3 +31,75 @@ Boolean query parsing in routes SHALL use shared helper behavior.
- **WHEN** routes parse common boolean query parameters
- **THEN** parsing behavior MUST be consistent across routes via shared utility
### Requirement: Mid-section defect analysis endpoints SHALL apply distributed lock to prevent duplicate pipeline execution
The `/api/mid-section-defect/analysis` pipeline SHALL use a Redis distributed lock to prevent concurrent identical queries from executing the full Oracle pipeline in parallel.
#### Scenario: Two parallel requests with cold cache
- **WHEN** two requests with identical parameters arrive simultaneously and no cache exists
- **THEN** the first request SHALL acquire the lock and execute the full pipeline
- **THEN** the second request SHALL wait by polling the cache until the first request completes
- **THEN** only ONE full Oracle pipeline execution SHALL occur
#### Scenario: Lock wait timeout
- **WHEN** a waiting request does not see a cache result within 90 seconds
- **THEN** the request SHALL proceed with its own pipeline execution (fail-open)
#### Scenario: Redis unavailable
- **WHEN** Redis is unavailable during lock acquisition
- **THEN** the lock function SHALL return acquired=true (fail-open)
- **THEN** the request SHALL proceed normally without blocking
#### Scenario: Pipeline exception with lock held
- **WHEN** the pipeline throws an exception while the lock is held
- **THEN** the lock SHALL be released in a finally block
- **THEN** subsequent requests SHALL NOT be blocked by a stale lock
### Requirement: Mid-section defect routes SHALL apply rate limiting
The `/analysis`, `/analysis/detail`, and `/export` endpoints SHALL apply per-client rate limiting using the existing `configured_rate_limit` mechanism.
#### Scenario: Analysis endpoint rate limit exceeded
- **WHEN** a client sends more than 6 requests to `/api/mid-section-defect/analysis` within 60 seconds
- **THEN** the endpoint SHALL return HTTP 429 with a `Retry-After` header
- **THEN** the service function SHALL NOT be called
#### Scenario: Detail endpoint rate limit exceeded
- **WHEN** a client sends more than 15 requests to `/api/mid-section-defect/analysis/detail` within 60 seconds
- **THEN** the endpoint SHALL return HTTP 429 with a `Retry-After` header
#### Scenario: Export endpoint rate limit exceeded
- **WHEN** a client sends more than 3 requests to `/api/mid-section-defect/export` within 60 seconds
- **THEN** the endpoint SHALL return HTTP 429 with a `Retry-After` header
#### Scenario: Loss reasons endpoint not rate limited
- **WHEN** a client sends requests to `/api/mid-section-defect/loss-reasons`
- **THEN** no rate limiting SHALL be applied (endpoint is lightweight with 24h cache)
### Requirement: Mid-section defect upstream history SHALL classify workcenters in SQL
The upstream history SQL query SHALL classify `WORKCENTERNAME` into workcenter groups using Oracle `CASE WHEN` expressions, returning the full production line history without excluding any stations.
#### Scenario: Workcenter group classification in SQL
- **WHEN** the upstream history query executes
- **THEN** each row SHALL include a `WORKCENTER_GROUP` column derived from `CASE WHEN` pattern matching
- **THEN** the classification SHALL match the patterns defined in `workcenter_groups.py`
#### Scenario: Unknown workcenter name
- **WHEN** a `WORKCENTERNAME` does not match any known pattern
- **THEN** `WORKCENTER_GROUP` SHALL be NULL
- **THEN** the row SHALL still be included in the result (not filtered out)
#### Scenario: Full production line retention
- **WHEN** the upstream history is fetched for ancestor CIDs
- **THEN** ALL stations SHALL be included (cutting, welding, mid-section, testing)
- **THEN** no order-based filtering SHALL be applied
### Requirement: Mid-section defect routes and service SHALL have test coverage
Route and service test files SHALL exist and cover core behaviors.
#### Scenario: Route tests exist
- **WHEN** pytest discovers tests
- **THEN** `tests/test_mid_section_defect_routes.py` SHALL contain tests for success, parameter validation (400), service failure (500), and rate limiting (429)
#### Scenario: Service tests exist
- **WHEN** pytest discovers tests
- **THEN** `tests/test_mid_section_defect_service.py` SHALL contain tests for date validation, pagination logic, and loss reasons caching

View File

@@ -87,3 +87,39 @@ Pages that require server-side parameter validation before serving SHALL validat
- **WHEN** the pure Vite hold-detail page loads
- **THEN** the page SHALL read `reason` from URL parameters
- **THEN** if `reason` is empty or missing, the page SHALL redirect to `/wip-overview`
### Requirement: Mid-section defect page SHALL separate filter state from query state
The mid-section defect page SHALL maintain separate reactive state for UI input (`filters`) and committed query parameters (`committedFilters`).
#### Scenario: User changes date without clicking query
- **WHEN** user modifies the date range in the filter bar but does not click "查詢"
- **THEN** auto-refresh, pagination, and CSV export SHALL continue using the previously committed filter values
- **THEN** the new date range SHALL NOT affect any API calls until "查詢" is clicked
#### Scenario: User clicks query button
- **WHEN** user clicks "查詢"
- **THEN** the current `filters` state SHALL be snapshotted into `committedFilters`
- **THEN** all subsequent API calls SHALL use the committed values
#### Scenario: CSV export uses committed filters
- **WHEN** user clicks "匯出 CSV" after modifying filters without re-querying
- **THEN** the export SHALL use the committed filter values from the last query
- **THEN** the export SHALL NOT use the current UI filter values
### Requirement: Mid-section defect page SHALL cancel in-flight requests on new query
The mid-section defect page SHALL use `AbortController` to cancel in-flight API requests when a new query is initiated.
#### Scenario: New query cancels previous query
- **WHEN** user clicks "查詢" while a previous query is still in-flight
- **THEN** the previous query's summary and detail requests SHALL be aborted
- **THEN** the AbortError SHALL be handled silently (no error banner shown)
#### Scenario: Page navigation cancels previous detail request
- **WHEN** user clicks next page while a previous page request is still in-flight
- **THEN** the previous page request SHALL be aborted
- **THEN** the new page request SHALL proceed independently
#### Scenario: Query and pagination use independent abort keys
- **WHEN** a query is in-flight and user triggers pagination
- **THEN** the query SHALL NOT be cancelled by the pagination request
- **THEN** the pagination SHALL use a separate abort key from the query

View File

@@ -4,24 +4,50 @@
Reverse traceability from TMTT (test) station back to upstream production stations.
"""
from flask import Blueprint, jsonify, request, Response
from mes_dashboard.services.mid_section_defect_service import (
query_analysis,
query_analysis_detail,
query_all_loss_reasons,
export_csv,
from flask import Blueprint, jsonify, request, Response
from mes_dashboard.core.rate_limit import configured_rate_limit
from mes_dashboard.services.mid_section_defect_service import (
query_analysis,
query_analysis_detail,
query_all_loss_reasons,
export_csv,
)
mid_section_defect_bp = Blueprint(
'mid_section_defect',
__name__,
url_prefix='/api/mid-section-defect'
)
@mid_section_defect_bp.route('/analysis', methods=['GET'])
def api_analysis():
mid_section_defect_bp = Blueprint(
'mid_section_defect',
__name__,
url_prefix='/api/mid-section-defect'
)
_ANALYSIS_RATE_LIMIT = configured_rate_limit(
bucket="mid-section-defect-analysis",
max_attempts_env="MID_SECTION_DEFECT_ANALYSIS_RATE_LIMIT_MAX_REQUESTS",
window_seconds_env="MID_SECTION_DEFECT_ANALYSIS_RATE_LIMIT_WINDOW_SECONDS",
default_max_attempts=6,
default_window_seconds=60,
)
_DETAIL_RATE_LIMIT = configured_rate_limit(
bucket="mid-section-defect-analysis-detail",
max_attempts_env="MID_SECTION_DEFECT_DETAIL_RATE_LIMIT_MAX_REQUESTS",
window_seconds_env="MID_SECTION_DEFECT_DETAIL_RATE_LIMIT_WINDOW_SECONDS",
default_max_attempts=15,
default_window_seconds=60,
)
_EXPORT_RATE_LIMIT = configured_rate_limit(
bucket="mid-section-defect-export",
max_attempts_env="MID_SECTION_DEFECT_EXPORT_RATE_LIMIT_MAX_REQUESTS",
window_seconds_env="MID_SECTION_DEFECT_EXPORT_RATE_LIMIT_WINDOW_SECONDS",
default_max_attempts=3,
default_window_seconds=60,
)
@mid_section_defect_bp.route('/analysis', methods=['GET'])
@_ANALYSIS_RATE_LIMIT
def api_analysis():
"""API: Get mid-section defect traceability analysis (summary).
Returns kpi, charts, daily_trend, available_loss_reasons, genealogy_status,
@@ -66,8 +92,9 @@ def api_analysis():
return jsonify({'success': True, 'data': summary})
@mid_section_defect_bp.route('/analysis/detail', methods=['GET'])
def api_analysis_detail():
@mid_section_defect_bp.route('/analysis/detail', methods=['GET'])
@_DETAIL_RATE_LIMIT
def api_analysis_detail():
"""API: Get paginated detail table for mid-section defect analysis.
Query Parameters:
@@ -124,8 +151,9 @@ def api_loss_reasons():
return jsonify({'success': True, 'data': result})
@mid_section_defect_bp.route('/export', methods=['GET'])
def api_export():
@mid_section_defect_bp.route('/export', methods=['GET'])
@_EXPORT_RATE_LIMIT
def api_export():
"""API: Export mid-section defect detail data as CSV.
Query Parameters:

View File

@@ -18,32 +18,35 @@ Attribution Method (Sum):
rate = attributed_rejectqty / attributed_trackinqty × 100
"""
import csv
import io
import logging
import math
from collections import defaultdict
from datetime import datetime
from typing import Optional, Dict, List, Any, Set, Tuple, Generator
import pandas as pd
from mes_dashboard.core.database import read_sql_df
from mes_dashboard.core.cache import cache_get, cache_set, make_cache_key
from mes_dashboard.sql import SQLLoader, QueryBuilder
from mes_dashboard.config.workcenter_groups import get_workcenter_group
logger = logging.getLogger('mes_dashboard.mid_section_defect')
import csv
import hashlib
import io
import logging
import math
import time
from collections import defaultdict
from datetime import datetime
from typing import Optional, Dict, List, Any, Set, Tuple, Generator
import pandas as pd
from mes_dashboard.core.database import read_sql_df
from mes_dashboard.core.cache import cache_get, cache_set, make_cache_key
from mes_dashboard.core.redis_client import try_acquire_lock, release_lock
from mes_dashboard.sql import SQLLoader, QueryBuilder
logger = logging.getLogger('mes_dashboard.mid_section_defect')
# Constants
MAX_QUERY_DAYS = 180
CACHE_TTL_TMTT = 300 # 5 min for TMTT detection data
CACHE_TTL_LOSS_REASONS = 86400 # 24h for loss reason list (daily sync)
ORACLE_IN_BATCH_SIZE = 1000 # Oracle IN clause limit
# Mid-section workcenter group order range (成型 through 測試)
MID_SECTION_ORDER_MIN = 4 # 成型
MID_SECTION_ORDER_MAX = 11 # 測試
ORACLE_IN_BATCH_SIZE = 1000 # Oracle IN clause limit
# Distributed lock settings for query_analysis cold-cache path
ANALYSIS_LOCK_TTL_SECONDS = 120
ANALYSIS_LOCK_WAIT_TIMEOUT_SECONDS = 90
ANALYSIS_LOCK_POLL_INTERVAL_SECONDS = 0.5
# Top N for chart display (rest grouped as "其他")
TOP_N = 10
@@ -107,81 +110,113 @@ def query_analysis(
'loss_reasons': sorted(loss_reasons) if loss_reasons else None,
},
)
cached = cache_get(cache_key)
if cached is not None:
return cached
# Stage 1: TMTT detection data
tmtt_df = _fetch_tmtt_data(start_date, end_date)
if tmtt_df is None:
return None
if tmtt_df.empty:
return _empty_result()
# Extract available loss reasons before filtering
available_loss_reasons = sorted(
tmtt_df.loc[tmtt_df['REJECTQTY'] > 0, 'LOSSREASONNAME']
.dropna().unique().tolist()
)
# Apply loss reason filter if specified
if loss_reasons:
filtered_df = tmtt_df[
(tmtt_df['LOSSREASONNAME'].isin(loss_reasons))
| (tmtt_df['REJECTQTY'] == 0)
| (tmtt_df['LOSSREASONNAME'].isna())
].copy()
else:
filtered_df = tmtt_df
# Stage 2: Genealogy resolution (split chain + merge expansion)
tmtt_cids = tmtt_df['CONTAINERID'].unique().tolist()
tmtt_names = {}
for _, r in tmtt_df.drop_duplicates('CONTAINERID').iterrows():
tmtt_names[r['CONTAINERID']] = _safe_str(r.get('CONTAINERNAME'))
ancestors = {}
genealogy_status = 'ready'
if tmtt_cids:
try:
ancestors = _resolve_full_genealogy(tmtt_cids, tmtt_names)
except Exception as exc:
logger.error(f"Genealogy resolution failed: {exc}", exc_info=True)
genealogy_status = 'error'
# Stage 3: Upstream history for ALL CIDs (TMTT lots + ancestors)
all_query_cids = set(tmtt_cids)
for anc_set in ancestors.values():
all_query_cids.update(anc_set)
# Filter out any non-string values (NaN/None from pandas)
all_query_cids = {c for c in all_query_cids if isinstance(c, str) and c}
upstream_by_cid = {}
if all_query_cids:
try:
upstream_by_cid = _fetch_upstream_history(list(all_query_cids))
except Exception as exc:
logger.error(f"Upstream history query failed: {exc}", exc_info=True)
genealogy_status = 'error'
tmtt_data = _build_tmtt_lookup(filtered_df)
attribution = _attribute_defects(
tmtt_data, ancestors, upstream_by_cid, loss_reasons,
)
result = {
'kpi': _build_kpi(filtered_df, attribution, loss_reasons),
'available_loss_reasons': available_loss_reasons,
'charts': _build_all_charts(attribution, tmtt_data),
'daily_trend': _build_daily_trend(filtered_df, loss_reasons),
'detail': _build_detail_table(filtered_df, ancestors, upstream_by_cid),
'genealogy_status': genealogy_status,
}
# Only cache successful results (don't cache upstream errors)
if genealogy_status == 'ready':
cache_set(cache_key, result, ttl=CACHE_TTL_TMTT)
return result
cached = cache_get(cache_key)
if cached is not None:
return cached
lock_name = (
f"mid_section_defect:analysis:{hashlib.md5(cache_key.encode('utf-8')).hexdigest()}"
)
lock_acquired = False
# Prevent duplicate cold-cache pipeline execution across workers.
lock_acquired = try_acquire_lock(lock_name, ttl_seconds=ANALYSIS_LOCK_TTL_SECONDS)
if not lock_acquired:
wait_start = time.monotonic()
while (
time.monotonic() - wait_start
< ANALYSIS_LOCK_WAIT_TIMEOUT_SECONDS
):
cached = cache_get(cache_key)
if cached is not None:
return cached
time.sleep(ANALYSIS_LOCK_POLL_INTERVAL_SECONDS)
logger.warning(
"Timed out waiting for in-flight mid_section_defect analysis cache; "
"continuing with fail-open pipeline execution"
)
else:
# Double-check cache after lock acquisition.
cached = cache_get(cache_key)
if cached is not None:
return cached
try:
# Stage 1: TMTT detection data
tmtt_df = _fetch_tmtt_data(start_date, end_date)
if tmtt_df is None:
return None
if tmtt_df.empty:
return _empty_result()
# Extract available loss reasons before filtering
available_loss_reasons = sorted(
tmtt_df.loc[tmtt_df['REJECTQTY'] > 0, 'LOSSREASONNAME']
.dropna().unique().tolist()
)
# Apply loss reason filter if specified
if loss_reasons:
filtered_df = tmtt_df[
(tmtt_df['LOSSREASONNAME'].isin(loss_reasons))
| (tmtt_df['REJECTQTY'] == 0)
| (tmtt_df['LOSSREASONNAME'].isna())
].copy()
else:
filtered_df = tmtt_df
# Stage 2: Genealogy resolution (split chain + merge expansion)
tmtt_cids = tmtt_df['CONTAINERID'].unique().tolist()
tmtt_names = {}
for _, r in tmtt_df.drop_duplicates('CONTAINERID').iterrows():
tmtt_names[r['CONTAINERID']] = _safe_str(r.get('CONTAINERNAME'))
ancestors = {}
genealogy_status = 'ready'
if tmtt_cids:
try:
ancestors = _resolve_full_genealogy(tmtt_cids, tmtt_names)
except Exception as exc:
logger.error(f"Genealogy resolution failed: {exc}", exc_info=True)
genealogy_status = 'error'
# Stage 3: Upstream history for ALL CIDs (TMTT lots + ancestors)
all_query_cids = set(tmtt_cids)
for anc_set in ancestors.values():
all_query_cids.update(anc_set)
# Filter out any non-string values (NaN/None from pandas)
all_query_cids = {c for c in all_query_cids if isinstance(c, str) and c}
upstream_by_cid = {}
if all_query_cids:
try:
upstream_by_cid = _fetch_upstream_history(list(all_query_cids))
except Exception as exc:
logger.error(f"Upstream history query failed: {exc}", exc_info=True)
genealogy_status = 'error'
tmtt_data = _build_tmtt_lookup(filtered_df)
attribution = _attribute_defects(
tmtt_data, ancestors, upstream_by_cid, loss_reasons,
)
result = {
'kpi': _build_kpi(filtered_df, attribution, loss_reasons),
'available_loss_reasons': available_loss_reasons,
'charts': _build_all_charts(attribution, tmtt_data),
'daily_trend': _build_daily_trend(filtered_df, loss_reasons),
'detail': _build_detail_table(filtered_df, ancestors, upstream_by_cid),
'genealogy_status': genealogy_status,
}
# Only cache successful results (don't cache upstream errors)
if genealogy_status == 'ready':
cache_set(cache_key, result, ttl=CACHE_TTL_TMTT)
return result
finally:
if lock_acquired:
release_lock(lock_name)
def query_analysis_detail(
@@ -600,13 +635,13 @@ def _log_genealogy_summary(
# Query 3: Upstream Production History
# ============================================================
def _fetch_upstream_history(
all_cids: List[str],
) -> Dict[str, List[Dict[str, Any]]]:
def _fetch_upstream_history(
all_cids: List[str],
) -> Dict[str, List[Dict[str, Any]]]:
"""Fetch upstream production history for ancestor CONTAINERIDs.
Batches queries to respect Oracle IN clause limit.
Filters by mid-section workcenter groups (order 4-11) in Python.
Batches queries to respect Oracle IN clause limit.
WORKCENTER_GROUP classification is computed in SQL (CASE WHEN).
Returns:
{containerid: [{'workcenter_group': ..., 'equipment_name': ..., ...}, ...]}
@@ -646,28 +681,24 @@ def _fetch_upstream_history(
combined = pd.concat(all_rows, ignore_index=True)
# Filter by mid-section workcenter groups in Python
result: Dict[str, List[Dict[str, Any]]] = defaultdict(list)
for _, row in combined.iterrows():
wc_name = row.get('WORKCENTERNAME', '')
group_name, order = get_workcenter_group(wc_name)
if group_name is None or order < MID_SECTION_ORDER_MIN or order > MID_SECTION_ORDER_MAX:
continue
cid = row['CONTAINERID']
result[cid].append({
'workcenter_group': group_name,
'workcenter_group_order': order,
'equipment_id': _safe_str(row.get('EQUIPMENTID')),
'equipment_name': _safe_str(row.get('EQUIPMENTNAME')),
'spec_name': _safe_str(row.get('SPECNAME')),
'track_in_time': _safe_str(row.get('TRACKINTIMESTAMP')),
})
logger.info(
f"Upstream history: {len(result)} lots with mid-section records, "
f"from {len(unique_cids)} queried CIDs"
)
result: Dict[str, List[Dict[str, Any]]] = defaultdict(list)
for _, row in combined.iterrows():
cid = row['CONTAINERID']
group_name = _safe_str(row.get('WORKCENTER_GROUP'))
if not group_name:
group_name = '(未知)'
result[cid].append({
'workcenter_group': group_name,
'equipment_id': _safe_str(row.get('EQUIPMENTID')),
'equipment_name': _safe_str(row.get('EQUIPMENTNAME')),
'spec_name': _safe_str(row.get('SPECNAME')),
'track_in_time': _safe_str(row.get('TRACKINTIMESTAMP')),
})
logger.info(
f"Upstream history: {len(result)} lots with classified records, "
f"from {len(unique_cids)} queried CIDs"
)
return dict(result)

View File

@@ -11,14 +11,44 @@
-- CONTAINERID has index. Batch IN clause (up to 1000 per query).
-- Estimated 1-5s per batch.
--
WITH ranked_history AS (
SELECT
h.CONTAINERID,
h.WORKCENTERNAME,
h.EQUIPMENTID,
h.EQUIPMENTNAME,
h.SPECNAME,
h.TRACKINTIMESTAMP,
WITH ranked_history AS (
SELECT
h.CONTAINERID,
h.WORKCENTERNAME,
CASE
WHEN UPPER(h.WORKCENTERNAME) LIKE '%元件切割%'
OR UPPER(h.WORKCENTERNAME) LIKE '%PKG_SAW%' THEN '元件切割'
WHEN UPPER(h.WORKCENTERNAME) LIKE '%切割%' THEN '切割'
WHEN UPPER(h.WORKCENTERNAME) LIKE '%焊接_DB%'
OR UPPER(h.WORKCENTERNAME) LIKE '%焊_DB_料%'
OR UPPER(h.WORKCENTERNAME) LIKE '%焊_DB%' THEN '焊接_DB'
WHEN UPPER(h.WORKCENTERNAME) LIKE '%焊接_WB%'
OR UPPER(h.WORKCENTERNAME) LIKE '%焊_WB_料%'
OR UPPER(h.WORKCENTERNAME) LIKE '%焊_WB%' THEN '焊接_WB'
WHEN UPPER(h.WORKCENTERNAME) LIKE '%焊接_DW%'
OR UPPER(h.WORKCENTERNAME) LIKE '%焊_DW%'
OR UPPER(h.WORKCENTERNAME) LIKE '%焊_DW_料%' THEN '焊接_DW'
WHEN UPPER(h.WORKCENTERNAME) LIKE '%成型%'
OR UPPER(h.WORKCENTERNAME) LIKE '%成型_料%' THEN '成型'
WHEN UPPER(h.WORKCENTERNAME) LIKE '%去膠%' THEN '去膠'
WHEN UPPER(h.WORKCENTERNAME) LIKE '%水吹砂%' THEN '水吹砂'
WHEN UPPER(h.WORKCENTERNAME) LIKE '%掛鍍%'
OR UPPER(h.WORKCENTERNAME) LIKE '%滾鍍%'
OR UPPER(h.WORKCENTERNAME) LIKE '%條鍍%'
OR UPPER(h.WORKCENTERNAME) LIKE '%電鍍%'
OR UPPER(h.WORKCENTERNAME) LIKE '%補鍍%'
OR UPPER(h.WORKCENTERNAME) LIKE '%TOTAI%'
OR UPPER(h.WORKCENTERNAME) LIKE '%BANDL%' THEN '電鍍'
WHEN UPPER(h.WORKCENTERNAME) LIKE '%移印%' THEN '移印'
WHEN UPPER(h.WORKCENTERNAME) LIKE '%切彎腳%' THEN '切彎腳'
WHEN UPPER(h.WORKCENTERNAME) LIKE '%TMTT%'
OR UPPER(h.WORKCENTERNAME) LIKE '%測試%' THEN '測試'
ELSE NULL
END AS WORKCENTER_GROUP,
h.EQUIPMENTID,
h.EQUIPMENTNAME,
h.SPECNAME,
h.TRACKINTIMESTAMP,
ROW_NUMBER() OVER (
PARTITION BY h.CONTAINERID, h.WORKCENTERNAME, h.EQUIPMENTNAME
ORDER BY h.TRACKINTIMESTAMP DESC
@@ -28,13 +58,14 @@ WITH ranked_history AS (
AND h.EQUIPMENTID IS NOT NULL
AND h.TRACKINTIMESTAMP IS NOT NULL
)
SELECT
CONTAINERID,
WORKCENTERNAME,
EQUIPMENTID,
EQUIPMENTNAME,
SPECNAME,
TRACKINTIMESTAMP
SELECT
CONTAINERID,
WORKCENTERNAME,
WORKCENTER_GROUP,
EQUIPMENTID,
EQUIPMENTNAME,
SPECNAME,
TRACKINTIMESTAMP
FROM ranked_history
WHERE rn = 1
ORDER BY CONTAINERID, TRACKINTIMESTAMP

View File

@@ -125,6 +125,41 @@
overflow-y: auto;
}
.workcenter-group-header {
display: flex;
align-items: center;
gap: 8px;
padding: 8px 15px;
background: #f0f0f0;
font-weight: 600;
font-size: 12px;
color: #666;
cursor: pointer;
user-select: none;
}
.workcenter-group-header:hover {
background: #e8e8e8;
}
.workcenter-group-header input[type="checkbox"] {
cursor: pointer;
}
.workcenter-group-header input[type="checkbox"].indeterminate {
opacity: 0.6;
}
.workcenter-group-name {
flex: 1;
}
.workcenter-group-count {
font-weight: 400;
font-size: 11px;
color: #999;
}
.equipment-item {
display: flex;
align-items: center;
@@ -460,7 +495,7 @@
<!-- Actions -->
<div class="filter-actions">
<button class="btn btn-primary" onclick="queryJobs()" id="queryBtn">
查詢工單
查詢
</button>
<button class="btn btn-success" onclick="exportCsv()" id="exportBtn" disabled>
匯出 CSV
@@ -471,19 +506,19 @@
<!-- Result Section -->
<div class="result-section" id="resultSection">
<div class="empty-state">
<p>請選擇設備和日期範圍後,點擊「查詢工單</p>
<p>請選擇設備和日期範圍後,點擊「查詢」</p>
</div>
</div>
</div>
</div>
{% endblock %}
{% block scripts %}
{% set job_query_js = frontend_asset('job-query.js') %}
{% if job_query_js %}
<script type="module" src="{{ job_query_js }}"></script>
{% else %}
<script>
{% block scripts %}
{% set job_query_js = frontend_asset('job-query.js') %}
{% if job_query_js %}
<script type="module" src="{{ job_query_js }}"></script>
{% else %}
<script>
// State
let allEquipments = [];
let selectedEquipments = new Set();
@@ -531,26 +566,39 @@
}
let html = '';
let currentWorkcenter = null;
// Group by workcenter
const grouped = {};
equipments.forEach(eq => {
const isSelected = selectedEquipments.has(eq.RESOURCEID);
const wc = eq.WORKCENTERNAME || '未分類';
if (!grouped[wc]) grouped[wc] = [];
grouped[wc].push(eq);
});
const workcenters = Object.keys(grouped).sort();
// Group header
if (eq.WORKCENTERNAME !== currentWorkcenter) {
currentWorkcenter = eq.WORKCENTERNAME;
html += `<div style="padding: 8px 15px; background: #f0f0f0; font-weight: 600; font-size: 12px; color: #666;">${currentWorkcenter || '未分類'}</div>`;
}
html += `
<div class="equipment-item ${isSelected ? 'selected' : ''}" onclick="toggleEquipment('${eq.RESOURCEID}')">
<input type="checkbox" ${isSelected ? 'checked' : ''} onclick="event.stopPropagation(); toggleEquipment('${eq.RESOURCEID}')">
<div class="equipment-info">
<div class="equipment-name">${eq.RESOURCENAME}</div>
<div class="equipment-workcenter">${eq.RESOURCEFAMILYNAME || ''}</div>
workcenters.forEach(workcenterName => {
const groupEquipments = grouped[workcenterName];
const groupIds = groupEquipments.map(eq => eq.RESOURCEID);
const selectedInGroup = groupIds.filter(id => selectedEquipments.has(id)).length;
const allSelected = selectedInGroup === groupIds.length;
const someSelected = selectedInGroup > 0 && !allSelected;
html += `<div class="workcenter-group-header" onclick="toggleWorkcenterGroup('${workcenterName}')">
<input type="checkbox" ${allSelected ? 'checked' : ''} ${someSelected ? 'class="indeterminate"' : ''} onclick="event.stopPropagation(); toggleWorkcenterGroup('${workcenterName}')">
<span class="workcenter-group-name">${workcenterName}</span>
<span class="workcenter-group-count">${selectedInGroup}/${groupIds.length}</span>
</div>`;
groupEquipments.forEach(eq => {
const isSelected = selectedEquipments.has(eq.RESOURCEID);
html += `
<div class="equipment-item ${isSelected ? 'selected' : ''}" onclick="toggleEquipment('${eq.RESOURCEID}')">
<input type="checkbox" ${isSelected ? 'checked' : ''} onclick="event.stopPropagation(); toggleEquipment('${eq.RESOURCEID}')">
<div class="equipment-info">
<div class="equipment-name">${eq.RESOURCENAME}</div>
<div class="equipment-workcenter">${eq.RESOURCEFAMILYNAME || ''}</div>
</div>
</div>
</div>
`;
`;
});
});
container.innerHTML = html;
@@ -590,6 +638,30 @@
}));
}
// Toggle entire workcenter group selection
function toggleWorkcenterGroup(workcenterName) {
const groupEquipments = allEquipments.filter(
eq => (eq.WORKCENTERNAME || '未分類') === workcenterName
);
const groupIds = groupEquipments.map(eq => eq.RESOURCEID);
const allSelected = groupIds.every(id => selectedEquipments.has(id));
if (allSelected) {
groupIds.forEach(id => selectedEquipments.delete(id));
} else {
groupIds.forEach(id => selectedEquipments.add(id));
}
updateSelectedDisplay();
renderEquipmentList(allEquipments.filter(eq => {
const search = document.querySelector('.equipment-search');
if (!search || !search.value) return true;
const q = search.value.toLowerCase();
return (eq.RESOURCENAME && eq.RESOURCENAME.toLowerCase().includes(q)) ||
(eq.WORKCENTERNAME && eq.WORKCENTERNAME.toLowerCase().includes(q));
}));
}
// Update selected display
function updateSelectedDisplay() {
const display = document.getElementById('equipmentDisplay');
@@ -918,6 +990,6 @@
if (!dateStr) return '';
return dateStr.replace('T', ' ').substring(0, 19);
}
</script>
{% endif %}
{% endblock %}
</script>
{% endif %}
{% endblock %}

View File

@@ -0,0 +1,162 @@
# -*- coding: utf-8 -*-
"""Route tests for mid-section defect APIs."""
from __future__ import annotations
from unittest.mock import patch
import mes_dashboard.core.database as db
from mes_dashboard.app import create_app
from mes_dashboard.core.rate_limit import reset_rate_limits_for_tests
def _client():
db._ENGINE = None
app = create_app('testing')
app.config['TESTING'] = True
return app.test_client()
def setup_function():
reset_rate_limits_for_tests()
def teardown_function():
reset_rate_limits_for_tests()
@patch('mes_dashboard.routes.mid_section_defect_routes.query_analysis')
def test_analysis_success(mock_query_analysis):
mock_query_analysis.return_value = {
'kpi': {'total_input': 100},
'charts': {'by_station': []},
'daily_trend': [],
'available_loss_reasons': ['A'],
'genealogy_status': 'ready',
'detail': [{}, {}],
}
client = _client()
response = client.get(
'/api/mid-section-defect/analysis?start_date=2025-01-01&end_date=2025-01-31&loss_reasons=A,B'
)
assert response.status_code == 200
payload = response.get_json()
assert payload['success'] is True
assert payload['data']['detail_total_count'] == 2
assert payload['data']['kpi']['total_input'] == 100
mock_query_analysis.assert_called_once_with('2025-01-01', '2025-01-31', ['A', 'B'])
def test_analysis_missing_dates_returns_400():
client = _client()
response = client.get('/api/mid-section-defect/analysis?start_date=2025-01-01')
assert response.status_code == 400
payload = response.get_json()
assert payload['success'] is False
@patch('mes_dashboard.routes.mid_section_defect_routes.query_analysis')
def test_analysis_service_failure_returns_500(mock_query_analysis):
mock_query_analysis.return_value = None
client = _client()
response = client.get('/api/mid-section-defect/analysis?start_date=2025-01-01&end_date=2025-01-31')
assert response.status_code == 500
payload = response.get_json()
assert payload['success'] is False
@patch('mes_dashboard.routes.mid_section_defect_routes.query_analysis')
@patch('mes_dashboard.core.rate_limit.check_and_record', return_value=(True, 7))
def test_analysis_rate_limited_returns_429(_mock_rate_limit, mock_query_analysis):
client = _client()
response = client.get('/api/mid-section-defect/analysis?start_date=2025-01-01&end_date=2025-01-31')
assert response.status_code == 429
assert response.headers.get('Retry-After') == '7'
payload = response.get_json()
assert payload['error']['code'] == 'TOO_MANY_REQUESTS'
mock_query_analysis.assert_not_called()
@patch('mes_dashboard.routes.mid_section_defect_routes.query_analysis_detail')
def test_detail_success(mock_query_detail):
mock_query_detail.return_value = {
'detail': [{'CONTAINERNAME': 'LOT-1'}],
'pagination': {'page': 2, 'page_size': 200, 'total_count': 350, 'total_pages': 2},
}
client = _client()
response = client.get(
'/api/mid-section-defect/analysis/detail?start_date=2025-01-01&end_date=2025-01-31&page=2&page_size=200'
)
assert response.status_code == 200
payload = response.get_json()
assert payload['success'] is True
assert payload['data']['pagination']['page'] == 2
mock_query_detail.assert_called_once_with(
'2025-01-01',
'2025-01-31',
None,
page=2,
page_size=200,
)
def test_detail_missing_dates_returns_400():
client = _client()
response = client.get('/api/mid-section-defect/analysis/detail?end_date=2025-01-31')
assert response.status_code == 400
payload = response.get_json()
assert payload['success'] is False
@patch('mes_dashboard.routes.mid_section_defect_routes.query_analysis_detail')
@patch('mes_dashboard.core.rate_limit.check_and_record', return_value=(True, 5))
def test_detail_rate_limited_returns_429(_mock_rate_limit, mock_query_detail):
client = _client()
response = client.get('/api/mid-section-defect/analysis/detail?start_date=2025-01-01&end_date=2025-01-31')
assert response.status_code == 429
assert response.headers.get('Retry-After') == '5'
payload = response.get_json()
assert payload['error']['code'] == 'TOO_MANY_REQUESTS'
mock_query_detail.assert_not_called()
@patch('mes_dashboard.routes.mid_section_defect_routes.export_csv')
def test_export_success(mock_export_csv):
mock_export_csv.return_value = iter([
'\ufeff',
'LOT ID,TYPE\r\n',
'A001,T1\r\n',
])
client = _client()
response = client.get(
'/api/mid-section-defect/export?start_date=2025-01-01&end_date=2025-01-31&loss_reasons=A,B'
)
assert response.status_code == 200
assert 'text/csv' in response.content_type
assert 'attachment;' in response.headers.get('Content-Disposition', '')
mock_export_csv.assert_called_once_with('2025-01-01', '2025-01-31', ['A', 'B'])
@patch('mes_dashboard.routes.mid_section_defect_routes.export_csv')
@patch('mes_dashboard.core.rate_limit.check_and_record', return_value=(True, 9))
def test_export_rate_limited_returns_429(_mock_rate_limit, mock_export_csv):
client = _client()
response = client.get('/api/mid-section-defect/export?start_date=2025-01-01&end_date=2025-01-31')
assert response.status_code == 429
assert response.headers.get('Retry-After') == '9'
payload = response.get_json()
assert payload['error']['code'] == 'TOO_MANY_REQUESTS'
mock_export_csv.assert_not_called()

View File

@@ -0,0 +1,128 @@
# -*- coding: utf-8 -*-
"""Service tests for mid-section defect analysis."""
from __future__ import annotations
from unittest.mock import patch
import pandas as pd
from mes_dashboard.services.mid_section_defect_service import (
query_analysis,
query_analysis_detail,
query_all_loss_reasons,
)
def test_query_analysis_invalid_date_format_returns_error():
result = query_analysis('2025/01/01', '2025-01-31')
assert 'error' in result
assert 'YYYY-MM-DD' in result['error']
def test_query_analysis_start_after_end_returns_error():
result = query_analysis('2025-02-01', '2025-01-31')
assert 'error' in result
assert '起始日期不能晚於結束日期' in result['error']
def test_query_analysis_exceeds_max_days_returns_error():
result = query_analysis('2025-01-01', '2025-12-31')
assert 'error' in result
assert '180' in result['error']
@patch('mes_dashboard.services.mid_section_defect_service.query_analysis')
def test_query_analysis_detail_returns_sorted_first_page(mock_query_analysis):
mock_query_analysis.return_value = {
'detail': [
{'CONTAINERNAME': 'C', 'DEFECT_RATE': 0.3},
{'CONTAINERNAME': 'A', 'DEFECT_RATE': 5.2},
{'CONTAINERNAME': 'B', 'DEFECT_RATE': 3.1},
]
}
result = query_analysis_detail('2025-01-01', '2025-01-31', page=1, page_size=2)
assert [row['CONTAINERNAME'] for row in result['detail']] == ['A', 'B']
assert result['pagination'] == {
'page': 1,
'page_size': 2,
'total_count': 3,
'total_pages': 2,
}
@patch('mes_dashboard.services.mid_section_defect_service.query_analysis')
def test_query_analysis_detail_clamps_page_to_last_page(mock_query_analysis):
mock_query_analysis.return_value = {
'detail': [
{'CONTAINERNAME': 'A', 'DEFECT_RATE': 9.9},
{'CONTAINERNAME': 'B', 'DEFECT_RATE': 8.8},
{'CONTAINERNAME': 'C', 'DEFECT_RATE': 7.7},
]
}
result = query_analysis_detail('2025-01-01', '2025-01-31', page=10, page_size=2)
assert result['pagination']['page'] == 2
assert result['pagination']['total_pages'] == 2
assert len(result['detail']) == 1
assert result['detail'][0]['CONTAINERNAME'] == 'C'
@patch('mes_dashboard.services.mid_section_defect_service.query_analysis')
def test_query_analysis_detail_returns_error_passthrough(mock_query_analysis):
mock_query_analysis.return_value = {'error': '日期格式無效'}
result = query_analysis_detail('2025-01-01', '2025-01-31', page=1, page_size=200)
assert result == {'error': '日期格式無效'}
@patch('mes_dashboard.services.mid_section_defect_service.query_analysis')
def test_query_analysis_detail_returns_none_on_service_failure(mock_query_analysis):
mock_query_analysis.return_value = None
result = query_analysis_detail('2025-01-01', '2025-01-31', page=1, page_size=200)
assert result is None
@patch('mes_dashboard.services.mid_section_defect_service.cache_get')
@patch('mes_dashboard.services.mid_section_defect_service.read_sql_df')
def test_query_all_loss_reasons_cache_hit_skips_query(mock_read_sql_df, mock_cache_get):
mock_cache_get.return_value = {'loss_reasons': ['Cached_A', 'Cached_B']}
result = query_all_loss_reasons()
assert result == {'loss_reasons': ['Cached_A', 'Cached_B']}
mock_read_sql_df.assert_not_called()
@patch('mes_dashboard.services.mid_section_defect_service.cache_get', return_value=None)
@patch('mes_dashboard.services.mid_section_defect_service.cache_set')
@patch('mes_dashboard.services.mid_section_defect_service.read_sql_df')
@patch('mes_dashboard.services.mid_section_defect_service.SQLLoader.load')
def test_query_all_loss_reasons_cache_miss_queries_and_caches_sorted_values(
mock_sql_load,
mock_read_sql_df,
mock_cache_set,
_mock_cache_get,
):
mock_sql_load.return_value = 'SELECT ...'
mock_read_sql_df.return_value = pd.DataFrame(
{'LOSSREASONNAME': ['B_REASON', None, 'A_REASON', 'B_REASON']}
)
result = query_all_loss_reasons()
assert result == {'loss_reasons': ['A_REASON', 'B_REASON']}
mock_cache_set.assert_called_once_with(
'mid_section_loss_reasons:None:',
{'loss_reasons': ['A_REASON', 'B_REASON']},
ttl=86400,
)