fix(query-tool): finalize raw-material tab/export and resolve ORA-00918

This commit is contained in:
egg
2026-02-22 17:59:27 +08:00
parent 9890586191
commit 97872cca97
11 changed files with 238 additions and 54 deletions

View File

@@ -16,9 +16,52 @@ const props = defineProps({
type: String, type: String,
default: '無資料', default: '無資料',
}, },
hiddenColumns: {
type: Array,
default: () => [],
},
columnLabels: {
type: Object,
default: () => ({}),
},
columnOrder: {
type: Array,
default: () => [],
},
}); });
const columns = computed(() => Object.keys(props.rows[0] || {})); const columns = computed(() => {
const baseColumns = Object.keys(props.rows[0] || {});
if (baseColumns.length === 0) {
return [];
}
const hidden = new Set((props.hiddenColumns || []).map((value) => String(value)));
const visible = baseColumns.filter((column) => !hidden.has(column));
if (!Array.isArray(props.columnOrder) || props.columnOrder.length === 0) {
return visible;
}
const ordered = [];
const seen = new Set();
props.columnOrder.forEach((column) => {
if (visible.includes(column) && !seen.has(column)) {
ordered.push(column);
seen.add(column);
}
});
visible.forEach((column) => {
if (!seen.has(column)) {
ordered.push(column);
}
});
return ordered;
});
function resolveColumnLabel(column) {
return props.columnLabels?.[column] || column;
}
</script> </script>
<template> <template>
@@ -40,7 +83,7 @@ const columns = computed(() => Object.keys(props.rows[0] || {}));
:key="column" :key="column"
class="whitespace-nowrap border-b border-stroke-soft px-2 py-1.5 text-left font-semibold" class="whitespace-nowrap border-b border-stroke-soft px-2 py-1.5 text-left font-semibold"
> >
{{ column }} {{ resolveColumnLabel(column) }}
</th> </th>
</tr> </tr>
</thead> </thead>

View File

@@ -69,7 +69,7 @@ const emit = defineEmits(['change-sub-tab', 'update-workcenter-groups', 'export-
const tabMeta = Object.freeze({ const tabMeta = Object.freeze({
history: { label: '歷程', emptyText: '無歷程資料' }, history: { label: '歷程', emptyText: '無歷程資料' },
materials: { label: '物料', emptyText: '無物料資料' }, materials: { label: '物料', emptyText: '無物料資料' },
rejects: { label: '報廢', emptyText: '無報廢資料' }, rejects: { label: '報廢', emptyText: '無報廢資料' },
holds: { label: 'Hold', emptyText: '無 Hold 資料' }, holds: { label: 'Hold', emptyText: '無 Hold 資料' },
jobs: { label: 'Job', emptyText: '無 Job 資料' }, jobs: { label: 'Job', emptyText: '無 Job 資料' },
@@ -104,6 +104,39 @@ const activeEmptyText = computed(() => {
return tabMeta[props.activeSubTab]?.emptyText || '無資料'; return tabMeta[props.activeSubTab]?.emptyText || '無資料';
}); });
const activeColumnLabels = computed(() => {
if (props.activeSubTab !== 'materials') {
return {};
}
return {
CONTAINERNAME: 'LOT ID',
};
});
const activeHiddenColumns = computed(() => {
if (props.activeSubTab !== 'materials') {
return [];
}
return ['CONTAINERID'];
});
const activeColumnOrder = computed(() => {
if (props.activeSubTab !== 'materials') {
return [];
}
return [
'CONTAINERNAME',
'MATERIALPARTNAME',
'MATERIALLOTNAME',
'QTYCONSUMED',
'WORKCENTERNAME',
'SPECNAME',
'EQUIPMENTNAME',
'TXNDATE',
'WORKCENTER_GROUP',
];
});
const canExport = computed(() => { const canExport = computed(() => {
return !activeLoading.value && activeRows.value.length > 0; return !activeLoading.value && activeRows.value.length > 0;
}); });
@@ -194,6 +227,9 @@ const detailCountLabel = computed(() => {
:rows="activeRows" :rows="activeRows"
:loading="activeLoading" :loading="activeLoading"
:empty-text="activeLoaded ? activeEmptyText : '尚未查詢此分頁資料'" :empty-text="activeLoaded ? activeEmptyText : '尚未查詢此分頁資料'"
:column-labels="activeColumnLabels"
:hidden-columns="activeHiddenColumns"
:column-order="activeColumnOrder"
/> />
</template> </template>
</section> </section>

View File

@@ -348,7 +348,7 @@ def export_csv():
return Response( return Response(
csv_content, csv_content,
mimetype='text/csv; charset=utf-8', mimetype='text/csv; charset=utf-8-sig',
headers={ headers={
'Content-Disposition': 'attachment; filename=query_result.csv' 'Content-Disposition': 'attachment; filename=query_result.csv'
} }

View File

@@ -190,10 +190,10 @@ def export_jobs():
return jsonify({'error': validation_error}), 400 return jsonify({'error': validation_error}), 400
# Stream CSV response # Stream CSV response
return Response( return Response(
export_jobs_with_history(resource_ids, start_date, end_date), export_jobs_with_history(resource_ids, start_date, end_date),
mimetype='text/csv; charset=utf-8', mimetype='text/csv; charset=utf-8-sig',
headers={ headers={
'Content-Disposition': 'attachment; filename=job_history_export.csv' 'Content-Disposition': 'attachment; filename=job_history_export.csv'
} }
) )

View File

@@ -40,7 +40,7 @@ from mes_dashboard.services.query_tool_service import (
) )
# Create Blueprint # Create Blueprint
query_tool_bp = Blueprint('query_tool', __name__) query_tool_bp = Blueprint('query_tool', __name__)
_QUERY_TOOL_RESOLVE_RATE_LIMIT = configured_rate_limit( _QUERY_TOOL_RESOLVE_RATE_LIMIT = configured_rate_limit(
bucket="query-tool-resolve", bucket="query-tool-resolve",
@@ -77,13 +77,31 @@ _QUERY_TOOL_EQUIPMENT_RATE_LIMIT = configured_rate_limit(
default_max_attempts=5, default_max_attempts=5,
default_window_seconds=60, default_window_seconds=60,
) )
_QUERY_TOOL_EXPORT_RATE_LIMIT = configured_rate_limit( _QUERY_TOOL_EXPORT_RATE_LIMIT = configured_rate_limit(
bucket="query-tool-export", bucket="query-tool-export",
max_attempts_env="QT_EXPORT_RATE_MAX_REQUESTS", max_attempts_env="QT_EXPORT_RATE_MAX_REQUESTS",
window_seconds_env="QT_EXPORT_RATE_WINDOW_SECONDS", window_seconds_env="QT_EXPORT_RATE_WINDOW_SECONDS",
default_max_attempts=3, default_max_attempts=3,
default_window_seconds=60, default_window_seconds=60,
) )
def _format_lot_materials_export_rows(rows):
"""Normalize LOT material export columns for UI/CSV consistency."""
normalized_rows = []
for row in rows or []:
lot_id = row.get('CONTAINERNAME') or row.get('CONTAINERID') or ''
normalized_rows.append({
'LOT ID': lot_id,
'MATERIALPARTNAME': row.get('MATERIALPARTNAME', ''),
'MATERIALLOTNAME': row.get('MATERIALLOTNAME', ''),
'QTYCONSUMED': row.get('QTYCONSUMED', ''),
'WORKCENTERNAME': row.get('WORKCENTERNAME', ''),
'SPECNAME': row.get('SPECNAME', ''),
'EQUIPMENTNAME': row.get('EQUIPMENTNAME', ''),
'TXNDATE': row.get('TXNDATE', ''),
})
return normalized_rows
# ============================================================ # ============================================================
@@ -498,10 +516,10 @@ def export_csv():
) )
filename = 'adjacent_lots.csv' filename = 'adjacent_lots.csv'
elif export_type == 'lot_materials': elif export_type == 'lot_materials':
container_id = params.get('container_id') container_id = params.get('container_id')
result = get_lot_materials(container_id) result = get_lot_materials(container_id)
filename = f'lot_materials_{container_id}.csv' filename = f'lot_raw_materials_{container_id}.csv'
elif export_type == 'lot_rejects': elif export_type == 'lot_rejects':
container_id = params.get('container_id') container_id = params.get('container_id')
@@ -589,18 +607,21 @@ def export_csv():
error_msg = result.get('error', '查詢失敗') if result else '查詢失敗' error_msg = result.get('error', '查詢失敗') if result else '查詢失敗'
return jsonify({'error': error_msg}), 400 return jsonify({'error': error_msg}), 400
export_data = result.get('data', []) export_data = result.get('data', [])
if not export_data: if not export_data:
return jsonify({'error': '查無資料'}), 404 return jsonify({'error': '查無資料'}), 404
# Stream CSV response if export_type == 'lot_materials':
return Response( export_data = _format_lot_materials_export_rows(export_data)
generate_csv_stream(export_data),
mimetype='text/csv; charset=utf-8', # Stream CSV response
headers={ return Response(
'Content-Disposition': f'attachment; filename={filename}' generate_csv_stream(export_data),
} mimetype='text/csv; charset=utf-8-sig',
) headers={
'Content-Disposition': f'attachment; filename={filename}'
}
)
except Exception as exc: except Exception as exc:
return jsonify({'error': f'匯出失敗: {str(exc)}'}), 500 return jsonify({'error': f'匯出失敗: {str(exc)}'}), 500

View File

@@ -6,6 +6,7 @@ from __future__ import annotations
import hashlib import hashlib
import logging import logging
import os import os
import re
from collections import defaultdict from collections import defaultdict
from typing import Any, Dict, List from typing import Any, Dict, List
@@ -28,7 +29,7 @@ _DOMAIN_SPECS: Dict[str, Dict[str, Any]] = {
"default_window": 60, "default_window": 60,
}, },
"materials": { "materials": {
"filter_column": "CONTAINERID", "filter_column": "m.CONTAINERID",
"cache_ttl": 300, "cache_ttl": 300,
"bucket": "event-materials", "bucket": "event-materials",
"max_env": "EVT_MATERIALS_RATE_MAX_REQUESTS", "max_env": "EVT_MATERIALS_RATE_MAX_REQUESTS",
@@ -106,12 +107,31 @@ def _normalize_ids(container_ids: List[str]) -> List[str]:
class EventFetcher: class EventFetcher:
"""Fetches container-scoped event records with cache and batching.""" """Fetches container-scoped event records with cache and batching."""
_CONTAINER_EQ_PARAM_PATTERN = re.compile(
r"(?:[A-Za-z_][A-Za-z0-9_]*\.)?CONTAINERID\s*=\s*:container_id",
re.IGNORECASE,
)
@staticmethod @staticmethod
def _cache_key(domain: str, container_ids: List[str]) -> str: def _cache_key(domain: str, container_ids: List[str]) -> str:
normalized = sorted(_normalize_ids(container_ids)) normalized = sorted(_normalize_ids(container_ids))
digest = hashlib.md5("|".join(normalized).encode("utf-8")).hexdigest()[:12] digest = hashlib.md5("|".join(normalized).encode("utf-8")).hexdigest()[:12]
return f"evt:{domain}:{digest}" return f"evt:{domain}:{digest}"
@staticmethod
def _replace_container_filter(sql: str, condition_sql: str) -> str:
"""Replace single-CID predicate with batched predicate in domain SQL."""
replaced_sql, replacements = EventFetcher._CONTAINER_EQ_PARAM_PATTERN.subn(
condition_sql,
sql,
count=1,
)
if replacements == 0:
logger.warning(
"EventFetcher container filter replacement missed target predicate"
)
return replaced_sql
@staticmethod @staticmethod
def _get_rate_limit_config(domain: str) -> Dict[str, int | str]: def _get_rate_limit_config(domain: str) -> Dict[str, int | str]:
spec = _DOMAIN_SPECS.get(domain) spec = _DOMAIN_SPECS.get(domain)
@@ -133,20 +153,20 @@ class EventFetcher:
if domain == "history": if domain == "history":
sql = SQLLoader.load("query_tool/lot_history") sql = SQLLoader.load("query_tool/lot_history")
sql = sql.replace("h.CONTAINERID = :container_id", condition_sql) sql = EventFetcher._replace_container_filter(sql, condition_sql)
return sql.replace("{{ WORKCENTER_FILTER }}", "") return sql.replace("{{ WORKCENTER_FILTER }}", "")
if domain == "materials": if domain == "materials":
sql = SQLLoader.load("query_tool/lot_materials") sql = SQLLoader.load("query_tool/lot_materials")
return sql.replace("CONTAINERID = :container_id", condition_sql) return EventFetcher._replace_container_filter(sql, condition_sql)
if domain == "rejects": if domain == "rejects":
sql = SQLLoader.load("query_tool/lot_rejects") sql = SQLLoader.load("query_tool/lot_rejects")
return sql.replace("CONTAINERID = :container_id", condition_sql) return EventFetcher._replace_container_filter(sql, condition_sql)
if domain == "holds": if domain == "holds":
sql = SQLLoader.load("query_tool/lot_holds") sql = SQLLoader.load("query_tool/lot_holds")
return sql.replace("CONTAINERID = :container_id", condition_sql) return EventFetcher._replace_container_filter(sql, condition_sql)
if domain == "jobs": if domain == "jobs":
return f""" return f"""

View File

@@ -526,6 +526,7 @@ def export_csv(
# Write CSV header # Write CSV header
output = io.StringIO() output = io.StringIO()
output.write('\ufeff') # UTF-8 BOM for Excel compatibility
writer = csv.writer(output) writer = csv.writer(output)
writer.writerow(headers) writer.writerow(headers)
yield output.getvalue() yield output.getvalue()

View File

@@ -9,14 +9,17 @@
-- Uses TXNDATE (NOT TXNDATETIME) -- Uses TXNDATE (NOT TXNDATETIME)
SELECT SELECT
CONTAINERID, m.CONTAINERID,
MATERIALPARTNAME, c.CONTAINERNAME,
MATERIALLOTNAME, m.MATERIALPARTNAME,
QTYCONSUMED, m.MATERIALLOTNAME,
WORKCENTERNAME, m.QTYCONSUMED,
SPECNAME, m.WORKCENTERNAME,
EQUIPMENTNAME, m.SPECNAME,
TXNDATE m.EQUIPMENTNAME,
FROM DWH.DW_MES_LOTMATERIALSHISTORY m.TXNDATE
WHERE CONTAINERID = :container_id FROM DWH.DW_MES_LOTMATERIALSHISTORY m
ORDER BY TXNDATE LEFT JOIN DWH.DW_MES_CONTAINER c
ON c.CONTAINERID = m.CONTAINERID
WHERE m.CONTAINERID = :container_id
ORDER BY m.TXNDATE

View File

@@ -89,3 +89,26 @@ def test_fetch_events_history_branch_replaces_container_filter(
assert "h.CONTAINERID = :container_id" not in sql assert "h.CONTAINERID = :container_id" not in sql
assert "{{ WORKCENTER_FILTER }}" not in sql assert "{{ WORKCENTER_FILTER }}" not in sql
assert params == {"p0": "CID-1"} assert params == {"p0": "CID-1"}
@patch("mes_dashboard.services.event_fetcher.cache_set")
@patch("mes_dashboard.services.event_fetcher.cache_get", return_value=None)
@patch("mes_dashboard.services.event_fetcher.read_sql_df")
@patch("mes_dashboard.services.event_fetcher.SQLLoader.load")
def test_fetch_events_materials_branch_replaces_aliased_container_filter(
mock_sql_load,
mock_read_sql_df,
_mock_cache_get,
_mock_cache_set,
):
mock_sql_load.return_value = (
"SELECT * FROM t m WHERE m.CONTAINERID = :container_id ORDER BY TXNDATE"
)
mock_read_sql_df.return_value = pd.DataFrame([])
EventFetcher.fetch_events(["CID-1", "CID-2"], "materials")
sql, params = mock_read_sql_df.call_args.args
assert "m.CONTAINERID = :container_id" not in sql
assert "IN" in sql.upper()
assert params == {"p0": "CID-1", "p1": "CID-2"}

View File

@@ -123,5 +123,5 @@ def test_resource_history_export_uses_contract_headers(
chunks = list(export_resource_history_csv('2024-01-01', '2024-01-10')) chunks = list(export_resource_history_csv('2024-01-01', '2024-01-10'))
assert chunks assert chunks
header_row = next(csv.reader(io.StringIO(chunks[0]))) header_row = next(csv.reader(io.StringIO(chunks[0].lstrip('\ufeff'))))
assert header_row == export_headers assert header_row == export_headers

View File

@@ -690,10 +690,10 @@ class TestExportCsvEndpoint:
assert '不支援' in data['error'] or 'type' in data['error'].lower() assert '不支援' in data['error'] or 'type' in data['error'].lower()
@patch('mes_dashboard.routes.query_tool_routes.get_lot_history') @patch('mes_dashboard.routes.query_tool_routes.get_lot_history')
def test_export_lot_history_success(self, mock_get_history, client): def test_export_lot_history_success(self, mock_get_history, client):
"""Should return CSV for lot history.""" """Should return CSV for lot history."""
mock_get_history.return_value = { mock_get_history.return_value = {
'data': [ 'data': [
{ {
'EQUIPMENTNAME': 'ASSY-01', 'EQUIPMENTNAME': 'ASSY-01',
'SPECNAME': 'SPEC-001', 'SPECNAME': 'SPEC-001',
@@ -709,9 +709,46 @@ class TestExportCsvEndpoint:
'export_type': 'lot_history', 'export_type': 'lot_history',
'params': {'container_id': '488103800029578b'} 'params': {'container_id': '488103800029578b'}
} }
) )
assert response.status_code == 200 assert response.status_code == 200
assert 'text/csv' in response.content_type assert 'text/csv' in response.content_type
@patch('mes_dashboard.routes.query_tool_routes.get_lot_materials')
def test_export_lot_materials_uses_container_name_as_lot_id(
self,
mock_get_materials,
client,
):
mock_get_materials.return_value = {
'data': [
{
'CONTAINERID': '488103800029578b',
'CONTAINERNAME': 'GA25010001-A01',
'MATERIALPARTNAME': 'M-001',
'MATERIALLOTNAME': 'LOT-MAT-01',
'QTYCONSUMED': 10,
'WORKCENTERNAME': 'DB',
'SPECNAME': 'SPEC-DB',
'EQUIPMENTNAME': 'EQ-01',
'TXNDATE': '2026-02-22 10:00:00',
}
],
'total': 1,
}
response = client.post(
'/api/query-tool/export-csv',
json={
'export_type': 'lot_materials',
'params': {'container_id': '488103800029578b'}
}
)
assert response.status_code == 200
assert 'lot_raw_materials_488103800029578b.csv' in response.headers.get('Content-Disposition', '')
decoded = response.data.decode('utf-8-sig')
assert 'LOT ID' in decoded
assert 'GA25010001-A01' in decoded
class TestEquipmentListEndpoint: class TestEquipmentListEndpoint: