diff --git a/frontend/src/query-tool/components/LotAssociationTable.vue b/frontend/src/query-tool/components/LotAssociationTable.vue index 964d8bd..dc95350 100644 --- a/frontend/src/query-tool/components/LotAssociationTable.vue +++ b/frontend/src/query-tool/components/LotAssociationTable.vue @@ -16,9 +16,52 @@ const props = defineProps({ type: String, default: '無資料', }, + hiddenColumns: { + type: Array, + default: () => [], + }, + columnLabels: { + type: Object, + default: () => ({}), + }, + columnOrder: { + type: Array, + default: () => [], + }, }); -const columns = computed(() => Object.keys(props.rows[0] || {})); +const columns = computed(() => { + const baseColumns = Object.keys(props.rows[0] || {}); + if (baseColumns.length === 0) { + return []; + } + + const hidden = new Set((props.hiddenColumns || []).map((value) => String(value))); + const visible = baseColumns.filter((column) => !hidden.has(column)); + + if (!Array.isArray(props.columnOrder) || props.columnOrder.length === 0) { + return visible; + } + + const ordered = []; + const seen = new Set(); + props.columnOrder.forEach((column) => { + if (visible.includes(column) && !seen.has(column)) { + ordered.push(column); + seen.add(column); + } + }); + visible.forEach((column) => { + if (!seen.has(column)) { + ordered.push(column); + } + }); + return ordered; +}); + +function resolveColumnLabel(column) { + return props.columnLabels?.[column] || column; +} diff --git a/src/mes_dashboard/routes/excel_query_routes.py b/src/mes_dashboard/routes/excel_query_routes.py index ca10761..2ef479a 100644 --- a/src/mes_dashboard/routes/excel_query_routes.py +++ b/src/mes_dashboard/routes/excel_query_routes.py @@ -348,7 +348,7 @@ def export_csv(): return Response( csv_content, - mimetype='text/csv; charset=utf-8', + mimetype='text/csv; charset=utf-8-sig', headers={ 'Content-Disposition': 'attachment; filename=query_result.csv' } diff --git a/src/mes_dashboard/routes/job_query_routes.py b/src/mes_dashboard/routes/job_query_routes.py index 3424a93..890ca8a 100644 --- a/src/mes_dashboard/routes/job_query_routes.py +++ b/src/mes_dashboard/routes/job_query_routes.py @@ -190,10 +190,10 @@ def export_jobs(): return jsonify({'error': validation_error}), 400 # Stream CSV response - return Response( - export_jobs_with_history(resource_ids, start_date, end_date), - mimetype='text/csv; charset=utf-8', - headers={ - 'Content-Disposition': 'attachment; filename=job_history_export.csv' - } - ) + return Response( + export_jobs_with_history(resource_ids, start_date, end_date), + mimetype='text/csv; charset=utf-8-sig', + headers={ + 'Content-Disposition': 'attachment; filename=job_history_export.csv' + } + ) diff --git a/src/mes_dashboard/routes/query_tool_routes.py b/src/mes_dashboard/routes/query_tool_routes.py index 139c228..60c7313 100644 --- a/src/mes_dashboard/routes/query_tool_routes.py +++ b/src/mes_dashboard/routes/query_tool_routes.py @@ -40,7 +40,7 @@ from mes_dashboard.services.query_tool_service import ( ) # Create Blueprint -query_tool_bp = Blueprint('query_tool', __name__) +query_tool_bp = Blueprint('query_tool', __name__) _QUERY_TOOL_RESOLVE_RATE_LIMIT = configured_rate_limit( bucket="query-tool-resolve", @@ -77,13 +77,31 @@ _QUERY_TOOL_EQUIPMENT_RATE_LIMIT = configured_rate_limit( default_max_attempts=5, default_window_seconds=60, ) -_QUERY_TOOL_EXPORT_RATE_LIMIT = configured_rate_limit( +_QUERY_TOOL_EXPORT_RATE_LIMIT = configured_rate_limit( bucket="query-tool-export", max_attempts_env="QT_EXPORT_RATE_MAX_REQUESTS", window_seconds_env="QT_EXPORT_RATE_WINDOW_SECONDS", default_max_attempts=3, default_window_seconds=60, -) +) + + +def _format_lot_materials_export_rows(rows): + """Normalize LOT material export columns for UI/CSV consistency.""" + normalized_rows = [] + for row in rows or []: + lot_id = row.get('CONTAINERNAME') or row.get('CONTAINERID') or '' + normalized_rows.append({ + 'LOT ID': lot_id, + 'MATERIALPARTNAME': row.get('MATERIALPARTNAME', ''), + 'MATERIALLOTNAME': row.get('MATERIALLOTNAME', ''), + 'QTYCONSUMED': row.get('QTYCONSUMED', ''), + 'WORKCENTERNAME': row.get('WORKCENTERNAME', ''), + 'SPECNAME': row.get('SPECNAME', ''), + 'EQUIPMENTNAME': row.get('EQUIPMENTNAME', ''), + 'TXNDATE': row.get('TXNDATE', ''), + }) + return normalized_rows # ============================================================ @@ -498,10 +516,10 @@ def export_csv(): ) filename = 'adjacent_lots.csv' - elif export_type == 'lot_materials': - container_id = params.get('container_id') - result = get_lot_materials(container_id) - filename = f'lot_materials_{container_id}.csv' + elif export_type == 'lot_materials': + container_id = params.get('container_id') + result = get_lot_materials(container_id) + filename = f'lot_raw_materials_{container_id}.csv' elif export_type == 'lot_rejects': container_id = params.get('container_id') @@ -589,18 +607,21 @@ def export_csv(): error_msg = result.get('error', '查詢失敗') if result else '查詢失敗' return jsonify({'error': error_msg}), 400 - export_data = result.get('data', []) - if not export_data: - return jsonify({'error': '查無資料'}), 404 - - # Stream CSV response - return Response( - generate_csv_stream(export_data), - mimetype='text/csv; charset=utf-8', - headers={ - 'Content-Disposition': f'attachment; filename={filename}' - } - ) + export_data = result.get('data', []) + if not export_data: + return jsonify({'error': '查無資料'}), 404 + + if export_type == 'lot_materials': + export_data = _format_lot_materials_export_rows(export_data) + + # Stream CSV response + return Response( + generate_csv_stream(export_data), + mimetype='text/csv; charset=utf-8-sig', + headers={ + 'Content-Disposition': f'attachment; filename={filename}' + } + ) except Exception as exc: return jsonify({'error': f'匯出失敗: {str(exc)}'}), 500 diff --git a/src/mes_dashboard/services/event_fetcher.py b/src/mes_dashboard/services/event_fetcher.py index c56b1a9..fb362c8 100644 --- a/src/mes_dashboard/services/event_fetcher.py +++ b/src/mes_dashboard/services/event_fetcher.py @@ -6,6 +6,7 @@ from __future__ import annotations import hashlib import logging import os +import re from collections import defaultdict from typing import Any, Dict, List @@ -28,7 +29,7 @@ _DOMAIN_SPECS: Dict[str, Dict[str, Any]] = { "default_window": 60, }, "materials": { - "filter_column": "CONTAINERID", + "filter_column": "m.CONTAINERID", "cache_ttl": 300, "bucket": "event-materials", "max_env": "EVT_MATERIALS_RATE_MAX_REQUESTS", @@ -106,12 +107,31 @@ def _normalize_ids(container_ids: List[str]) -> List[str]: class EventFetcher: """Fetches container-scoped event records with cache and batching.""" + _CONTAINER_EQ_PARAM_PATTERN = re.compile( + r"(?:[A-Za-z_][A-Za-z0-9_]*\.)?CONTAINERID\s*=\s*:container_id", + re.IGNORECASE, + ) + @staticmethod def _cache_key(domain: str, container_ids: List[str]) -> str: normalized = sorted(_normalize_ids(container_ids)) digest = hashlib.md5("|".join(normalized).encode("utf-8")).hexdigest()[:12] return f"evt:{domain}:{digest}" + @staticmethod + def _replace_container_filter(sql: str, condition_sql: str) -> str: + """Replace single-CID predicate with batched predicate in domain SQL.""" + replaced_sql, replacements = EventFetcher._CONTAINER_EQ_PARAM_PATTERN.subn( + condition_sql, + sql, + count=1, + ) + if replacements == 0: + logger.warning( + "EventFetcher container filter replacement missed target predicate" + ) + return replaced_sql + @staticmethod def _get_rate_limit_config(domain: str) -> Dict[str, int | str]: spec = _DOMAIN_SPECS.get(domain) @@ -133,20 +153,20 @@ class EventFetcher: if domain == "history": sql = SQLLoader.load("query_tool/lot_history") - sql = sql.replace("h.CONTAINERID = :container_id", condition_sql) + sql = EventFetcher._replace_container_filter(sql, condition_sql) return sql.replace("{{ WORKCENTER_FILTER }}", "") if domain == "materials": sql = SQLLoader.load("query_tool/lot_materials") - return sql.replace("CONTAINERID = :container_id", condition_sql) + return EventFetcher._replace_container_filter(sql, condition_sql) if domain == "rejects": sql = SQLLoader.load("query_tool/lot_rejects") - return sql.replace("CONTAINERID = :container_id", condition_sql) + return EventFetcher._replace_container_filter(sql, condition_sql) if domain == "holds": sql = SQLLoader.load("query_tool/lot_holds") - return sql.replace("CONTAINERID = :container_id", condition_sql) + return EventFetcher._replace_container_filter(sql, condition_sql) if domain == "jobs": return f""" diff --git a/src/mes_dashboard/services/resource_history_service.py b/src/mes_dashboard/services/resource_history_service.py index 703e1a8..0e61d5b 100644 --- a/src/mes_dashboard/services/resource_history_service.py +++ b/src/mes_dashboard/services/resource_history_service.py @@ -526,6 +526,7 @@ def export_csv( # Write CSV header output = io.StringIO() + output.write('\ufeff') # UTF-8 BOM for Excel compatibility writer = csv.writer(output) writer.writerow(headers) yield output.getvalue() diff --git a/src/mes_dashboard/sql/query_tool/lot_materials.sql b/src/mes_dashboard/sql/query_tool/lot_materials.sql index 05de4db..c7372e6 100644 --- a/src/mes_dashboard/sql/query_tool/lot_materials.sql +++ b/src/mes_dashboard/sql/query_tool/lot_materials.sql @@ -9,14 +9,17 @@ -- Uses TXNDATE (NOT TXNDATETIME) SELECT - CONTAINERID, - MATERIALPARTNAME, - MATERIALLOTNAME, - QTYCONSUMED, - WORKCENTERNAME, - SPECNAME, - EQUIPMENTNAME, - TXNDATE -FROM DWH.DW_MES_LOTMATERIALSHISTORY -WHERE CONTAINERID = :container_id -ORDER BY TXNDATE + m.CONTAINERID, + c.CONTAINERNAME, + m.MATERIALPARTNAME, + m.MATERIALLOTNAME, + m.QTYCONSUMED, + m.WORKCENTERNAME, + m.SPECNAME, + m.EQUIPMENTNAME, + m.TXNDATE +FROM DWH.DW_MES_LOTMATERIALSHISTORY m +LEFT JOIN DWH.DW_MES_CONTAINER c + ON c.CONTAINERID = m.CONTAINERID +WHERE m.CONTAINERID = :container_id +ORDER BY m.TXNDATE diff --git a/tests/test_event_fetcher.py b/tests/test_event_fetcher.py index 9a81d3f..9edaa83 100644 --- a/tests/test_event_fetcher.py +++ b/tests/test_event_fetcher.py @@ -89,3 +89,26 @@ def test_fetch_events_history_branch_replaces_container_filter( assert "h.CONTAINERID = :container_id" not in sql assert "{{ WORKCENTER_FILTER }}" not in sql assert params == {"p0": "CID-1"} + + +@patch("mes_dashboard.services.event_fetcher.cache_set") +@patch("mes_dashboard.services.event_fetcher.cache_get", return_value=None) +@patch("mes_dashboard.services.event_fetcher.read_sql_df") +@patch("mes_dashboard.services.event_fetcher.SQLLoader.load") +def test_fetch_events_materials_branch_replaces_aliased_container_filter( + mock_sql_load, + mock_read_sql_df, + _mock_cache_get, + _mock_cache_set, +): + mock_sql_load.return_value = ( + "SELECT * FROM t m WHERE m.CONTAINERID = :container_id ORDER BY TXNDATE" + ) + mock_read_sql_df.return_value = pd.DataFrame([]) + + EventFetcher.fetch_events(["CID-1", "CID-2"], "materials") + + sql, params = mock_read_sql_df.call_args.args + assert "m.CONTAINERID = :container_id" not in sql + assert "IN" in sql.upper() + assert params == {"p0": "CID-1", "p1": "CID-2"} diff --git a/tests/test_field_contracts.py b/tests/test_field_contracts.py index a93577c..8ea6748 100644 --- a/tests/test_field_contracts.py +++ b/tests/test_field_contracts.py @@ -123,5 +123,5 @@ def test_resource_history_export_uses_contract_headers( chunks = list(export_resource_history_csv('2024-01-01', '2024-01-10')) assert chunks - header_row = next(csv.reader(io.StringIO(chunks[0]))) + header_row = next(csv.reader(io.StringIO(chunks[0].lstrip('\ufeff')))) assert header_row == export_headers diff --git a/tests/test_query_tool_routes.py b/tests/test_query_tool_routes.py index edf22f7..c102cf7 100644 --- a/tests/test_query_tool_routes.py +++ b/tests/test_query_tool_routes.py @@ -690,10 +690,10 @@ class TestExportCsvEndpoint: assert '不支援' in data['error'] or 'type' in data['error'].lower() @patch('mes_dashboard.routes.query_tool_routes.get_lot_history') - def test_export_lot_history_success(self, mock_get_history, client): - """Should return CSV for lot history.""" - mock_get_history.return_value = { - 'data': [ + def test_export_lot_history_success(self, mock_get_history, client): + """Should return CSV for lot history.""" + mock_get_history.return_value = { + 'data': [ { 'EQUIPMENTNAME': 'ASSY-01', 'SPECNAME': 'SPEC-001', @@ -709,9 +709,46 @@ class TestExportCsvEndpoint: 'export_type': 'lot_history', 'params': {'container_id': '488103800029578b'} } - ) - assert response.status_code == 200 - assert 'text/csv' in response.content_type + ) + assert response.status_code == 200 + assert 'text/csv' in response.content_type + + @patch('mes_dashboard.routes.query_tool_routes.get_lot_materials') + def test_export_lot_materials_uses_container_name_as_lot_id( + self, + mock_get_materials, + client, + ): + mock_get_materials.return_value = { + 'data': [ + { + 'CONTAINERID': '488103800029578b', + 'CONTAINERNAME': 'GA25010001-A01', + 'MATERIALPARTNAME': 'M-001', + 'MATERIALLOTNAME': 'LOT-MAT-01', + 'QTYCONSUMED': 10, + 'WORKCENTERNAME': 'DB', + 'SPECNAME': 'SPEC-DB', + 'EQUIPMENTNAME': 'EQ-01', + 'TXNDATE': '2026-02-22 10:00:00', + } + ], + 'total': 1, + } + + response = client.post( + '/api/query-tool/export-csv', + json={ + 'export_type': 'lot_materials', + 'params': {'container_id': '488103800029578b'} + } + ) + + assert response.status_code == 200 + assert 'lot_raw_materials_488103800029578b.csv' in response.headers.get('Content-Disposition', '') + decoded = response.data.decode('utf-8-sig') + assert 'LOT ID' in decoded + assert 'GA25010001-A01' in decoded class TestEquipmentListEndpoint: