diff --git a/frontend/src/query-tool/components/EquipmentLotsTable.vue b/frontend/src/query-tool/components/EquipmentLotsTable.vue index fcd8572..2fe20fb 100644 --- a/frontend/src/query-tool/components/EquipmentLotsTable.vue +++ b/frontend/src/query-tool/components/EquipmentLotsTable.vue @@ -27,10 +27,21 @@ const props = defineProps({ const emit = defineEmits(['export']); +const COLUMN_LABELS = Object.freeze({ + CONTAINERNAME: 'LOT ID', + WAFER_LOT_ID: 'WAFER LOT', + PJ_TYPE: 'TYPE', + PJ_BOP: 'BOP', + PJ_WORKORDER: 'WORKORDER', +}); + const columns = Object.freeze([ - 'CONTAINERID', 'CONTAINERNAME', + 'WAFER_LOT_ID', + 'PJ_TYPE', + 'PJ_BOP', 'SPECNAME', + 'PJ_WORKORDER', 'TRACKINTIMESTAMP', 'TRACKOUTTIMESTAMP', 'TRACKINQTY', @@ -69,7 +80,7 @@ const columns = Object.freeze([ - {{ column }} + {{ COLUMN_LABELS[column] || column }} diff --git a/frontend/src/query-tool/components/LotHistoryTable.vue b/frontend/src/query-tool/components/LotHistoryTable.vue index 94aed00..9949f96 100644 --- a/frontend/src/query-tool/components/LotHistoryTable.vue +++ b/frontend/src/query-tool/components/LotHistoryTable.vue @@ -27,6 +27,13 @@ const emit = defineEmits(['update:workcenterGroups']); const HIDDEN_COLUMNS = new Set(['CONTAINERID', 'EQUIPMENTID', 'RESOURCEID']); +const COLUMN_LABELS = Object.freeze({ + CONTAINERNAME: 'LOT ID', + PJ_TYPE: 'TYPE', + PJ_BOP: 'BOP', + PJ_WORKORDER: 'WORKORDER', +}); + const columns = computed(() => Object.keys(props.rows[0] || {}).filter((col) => !HIDDEN_COLUMNS.has(col)), ); @@ -73,7 +80,7 @@ const workcenterOptions = computed(() => { - {{ column }} + {{ COLUMN_LABELS[column] || column }} diff --git a/frontend/src/query-tool/components/LotRejectTable.vue b/frontend/src/query-tool/components/LotRejectTable.vue index 98c6747..e4a1745 100644 --- a/frontend/src/query-tool/components/LotRejectTable.vue +++ b/frontend/src/query-tool/components/LotRejectTable.vue @@ -109,7 +109,7 @@ const sortedRows = computed(() => { - + diff --git a/frontend/src/query-tool/composables/useEquipmentQuery.js b/frontend/src/query-tool/composables/useEquipmentQuery.js index 8b400fc..a6d50a9 100644 --- a/frontend/src/query-tool/composables/useEquipmentQuery.js +++ b/frontend/src/query-tool/composables/useEquipmentQuery.js @@ -81,7 +81,7 @@ export function useEquipmentQuery(initial = {}) { const equipmentOptionItems = computed(() => { return equipmentOptions.value.map((item) => ({ value: String(item.RESOURCEID), - label: item.RESOURCENAME ? `${item.RESOURCENAME} (${item.RESOURCEID})` : String(item.RESOURCEID), + label: item.RESOURCENAME || String(item.RESOURCEID), })); }); diff --git a/src/mes_dashboard/routes/query_tool_routes.py b/src/mes_dashboard/routes/query_tool_routes.py index 1c8b58f..47fb259 100644 --- a/src/mes_dashboard/routes/query_tool_routes.py +++ b/src/mes_dashboard/routes/query_tool_routes.py @@ -9,15 +9,15 @@ Contains Flask Blueprint for batch tracing and equipment period query endpoints: - CSV export functionality """ -import hashlib - -from flask import Blueprint, jsonify, request, Response, render_template, current_app - -from mes_dashboard.core.cache import cache_get, cache_set -from mes_dashboard.core.modernization_policy import maybe_redirect_to_canonical_shell -from mes_dashboard.core.rate_limit import configured_rate_limit -from mes_dashboard.core.request_validation import parse_json_payload -from mes_dashboard.services.query_tool_service import ( +import hashlib + +from flask import Blueprint, jsonify, request, Response, render_template, current_app + +from mes_dashboard.core.cache import cache_get, cache_set +from mes_dashboard.core.modernization_policy import maybe_redirect_to_canonical_shell +from mes_dashboard.core.rate_limit import configured_rate_limit +from mes_dashboard.core.request_validation import parse_json_payload +from mes_dashboard.services.query_tool_service import ( resolve_lots, get_lot_history, get_lot_history_batch, @@ -41,7 +41,7 @@ from mes_dashboard.services.query_tool_service import ( ) # Create Blueprint -query_tool_bp = Blueprint('query_tool', __name__) +query_tool_bp = Blueprint('query_tool', __name__) _QUERY_TOOL_RESOLVE_RATE_LIMIT = configured_rate_limit( bucket="query-tool-resolve", @@ -78,69 +78,130 @@ _QUERY_TOOL_EQUIPMENT_RATE_LIMIT = configured_rate_limit( default_max_attempts=5, default_window_seconds=60, ) -_QUERY_TOOL_EXPORT_RATE_LIMIT = configured_rate_limit( +_QUERY_TOOL_EXPORT_RATE_LIMIT = configured_rate_limit( bucket="query-tool-export", max_attempts_env="QT_EXPORT_RATE_MAX_REQUESTS", window_seconds_env="QT_EXPORT_RATE_WINDOW_SECONDS", default_max_attempts=3, default_window_seconds=60, -) - - -def _query_tool_max_container_ids() -> int: - try: - value = int(current_app.config.get("QUERY_TOOL_MAX_CONTAINER_IDS", 200)) - except Exception: - value = 200 - return max(value, 1) - - -def _reject_if_batch_too_large(container_ids: list[str]): - max_ids = _query_tool_max_container_ids() - if len(container_ids) <= max_ids: - return None - return jsonify({'error': f'container_ids 數量不可超過 {max_ids} 筆'}), 413 - - -def _format_lot_materials_export_rows(rows): - """Normalize LOT material export columns for UI/CSV consistency.""" - normalized_rows = [] - for row in rows or []: - lot_id = row.get('CONTAINERNAME') or row.get('CONTAINERID') or '' - normalized_rows.append({ - 'LOT ID': lot_id, - 'MATERIALPARTNAME': row.get('MATERIALPARTNAME', ''), - 'MATERIALLOTNAME': row.get('MATERIALLOTNAME', ''), - 'QTYCONSUMED': row.get('QTYCONSUMED', ''), - 'WORKCENTERNAME': row.get('WORKCENTERNAME', ''), - 'SPECNAME': row.get('SPECNAME', ''), - 'EQUIPMENTNAME': row.get('EQUIPMENTNAME', ''), - 'TXNDATE': row.get('TXNDATE', ''), - }) - return normalized_rows - - -def _format_lot_holds_export_rows(rows): - """Normalize LOT hold export columns for UI/CSV consistency.""" - normalized_rows = [] - for row in rows or []: - lot_id = row.get('CONTAINERNAME') or row.get('CONTAINERID') or '' - normalized_rows.append({ - 'LOT ID': lot_id, - 'WORKCENTERNAME': row.get('WORKCENTERNAME', ''), - 'HOLDTXNDATE': row.get('HOLDTXNDATE', ''), - 'RELEASETXNDATE': row.get('RELEASETXNDATE', ''), - 'HOLD_STATUS': row.get('HOLD_STATUS', ''), - 'HOLD_HOURS': row.get('HOLD_HOURS', ''), - 'HOLDREASONNAME': row.get('HOLDREASONNAME', ''), - 'HOLDCOMMENTS': row.get('HOLDCOMMENTS', ''), - 'HOLDEMP': row.get('HOLDEMP', ''), - 'HOLDEMPDEPTNAME': row.get('HOLDEMPDEPTNAME', ''), - 'RELEASEEMP': row.get('RELEASEEMP', ''), - 'RELEASECOMMENTS': row.get('RELEASECOMMENTS', ''), - 'NCRID': row.get('NCRID', ''), - }) - return normalized_rows +) + + +def _query_tool_max_container_ids() -> int: + try: + value = int(current_app.config.get("QUERY_TOOL_MAX_CONTAINER_IDS", 200)) + except Exception: + value = 200 + return max(value, 1) + + +def _reject_if_batch_too_large(container_ids: list[str]): + max_ids = _query_tool_max_container_ids() + if len(container_ids) <= max_ids: + return None + return jsonify({'error': f'container_ids 數量不可超過 {max_ids} 筆'}), 413 + + +def _format_lot_materials_export_rows(rows): + """Normalize LOT material export columns for UI/CSV consistency.""" + normalized_rows = [] + for row in rows or []: + lot_id = row.get('CONTAINERNAME') or row.get('CONTAINERID') or '' + normalized_rows.append({ + 'LOT ID': lot_id, + 'MATERIALPARTNAME': row.get('MATERIALPARTNAME', ''), + 'MATERIALLOTNAME': row.get('MATERIALLOTNAME', ''), + 'QTYCONSUMED': row.get('QTYCONSUMED', ''), + 'WORKCENTERNAME': row.get('WORKCENTERNAME', ''), + 'SPECNAME': row.get('SPECNAME', ''), + 'EQUIPMENTNAME': row.get('EQUIPMENTNAME', ''), + 'TXNDATE': row.get('TXNDATE', ''), + }) + return normalized_rows + + +def _format_lot_holds_export_rows(rows): + """Normalize LOT hold export columns for UI/CSV consistency.""" + normalized_rows = [] + for row in rows or []: + lot_id = row.get('CONTAINERNAME') or row.get('CONTAINERID') or '' + normalized_rows.append({ + 'LOT ID': lot_id, + 'WORKCENTERNAME': row.get('WORKCENTERNAME', ''), + 'HOLDTXNDATE': row.get('HOLDTXNDATE', ''), + 'RELEASETXNDATE': row.get('RELEASETXNDATE', ''), + 'HOLD_STATUS': row.get('HOLD_STATUS', ''), + 'HOLD_HOURS': row.get('HOLD_HOURS', ''), + 'HOLDREASONNAME': row.get('HOLDREASONNAME', ''), + 'HOLDCOMMENTS': row.get('HOLDCOMMENTS', ''), + 'HOLDEMP': row.get('HOLDEMP', ''), + 'HOLDEMPDEPTNAME': row.get('HOLDEMPDEPTNAME', ''), + 'RELEASEEMP': row.get('RELEASEEMP', ''), + 'RELEASECOMMENTS': row.get('RELEASECOMMENTS', ''), + 'NCRID': row.get('NCRID', ''), + }) + return normalized_rows + + +def _format_equipment_lots_export_rows(rows): + """Normalize equipment lots export columns for UI/CSV consistency.""" + normalized_rows = [] + for row in rows or []: + normalized_rows.append({ + 'LOT ID': row.get('CONTAINERNAME') or row.get('CONTAINERID') or '', + 'WAFER LOT': row.get('WAFER_LOT_ID', ''), + 'TYPE': row.get('PJ_TYPE', ''), + 'BOP': row.get('PJ_BOP', ''), + 'SPECNAME': row.get('SPECNAME', ''), + 'WORKORDER': row.get('PJ_WORKORDER', ''), + 'TRACKINTIMESTAMP': row.get('TRACKINTIMESTAMP', ''), + 'TRACKOUTTIMESTAMP': row.get('TRACKOUTTIMESTAMP', ''), + 'TRACKINQTY': row.get('TRACKINQTY', ''), + 'TRACKOUTQTY': row.get('TRACKOUTQTY', ''), + 'EQUIPMENTNAME': row.get('EQUIPMENTNAME', ''), + 'WORKCENTERNAME': row.get('WORKCENTERNAME', ''), + }) + return normalized_rows + + +_LOT_HISTORY_COLUMN_RENAMES = { + 'CONTAINERNAME': 'LOT ID', + 'PJ_TYPE': 'TYPE', + 'PJ_BOP': 'BOP', + 'PJ_WORKORDER': 'WORKORDER', +} + +_LOT_HISTORY_HIDDEN = {'CONTAINERID', 'EQUIPMENTID', 'RESOURCEID'} + + +def _format_lot_history_export_rows(rows): + """Rename columns in lot history export to match frontend labels.""" + normalized_rows = [] + for row in rows or []: + out = {} + for key, value in row.items(): + if key in _LOT_HISTORY_HIDDEN: + continue + label = _LOT_HISTORY_COLUMN_RENAMES.get(key, key) + out[label] = value if value is not None else '' + normalized_rows.append(out) + return normalized_rows + + +def _format_lot_rejects_export_rows(rows): + """Rename CONTAINERNAME to LOT ID in lot rejects export.""" + normalized_rows = [] + for row in rows or []: + out = {} + for key, value in row.items(): + if key == 'CONTAINERID': + continue + if key == 'CONTAINERNAME': + out['LOT ID'] = value or '' + else: + out[key] = value if value is not None else '' + normalized_rows.append(out) + return normalized_rows # ============================================================ @@ -162,14 +223,14 @@ def query_tool_page(): @query_tool_bp.route('/api/query-tool/resolve', methods=['POST']) @_QUERY_TOOL_RESOLVE_RATE_LIMIT -def resolve_lot_input(): +def resolve_lot_input(): """Resolve user input to CONTAINERID list. Expects JSON body: - { - "input_type": "lot_id" | "wafer_lot" | "serial_number" | "work_order" | "gd_work_order" | "gd_lot_id", - "values": ["value1", "value2", ...] - } + { + "input_type": "lot_id" | "wafer_lot" | "serial_number" | "work_order" | "gd_work_order" | "gd_lot_id", + "values": ["value1", "value2", ...] + } Returns: { @@ -179,15 +240,15 @@ def resolve_lot_input(): "not_found": ["value3"] } """ - data, payload_error = parse_json_payload(require_non_empty_object=True) - if payload_error is not None: - return jsonify({'error': payload_error.message}), payload_error.status_code - - input_type = data.get('input_type') + data, payload_error = parse_json_payload(require_non_empty_object=True) + if payload_error is not None: + return jsonify({'error': payload_error.message}), payload_error.status_code + + input_type = data.get('input_type') values = data.get('values', []) # Validate input type - valid_types = ['lot_id', 'wafer_lot', 'serial_number', 'work_order', 'gd_work_order', 'gd_lot_id'] + valid_types = ['lot_id', 'wafer_lot', 'serial_number', 'work_order', 'gd_work_order', 'gd_lot_id'] if input_type not in valid_types: return jsonify({'error': f'不支援的查詢類型: {input_type}'}), 400 @@ -228,7 +289,7 @@ def resolve_lot_input(): @query_tool_bp.route('/api/query-tool/lot-history', methods=['GET']) @_QUERY_TOOL_HISTORY_RATE_LIMIT -def query_lot_history(): +def query_lot_history(): """Query production history for one or more LOTs. Query params: @@ -251,15 +312,15 @@ def query_lot_history(): ] # Batch mode: container_ids takes precedence - if container_ids_param: - cids = [c.strip() for c in container_ids_param.split(',') if c.strip()] - if not cids: - return jsonify({'error': '請指定 CONTAINERID'}), 400 - too_large = _reject_if_batch_too_large(cids) - if too_large is not None: - return too_large - result = get_lot_history_batch(cids, workcenter_groups=workcenter_groups) - elif container_id: + if container_ids_param: + cids = [c.strip() for c in container_ids_param.split(',') if c.strip()] + if not cids: + return jsonify({'error': '請指定 CONTAINERID'}), 400 + too_large = _reject_if_batch_too_large(cids) + if too_large is not None: + return too_large + result = get_lot_history_batch(cids, workcenter_groups=workcenter_groups) + elif container_id: result = get_lot_history(container_id, workcenter_groups=workcenter_groups) else: return jsonify({'error': '請指定 CONTAINERID'}), 400 @@ -327,21 +388,21 @@ def query_lot_associations(): container_id = request.args.get('container_id') assoc_type = request.args.get('type') - valid_types = ['materials', 'rejects', 'holds', 'splits', 'jobs'] + valid_types = ['materials', 'rejects', 'holds', 'splits', 'jobs'] if assoc_type not in valid_types: return jsonify({'error': f'不支援的關聯類型: {assoc_type}'}), 400 # Batch mode for materials/rejects/holds - batch_types = {'materials', 'rejects', 'holds'} - if container_ids_param and assoc_type in batch_types: - cids = [c.strip() for c in container_ids_param.split(',') if c.strip()] - if not cids: - return jsonify({'error': '請指定 CONTAINERID'}), 400 - too_large = _reject_if_batch_too_large(cids) - if too_large is not None: - return too_large - result = get_lot_associations_batch(cids, assoc_type) - else: + batch_types = {'materials', 'rejects', 'holds'} + if container_ids_param and assoc_type in batch_types: + cids = [c.strip() for c in container_ids_param.split(',') if c.strip()] + if not cids: + return jsonify({'error': '請指定 CONTAINERID'}), 400 + too_large = _reject_if_batch_too_large(cids) + if too_large is not None: + return too_large + result = get_lot_associations_batch(cids, assoc_type) + else: if not container_id: return jsonify({'error': '請指定 CONTAINERID'}), 400 @@ -349,12 +410,12 @@ def query_lot_associations(): result = get_lot_materials(container_id) elif assoc_type == 'rejects': result = get_lot_rejects(container_id) - elif assoc_type == 'holds': - result = get_lot_holds(container_id) - elif assoc_type == 'splits': - full_history = str(request.args.get('full_history', '')).strip().lower() in {'1', 'true', 'yes'} - result = get_lot_splits(container_id, full_history=full_history) - elif assoc_type == 'jobs': + elif assoc_type == 'holds': + result = get_lot_holds(container_id) + elif assoc_type == 'splits': + full_history = str(request.args.get('full_history', '')).strip().lower() in {'1', 'true', 'yes'} + result = get_lot_splits(container_id, full_history=full_history) + elif assoc_type == 'jobs': equipment_id = request.args.get('equipment_id') time_start = request.args.get('time_start') time_end = request.args.get('time_end') @@ -376,7 +437,7 @@ def query_lot_associations(): @query_tool_bp.route('/api/query-tool/equipment-period', methods=['POST']) @_QUERY_TOOL_EQUIPMENT_RATE_LIMIT -def query_equipment_period(): +def query_equipment_period(): """Query equipment data for a time period. Expects JSON body: @@ -390,11 +451,11 @@ def query_equipment_period(): Returns data based on query_type. """ - data, payload_error = parse_json_payload(require_non_empty_object=True) - if payload_error is not None: - return jsonify({'error': payload_error.message}), payload_error.status_code - - equipment_ids = data.get('equipment_ids', []) + data, payload_error = parse_json_payload(require_non_empty_object=True) + if payload_error is not None: + return jsonify({'error': payload_error.message}), payload_error.status_code + + equipment_ids = data.get('equipment_ids', []) equipment_names = data.get('equipment_names', []) start_date = data.get('start_date') end_date = data.get('end_date') @@ -517,7 +578,7 @@ def get_workcenter_groups_list(): @query_tool_bp.route('/api/query-tool/export-csv', methods=['POST']) @_QUERY_TOOL_EXPORT_RATE_LIMIT -def export_csv(): +def export_csv(): """Export query results as CSV. Expects JSON body: @@ -531,11 +592,11 @@ def export_csv(): Returns streaming CSV response. """ - data, payload_error = parse_json_payload(require_non_empty_object=True) - if payload_error is not None: - return jsonify({'error': payload_error.message}), payload_error.status_code - - export_type = data.get('export_type') + data, payload_error = parse_json_payload(require_non_empty_object=True) + if payload_error is not None: + return jsonify({'error': payload_error.message}), payload_error.status_code + + export_type = data.get('export_type') params = data.get('params', {}) # Get data based on export type @@ -558,10 +619,10 @@ def export_csv(): ) filename = 'adjacent_lots.csv' - elif export_type == 'lot_materials': - container_id = params.get('container_id') - result = get_lot_materials(container_id) - filename = f'lot_raw_materials_{container_id}.csv' + elif export_type == 'lot_materials': + container_id = params.get('container_id') + result = get_lot_materials(container_id) + filename = f'lot_raw_materials_{container_id}.csv' elif export_type == 'lot_rejects': container_id = params.get('container_id') @@ -649,23 +710,29 @@ def export_csv(): error_msg = result.get('error', '查詢失敗') if result else '查詢失敗' return jsonify({'error': error_msg}), 400 - export_data = result.get('data', []) - if not export_data: - return jsonify({'error': '查無資料'}), 404 - - if export_type == 'lot_materials': - export_data = _format_lot_materials_export_rows(export_data) - elif export_type == 'lot_holds': - export_data = _format_lot_holds_export_rows(export_data) - - # Stream CSV response - return Response( - generate_csv_stream(export_data), - mimetype='text/csv; charset=utf-8-sig', - headers={ - 'Content-Disposition': f'attachment; filename={filename}' - } - ) + export_data = result.get('data', []) + if not export_data: + return jsonify({'error': '查無資料'}), 404 + + if export_type == 'lot_materials': + export_data = _format_lot_materials_export_rows(export_data) + elif export_type == 'lot_holds': + export_data = _format_lot_holds_export_rows(export_data) + elif export_type == 'equipment_lots': + export_data = _format_equipment_lots_export_rows(export_data) + elif export_type == 'lot_history': + export_data = _format_lot_history_export_rows(export_data) + elif export_type == 'lot_rejects': + export_data = _format_lot_rejects_export_rows(export_data) + + # Stream CSV response + return Response( + generate_csv_stream(export_data), + mimetype='text/csv; charset=utf-8-sig', + headers={ + 'Content-Disposition': f'attachment; filename={filename}' + } + ) except Exception as exc: return jsonify({'error': f'匯出失敗: {str(exc)}'}), 500
LOTLOT ID WORKCENTER Package FUNCTION