harden released pages and archive openspec change

This commit is contained in:
egg
2026-02-23 17:48:32 +08:00
parent 6e2ff9813e
commit e5d7700b36
47 changed files with 2126 additions and 141 deletions

View File

@@ -31,6 +31,7 @@ DB_CALL_TIMEOUT_MS=55000 # Must stay below worker timeout
# Flask Configuration # Flask Configuration
# ============================================================ # ============================================================
# Environment mode: development | production | testing # Environment mode: development | production | testing
# If omitted, runtime defaults to production (fail-safe)
FLASK_ENV=development FLASK_ENV=development
# Debug mode: 0 for production, 1 for development # Debug mode: 0 for production, 1 for development
@@ -43,6 +44,24 @@ SECRET_KEY=your-secret-key-change-in-production
# Session timeout in seconds (default: 28800 = 8 hours) # Session timeout in seconds (default: 28800 = 8 hours)
SESSION_LIFETIME=28800 SESSION_LIFETIME=28800
# JSON request payload upper bound in bytes (default: 262144 = 256KB)
MAX_JSON_BODY_BYTES=262144
# Route input-budget guardrails
QUERY_TOOL_MAX_CONTAINER_IDS=200
RESOURCE_DETAIL_DEFAULT_LIMIT=500
RESOURCE_DETAIL_MAX_LIMIT=500
# Trust boundary for forwarded headers (safe default: false)
# Direct-exposure deployment (no reverse proxy): keep this false
TRUST_PROXY_HEADERS=false
# Required when TRUST_PROXY_HEADERS=true. Supports comma-separated IP/CIDR entries.
# Example: TRUSTED_PROXY_IPS=127.0.0.1,10.0.0.0/24
TRUSTED_PROXY_IPS=
# CSP opt-in compatibility flag (default false = safer)
CSP_ALLOW_UNSAFE_EVAL=false
# ============================================================ # ============================================================
# Authentication Configuration # Authentication Configuration
# ============================================================ # ============================================================

View File

@@ -0,0 +1,61 @@
name: released-pages-hardening-gates
on:
pull_request:
paths:
- "src/mes_dashboard/**"
- "frontend/src/job-query/**"
- "tests/test_query_tool_routes.py"
- "tests/test_job_query_routes.py"
- "tests/test_resource_routes.py"
- "tests/test_rate_limit_identity.py"
- "tests/test_page_registry.py"
- "tests/test_redis_client.py"
- "tests/test_runtime_hardening.py"
- "tests/test_hold_routes.py"
- "tests/test_wip_routes.py"
- "tests/test_job_query_frontend_safety.py"
- ".github/workflows/released-pages-hardening-gates.yml"
push:
branches: [ main ]
paths:
- "src/mes_dashboard/**"
- "frontend/src/job-query/**"
- "tests/test_query_tool_routes.py"
- "tests/test_job_query_routes.py"
- "tests/test_resource_routes.py"
- "tests/test_rate_limit_identity.py"
- "tests/test_page_registry.py"
- "tests/test_redis_client.py"
- "tests/test_runtime_hardening.py"
- "tests/test_hold_routes.py"
- "tests/test_wip_routes.py"
- "tests/test_job_query_frontend_safety.py"
- ".github/workflows/released-pages-hardening-gates.yml"
jobs:
released-pages-hardening:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.13"
- name: Install test dependencies
run: |
python -m pip install --upgrade pip
python -m pip install -e . pytest
- name: Run released-pages hardening regression suite
run: |
python -m pytest -q \
tests/test_query_tool_routes.py \
tests/test_job_query_routes.py \
tests/test_resource_routes.py \
tests/test_rate_limit_identity.py \
tests/test_page_registry.py \
tests/test_redis_client.py \
tests/test_runtime_hardening.py \
tests/test_hold_routes.py \
tests/test_wip_routes.py \
tests/test_job_query_frontend_safety.py \
-k "not TestJobQueryPage and not TestHoldDetailPageRoute and not TestPageRoutes"

View File

@@ -277,6 +277,19 @@ DB_PASSWORD=your_password
# Flask 設定 # Flask 設定
FLASK_ENV=production # production | development FLASK_ENV=production # production | development
SECRET_KEY=your-secret-key # 生產環境請更換 SECRET_KEY=your-secret-key # 生產環境請更換
MAX_JSON_BODY_BYTES=262144 # JSON 請求大小上限bytes
# 輸入預算保護Released 高成本 API
QUERY_TOOL_MAX_CONTAINER_IDS=200
RESOURCE_DETAIL_DEFAULT_LIMIT=500
RESOURCE_DETAIL_MAX_LIMIT=500
# 反向代理信任邊界(無反向代理時務必維持 false
TRUST_PROXY_HEADERS=false
TRUSTED_PROXY_IPS=127.0.0.1
# CSP 相容開關(預設 false僅在必要時啟用
CSP_ALLOW_UNSAFE_EVAL=false
# Gunicorn 設定 # Gunicorn 設定
GUNICORN_BIND=0.0.0.0:8080 # 服務監聽位址 GUNICORN_BIND=0.0.0.0:8080 # 服務監聽位址

View File

@@ -0,0 +1,22 @@
{
"in_scope_required_assets": {
"/wip-overview": ["wip-overview.js"],
"/wip-detail": ["wip-detail.js"],
"/hold-overview": ["hold-overview.js"],
"/hold-detail": ["hold-detail.js"],
"/hold-history": ["hold-history.js"],
"/reject-history": ["reject-history.js"],
"/resource": ["resource-status.js"],
"/resource-history": ["resource-history.js"],
"/qc-gate": ["qc-gate.js"],
"/job-query": ["job-query.js"],
"/tmtt-defect": ["tmtt-defect.js"],
"/admin/pages": ["admin-pages.js"],
"/admin/performance": ["admin-performance.js"],
"/tables": ["tables.js"],
"/excel-query": ["excel-query.js"],
"/query-tool": ["query-tool.js"],
"/mid-section-defect": ["mid-section-defect.js"]
},
"deferred_routes": []
}

View File

@@ -0,0 +1,3 @@
{
"records": []
}

View File

@@ -0,0 +1,123 @@
{
"entries": [
{
"id": "style-legacy-wip-overview",
"scope": "/wip-overview",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-wip-detail",
"scope": "/wip-detail",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-hold-overview",
"scope": "/hold-overview",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-hold-detail",
"scope": "/hold-detail",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-hold-history",
"scope": "/hold-history",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-reject-history",
"scope": "/reject-history",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-resource",
"scope": "/resource",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-resource-history",
"scope": "/resource-history",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-qc-gate",
"scope": "/qc-gate",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-job-query",
"scope": "/job-query",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-tmtt-defect",
"scope": "/tmtt-defect",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-admin-pages",
"scope": "/admin/pages",
"owner": "frontend-platform-admin",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-admin-performance",
"scope": "/admin/performance",
"owner": "frontend-platform-admin",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-tables",
"scope": "/tables",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-excel-query",
"scope": "/excel-query",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-query-tool",
"scope": "/query-tool",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
},
{
"id": "style-legacy-mid-section-defect",
"scope": "/mid-section-defect",
"owner": "frontend-mes-reporting",
"milestone": "full-modernization-phase2",
"reason": "Legacy styles pending full token and scope migration"
}
]
}

View File

@@ -0,0 +1,21 @@
{
"routes": {
"/wip-overview": { "known_issues": [] },
"/wip-detail": { "known_issues": [] },
"/hold-overview": { "known_issues": [] },
"/hold-detail": { "known_issues": [] },
"/hold-history": { "known_issues": [] },
"/reject-history": { "known_issues": [] },
"/resource": { "known_issues": [] },
"/resource-history": { "known_issues": [] },
"/qc-gate": { "known_issues": [] },
"/job-query": { "known_issues": [] },
"/tmtt-defect": { "known_issues": [] },
"/tables": { "known_issues": [] },
"/excel-query": { "known_issues": [] },
"/query-tool": { "known_issues": [] },
"/mid-section-defect": { "known_issues": [] },
"/admin/pages": { "known_issues": [] },
"/admin/performance": { "known_issues": [] }
}
}

View File

@@ -0,0 +1,3 @@
{
"records": []
}

View File

@@ -0,0 +1,6 @@
{
"severity_mode": {
"current": "block"
},
"deferred_routes_excluded": []
}

View File

@@ -0,0 +1,208 @@
{
"routes": [
{
"route": "/wip-overview",
"route_id": "wip-overview",
"title": "WIP Overview",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/wip-overview",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/wip-detail",
"route_id": "wip-detail",
"title": "WIP Detail",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/wip-detail",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/hold-overview",
"route_id": "hold-overview",
"title": "Hold Overview",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/hold-overview",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/hold-detail",
"route_id": "hold-detail",
"title": "Hold Detail",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/hold-detail",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/hold-history",
"route_id": "hold-history",
"title": "Hold History",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/hold-history",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/reject-history",
"route_id": "reject-history",
"title": "Reject History",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/reject-history",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/resource",
"route_id": "resource",
"title": "Resource",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/resource",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/resource-history",
"route_id": "resource-history",
"title": "Resource History",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/resource-history",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/qc-gate",
"route_id": "qc-gate",
"title": "QC Gate",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/qc-gate",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/job-query",
"route_id": "job-query",
"title": "Job Query",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/job-query",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/tmtt-defect",
"route_id": "tmtt-defect",
"title": "TMTT Defect",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/tmtt-defect",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/admin/pages",
"route_id": "admin-pages",
"title": "Admin Pages",
"scope": "in-scope",
"render_mode": "external",
"owner": "frontend-platform-admin",
"visibility_policy": "admin_only",
"canonical_shell_path": "/portal-shell/admin/pages",
"rollback_strategy": "external_route_reversion",
"compatibility_policy": "external_target_redirect"
},
{
"route": "/admin/performance",
"route_id": "admin-performance",
"title": "Admin Performance",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-platform-admin",
"visibility_policy": "admin_only",
"canonical_shell_path": "/portal-shell/admin/performance",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/tables",
"route_id": "tables",
"title": "Tables",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/tables",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/excel-query",
"route_id": "excel-query",
"title": "Excel Query",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/excel-query",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/query-tool",
"route_id": "query-tool",
"title": "Query Tool",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/query-tool",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
},
{
"route": "/mid-section-defect",
"route_id": "mid-section-defect",
"title": "Mid Section Defect",
"scope": "in-scope",
"render_mode": "native",
"owner": "frontend-mes-reporting",
"visibility_policy": "released_or_admin",
"canonical_shell_path": "/portal-shell/mid-section-defect",
"rollback_strategy": "fallback_to_legacy_route",
"compatibility_policy": "redirect_to_shell_when_spa_enabled"
}
]
}

View File

@@ -0,0 +1,22 @@
{
"in_scope": [
{ "route": "/wip-overview", "category": "report" },
{ "route": "/wip-detail", "category": "report" },
{ "route": "/hold-overview", "category": "report" },
{ "route": "/hold-detail", "category": "report" },
{ "route": "/hold-history", "category": "report" },
{ "route": "/reject-history", "category": "report" },
{ "route": "/resource", "category": "report" },
{ "route": "/resource-history", "category": "report" },
{ "route": "/qc-gate", "category": "report" },
{ "route": "/job-query", "category": "report" },
{ "route": "/tmtt-defect", "category": "report" },
{ "route": "/tables", "category": "report" },
{ "route": "/excel-query", "category": "report" },
{ "route": "/query-tool", "category": "report" },
{ "route": "/mid-section-defect", "category": "report" },
{ "route": "/admin/pages", "category": "admin" },
{ "route": "/admin/performance", "category": "admin" }
],
"deferred": []
}

View File

@@ -0,0 +1,4 @@
{
"routes": {},
"notes": "Baseline placeholder inventory"
}

View File

@@ -0,0 +1,46 @@
{
"source": "current frontend API consumption contracts",
"apis": {
"/api/wip/overview/summary": {
"required_keys": [
"dataUpdateDate",
"runLots",
"queueLots",
"holdLots"
],
"notes": "summary header and cards depend on these fields"
},
"/api/wip/overview/matrix": {
"required_keys": [
"workcenters",
"packages",
"matrix",
"workcenter_totals"
],
"notes": "matrix table rendering contract"
},
"/api/wip/hold-detail/summary": {
"required_keys": [
"workcenterCount",
"packageCount",
"lotCount"
],
"notes": "hold detail summary cards contract"
},
"/api/resource/history/summary": {
"required_keys": [
"kpi",
"trend",
"heatmap",
"workcenter_comparison"
],
"notes": "resource history chart summary contract"
},
"/api/resource/history/detail": {
"required_keys": [
"data"
],
"notes": "detail table contract (plus truncated/max_records metadata when present)"
}
}
}

View File

@@ -0,0 +1,4 @@
{
"source": "data/page_status.json",
"errors": []
}

View File

@@ -0,0 +1,201 @@
{
"source": "data/page_status.json",
"admin": [
{
"id": "reports",
"name": "即時報表",
"order": 1,
"admin_only": false,
"pages": [
{
"route": "/wip-overview",
"name": "WIP 即時概況",
"status": "released",
"order": 1
},
{
"route": "/hold-overview",
"name": "Hold 即時概況",
"status": "released",
"order": 2
},
{
"route": "/resource",
"name": "設備即時概況",
"status": "released",
"order": 4
},
{
"route": "/qc-gate",
"name": "QC-GATE 狀態",
"status": "released",
"order": 6
}
]
},
{
"id": "drawer-2",
"name": "歷史報表",
"order": 2,
"admin_only": false,
"pages": [
{
"route": "/hold-history",
"name": "Hold 歷史績效",
"status": "released",
"order": 3
},
{
"route": "/reject-history",
"name": "報廢歷史查詢",
"status": "dev",
"order": 4
},
{
"route": "/resource-history",
"name": "設備歷史績效",
"status": "released",
"order": 5
}
]
},
{
"id": "drawer",
"name": "查詢工具",
"order": 3,
"admin_only": false,
"pages": [
{
"route": "/job-query",
"name": "設備維修查詢",
"status": "released",
"order": 1
},
{
"route": "/query-tool",
"name": "批次追蹤工具",
"status": "released",
"order": 2
}
]
},
{
"id": "dev-tools",
"name": "開發工具",
"order": 4,
"admin_only": true,
"pages": [
{
"route": "/tables",
"name": "表格總覽",
"status": "dev",
"order": 1
},
{
"route": "/admin/pages",
"name": "頁面管理",
"status": "released",
"order": 1
},
{
"route": "/excel-query",
"name": "Excel 批次查詢",
"status": "dev",
"order": 2
},
{
"route": "/admin/performance",
"name": "效能監控",
"status": "dev",
"order": 2
},
{
"route": "/tmtt-defect",
"name": "TMTT印字腳型不良分析",
"status": "dev",
"order": 5
},
{
"route": "/mid-section-defect",
"name": "中段製程不良追溯",
"status": "dev",
"order": 6
}
]
}
],
"non_admin": [
{
"id": "reports",
"name": "即時報表",
"order": 1,
"admin_only": false,
"pages": [
{
"route": "/wip-overview",
"name": "WIP 即時概況",
"status": "released",
"order": 1
},
{
"route": "/hold-overview",
"name": "Hold 即時概況",
"status": "released",
"order": 2
},
{
"route": "/resource",
"name": "設備即時概況",
"status": "released",
"order": 4
},
{
"route": "/qc-gate",
"name": "QC-GATE 狀態",
"status": "released",
"order": 6
}
]
},
{
"id": "drawer-2",
"name": "歷史報表",
"order": 2,
"admin_only": false,
"pages": [
{
"route": "/hold-history",
"name": "Hold 歷史績效",
"status": "released",
"order": 3
},
{
"route": "/resource-history",
"name": "設備歷史績效",
"status": "released",
"order": 5
}
]
},
{
"id": "drawer",
"name": "查詢工具",
"order": 3,
"admin_only": false,
"pages": [
{
"route": "/job-query",
"name": "設備維修查詢",
"status": "released",
"order": 1
},
{
"route": "/query-tool",
"name": "批次追蹤工具",
"status": "released",
"order": 2
}
]
}
]
}

View File

@@ -0,0 +1,46 @@
{
"source": "frontend route parsing and current parity matrix",
"routes": {
"/wip-overview": {
"query_keys": [
"workorder",
"lotid",
"package",
"type",
"status"
],
"notes": "filters + status URL state must remain compatible"
},
"/wip-detail": {
"query_keys": [
"workcenter",
"workorder",
"lotid",
"package",
"type",
"status"
],
"notes": "workcenter deep-link and back-link query continuity"
},
"/hold-detail": {
"query_keys": [
"reason"
],
"notes": "reason required for normal access flow"
},
"/resource-history": {
"query_keys": [
"start_date",
"end_date",
"granularity",
"workcenter_groups",
"families",
"resource_ids",
"is_production",
"is_key",
"is_monitor"
],
"notes": "query/export params must remain compatible"
}
}
}

View File

@@ -11,10 +11,27 @@ window.__FIELD_CONTRACTS__['job_query:txn_table'] = getPageContract('job_query',
const jobTableFields = getPageContract('job_query', 'jobs_table'); const jobTableFields = getPageContract('job_query', 'jobs_table');
const txnTableFields = getPageContract('job_query', 'txn_table'); const txnTableFields = getPageContract('job_query', 'txn_table');
function toDataToken(value) {
return encodeURIComponent(safeText(value));
}
function fromDataToken(value) {
if (!value) {
return '';
}
try {
return decodeURIComponent(value);
} catch (_error) {
return value;
}
}
function renderJobCell(job, apiKey) { function renderJobCell(job, apiKey) {
if (apiKey === 'JOBSTATUS') { if (apiKey === 'JOBSTATUS') {
const value = safeText(job[apiKey]); const value = safeText(job[apiKey]);
return `<span class="status-badge ${value}">${value}</span>`; const classToken = safeText(value).replace(/[^A-Za-z0-9_-]/g, '_');
const escaped = escapeHtml(value);
return `<span class="status-badge ${classToken}">${escaped}</span>`;
} }
if (apiKey === 'CREATEDATE' || apiKey === 'COMPLETEDATE') { if (apiKey === 'CREATEDATE' || apiKey === 'COMPLETEDATE') {
return formatDate(job[apiKey]); return formatDate(job[apiKey]);
@@ -25,7 +42,9 @@ function renderJobCell(job, apiKey) {
function renderTxnCell(txn, apiKey) { function renderTxnCell(txn, apiKey) {
if (apiKey === 'FROMJOBSTATUS' || apiKey === 'JOBSTATUS') { if (apiKey === 'FROMJOBSTATUS' || apiKey === 'JOBSTATUS') {
const value = safeText(txn[apiKey], '-'); const value = safeText(txn[apiKey], '-');
return `<span class="status-badge ${escapeHtml(value)}">${escapeHtml(value)}</span>`; const classToken = safeText(value).replace(/[^A-Za-z0-9_-]/g, '_');
const escaped = escapeHtml(value);
return `<span class="status-badge ${classToken}">${escaped}</span>`;
} }
if (apiKey === 'TXNDATE') { if (apiKey === 'TXNDATE') {
return formatDate(txn[apiKey]); return formatDate(txn[apiKey]);
@@ -48,6 +67,16 @@ function renderTxnCell(txn, apiKey) {
loadEquipments(); loadEquipments();
setLast90Days(); setLast90Days();
const equipmentList = document.getElementById('equipmentList');
if (equipmentList) {
equipmentList.addEventListener('click', handleEquipmentListClick);
}
const resultSection = document.getElementById('resultSection');
if (resultSection) {
resultSection.addEventListener('click', handleResultSectionClick);
}
// Close dropdown when clicking outside // Close dropdown when clicking outside
document.addEventListener('click', (e) => { document.addEventListener('click', (e) => {
const dropdown = document.getElementById('equipmentDropdown'); const dropdown = document.getElementById('equipmentDropdown');
@@ -94,20 +123,22 @@ function renderTxnCell(txn, apiKey) {
const allSelected = selectedInGroup === groupIds.length; const allSelected = selectedInGroup === groupIds.length;
const someSelected = selectedInGroup > 0 && !allSelected; const someSelected = selectedInGroup > 0 && !allSelected;
const escapedName = escapeHtml(workcenterName); const escapedName = escapeHtml(workcenterName);
html += `<div class="workcenter-group-header" onclick="toggleWorkcenterGroup('${escapedName}')"> const workcenterToken = toDataToken(workcenterName);
<input type="checkbox" ${allSelected ? 'checked' : ''} ${someSelected ? 'class="indeterminate"' : ''} onclick="event.stopPropagation(); toggleWorkcenterGroup('${escapedName}')"> html += `<div class="workcenter-group-header" data-action="toggle-workcenter-group" data-workcenter="${workcenterToken}">
<input type="checkbox" ${allSelected ? 'checked' : ''} ${someSelected ? 'class="indeterminate"' : ''} data-action="toggle-workcenter-group" data-workcenter="${workcenterToken}">
<span class="workcenter-group-name">${escapedName}</span> <span class="workcenter-group-name">${escapedName}</span>
<span class="workcenter-group-count">${selectedInGroup}/${groupIds.length}</span> <span class="workcenter-group-count">${selectedInGroup}/${groupIds.length}</span>
</div>`; </div>`;
groupEquipments.forEach((eq) => { groupEquipments.forEach((eq) => {
const isSelected = selectedEquipments.has(eq.RESOURCEID); const isSelected = selectedEquipments.has(eq.RESOURCEID);
const resourceId = escapeHtml(safeText(eq.RESOURCEID)); const resourceId = safeText(eq.RESOURCEID);
const resourceIdToken = toDataToken(resourceId);
const resourceName = escapeHtml(safeText(eq.RESOURCENAME)); const resourceName = escapeHtml(safeText(eq.RESOURCENAME));
const familyName = escapeHtml(safeText(eq.RESOURCEFAMILYNAME)); const familyName = escapeHtml(safeText(eq.RESOURCEFAMILYNAME));
html += ` html += `
<div class="equipment-item ${isSelected ? 'selected' : ''}" onclick="toggleEquipment('${resourceId}')"> <div class="equipment-item ${isSelected ? 'selected' : ''}" data-action="toggle-equipment" data-resource-id="${resourceIdToken}">
<input type="checkbox" ${isSelected ? 'checked' : ''} onclick="event.stopPropagation(); toggleEquipment('${resourceId}')"> <input type="checkbox" ${isSelected ? 'checked' : ''} data-action="toggle-equipment" data-resource-id="${resourceIdToken}">
<div class="equipment-info"> <div class="equipment-info">
<div class="equipment-name">${resourceName}</div> <div class="equipment-name">${resourceName}</div>
<div class="equipment-workcenter">${familyName}</div> <div class="equipment-workcenter">${familyName}</div>
@@ -120,6 +151,30 @@ function renderTxnCell(txn, apiKey) {
container.innerHTML = html; container.innerHTML = html;
} }
function handleEquipmentListClick(event) {
const trigger = event.target.closest('[data-action]');
if (!trigger) {
return;
}
if (trigger.dataset.action === 'toggle-workcenter-group') {
const workcenterName = fromDataToken(trigger.dataset.workcenter);
if (!workcenterName) {
return;
}
toggleWorkcenterGroup(workcenterName);
return;
}
if (trigger.dataset.action === 'toggle-equipment') {
const resourceId = fromDataToken(trigger.dataset.resourceId);
if (!resourceId) {
return;
}
toggleEquipment(resourceId);
}
}
// Toggle equipment dropdown // Toggle equipment dropdown
function toggleEquipmentDropdown() { function toggleEquipmentDropdown() {
const dropdown = document.getElementById('equipmentDropdown'); const dropdown = document.getElementById('equipmentDropdown');
@@ -299,8 +354,8 @@ function renderTxnCell(txn, apiKey) {
<div class="result-header"> <div class="result-header">
<div class="result-info">共 ${jobsData.length} 筆工單</div> <div class="result-info">共 ${jobsData.length} 筆工單</div>
<div class="result-actions"> <div class="result-actions">
<button class="btn btn-secondary btn-sm" onclick="expandAll()">全部展開</button> <button type="button" class="btn btn-secondary btn-sm" data-action="expand-all">全部展開</button>
<button class="btn btn-secondary btn-sm" onclick="collapseAll()">全部收合</button> <button type="button" class="btn btn-secondary btn-sm" data-action="collapse-all">全部收合</button>
</div> </div>
</div> </div>
<div class="table-container"> <div class="table-container">
@@ -316,13 +371,14 @@ function renderTxnCell(txn, apiKey) {
jobsData.forEach((job, idx) => { jobsData.forEach((job, idx) => {
const isExpanded = expandedJobs.has(job.JOBID); const isExpanded = expandedJobs.has(job.JOBID);
const jobIdToken = toDataToken(job.JOBID);
const jobCells = jobTableFields const jobCells = jobTableFields
.map((field) => `<td>${renderJobCell(job, field.api_key)}</td>`) .map((field) => `<td>${renderJobCell(job, field.api_key)}</td>`)
.join(''); .join('');
html += ` html += `
<tr class="job-row ${isExpanded ? 'expanded' : ''}" id="job-row-${idx}"> <tr class="job-row ${isExpanded ? 'expanded' : ''}" id="job-row-${idx}">
<td> <td>
<button class="expand-btn" onclick="toggleJobHistory('${escapeHtml(safeText(job.JOBID))}', ${idx})"> <button type="button" class="expand-btn" data-action="toggle-job-history" data-job-id="${jobIdToken}" data-row-index="${idx}">
<span class="arrow-icon ${isExpanded ? 'rotated' : ''}">▶</span> <span class="arrow-icon ${isExpanded ? 'rotated' : ''}">▶</span>
</button> </button>
</td> </td>
@@ -355,6 +411,31 @@ function renderTxnCell(txn, apiKey) {
void loadHistoriesBatched(pendingLoads); void loadHistoriesBatched(pendingLoads);
} }
function handleResultSectionClick(event) {
const trigger = event.target.closest('[data-action]');
if (!trigger) {
return;
}
const action = trigger.dataset.action;
if (action === 'expand-all') {
expandAll();
return;
}
if (action === 'collapse-all') {
collapseAll();
return;
}
if (action === 'toggle-job-history') {
const idx = Number.parseInt(trigger.dataset.rowIndex || '', 10);
const jobId = fromDataToken(trigger.dataset.jobId);
if (!Number.isInteger(idx) || !jobId) {
return;
}
void toggleJobHistory(jobId, idx);
}
}
// Toggle job history // Toggle job history
async function toggleJobHistory(jobId, idx) { async function toggleJobHistory(jobId, idx) {
const txnRow = document.getElementById(`txn-row-${idx}`); const txnRow = document.getElementById(`txn-row-${idx}`);

View File

@@ -0,0 +1,2 @@
schema: spec-driven
created: 2026-02-23

View File

@@ -0,0 +1,90 @@
## Context
Released 頁面已直接使用於生產,且現行部署為單層對外服務(無反向代理)。現況存在多個交叉風險:
- JSON 解析錯誤可能透過全域 exception handler 回落為 500。
- 部分高成本查詢端點缺乏批量輸入與查詢筆數上限。
- rate-limit client key 可能受 `X-Forwarded-For` spoofing 影響。
- 設定載入在缺漏時存在偏寬鬆預設(含 API 可見性、環境模式)。
- 記錄連線 URL 時可能暴露敏感資訊。
- 前端仍有 inline handler 字串插值路徑。
本變更屬跨模組 hardeningroutes/core/config/frontend/tests且要求在不破壞 Released 正常流程下補齊安全與穩定性基線。
## Goals / Non-Goals
**Goals:**
- 將 Released 高風險端點的輸入錯誤語義固定為可預期 4xx。
- 對 batch / detail 查詢導入可設定的硬上限與拒絕策略。
- 在無 proxy 預設下建立正確的 rate-limit 信任邊界。
- 將生產安全設定調整為 fail-safe 預設並加入啟動檢查。
- 移除已知前端 inline 插值風險點並補強測試,確保無回歸。
**Non-Goals:**
- 不重寫 Released 頁面的商業邏輯或資料模型。
- 不改動 Oracle schema 或新增外部服務。
- 不一次性移除全站所有 legacy inline script以風險最高路徑優先
## Decisions
### Decision 1: 建立一致的 JSON 輸入驗證邊界,將解析失敗明確轉為 4xx
- 選擇:在 Released 相關 JSON routes 採一致的 request parsing helper含 content-type 與 malformed JSON 驗證),回傳 400/415僅真正未預期例外才走 500。
- 理由:修正「客戶端錯誤被誤判為服務端錯誤」並提升可觀測性。
- 替代方案:維持各 route 自行 `get_json()` + 全域 handler。
- 未採用原因:行為不一致且易再次回歸 500。
### Decision 2: 以設定驅動的輸入預算input budget治理高成本端點
- 選擇:新增集中化上限設定(例如 `QUERY_TOOL_MAX_CONTAINER_IDS``RESOURCE_DETAIL_MAX_LIMIT``MAX_JSON_BODY_BYTES`route 先驗證再呼叫 service。
- 理由:避免 hardcode 分散、便於環境調優與壓測。
- 替代方案:在 service 層被動截斷或依 DB timeout 自然保護。
- 未採用原因:無法在入口即時拒絕,仍浪費應用資源。
### Decision 3: 以「預設不信任 proxy headers」實作 rate-limit identity
- 選擇:新增 `TRUST_PROXY_HEADERS=false` 預設;只有顯式開啟且來源符合 trusted proxy 條件時才使用 `X-Forwarded-For`
- 理由:符合當前無反向代理部署現況,避免 IP spoofing 使限流失效。
- 替代方案:永遠信任 XFF。
- 未採用原因:對外直連部署下可被任意偽造。
### Decision 4: 生產安全設定 fail-safe 與敏感資訊遮罩
- 選擇:`api_public` 缺值或配置錯誤時預設 false`SECRET_KEY` 等關鍵安全變數缺失時拒絕啟動或進入明確受限模式;所有 URL 型密鑰資訊在 log 遮罩。
- 理由:把「配置失誤」從安全事件轉為可診斷的啟動錯誤。
- 替代方案:保留寬鬆 fallback例如預設公開 API
- 未採用原因:與生產最小暴露原則衝突。
### Decision 5: 前端高風險 inline handler 先行替換為安全事件綁定
- 選擇:針對 Released 且已觀察到風險的 job-query 動作欄位,改為 data attribute + addEventListener避免 raw 字串 `onclick` 插值。
- 理由:以最小變更降低 XSS/斷裂風險且不影響 UX。
- 替代方案:一次性重構所有頁面事件綁定。
- 未採用原因:變更面過大,不利快速風險收斂。
### Decision 6: 以「負向測試 + 既有契約測試」雙軌防回歸
- 選擇:新增 hardening 專屬負向測試invalid JSON、超量輸入、限流來源、secret redaction並保留既有 released route 正向契約測試,兩者皆納入 CI gate。
- 理由:確保防護生效且既有功能不被破壞。
- 替代方案:僅補單元測試或手動驗證。
- 未採用原因:無法長期防止行為漂移。
## Risks / Trade-offs
- [Risk] 新增 4xx 驗證可能影響少量既有錯誤處理流程 → Mitigation: 僅對 JSON-only endpoint 啟用,並以契約測試固定成功路徑。
- [Risk] 輸入上限過低可能影響查詢體驗 → Mitigation: 上限參數化並透過壓測/實際流量校準。
- [Risk] fail-safe 設定可能在配置不完整時阻擋啟動 → Mitigation: 發布前檢查清單與啟動時清楚錯誤訊息。
- [Risk] 前端事件綁定改動造成局部互動差異 → Mitigation: 補 UI 行為測試與手動 smoke 驗證。
## Migration Plan
1. 新增設定鍵與預設值輸入上限、proxy trust、安全啟動檢查保留清楚註解與環境文件。
2. 先改 route 層 JSON 驗證與批量上限檢查,再補 service 防線(雙層保護)。
3. 更新 rate-limit client identity resolver預設走 `remote_addr`
4. 加入 Redis URL log redaction 與 page registry fail-safe 預設。
5. 調整 job-query 前端事件綁定,移除高風險 inline 插值。
6. 補齊測試:負向 API、限流信任邊界、設定 fail-safe、log redaction、既有 released route 契約。
7. CI 全綠後部署;若出現非預期拒絕,僅允許透過設定值調整上限,不回退安全語義。
Rollback Strategy:
- 若發生突發相容性問題,優先調整上限配置與 trusted proxy 配置;
- 嚴禁回退到「信任任意 XFF」或「invalid JSON 回 500」行為
- 必要時暫時放寬單一端點上限,但保留防護機制本身。
## Open Questions
- `container_ids``resource detail limit` 的正式預設值是否以現網 P95 請求分佈定版(例如 200 / 500
- trusted proxy 是否需要 CIDR allowlist而非單純 bool以支援未來拓樸演進

View File

@@ -0,0 +1,37 @@
## Why
Released 頁面目前直接套用到生產環境,且部署型態為無反向代理的單層對外服務;現況在 API 輸入驗證、流量防護、設定安全預設、與錯誤處理上仍有可導致 500、資源耗盡或安全邊界被繞過的風險。需要以一次性治理方式補齊基線並建立可重複執行的無回歸驗證避免修正後再次退化。
## What Changes
- 統一 Released 頁面相關高成本 API 的輸入驗證與錯誤語義:非 JSON 或格式錯誤請求回覆 4xx不再落入 500。
- 為 query-tool 與 resource 等批次/明細查詢加入明確上限(批量 ID、limit、payload size與拒絕策略降低 DoS 與慢查風險。
- 強化 rate-limit 客戶端識別信任邊界:在無 trusted proxy 情境下不可直接信任 `X-Forwarded-For`
- 對生產安全設定採 fail-safe 預設:`api_public``FLASK_ENV``SECRET_KEY`、Redis URL log masking 等。
- 收斂前端可注入風險(如 inline handler 字串插值)與 CSP 風險設定,降低 XSS 面。
- 建立 Released 頁面專屬無回歸驗證矩陣(正向、負向、壓力邊界、契約),納入 CI gate。
## Capabilities
### New Capabilities
- `released-pages-production-hardening`: 定義 Released 頁面在生產環境的輸入驗證、資源保護、信任邊界、安全預設與回歸防線要求。
### Modified Capabilities
- None.
## Impact
- Affected code:
- `src/mes_dashboard/routes/job_query_routes.py`
- `src/mes_dashboard/routes/query_tool_routes.py`
- `src/mes_dashboard/routes/resource_routes.py`
- `src/mes_dashboard/routes/hold_routes.py`
- `src/mes_dashboard/routes/wip_routes.py`
- `src/mes_dashboard/core/rate_limit.py`
- `src/mes_dashboard/core/redis_client.py`
- `src/mes_dashboard/config/settings.py`
- `src/mes_dashboard/app.py`
- `frontend/src/job-query/main.js`
- `data/page_status.json`
- APIs/routes: Released route 對應 API包含 `/api/query-tool/*`, `/api/job-query/*`, `/api/resource/*` 等)會新增/明確化 4xx 與 429 邊界行為。
- Tests/quality gates: 新增與擴充 Released 頁面 API 的負向驗證、限流、上限邊界與模板整合回歸測試CI 需納入通過條件。

View File

@@ -0,0 +1,81 @@
## ADDED Requirements
### Requirement: Released Query APIs SHALL Return 4xx for Invalid JSON Inputs
Released 頁面對應的 JSON API 在收到非 JSON、Malformed JSON、或型別不符 payload 時MUST 回覆可預期的 4xx 錯誤,且 MUST NOT 因 JSON 解析失敗回落為 500。
#### Scenario: Non-JSON request to JSON-only endpoint
- **WHEN** client 以 `Content-Type: text/plain` 或缺少 JSON body 呼叫 JSON-only endpoint例如 `/api/query-tool/*``/api/job-query/*``/api/resource/detail`
- **THEN** endpoint MUST 回覆 400 或 415並提供一致的錯誤訊息
- **THEN** service layer MUST NOT 執行高成本查詢
#### Scenario: Malformed JSON payload
- **WHEN** client 送出無法解析的 JSON 內容
- **THEN** endpoint MUST 回覆 400
- **THEN** response MUST 指出 payload 格式錯誤,而非 generic 500
### Requirement: High-Cost Batch Inputs SHALL Enforce Hard Upper Bounds
Released 頁面高成本查詢端點 MUST 對批量輸入與查詢筆數上限施加硬限制,避免單次請求造成過量資料讀取或計算。
#### Scenario: Query-tool batch container IDs exceed limit
- **WHEN** `container_ids` 數量超過設定上限
- **THEN** endpoint MUST 回覆 400 或 413且 MUST 附帶可操作的上限資訊
- **THEN** backend MUST NOT 執行 Oracle/Redis 高成本查詢流程
#### Scenario: Resource detail limit exceeds limit
- **WHEN** `/api/resource/detail``limit` 超過設定上限
- **THEN** endpoint MUST 拒絕請求或安全夾制至上限,並在契約中明確定義行為
- **THEN** response 行為 MUST 於測試中固定化,避免版本漂移
### Requirement: Rate-Limit Client Identity SHALL Respect Trust Boundary
Rate limiting 的 client identity 解析 MUST 依部署信任邊界運作,未啟用 trusted proxy 時 MUST NOT 直接信任 `X-Forwarded-For`
#### Scenario: Direct internet deployment without reverse proxy
- **WHEN** 服務直接對外且未啟用 trusted proxy 模式
- **THEN** rate-limit key MUST 使用 `remote_addr`(或等價來源)
- **THEN** 來自 request header 的 `X-Forwarded-For` MUST 被忽略
#### Scenario: Deployment with trusted reverse proxy enabled
- **WHEN** 系統明確配置 trusted proxy 名單或模式
- **THEN** rate-limit key MAY 使用 `X-Forwarded-For` 的可信 client IP
- **THEN** 非可信來源 MUST 回退至 `remote_addr`
### Requirement: Production Security Defaults SHALL Fail Safe
生產設定在缺漏或格式錯誤時 MUST 採 fail-safe 預設,避免 API 無意外暴露或低安全模式啟動。
#### Scenario: page status config missing or invalid
- **WHEN** `page_status.json` 缺失、破損或缺少 `api_public` 設定
- **THEN** runtime MUST 預設為 API 非公開(`api_public=false`
- **THEN** 需要明確配置才可開啟公開 API 行為
#### Scenario: runtime environment variables incomplete
- **WHEN** 生產啟動缺少關鍵安全變數(例如 `SECRET_KEY`
- **THEN** 系統 MUST 以安全方式拒絕啟動或進入受限模式,且輸出可診斷訊息
### Requirement: Sensitive Configuration Values SHALL Be Redacted in Logs
任何含憑證的連線字串(例如 Redis URL在 log 輸出時 MUST 進行遮罩,避免密碼外洩。
#### Scenario: Redis URL includes password
- **WHEN** 應用程式記錄 Redis 連線設定
- **THEN** log 中的 URL MUST 隱藏密碼(例如 `redis://***@host:port/db`
- **THEN** 原始明文密碼 MUST NOT 出現在任何應用層日誌
### Requirement: Released Frontend Views SHALL Avoid Unsafe Inline Interpolation
Released 頁面前端 MUST 避免將不受信資料直接插入 inline JavaScript 或 HTML 屬性字串,降低 XSS 與 handler 斷裂風險。
#### Scenario: Rendering action controls with user-derived values
- **WHEN** 前端渲染按鈕或互動控制(例如 job-query 操作欄)且內容含資料列值
- **THEN** MUST 透過安全資料綁定data-* attribute 或事件監聽)實作
- **THEN** MUST NOT 依賴 raw string `onclick="...${value}..."` 拼接
### Requirement: Released Hardening SHALL Be Protected by Regression Gates
本次 hardening 的行為 MUST 由自動化測試固定,並納入 CI gate避免日後回歸。
#### Scenario: Negative-path regression suite execution
- **WHEN** CI 執行 Released 頁面 API 測試
- **THEN** MUST 覆蓋 invalid JSON、超量輸入、rate-limit、security default、與 log redaction 斷言
- **THEN** 任一關鍵斷言失敗 MUST 阻擋合併
#### Scenario: Existing released behavior parity
- **WHEN** hardening 變更部署後執行既有 Released route 測試
- **THEN** 成功路徑與既有回應契約 MUST 維持相容
- **THEN** 僅新增已定義的防護錯誤路徑4xx/429

View File

@@ -0,0 +1,34 @@
## 1. Config and Core Safety Baseline
- [x] 1.1 Add centralized hardening config keys (`TRUST_PROXY_HEADERS`, trusted proxy source config, JSON/body/input limits) with production-safe defaults.
- [x] 1.2 Change page registry fallback behavior so `api_public` defaults to false when config is missing/invalid.
- [x] 1.3 Implement secret redaction utility for connection-string logging and apply it to Redis URL logs.
- [x] 1.4 Enforce startup validation for required production security variables (including `SECRET_KEY`) with actionable diagnostics.
- [x] 1.5 Update environment documentation (`.env.example`/README/deploy docs) to match new hardening settings.
## 2. Released API Input Validation and Budget Guards
- [x] 2.1 Introduce a shared JSON request parsing/validation helper and adopt it in released JSON-only endpoints (`query-tool`, `job-query`, `resource` related routes).
- [x] 2.2 Ensure invalid/malformed/non-JSON payloads return deterministic 400/415 and do not fall through to generic 500 handlers.
- [x] 2.3 Add configurable hard caps for query-tool batch inputs (including `container_ids`) and reject overflow requests before service execution.
- [x] 2.4 Add configurable `limit` bounds for `/api/resource/detail` and normalize/reject invalid pagination limits consistently.
- [x] 2.5 Fix released route numeric query parsing edge cases to avoid `TypeError`/500 regressions.
## 3. Rate-Limit Trust Boundary Hardening
- [x] 3.1 Refactor rate-limit client identity resolution to ignore `X-Forwarded-For` by default and use `remote_addr` in direct-exposure deployments.
- [x] 3.2 Add trusted-proxy mode behavior so forwarded IP is used only when explicit trust configuration is enabled.
- [x] 3.3 Add tests for spoofed header attempts, direct mode behavior, and trusted-proxy behavior.
## 4. Frontend Injection-Surface Reduction
- [x] 4.1 Refactor `job-query` action rendering to remove raw inline `onclick` interpolation and use safe event binding/data attributes.
- [x] 4.2 Review and tighten applicable CSP/script-safety configuration for released routes without breaking current module/fallback loading.
- [x] 4.3 Add frontend/template tests to lock down safe rendering behavior for quoted/special-character data.
## 5. Regression Gates and Verification
- [x] 5.1 Add negative-path tests for invalid JSON, oversized batch input, bounded `limit`, and no-service-call-on-reject behavior.
- [x] 5.2 Add config hardening tests for `api_public` fail-safe fallback, production env validation, and Redis URL redaction.
- [x] 5.3 Run released-route focused pytest suite and update/repair affected contract tests to reflect explicit new 4xx/429 boundaries only.
- [x] 5.4 Ensure CI requires the new hardening test set to pass before merge.

View File

@@ -0,0 +1,85 @@
# released-pages-production-hardening Specification
## Purpose
TBD - created by archiving change released-pages-production-hardening. Update Purpose after archive.
## Requirements
### Requirement: Released Query APIs SHALL Return 4xx for Invalid JSON Inputs
Released 頁面對應的 JSON API 在收到非 JSON、Malformed JSON、或型別不符 payload 時MUST 回覆可預期的 4xx 錯誤,且 MUST NOT 因 JSON 解析失敗回落為 500。
#### Scenario: Non-JSON request to JSON-only endpoint
- **WHEN** client 以 `Content-Type: text/plain` 或缺少 JSON body 呼叫 JSON-only endpoint例如 `/api/query-tool/*``/api/job-query/*``/api/resource/detail`
- **THEN** endpoint MUST 回覆 400 或 415並提供一致的錯誤訊息
- **THEN** service layer MUST NOT 執行高成本查詢
#### Scenario: Malformed JSON payload
- **WHEN** client 送出無法解析的 JSON 內容
- **THEN** endpoint MUST 回覆 400
- **THEN** response MUST 指出 payload 格式錯誤,而非 generic 500
### Requirement: High-Cost Batch Inputs SHALL Enforce Hard Upper Bounds
Released 頁面高成本查詢端點 MUST 對批量輸入與查詢筆數上限施加硬限制,避免單次請求造成過量資料讀取或計算。
#### Scenario: Query-tool batch container IDs exceed limit
- **WHEN** `container_ids` 數量超過設定上限
- **THEN** endpoint MUST 回覆 400 或 413且 MUST 附帶可操作的上限資訊
- **THEN** backend MUST NOT 執行 Oracle/Redis 高成本查詢流程
#### Scenario: Resource detail limit exceeds limit
- **WHEN** `/api/resource/detail``limit` 超過設定上限
- **THEN** endpoint MUST 拒絕請求或安全夾制至上限,並在契約中明確定義行為
- **THEN** response 行為 MUST 於測試中固定化,避免版本漂移
### Requirement: Rate-Limit Client Identity SHALL Respect Trust Boundary
Rate limiting 的 client identity 解析 MUST 依部署信任邊界運作,未啟用 trusted proxy 時 MUST NOT 直接信任 `X-Forwarded-For`
#### Scenario: Direct internet deployment without reverse proxy
- **WHEN** 服務直接對外且未啟用 trusted proxy 模式
- **THEN** rate-limit key MUST 使用 `remote_addr`(或等價來源)
- **THEN** 來自 request header 的 `X-Forwarded-For` MUST 被忽略
#### Scenario: Deployment with trusted reverse proxy enabled
- **WHEN** 系統明確配置 trusted proxy 名單或模式
- **THEN** rate-limit key MAY 使用 `X-Forwarded-For` 的可信 client IP
- **THEN** 非可信來源 MUST 回退至 `remote_addr`
### Requirement: Production Security Defaults SHALL Fail Safe
生產設定在缺漏或格式錯誤時 MUST 採 fail-safe 預設,避免 API 無意外暴露或低安全模式啟動。
#### Scenario: page status config missing or invalid
- **WHEN** `page_status.json` 缺失、破損或缺少 `api_public` 設定
- **THEN** runtime MUST 預設為 API 非公開(`api_public=false`
- **THEN** 需要明確配置才可開啟公開 API 行為
#### Scenario: runtime environment variables incomplete
- **WHEN** 生產啟動缺少關鍵安全變數(例如 `SECRET_KEY`
- **THEN** 系統 MUST 以安全方式拒絕啟動或進入受限模式,且輸出可診斷訊息
### Requirement: Sensitive Configuration Values SHALL Be Redacted in Logs
任何含憑證的連線字串(例如 Redis URL在 log 輸出時 MUST 進行遮罩,避免密碼外洩。
#### Scenario: Redis URL includes password
- **WHEN** 應用程式記錄 Redis 連線設定
- **THEN** log 中的 URL MUST 隱藏密碼(例如 `redis://***@host:port/db`
- **THEN** 原始明文密碼 MUST NOT 出現在任何應用層日誌
### Requirement: Released Frontend Views SHALL Avoid Unsafe Inline Interpolation
Released 頁面前端 MUST 避免將不受信資料直接插入 inline JavaScript 或 HTML 屬性字串,降低 XSS 與 handler 斷裂風險。
#### Scenario: Rendering action controls with user-derived values
- **WHEN** 前端渲染按鈕或互動控制(例如 job-query 操作欄)且內容含資料列值
- **THEN** MUST 透過安全資料綁定data-* attribute 或事件監聽)實作
- **THEN** MUST NOT 依賴 raw string `onclick="...${value}..."` 拼接
### Requirement: Released Hardening SHALL Be Protected by Regression Gates
本次 hardening 的行為 MUST 由自動化測試固定,並納入 CI gate避免日後回歸。
#### Scenario: Negative-path regression suite execution
- **WHEN** CI 執行 Released 頁面 API 測試
- **THEN** MUST 覆蓋 invalid JSON、超量輸入、rate-limit、security default、與 log redaction 斷言
- **THEN** 任一關鍵斷言失敗 MUST 阻擋合併
#### Scenario: Existing released behavior parity
- **WHEN** hardening 變更部署後執行既有 Released route 測試
- **THEN** 成功路徑與既有回應契約 MUST 維持相容
- **THEN** 僅新增已定義的防護錯誤路徑4xx/429

View File

@@ -114,11 +114,15 @@ def _is_production_env(app: Flask) -> bool:
return env_value in {"prod", "production"} return env_value in {"prod", "production"}
def _build_security_headers(production: bool) -> dict[str, str]: def _build_security_headers(production: bool, *, allow_unsafe_eval: bool = False) -> dict[str, str]:
script_directives = ["'self'", "'unsafe-inline'"]
if allow_unsafe_eval:
script_directives.append("'unsafe-eval'")
headers = { headers = {
"Content-Security-Policy": ( "Content-Security-Policy": (
"default-src 'self'; " "default-src 'self'; "
"script-src 'self' 'unsafe-inline' 'unsafe-eval'; " f"script-src {' '.join(script_directives)}; "
"style-src 'self' 'unsafe-inline'; " "style-src 'self' 'unsafe-inline'; "
"img-src 'self' data: blob:; " "img-src 'self' data: blob:; "
"font-src 'self' data:; " "font-src 'self' data:; "
@@ -155,6 +159,42 @@ def _resolve_secret_key(app: Flask) -> str:
return "dev-local-only-secret-key" return "dev-local-only-secret-key"
def _validate_production_security_settings(app: Flask) -> None:
"""Validate production security-sensitive runtime settings."""
if not _is_production_env(app):
return
trust_proxy_headers = resolve_bool_flag(
"TRUST_PROXY_HEADERS",
config=app.config,
default=bool(app.config.get("TRUST_PROXY_HEADERS", False)),
)
if trust_proxy_headers:
configured_sources = os.getenv("TRUSTED_PROXY_IPS")
if configured_sources is None:
configured_sources = app.config.get("TRUSTED_PROXY_IPS")
if isinstance(configured_sources, str):
trusted_sources = tuple(
part.strip()
for part in configured_sources.split(",")
if part.strip()
)
else:
trusted_sources = tuple(configured_sources or ())
if not trusted_sources:
raise RuntimeError(
"TRUST_PROXY_HEADERS=true requires TRUSTED_PROXY_IPS in production."
)
def _resolve_csp_allow_unsafe_eval(app: Flask) -> bool:
return resolve_bool_flag(
"CSP_ALLOW_UNSAFE_EVAL",
config=app.config,
default=bool(app.config.get("CSP_ALLOW_UNSAFE_EVAL", False)),
)
def _resolve_portal_spa_enabled(app: Flask) -> bool: def _resolve_portal_spa_enabled(app: Flask) -> bool:
"""Resolve cutover flag for SPA shell navigation. """Resolve cutover flag for SPA shell navigation.
@@ -367,6 +407,7 @@ def create_app(config_name: str | None = None) -> Flask:
# Session configuration with environment-aware secret validation. # Session configuration with environment-aware secret validation.
app.secret_key = _resolve_secret_key(app) app.secret_key = _resolve_secret_key(app)
app.config["SECRET_KEY"] = app.secret_key app.config["SECRET_KEY"] = app.secret_key
_validate_production_security_settings(app)
# Session cookie security settings # Session cookie security settings
# SECURE: Only send cookie over HTTPS in production. # SECURE: Only send cookie over HTTPS in production.
@@ -380,7 +421,10 @@ def create_app(config_name: str | None = None) -> Flask:
_configure_logging(app) _configure_logging(app)
_validate_runtime_contract(app) _validate_runtime_contract(app)
_validate_in_scope_asset_readiness(app) _validate_in_scope_asset_readiness(app)
security_headers = _build_security_headers(_is_production_env(app)) security_headers = _build_security_headers(
_is_production_env(app),
allow_unsafe_eval=_resolve_csp_allow_unsafe_eval(app),
)
# Route-level cache backend (L1 memory + optional L2 Redis) # Route-level cache backend (L1 memory + optional L2 Redis)
app.extensions["cache"] = create_default_cache_backend() app.extensions["cache"] = create_default_cache_backend()

View File

@@ -27,6 +27,13 @@ def _bool_env(name: str, default: bool) -> bool:
return value.strip().lower() in {"1", "true", "yes", "on"} return value.strip().lower() in {"1", "true", "yes", "on"}
def _csv_env(name: str, default: str = "") -> tuple[str, ...]:
value = os.getenv(name, default)
if not value:
return tuple()
return tuple(item.strip() for item in value.split(",") if item.strip())
class Config: class Config:
"""Base configuration.""" """Base configuration."""
@@ -51,6 +58,15 @@ class Config:
CSRF_ENABLED = _bool_env("CSRF_ENABLED", True) CSRF_ENABLED = _bool_env("CSRF_ENABLED", True)
PORTAL_SPA_ENABLED = _bool_env("PORTAL_SPA_ENABLED", True) PORTAL_SPA_ENABLED = _bool_env("PORTAL_SPA_ENABLED", True)
# Hardening configuration (safe-by-default)
MAX_JSON_BODY_BYTES = _int_env("MAX_JSON_BODY_BYTES", 262144) # 256 KB
QUERY_TOOL_MAX_CONTAINER_IDS = _int_env("QUERY_TOOL_MAX_CONTAINER_IDS", 200)
RESOURCE_DETAIL_DEFAULT_LIMIT = _int_env("RESOURCE_DETAIL_DEFAULT_LIMIT", 500)
RESOURCE_DETAIL_MAX_LIMIT = _int_env("RESOURCE_DETAIL_MAX_LIMIT", 500)
TRUST_PROXY_HEADERS = _bool_env("TRUST_PROXY_HEADERS", False)
TRUSTED_PROXY_IPS = _csv_env("TRUSTED_PROXY_IPS")
CSP_ALLOW_UNSAFE_EVAL = _bool_env("CSP_ALLOW_UNSAFE_EVAL", False)
# Session configuration # Session configuration
PERMANENT_SESSION_LIFETIME = _int_env("SESSION_LIFETIME", 28800) # 8 hours PERMANENT_SESSION_LIFETIME = _int_env("SESSION_LIFETIME", 28800) # 8 hours
@@ -117,7 +133,7 @@ class TestingConfig(Config):
def get_config(env: str | None = None) -> Type[Config]: def get_config(env: str | None = None) -> Type[Config]:
"""Select config class based on environment name.""" """Select config class based on environment name."""
value = (env or os.getenv("FLASK_ENV", "development")).lower() value = (env or os.getenv("FLASK_ENV", "production")).lower()
if value in {"prod", "production"}: if value in {"prod", "production"}:
return ProductionConfig return ProductionConfig
if value in {"test", "testing"}: if value in {"test", "testing"}:

View File

@@ -6,6 +6,7 @@ from __future__ import annotations
import os import os
import threading import threading
import time import time
from ipaddress import ip_address, ip_network
from collections import defaultdict, deque from collections import defaultdict, deque
from functools import wraps from functools import wraps
from typing import Callable, Deque from typing import Callable, Deque
@@ -29,11 +30,66 @@ def _env_int(name: str, default: int) -> int:
return max(value, 1) return max(value, 1)
def _env_bool(name: str, default: bool) -> bool:
raw = os.getenv(name)
if raw is None:
return default
return raw.strip().lower() in {"1", "true", "yes", "on"}
def _trusted_proxy_networks() -> list:
raw = os.getenv("TRUSTED_PROXY_IPS", "")
if not raw:
return []
networks = []
for token in raw.split(","):
candidate = token.strip()
if not candidate:
continue
try:
if "/" in candidate:
networks.append(ip_network(candidate, strict=False))
else:
if ":" in candidate:
networks.append(ip_network(f"{candidate}/128", strict=False))
else:
networks.append(ip_network(f"{candidate}/32", strict=False))
except ValueError:
continue
return networks
def _is_trusted_proxy_source(remote_addr: str | None) -> bool:
if not _env_bool("TRUST_PROXY_HEADERS", False):
return False
if not remote_addr:
return False
networks = _trusted_proxy_networks()
if not networks:
# Explicit proxy trust mode requires explicit trusted source list.
return False
try:
remote_ip = ip_address(remote_addr.strip())
except ValueError:
return False
return any(remote_ip in network for network in networks)
def _client_identifier() -> str: def _client_identifier() -> str:
forwarded = request.headers.get("X-Forwarded-For", "").strip() remote = request.remote_addr
if forwarded: if _is_trusted_proxy_source(remote):
return forwarded.split(",")[0].strip() forwarded = request.headers.get("X-Forwarded-For", "").strip()
return request.remote_addr or "unknown" if forwarded:
candidate = forwarded.split(",")[0].strip()
try:
return str(ip_address(candidate))
except ValueError:
pass
return remote or "unknown"
def check_and_record( def check_and_record(

View File

@@ -3,9 +3,10 @@
from __future__ import annotations from __future__ import annotations
import logging import logging
import os import os
from typing import Optional from typing import Optional
from urllib.parse import urlsplit, urlunsplit
import redis import redis
@@ -23,10 +24,35 @@ REDIS_KEY_PREFIX = os.getenv('REDIS_KEY_PREFIX', 'mes_wip')
# Redis Client Singleton # Redis Client Singleton
# ============================================================ # ============================================================
_REDIS_CLIENT: Optional[redis.Redis] = None _REDIS_CLIENT: Optional[redis.Redis] = None
def get_redis_client() -> Optional[redis.Redis]: def redact_connection_url(url: str) -> str:
"""Redact credentials in URL-like connection strings."""
if not url:
return url
try:
parsed = urlsplit(url)
except Exception:
return url
netloc = parsed.netloc
if "@" not in netloc:
return url
credentials, host = netloc.rsplit("@", 1)
if ":" in credentials:
user, _password = credentials.split(":", 1)
masked = f"{user}:***" if user else "***"
else:
masked = "***"
return urlunsplit(
(parsed.scheme, f"{masked}@{host}", parsed.path, parsed.query, parsed.fragment)
)
def get_redis_client() -> Optional[redis.Redis]:
"""Get Redis client with connection pooling and health check. """Get Redis client with connection pooling and health check.
Returns: Returns:
@@ -47,10 +73,10 @@ def get_redis_client() -> Optional[redis.Redis]:
socket_connect_timeout=5, socket_connect_timeout=5,
retry_on_timeout=True, retry_on_timeout=True,
health_check_interval=30 health_check_interval=30
) )
# Test connection # Test connection
_REDIS_CLIENT.ping() _REDIS_CLIENT.ping()
logger.info(f"Redis client connected to {REDIS_URL}") logger.info("Redis client connected to %s", redact_connection_url(REDIS_URL))
except redis.RedisError as e: except redis.RedisError as e:
logger.warning(f"Failed to connect to Redis: {e}") logger.warning(f"Failed to connect to Redis: {e}")
_REDIS_CLIENT = None _REDIS_CLIENT = None

View File

@@ -0,0 +1,64 @@
# -*- coding: utf-8 -*-
"""Request validation helpers for API routes."""
from __future__ import annotations
import os
from dataclasses import dataclass
from typing import Any
from flask import current_app, request
@dataclass(frozen=True)
class JsonPayloadError:
message: str
status_code: int
def _resolve_max_json_body_bytes(explicit_max: int | None = None) -> int:
if explicit_max is not None:
return max(int(explicit_max), 1)
try:
value = int(current_app.config.get("MAX_JSON_BODY_BYTES", 262144))
return max(value, 1)
except Exception:
pass
try:
return max(int(os.getenv("MAX_JSON_BODY_BYTES", "262144")), 1)
except Exception:
return 262144
def parse_json_payload(
*,
require_object: bool = True,
require_non_empty_object: bool = False,
max_body_bytes: int | None = None,
) -> tuple[Any | None, JsonPayloadError | None]:
"""Parse and validate JSON request payload with deterministic 4xx errors."""
content_length = request.content_length
max_bytes = _resolve_max_json_body_bytes(max_body_bytes)
if content_length is not None and content_length > max_bytes:
return None, JsonPayloadError(
f"請求內容過大,限制 {max_bytes} bytes",
413,
)
if not request.is_json:
return None, JsonPayloadError(
"Content-Type 必須為 application/json",
415,
)
payload = request.get_json(silent=True)
if payload is None:
return None, JsonPayloadError("JSON 格式錯誤", 400)
if require_object and not isinstance(payload, dict):
return None, JsonPayloadError("JSON 內容必須為物件", 400)
if require_non_empty_object and isinstance(payload, dict) and not payload:
return None, JsonPayloadError("請求內容不可為空", 400)
return payload, None

View File

@@ -154,15 +154,18 @@ def api_hold_detail_lots():
if not reason: if not reason:
return jsonify({'success': False, 'error': '缺少必要參數: reason'}), 400 return jsonify({'success': False, 'error': '缺少必要參數: reason'}), 400
workcenter = request.args.get('workcenter', '').strip() or None workcenter = request.args.get('workcenter', '').strip() or None
package = request.args.get('package', '').strip() or None package = request.args.get('package', '').strip() or None
age_range = request.args.get('age_range', '').strip() or None age_range = request.args.get('age_range', '').strip() or None
include_dummy = parse_bool_query(request.args.get('include_dummy')) include_dummy = parse_bool_query(request.args.get('include_dummy'))
page = request.args.get('page', 1, type=int) page = request.args.get('page', 1, type=int)
per_page = min(request.args.get('per_page', 50, type=int), 200) per_page_value = request.args.get('per_page', 50, type=int)
if per_page_value is None:
if page < 1: per_page_value = 50
page = 1 per_page = min(max(per_page_value, 1), 200)
if page is None or page < 1:
page = 1
# Validate age_range parameter # Validate age_range parameter
if age_range and age_range not in ('0-1', '1-3', '3-7', '7+'): if age_range and age_range not in ('0-1', '1-3', '3-7', '7+'):

View File

@@ -13,6 +13,7 @@ from flask import Blueprint, jsonify, request, Response, render_template
from mes_dashboard.core.rate_limit import configured_rate_limit from mes_dashboard.core.rate_limit import configured_rate_limit
from mes_dashboard.core.modernization_policy import maybe_redirect_to_canonical_shell from mes_dashboard.core.modernization_policy import maybe_redirect_to_canonical_shell
from mes_dashboard.core.request_validation import parse_json_payload
from mes_dashboard.services.job_query_service import ( from mes_dashboard.services.job_query_service import (
get_jobs_by_resources, get_jobs_by_resources,
get_job_txn_history, get_job_txn_history,
@@ -98,7 +99,7 @@ def get_resources():
@job_query_bp.route('/api/job-query/jobs', methods=['POST']) @job_query_bp.route('/api/job-query/jobs', methods=['POST'])
@_JOB_QUERY_RATE_LIMIT @_JOB_QUERY_RATE_LIMIT
def query_jobs(): def query_jobs():
"""Query jobs for selected resources. """Query jobs for selected resources.
Expects JSON body: Expects JSON body:
@@ -110,9 +111,11 @@ def query_jobs():
Returns job list. Returns job list.
""" """
data = request.get_json() data, payload_error = parse_json_payload(require_non_empty_object=True)
if payload_error is not None:
resource_ids = data.get('resource_ids', []) return jsonify({'error': payload_error.message}), payload_error.status_code
resource_ids = data.get('resource_ids', [])
start_date = data.get('start_date') start_date = data.get('start_date')
end_date = data.get('end_date') end_date = data.get('end_date')
@@ -159,7 +162,7 @@ def query_job_txn_history(job_id: str):
@job_query_bp.route('/api/job-query/export', methods=['POST']) @job_query_bp.route('/api/job-query/export', methods=['POST'])
@_JOB_EXPORT_RATE_LIMIT @_JOB_EXPORT_RATE_LIMIT
def export_jobs(): def export_jobs():
"""Export jobs with full transaction history as CSV. """Export jobs with full transaction history as CSV.
Expects JSON body: Expects JSON body:
@@ -171,9 +174,11 @@ def export_jobs():
Returns streaming CSV response. Returns streaming CSV response.
""" """
data = request.get_json() data, payload_error = parse_json_payload(require_non_empty_object=True)
if payload_error is not None:
resource_ids = data.get('resource_ids', []) return jsonify({'error': payload_error.message}), payload_error.status_code
resource_ids = data.get('resource_ids', [])
start_date = data.get('start_date') start_date = data.get('start_date')
end_date = data.get('end_date') end_date = data.get('end_date')

View File

@@ -9,14 +9,15 @@ Contains Flask Blueprint for batch tracing and equipment period query endpoints:
- CSV export functionality - CSV export functionality
""" """
import hashlib import hashlib
from flask import Blueprint, jsonify, request, Response, render_template from flask import Blueprint, jsonify, request, Response, render_template, current_app
from mes_dashboard.core.cache import cache_get, cache_set from mes_dashboard.core.cache import cache_get, cache_set
from mes_dashboard.core.modernization_policy import maybe_redirect_to_canonical_shell from mes_dashboard.core.modernization_policy import maybe_redirect_to_canonical_shell
from mes_dashboard.core.rate_limit import configured_rate_limit from mes_dashboard.core.rate_limit import configured_rate_limit
from mes_dashboard.services.query_tool_service import ( from mes_dashboard.core.request_validation import parse_json_payload
from mes_dashboard.services.query_tool_service import (
resolve_lots, resolve_lots,
get_lot_history, get_lot_history,
get_lot_history_batch, get_lot_history_batch,
@@ -86,6 +87,21 @@ _QUERY_TOOL_EXPORT_RATE_LIMIT = configured_rate_limit(
) )
def _query_tool_max_container_ids() -> int:
try:
value = int(current_app.config.get("QUERY_TOOL_MAX_CONTAINER_IDS", 200))
except Exception:
value = 200
return max(value, 1)
def _reject_if_batch_too_large(container_ids: list[str]):
max_ids = _query_tool_max_container_ids()
if len(container_ids) <= max_ids:
return None
return jsonify({'error': f'container_ids 數量不可超過 {max_ids}'}), 413
def _format_lot_materials_export_rows(rows): def _format_lot_materials_export_rows(rows):
"""Normalize LOT material export columns for UI/CSV consistency.""" """Normalize LOT material export columns for UI/CSV consistency."""
normalized_rows = [] normalized_rows = []
@@ -146,7 +162,7 @@ def query_tool_page():
@query_tool_bp.route('/api/query-tool/resolve', methods=['POST']) @query_tool_bp.route('/api/query-tool/resolve', methods=['POST'])
@_QUERY_TOOL_RESOLVE_RATE_LIMIT @_QUERY_TOOL_RESOLVE_RATE_LIMIT
def resolve_lot_input(): def resolve_lot_input():
"""Resolve user input to CONTAINERID list. """Resolve user input to CONTAINERID list.
Expects JSON body: Expects JSON body:
@@ -163,12 +179,11 @@ def resolve_lot_input():
"not_found": ["value3"] "not_found": ["value3"]
} }
""" """
data = request.get_json() data, payload_error = parse_json_payload(require_non_empty_object=True)
if payload_error is not None:
if not data: return jsonify({'error': payload_error.message}), payload_error.status_code
return jsonify({'error': '請求內容不可為空'}), 400
input_type = data.get('input_type')
input_type = data.get('input_type')
values = data.get('values', []) values = data.get('values', [])
# Validate input type # Validate input type
@@ -213,7 +228,7 @@ def resolve_lot_input():
@query_tool_bp.route('/api/query-tool/lot-history', methods=['GET']) @query_tool_bp.route('/api/query-tool/lot-history', methods=['GET'])
@_QUERY_TOOL_HISTORY_RATE_LIMIT @_QUERY_TOOL_HISTORY_RATE_LIMIT
def query_lot_history(): def query_lot_history():
"""Query production history for one or more LOTs. """Query production history for one or more LOTs.
Query params: Query params:
@@ -236,12 +251,15 @@ def query_lot_history():
] ]
# Batch mode: container_ids takes precedence # Batch mode: container_ids takes precedence
if container_ids_param: if container_ids_param:
cids = [c.strip() for c in container_ids_param.split(',') if c.strip()] cids = [c.strip() for c in container_ids_param.split(',') if c.strip()]
if not cids: if not cids:
return jsonify({'error': '請指定 CONTAINERID'}), 400 return jsonify({'error': '請指定 CONTAINERID'}), 400
result = get_lot_history_batch(cids, workcenter_groups=workcenter_groups) too_large = _reject_if_batch_too_large(cids)
elif container_id: if too_large is not None:
return too_large
result = get_lot_history_batch(cids, workcenter_groups=workcenter_groups)
elif container_id:
result = get_lot_history(container_id, workcenter_groups=workcenter_groups) result = get_lot_history(container_id, workcenter_groups=workcenter_groups)
else: else:
return jsonify({'error': '請指定 CONTAINERID'}), 400 return jsonify({'error': '請指定 CONTAINERID'}), 400
@@ -315,12 +333,15 @@ def query_lot_associations():
# Batch mode for materials/rejects/holds # Batch mode for materials/rejects/holds
batch_types = {'materials', 'rejects', 'holds'} batch_types = {'materials', 'rejects', 'holds'}
if container_ids_param and assoc_type in batch_types: if container_ids_param and assoc_type in batch_types:
cids = [c.strip() for c in container_ids_param.split(',') if c.strip()] cids = [c.strip() for c in container_ids_param.split(',') if c.strip()]
if not cids: if not cids:
return jsonify({'error': '請指定 CONTAINERID'}), 400 return jsonify({'error': '請指定 CONTAINERID'}), 400
result = get_lot_associations_batch(cids, assoc_type) too_large = _reject_if_batch_too_large(cids)
else: if too_large is not None:
return too_large
result = get_lot_associations_batch(cids, assoc_type)
else:
if not container_id: if not container_id:
return jsonify({'error': '請指定 CONTAINERID'}), 400 return jsonify({'error': '請指定 CONTAINERID'}), 400
@@ -355,7 +376,7 @@ def query_lot_associations():
@query_tool_bp.route('/api/query-tool/equipment-period', methods=['POST']) @query_tool_bp.route('/api/query-tool/equipment-period', methods=['POST'])
@_QUERY_TOOL_EQUIPMENT_RATE_LIMIT @_QUERY_TOOL_EQUIPMENT_RATE_LIMIT
def query_equipment_period(): def query_equipment_period():
"""Query equipment data for a time period. """Query equipment data for a time period.
Expects JSON body: Expects JSON body:
@@ -369,12 +390,11 @@ def query_equipment_period():
Returns data based on query_type. Returns data based on query_type.
""" """
data = request.get_json() data, payload_error = parse_json_payload(require_non_empty_object=True)
if payload_error is not None:
if not data: return jsonify({'error': payload_error.message}), payload_error.status_code
return jsonify({'error': '請求內容不可為空'}), 400
equipment_ids = data.get('equipment_ids', [])
equipment_ids = data.get('equipment_ids', [])
equipment_names = data.get('equipment_names', []) equipment_names = data.get('equipment_names', [])
start_date = data.get('start_date') start_date = data.get('start_date')
end_date = data.get('end_date') end_date = data.get('end_date')
@@ -497,7 +517,7 @@ def get_workcenter_groups_list():
@query_tool_bp.route('/api/query-tool/export-csv', methods=['POST']) @query_tool_bp.route('/api/query-tool/export-csv', methods=['POST'])
@_QUERY_TOOL_EXPORT_RATE_LIMIT @_QUERY_TOOL_EXPORT_RATE_LIMIT
def export_csv(): def export_csv():
"""Export query results as CSV. """Export query results as CSV.
Expects JSON body: Expects JSON body:
@@ -511,12 +531,11 @@ def export_csv():
Returns streaming CSV response. Returns streaming CSV response.
""" """
data = request.get_json() data, payload_error = parse_json_payload(require_non_empty_object=True)
if payload_error is not None:
if not data: return jsonify({'error': payload_error.message}), payload_error.status_code
return jsonify({'error': '請求內容不可為空'}), 400
export_type = data.get('export_type')
export_type = data.get('export_type')
params = data.get('params', {}) params = data.get('params', {})
# Get data based on export type # Get data based on export type

View File

@@ -6,7 +6,7 @@ Contains Flask Blueprint for resource/equipment-related API endpoints.
import math import math
import logging import logging
from flask import Blueprint, jsonify, request from flask import Blueprint, jsonify, request, current_app
from mes_dashboard.core.database import ( from mes_dashboard.core.database import (
get_db_connection, get_db_connection,
@@ -15,6 +15,7 @@ from mes_dashboard.core.database import (
) )
from mes_dashboard.core.cache import cache_get, cache_set, make_cache_key from mes_dashboard.core.cache import cache_get, cache_set, make_cache_key
from mes_dashboard.core.rate_limit import configured_rate_limit from mes_dashboard.core.rate_limit import configured_rate_limit
from mes_dashboard.core.request_validation import parse_json_payload
from mes_dashboard.core.response import INTERNAL_ERROR, error_response from mes_dashboard.core.response import INTERNAL_ERROR, error_response
from mes_dashboard.core.utils import get_days_back, parse_bool_query from mes_dashboard.core.utils import get_days_back, parse_bool_query
@@ -147,6 +148,14 @@ def _optional_bool_arg(name: str):
return parse_bool_query(text) return parse_bool_query(text)
def _config_int(name: str, default: int, minimum: int = 1) -> int:
try:
parsed = int(current_app.config.get(name, default))
except Exception:
parsed = int(default)
return max(parsed, minimum)
@resource_bp.route('/by_status') @resource_bp.route('/by_status')
def api_resource_by_status(): def api_resource_by_status():
"""API: Resource count by status.""" """API: Resource count by status."""
@@ -205,10 +214,33 @@ def api_resource_workcenter_status_matrix():
@_RESOURCE_DETAIL_RATE_LIMIT @_RESOURCE_DETAIL_RATE_LIMIT
def api_resource_detail(): def api_resource_detail():
"""API: Resource detail with filters.""" """API: Resource detail with filters."""
data = request.get_json() or {} data, payload_error = parse_json_payload(require_object=True)
if payload_error is not None:
return jsonify({'success': False, 'error': payload_error.message}), payload_error.status_code
filters = data.get('filters') filters = data.get('filters')
limit = data.get('limit', 500) if filters is not None and not isinstance(filters, dict):
offset = data.get('offset', 0) return jsonify({'success': False, 'error': 'filters 必須為物件'}), 400
default_limit = _config_int("RESOURCE_DETAIL_DEFAULT_LIMIT", 500)
max_limit = _config_int("RESOURCE_DETAIL_MAX_LIMIT", default_limit)
try:
limit = int(data.get('limit', default_limit))
except (TypeError, ValueError):
return jsonify({'success': False, 'error': 'limit 必須為整數'}), 400
if limit < 1:
return jsonify({'success': False, 'error': 'limit 必須大於 0'}), 400
if limit > max_limit:
return jsonify({'success': False, 'error': f'limit 不可超過 {max_limit}'}), 413
try:
offset = int(data.get('offset', 0))
except (TypeError, ValueError):
return jsonify({'success': False, 'error': 'offset 必須為整數'}), 400
if offset < 0:
return jsonify({'success': False, 'error': 'offset 不可小於 0'}), 400
days_back = get_days_back(filters) days_back = get_days_back(filters)
df = query_resource_detail(filters, limit, offset, days_back) df = query_resource_detail(filters, limit, offset, days_back)

View File

@@ -371,7 +371,10 @@ def api_meta_search():
""" """
search_field = request.args.get('field', '').strip().lower() search_field = request.args.get('field', '').strip().lower()
q = request.args.get('q', '').strip() q = request.args.get('q', '').strip()
limit = min(request.args.get('limit', 20, type=int), 50) limit_value = request.args.get('limit', 20, type=int)
if limit_value is None:
limit_value = 20
limit = min(max(limit_value, 1), 50)
include_dummy = parse_bool_query(request.args.get('include_dummy')) include_dummy = parse_bool_query(request.args.get('include_dummy'))
# Cross-filter parameters # Cross-filter parameters

View File

@@ -88,11 +88,11 @@ def _load() -> dict:
logger.debug("Loaded page status from %s", DATA_FILE) logger.debug("Loaded page status from %s", DATA_FILE)
except (json.JSONDecodeError, OSError) as e: except (json.JSONDecodeError, OSError) as e:
logger.warning("Failed to load page status: %s", e) logger.warning("Failed to load page status: %s", e)
_cache = {"pages": [], "api_public": True} _cache = {"pages": [], "api_public": False}
_cache_mtime = 0.0 _cache_mtime = 0.0
else: else:
logger.info("Page status file not found, using defaults") logger.info("Page status file not found, using defaults")
_cache = {"pages": [], "api_public": True} _cache = {"pages": [], "api_public": False}
_cache_mtime = 0.0 _cache_mtime = 0.0
if _migrate_navigation_schema(_cache): if _migrate_navigation_schema(_cache):
@@ -487,7 +487,14 @@ def is_api_public() -> bool:
True if API endpoints bypass permission checks True if API endpoints bypass permission checks
""" """
with _lock: with _lock:
return _load().get("api_public", True) value = _load().get("api_public", False)
if isinstance(value, bool):
return value
if isinstance(value, str):
return value.strip().lower() in {"1", "true", "yes", "on"}
if isinstance(value, (int, float)):
return bool(value)
return False
def reload_cache() -> None: def reload_cache() -> None:

View File

@@ -18,6 +18,7 @@ Architecture:
import csv import csv
import io import io
import logging import logging
import os
import re import re
from datetime import datetime, timedelta from datetime import datetime, timedelta
from decimal import Decimal from decimal import Decimal
@@ -47,11 +48,18 @@ MAX_EQUIPMENTS = 20
MAX_DATE_RANGE_DAYS = 90 MAX_DATE_RANGE_DAYS = 90
DEFAULT_TIME_WINDOW_HOURS = 168 # 1 week for better PJ_TYPE detection DEFAULT_TIME_WINDOW_HOURS = 168 # 1 week for better PJ_TYPE detection
ADJACENT_LOTS_COUNT = 3 ADJACENT_LOTS_COUNT = 3
# ============================================================ def _max_batch_container_ids() -> int:
# Validation Functions try:
# ============================================================ return max(int(os.getenv("QUERY_TOOL_MAX_CONTAINER_IDS", "200")), 1)
except (TypeError, ValueError):
return 200
# ============================================================
# Validation Functions
# ============================================================
def validate_date_range(start_date: str, end_date: str, max_days: int = MAX_DATE_RANGE_DAYS) -> Optional[str]: def validate_date_range(start_date: str, end_date: str, max_days: int = MAX_DATE_RANGE_DAYS) -> Optional[str]:
"""Validate date range. """Validate date range.
@@ -866,7 +874,7 @@ def get_adjacent_lots(
# LOT Batch Query Functions # LOT Batch Query Functions
# ============================================================ # ============================================================
def get_lot_history_batch( def get_lot_history_batch(
container_ids: List[str], container_ids: List[str],
workcenter_groups: Optional[List[str]] = None, workcenter_groups: Optional[List[str]] = None,
) -> Dict[str, Any]: ) -> Dict[str, Any]:
@@ -878,11 +886,14 @@ def get_lot_history_batch(
Returns: Returns:
Dict with 'data' (merged history records) and 'total'. Dict with 'data' (merged history records) and 'total'.
""" """
if not container_ids: if not container_ids:
return {'error': '請指定 CONTAINERID'} return {'error': '請指定 CONTAINERID'}
max_ids = _max_batch_container_ids()
try: if len(container_ids) > max_ids:
return {'error': f'container_ids 數量不可超過 {max_ids}'}
try:
events_by_cid = EventFetcher.fetch_events(container_ids, "history") events_by_cid = EventFetcher.fetch_events(container_ids, "history")
rows = [] rows = []
@@ -930,11 +941,14 @@ def get_lot_associations_batch(
Returns: Returns:
Dict with 'data' (merged records) and 'total'. Dict with 'data' (merged records) and 'total'.
""" """
if not container_ids: if not container_ids:
return {'error': '請指定 CONTAINERID'} return {'error': '請指定 CONTAINERID'}
max_ids = _max_batch_container_ids()
valid_batch_types = {'materials', 'rejects', 'holds'} if len(container_ids) > max_ids:
return {'error': f'container_ids 數量不可超過 {max_ids}'}
valid_batch_types = {'materials', 'rejects', 'holds'}
if assoc_type not in valid_batch_types: if assoc_type not in valid_batch_types:
return {'error': f'批次查詢不支援類型: {assoc_type}'} return {'error': f'批次查詢不支援類型: {assoc_type}'}

View File

@@ -5,6 +5,7 @@ Provides functions to query equipment status from DWH.DW_MES_RESOURCE and DWH.DW
""" """
import logging import logging
import os
import pandas as pd import pandas as pd
from typing import Optional, Dict, List, Any from typing import Optional, Dict, List, Any
@@ -173,6 +174,22 @@ def query_resource_detail(
DataFrame with resource details or None if query fails. DataFrame with resource details or None if query fails.
""" """
try: try:
try:
max_limit = max(int(os.getenv("RESOURCE_DETAIL_MAX_LIMIT", "500")), 1)
except (TypeError, ValueError):
max_limit = 500
try:
limit = int(limit)
except (TypeError, ValueError):
limit = 500
try:
offset = int(offset)
except (TypeError, ValueError):
offset = 0
limit = max(1, min(limit, max_limit))
offset = max(offset, 0)
base_sql = get_resource_latest_status_subquery(days_back) base_sql = get_resource_latest_status_subquery(days_back)
# Use QueryBuilder for safe parameterized conditions # Use QueryBuilder for safe parameterized conditions

View File

@@ -130,6 +130,34 @@ class AppFactoryTests(unittest.TestCase):
else: else:
os.environ["PORTAL_SPA_ENABLED"] = old os.environ["PORTAL_SPA_ENABLED"] = old
def test_default_env_is_production_when_flask_env_missing(self):
old_flask_env = os.environ.pop("FLASK_ENV", None)
old_secret = os.environ.get("SECRET_KEY")
old_runtime_contract = os.environ.get("RUNTIME_CONTRACT_ENFORCE")
old_realtime_cache = os.environ.get("REALTIME_EQUIPMENT_CACHE_ENABLED")
try:
os.environ["SECRET_KEY"] = "test-production-secret-key"
os.environ["RUNTIME_CONTRACT_ENFORCE"] = "false"
os.environ["REALTIME_EQUIPMENT_CACHE_ENABLED"] = "false"
app = create_app()
self.assertEqual(app.config.get("ENV"), "production")
finally:
if old_flask_env is not None:
os.environ["FLASK_ENV"] = old_flask_env
if old_secret is None:
os.environ.pop("SECRET_KEY", None)
else:
os.environ["SECRET_KEY"] = old_secret
if old_runtime_contract is None:
os.environ.pop("RUNTIME_CONTRACT_ENFORCE", None)
else:
os.environ["RUNTIME_CONTRACT_ENFORCE"] = old_runtime_contract
if old_realtime_cache is None:
os.environ.pop("REALTIME_EQUIPMENT_CACHE_ENABLED", None)
else:
os.environ["REALTIME_EQUIPMENT_CACHE_ENABLED"] = old_realtime_cache
if __name__ == "__main__": if __name__ == "__main__":
unittest.main() unittest.main()

View File

@@ -297,8 +297,8 @@ class TestHoldDetailLotsRoute(TestHoldRoutesBase):
call_args = mock_get_lots.call_args call_args = mock_get_lots.call_args
self.assertEqual(call_args.kwargs['page_size'], 200) self.assertEqual(call_args.kwargs['page_size'], 200)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots') @patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
def test_handles_page_less_than_one(self, mock_get_lots): def test_handles_page_less_than_one(self, mock_get_lots):
"""Page number less than 1 should be set to 1.""" """Page number less than 1 should be set to 1."""
mock_get_lots.return_value = { mock_get_lots.return_value = {
'lots': [], 'lots': [],
@@ -308,8 +308,36 @@ class TestHoldDetailLotsRoute(TestHoldRoutesBase):
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&page=0') response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&page=0')
call_args = mock_get_lots.call_args call_args = mock_get_lots.call_args
self.assertEqual(call_args.kwargs['page'], 1) self.assertEqual(call_args.kwargs['page'], 1)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
def test_handles_invalid_page_type(self, mock_get_lots):
mock_get_lots.return_value = {
'lots': [],
'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1},
'filters': {'workcenter': None, 'package': None, 'ageRange': None}
}
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&page=abc')
self.assertEqual(response.status_code, 200)
call_args = mock_get_lots.call_args
self.assertEqual(call_args.kwargs['page'], 1)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
def test_handles_invalid_per_page_type(self, mock_get_lots):
mock_get_lots.return_value = {
'lots': [],
'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1},
'filters': {'workcenter': None, 'package': None, 'ageRange': None}
}
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&per_page=abc')
self.assertEqual(response.status_code, 200)
call_args = mock_get_lots.call_args
self.assertEqual(call_args.kwargs['page_size'], 50)
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots') @patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
def test_returns_error_on_failure(self, mock_get_lots): def test_returns_error_on_failure(self, mock_get_lots):

View File

@@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-
"""Frontend safety contract tests for job-query module rendering."""
from __future__ import annotations
from pathlib import Path
def test_job_query_module_avoids_inline_onclick_string_interpolation():
source = (
Path(__file__).resolve().parents[1]
/ "frontend"
/ "src"
/ "job-query"
/ "main.js"
).read_text(encoding="utf-8")
assert "onclick=" not in source
assert 'data-action="toggle-equipment"' in source
assert 'data-action="toggle-job-history"' in source
assert "encodeURIComponent(safeText(value))" in source
assert "decodeURIComponent(value)" in source

View File

@@ -87,8 +87,45 @@ class TestGetResources:
assert 'ORA-01017' not in data['error'] assert 'ORA-01017' not in data['error']
class TestQueryJobs: class TestQueryJobs:
"""Tests for /api/job-query/jobs endpoint.""" """Tests for /api/job-query/jobs endpoint."""
@patch('mes_dashboard.routes.job_query_routes.get_jobs_by_resources')
def test_non_json_payload_returns_415(self, mock_query, client):
response = client.post(
'/api/job-query/jobs',
data='plain-text',
content_type='text/plain',
)
assert response.status_code == 415
payload = response.get_json()
assert 'error' in payload
mock_query.assert_not_called()
@patch('mes_dashboard.routes.job_query_routes.get_jobs_by_resources')
def test_malformed_json_returns_400(self, mock_query, client):
response = client.post(
'/api/job-query/jobs',
data='{"resource_ids":',
content_type='application/json',
)
assert response.status_code == 400
payload = response.get_json()
assert 'error' in payload
mock_query.assert_not_called()
@patch('mes_dashboard.routes.job_query_routes.get_jobs_by_resources')
def test_payload_too_large_returns_413(self, mock_query, client):
client.application.config['MAX_JSON_BODY_BYTES'] = 8
response = client.post(
'/api/job-query/jobs',
data='{"resource_ids":["RES001"]}',
content_type='application/json',
)
assert response.status_code == 413
payload = response.get_json()
assert 'error' in payload
mock_query.assert_not_called()
def test_missing_resource_ids(self, client): def test_missing_resource_ids(self, client):
"""Should return error without resource_ids.""" """Should return error without resource_ids."""
@@ -256,8 +293,32 @@ class TestQueryJobTxnHistory:
assert 'error' in data assert 'error' in data
class TestExportJobs: class TestExportJobs:
"""Tests for /api/job-query/export endpoint.""" """Tests for /api/job-query/export endpoint."""
@patch('mes_dashboard.routes.job_query_routes.export_jobs_with_history')
def test_non_json_payload_returns_415(self, mock_export, client):
response = client.post(
'/api/job-query/export',
data='plain-text',
content_type='text/plain',
)
assert response.status_code == 415
payload = response.get_json()
assert 'error' in payload
mock_export.assert_not_called()
@patch('mes_dashboard.routes.job_query_routes.export_jobs_with_history')
def test_malformed_json_returns_400(self, mock_export, client):
response = client.post(
'/api/job-query/export',
data='{"resource_ids":',
content_type='application/json',
)
assert response.status_code == 400
payload = response.get_json()
assert 'error' in payload
mock_export.assert_not_called()
def test_missing_resource_ids(self, client): def test_missing_resource_ids(self, client):
"""Should return error without resource_ids.""" """Should return error without resource_ids."""

View File

@@ -196,6 +196,22 @@ class TestIsApiPublic:
assert page_registry.is_api_public() is False assert page_registry.is_api_public() is False
def test_api_public_defaults_false_when_key_missing(self, mock_registry, temp_data_file):
data = json.loads(temp_data_file.read_text())
data.pop("api_public", None)
temp_data_file.write_text(json.dumps(data))
page_registry._cache = None
assert page_registry.is_api_public() is False
def test_api_public_invalid_value_defaults_false(self, mock_registry, temp_data_file):
data = json.loads(temp_data_file.read_text())
data["api_public"] = "not-a-bool"
temp_data_file.write_text(json.dumps(data))
page_registry._cache = None
assert page_registry.is_api_public() is False
class TestReloadCache: class TestReloadCache:
"""Tests for reload_cache function.""" """Tests for reload_cache function."""

View File

@@ -53,8 +53,32 @@ class TestQueryToolPage:
assert b'html' in response.data.lower() assert b'html' in response.data.lower()
class TestResolveEndpoint: class TestResolveEndpoint:
"""Tests for /api/query-tool/resolve endpoint.""" """Tests for /api/query-tool/resolve endpoint."""
@patch('mes_dashboard.routes.query_tool_routes.resolve_lots')
def test_non_json_payload_returns_415(self, mock_resolve, client):
response = client.post(
'/api/query-tool/resolve',
data='plain-text',
content_type='text/plain',
)
assert response.status_code == 415
payload = response.get_json()
assert 'error' in payload
mock_resolve.assert_not_called()
@patch('mes_dashboard.routes.query_tool_routes.resolve_lots')
def test_malformed_json_returns_400(self, mock_resolve, client):
response = client.post(
'/api/query-tool/resolve',
data='{"input_type":',
content_type='application/json',
)
assert response.status_code == 400
payload = response.get_json()
assert 'error' in payload
mock_resolve.assert_not_called()
def test_missing_input_type(self, client): def test_missing_input_type(self, client):
"""Should return error without input_type.""" """Should return error without input_type."""
@@ -238,7 +262,7 @@ class TestResolveEndpoint:
assert mock_cache_set.call_args.kwargs['ttl'] == 60 assert mock_cache_set.call_args.kwargs['ttl'] == 60
class TestLotHistoryEndpoint: class TestLotHistoryEndpoint:
"""Tests for /api/query-tool/lot-history endpoint.""" """Tests for /api/query-tool/lot-history endpoint."""
def test_missing_container_id(self, client): def test_missing_container_id(self, client):
@@ -270,15 +294,24 @@ class TestLotHistoryEndpoint:
assert 'data' in data assert 'data' in data
assert data['total'] == 1 assert data['total'] == 1
@patch('mes_dashboard.routes.query_tool_routes.get_lot_history') @patch('mes_dashboard.routes.query_tool_routes.get_lot_history')
def test_lot_history_service_error(self, mock_query, client): def test_lot_history_service_error(self, mock_query, client):
"""Should return error from service.""" """Should return error from service."""
mock_query.return_value = {'error': '查詢失敗'} mock_query.return_value = {'error': '查詢失敗'}
response = client.get('/api/query-tool/lot-history?container_id=invalid') response = client.get('/api/query-tool/lot-history?container_id=invalid')
assert response.status_code == 400 assert response.status_code == 400
data = json.loads(response.data) data = json.loads(response.data)
assert 'error' in data assert 'error' in data
@patch('mes_dashboard.routes.query_tool_routes.get_lot_history_batch')
def test_lot_history_batch_over_limit_returns_413(self, mock_batch, client):
client.application.config['QUERY_TOOL_MAX_CONTAINER_IDS'] = 2
response = client.get('/api/query-tool/lot-history?container_ids=A,B,C')
assert response.status_code == 413
payload = response.get_json()
assert 'error' in payload
mock_batch.assert_not_called()
class TestAdjacentLotsEndpoint: class TestAdjacentLotsEndpoint:
@@ -425,6 +458,17 @@ class TestLotAssociationsEndpoint:
assert response.status_code == 200 assert response.status_code == 200
mock_query.assert_called_once_with('488103800029578b', full_history=True) mock_query.assert_called_once_with('488103800029578b', full_history=True)
@patch('mes_dashboard.routes.query_tool_routes.get_lot_associations_batch')
def test_lot_associations_batch_over_limit_returns_413(self, mock_batch, client):
client.application.config['QUERY_TOOL_MAX_CONTAINER_IDS'] = 1
response = client.get(
'/api/query-tool/lot-associations?type=materials&container_ids=A,B'
)
assert response.status_code == 413
payload = response.get_json()
assert 'error' in payload
mock_batch.assert_not_called()
class TestQueryToolRateLimit: class TestQueryToolRateLimit:
"""Rate-limit behavior for high-cost query-tool endpoints.""" """Rate-limit behavior for high-cost query-tool endpoints."""
@@ -532,8 +576,32 @@ class TestQueryToolRateLimit:
mock_history.assert_not_called() mock_history.assert_not_called()
class TestEquipmentPeriodEndpoint: class TestEquipmentPeriodEndpoint:
"""Tests for /api/query-tool/equipment-period endpoint.""" """Tests for /api/query-tool/equipment-period endpoint."""
@patch('mes_dashboard.routes.query_tool_routes.get_equipment_status_hours')
def test_non_json_payload_returns_415(self, mock_query, client):
response = client.post(
'/api/query-tool/equipment-period',
data='plain-text',
content_type='text/plain',
)
assert response.status_code == 415
payload = response.get_json()
assert 'error' in payload
mock_query.assert_not_called()
@patch('mes_dashboard.routes.query_tool_routes.get_equipment_status_hours')
def test_malformed_json_returns_400(self, mock_query, client):
response = client.post(
'/api/query-tool/equipment-period',
data='{"equipment_ids":',
content_type='application/json',
)
assert response.status_code == 400
payload = response.get_json()
assert 'error' in payload
mock_query.assert_not_called()
def test_missing_query_type(self, client): def test_missing_query_type(self, client):
"""Should return error without query_type.""" """Should return error without query_type."""
@@ -660,8 +728,32 @@ class TestEquipmentPeriodEndpoint:
assert 'data' in data assert 'data' in data
class TestExportCsvEndpoint: class TestExportCsvEndpoint:
"""Tests for /api/query-tool/export-csv endpoint.""" """Tests for /api/query-tool/export-csv endpoint."""
@patch('mes_dashboard.routes.query_tool_routes.get_lot_history')
def test_non_json_payload_returns_415(self, mock_get_history, client):
response = client.post(
'/api/query-tool/export-csv',
data='plain-text',
content_type='text/plain',
)
assert response.status_code == 415
payload = response.get_json()
assert 'error' in payload
mock_get_history.assert_not_called()
@patch('mes_dashboard.routes.query_tool_routes.get_lot_history')
def test_malformed_json_returns_400(self, mock_get_history, client):
response = client.post(
'/api/query-tool/export-csv',
data='{"export_type":',
content_type='application/json',
)
assert response.status_code == 400
payload = response.get_json()
assert 'error' in payload
mock_get_history.assert_not_called()
def test_missing_export_type(self, client): def test_missing_export_type(self, client):
"""Should return error without export_type.""" """Should return error without export_type."""

View File

@@ -0,0 +1,62 @@
# -*- coding: utf-8 -*-
"""Tests for rate-limit client identity trust boundary behavior."""
from flask import Flask
from mes_dashboard.core.rate_limit import _client_identifier
def _app() -> Flask:
return Flask(__name__)
def test_client_identifier_ignores_xff_when_proxy_trust_disabled(monkeypatch):
monkeypatch.setenv("TRUST_PROXY_HEADERS", "false")
monkeypatch.delenv("TRUSTED_PROXY_IPS", raising=False)
app = _app()
with app.test_request_context(
"/",
headers={"X-Forwarded-For": "1.2.3.4"},
environ_base={"REMOTE_ADDR": "9.9.9.9"},
):
assert _client_identifier() == "9.9.9.9"
def test_client_identifier_uses_xff_for_trusted_proxy_source(monkeypatch):
monkeypatch.setenv("TRUST_PROXY_HEADERS", "true")
monkeypatch.setenv("TRUSTED_PROXY_IPS", "127.0.0.1")
app = _app()
with app.test_request_context(
"/",
headers={"X-Forwarded-For": "1.2.3.4, 5.6.7.8"},
environ_base={"REMOTE_ADDR": "127.0.0.1"},
):
assert _client_identifier() == "1.2.3.4"
def test_client_identifier_rejects_untrusted_proxy_source(monkeypatch):
monkeypatch.setenv("TRUST_PROXY_HEADERS", "true")
monkeypatch.setenv("TRUSTED_PROXY_IPS", "127.0.0.1")
app = _app()
with app.test_request_context(
"/",
headers={"X-Forwarded-For": "1.2.3.4"},
environ_base={"REMOTE_ADDR": "10.10.10.10"},
):
assert _client_identifier() == "10.10.10.10"
def test_client_identifier_requires_allowlist_when_proxy_trust_enabled(monkeypatch):
monkeypatch.setenv("TRUST_PROXY_HEADERS", "true")
monkeypatch.delenv("TRUSTED_PROXY_IPS", raising=False)
app = _app()
with app.test_request_context(
"/",
headers={"X-Forwarded-For": "1.2.3.4"},
environ_base={"REMOTE_ADDR": "127.0.0.1"},
):
assert _client_identifier() == "127.0.0.1"

View File

@@ -83,13 +83,41 @@ class TestRedisClient:
key = rc.get_key('mykey') key = rc.get_key('mykey')
assert key == 'test_prefix:mykey' assert key == 'test_prefix:mykey'
def test_get_key_without_prefix(self): def test_get_key_without_prefix(self):
"""Test get_key works with empty prefix.""" """Test get_key works with empty prefix."""
import mes_dashboard.core.redis_client as rc import mes_dashboard.core.redis_client as rc
with patch.object(rc, 'REDIS_KEY_PREFIX', ''): with patch.object(rc, 'REDIS_KEY_PREFIX', ''):
key = rc.get_key('mykey') key = rc.get_key('mykey')
assert key == ':mykey' assert key == ':mykey'
def test_redact_connection_url_masks_password(self):
import mes_dashboard.core.redis_client as rc
redacted = rc.redact_connection_url("redis://user:secret@localhost:6379/0")
assert redacted == "redis://user:***@localhost:6379/0"
def test_redact_connection_url_without_credentials(self):
import mes_dashboard.core.redis_client as rc
redacted = rc.redact_connection_url("redis://localhost:6379/0")
assert redacted == "redis://localhost:6379/0"
def test_get_redis_client_logs_redacted_url(self, reset_module):
import mes_dashboard.core.redis_client as rc
with patch.object(rc, 'REDIS_ENABLED', True):
with patch.object(rc, 'REDIS_URL', 'redis://user:secret@localhost:6379/0'):
with patch.object(rc.redis.Redis, 'from_url') as mock_from_url:
with patch.object(rc.logger, 'info') as mock_info:
mock_client = MagicMock()
mock_client.ping.return_value = True
mock_from_url.return_value = mock_client
rc.get_redis_client()
logged_url = mock_info.call_args.args[1]
assert logged_url == 'redis://user:***@localhost:6379/0'
class TestRedisClientSingleton: class TestRedisClientSingleton:

View File

@@ -72,3 +72,77 @@ def test_resource_status_masks_internal_error_details(_mock_status):
assert payload["error"]["code"] == "INTERNAL_ERROR" assert payload["error"]["code"] == "INTERNAL_ERROR"
assert payload["error"]["message"] == "服務暫時無法使用" assert payload["error"]["message"] == "服務暫時無法使用"
assert "sensitive sql context" not in str(payload) assert "sensitive sql context" not in str(payload)
@patch("mes_dashboard.routes.resource_routes.query_resource_detail")
def test_resource_detail_non_json_payload_returns_415(mock_query):
response = _client().post(
"/api/resource/detail",
data="plain-text",
content_type="text/plain",
)
assert response.status_code == 415
payload = response.get_json()
assert payload["success"] is False
assert "error" in payload
mock_query.assert_not_called()
@patch("mes_dashboard.routes.resource_routes.query_resource_detail")
def test_resource_detail_malformed_json_returns_400(mock_query):
response = _client().post(
"/api/resource/detail",
data='{"filters":',
content_type="application/json",
)
assert response.status_code == 400
payload = response.get_json()
assert payload["success"] is False
assert "error" in payload
mock_query.assert_not_called()
@patch("mes_dashboard.routes.resource_routes.query_resource_detail")
def test_resource_detail_rejects_limit_over_configured_max(mock_query):
client = _client()
client.application.config["RESOURCE_DETAIL_MAX_LIMIT"] = 100
response = client.post(
"/api/resource/detail",
json={"limit": 101, "offset": 0, "filters": {}},
)
assert response.status_code == 413
payload = response.get_json()
assert payload["success"] is False
assert "limit" in payload["error"]
mock_query.assert_not_called()
@patch("mes_dashboard.routes.resource_routes.query_resource_detail")
def test_resource_detail_rejects_invalid_limit_type(mock_query):
response = _client().post(
"/api/resource/detail",
json={"limit": "abc", "offset": 0, "filters": {}},
)
assert response.status_code == 400
payload = response.get_json()
assert payload["success"] is False
assert "limit" in payload["error"]
mock_query.assert_not_called()
@patch("mes_dashboard.routes.resource_routes.query_resource_detail")
def test_resource_detail_rejects_negative_offset(mock_query):
response = _client().post(
"/api/resource/detail",
json={"limit": 10, "offset": -1, "filters": {}},
)
assert response.status_code == 400
payload = response.get_json()
assert payload["success"] is False
assert "offset" in payload["error"]
mock_query.assert_not_called()

View File

@@ -159,6 +159,7 @@ def test_security_headers_applied_globally(testing_app_factory):
assert response.status_code == 200 assert response.status_code == 200
assert "Content-Security-Policy" in response.headers assert "Content-Security-Policy" in response.headers
assert "frame-ancestors 'self'" in response.headers["Content-Security-Policy"] assert "frame-ancestors 'self'" in response.headers["Content-Security-Policy"]
assert "'unsafe-eval'" not in response.headers["Content-Security-Policy"]
assert response.headers["X-Frame-Options"] == "SAMEORIGIN" assert response.headers["X-Frame-Options"] == "SAMEORIGIN"
assert response.headers["X-Content-Type-Options"] == "nosniff" assert response.headers["X-Content-Type-Options"] == "nosniff"
assert "Referrer-Policy" in response.headers assert "Referrer-Policy" in response.headers
@@ -181,3 +182,32 @@ def test_hsts_header_enabled_in_production(monkeypatch):
assert "Strict-Transport-Security" in response.headers assert "Strict-Transport-Security" in response.headers
_shutdown(app) _shutdown(app)
def test_csp_unsafe_eval_can_be_enabled_via_env(monkeypatch):
monkeypatch.setenv("CSP_ALLOW_UNSAFE_EVAL", "true")
# Build app directly to control env behavior.
monkeypatch.setenv("REALTIME_EQUIPMENT_CACHE_ENABLED", "false")
db._ENGINE = None
db._HEALTH_ENGINE = None
app = create_app("testing")
app.config["TESTING"] = True
response = app.test_client().get("/", follow_redirects=True)
assert response.status_code == 200
assert "'unsafe-eval'" in response.headers["Content-Security-Policy"]
_shutdown(app)
def test_production_trusted_proxy_requires_allowlist(monkeypatch):
monkeypatch.setenv("SECRET_KEY", "test-production-secret-key")
monkeypatch.setenv("REALTIME_EQUIPMENT_CACHE_ENABLED", "false")
monkeypatch.setenv("RUNTIME_CONTRACT_ENFORCE", "false")
monkeypatch.setenv("TRUST_PROXY_HEADERS", "true")
monkeypatch.delenv("TRUSTED_PROXY_IPS", raising=False)
db._ENGINE = None
db._HEALTH_ENGINE = None
with pytest.raises(RuntimeError, match="TRUSTED_PROXY_IPS"):
create_app("production")

View File

@@ -535,6 +535,32 @@ class TestMetaFilterOptionsRoute(TestWipRoutesBase):
self.assertFalse(data['success']) self.assertFalse(data['success'])
class TestMetaSearchRoute(TestWipRoutesBase):
"""Test GET /api/wip/meta/search endpoint."""
@patch('mes_dashboard.routes.wip_routes.search_workorders')
def test_invalid_limit_type_falls_back_to_default(self, mock_search):
mock_search.return_value = []
response = self.client.get('/api/wip/meta/search?field=workorder&q=WO&limit=abc')
data = json.loads(response.data)
self.assertEqual(response.status_code, 200)
self.assertTrue(data['success'])
self.assertEqual(mock_search.call_args.kwargs['limit'], 20)
@patch('mes_dashboard.routes.wip_routes.search_workorders')
def test_limit_is_bounded_with_upper_cap(self, mock_search):
mock_search.return_value = []
response = self.client.get('/api/wip/meta/search?field=workorder&q=WO&limit=999')
data = json.loads(response.data)
self.assertEqual(response.status_code, 200)
self.assertTrue(data['success'])
self.assertEqual(mock_search.call_args.kwargs['limit'], 50)
class TestPageRoutes(TestWipRoutesBase): class TestPageRoutes(TestWipRoutesBase):
"""Test page routes for WIP dashboards.""" """Test page routes for WIP dashboards."""