fix(review): harden security, stability, and efficiency across 7 dashboard pages

Security: sanitize innerHTML with escapeHtml in job-query, add rate limiting
to job-query and job-export endpoints, upgrade login rate limiter to Redis
cross-worker with in-memory fallback, cap resource_ids array at 50, limit
CSV export date range to 365 days.

Stability: wrap initPage calls in onMounted for wip-overview, resource-status,
and resource-history; unload inactive iframes in portal to free memory; add
±15% jitter to auto-refresh timers in useAutoRefresh and useQcGateData; batch
expanded job history loads with concurrency limit of 5.

Config: reorganize sidebar drawers, move query-tool to dev status.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
egg
2026-02-11 10:29:49 +08:00
parent 7b3f4b2cc1
commit dfaf0bc611
12 changed files with 251 additions and 112 deletions

View File

@@ -23,7 +23,7 @@
"route": "/hold-history",
"name": "Hold 歷史績效",
"status": "dev",
"drawer_id": "reports",
"drawer_id": "drawer-2",
"order": 3
},
{
@@ -40,7 +40,7 @@
"route": "/resource-history",
"name": "設備歷史績效",
"status": "released",
"drawer_id": "reports",
"drawer_id": "drawer-2",
"order": 5
},
{
@@ -75,14 +75,14 @@
"route": "/job-query",
"name": "設備維修查詢",
"status": "released",
"drawer_id": "queries",
"drawer_id": "drawer",
"order": 3
},
{
"route": "/query-tool",
"name": "批次追蹤工具",
"status": "released",
"drawer_id": "queries",
"status": "dev",
"drawer_id": "dev-tools",
"order": 4
},
{
@@ -128,12 +128,6 @@
"order": 1,
"admin_only": false
},
{
"id": "queries",
"name": "查詢類",
"order": 3,
"admin_only": false
},
{
"id": "dev-tools",
"name": "開發工具",
@@ -143,6 +137,12 @@
{
"id": "drawer",
"name": "查詢工具",
"order": 3,
"admin_only": false
},
{
"id": "drawer-2",
"name": "歷史報表",
"order": 2,
"admin_only": false
}

View File

@@ -63,14 +63,14 @@ function renderTxnCell(txn, apiKey) {
try {
const data = await MesApi.get('/api/job-query/resources');
if (data.error) {
document.getElementById('equipmentList').innerHTML = `<div class="error">${data.error}</div>`;
document.getElementById('equipmentList').innerHTML = `<div class="error">${escapeHtml(data.error)}</div>`;
return;
}
allEquipments = data.data;
renderEquipmentList(allEquipments);
} catch (error) {
document.getElementById('equipmentList').innerHTML = `<div class="error">載入失敗: ${error.message}</div>`;
document.getElementById('equipmentList').innerHTML = `<div class="error">載入失敗: ${escapeHtml(error.message)}</div>`;
}
}
@@ -264,7 +264,7 @@ function renderTxnCell(txn, apiKey) {
});
if (data.error) {
resultSection.innerHTML = `<div class="error">${data.error}</div>`;
resultSection.innerHTML = `<div class="error">${escapeHtml(data.error)}</div>`;
return;
}
@@ -275,7 +275,7 @@ function renderTxnCell(txn, apiKey) {
document.getElementById('exportBtn').disabled = jobsData.length === 0;
} catch (error) {
resultSection.innerHTML = `<div class="error">查詢失敗: ${error.message}</div>`;
resultSection.innerHTML = `<div class="error">查詢失敗: ${escapeHtml(error.message)}</div>`;
} finally {
document.getElementById('queryBtn').disabled = false;
}
@@ -346,11 +346,13 @@ function renderTxnCell(txn, apiKey) {
resultSection.innerHTML = html;
// Load expanded histories
// Load expanded histories in batches to avoid thundering herd
const pendingLoads = [];
expandedJobs.forEach(jobId => {
const idx = jobsData.findIndex(j => j.JOBID === jobId);
if (idx >= 0) loadJobHistory(jobId, idx);
if (idx >= 0) pendingLoads.push({ jobId, idx });
});
void loadHistoriesBatched(pendingLoads);
}
// Toggle job history
@@ -382,7 +384,7 @@ function renderTxnCell(txn, apiKey) {
const data = await MesApi.get(`/api/job-query/txn/${jobId}`);
if (data.error) {
container.innerHTML = `<div class="error" style="margin: 10px 20px;">${data.error}</div>`;
container.innerHTML = `<div class="error" style="margin: 10px 20px;">${escapeHtml(data.error)}</div>`;
return;
}
@@ -417,7 +419,16 @@ function renderTxnCell(txn, apiKey) {
container.innerHTML = html;
} catch (error) {
container.innerHTML = `<div class="error" style="margin: 10px 20px;">載入失敗: ${error.message}</div>`;
container.innerHTML = `<div class="error" style="margin: 10px 20px;">載入失敗: ${escapeHtml(error.message)}</div>`;
}
}
// Load multiple job histories with concurrency limit
const BATCH_CONCURRENCY = 5;
async function loadHistoriesBatched(items) {
for (let i = 0; i < items.length; i += BATCH_CONCURRENCY) {
const batch = items.slice(i, i + BATCH_CONCURRENCY);
await Promise.all(batch.map(({ jobId, idx }) => loadJobHistory(jobId, idx)));
}
}

View File

@@ -29,7 +29,17 @@ import './portal.css';
function activateTab(targetId, toolSrc) {
sidebarItems.forEach((item) => item.classList.remove('active'));
frames.forEach((frame) => frame.classList.remove('active'));
// Unload inactive iframes to free memory and stop their timers
frames.forEach((frame) => {
if (frame.classList.contains('active') && frame.id !== targetId) {
if (frame.src) {
frame.dataset.src = frame.src;
}
frame.removeAttribute('src');
}
frame.classList.remove('active');
});
const activeItems = document.querySelectorAll(`.sidebar-item[data-target="${targetId}"]`);
activeItems.forEach((item) => {

View File

@@ -3,6 +3,12 @@ import { computed, onBeforeUnmount, onMounted, ref } from 'vue';
import { apiGet } from '../../core/api.js';
const REFRESH_INTERVAL_MS = 10 * 60 * 1000;
const JITTER_FACTOR = 0.15;
function jitteredInterval(baseMs) {
const jitter = baseMs * JITTER_FACTOR * (2 * Math.random() - 1);
return Math.max(1000, Math.round(baseMs + jitter));
}
const API_TIMEOUT_MS = 60000;
const BUCKET_KEYS = ['lt_6h', '6h_12h', '12h_24h', 'gt_24h'];
@@ -106,18 +112,23 @@ export function useQcGateData() {
const stopAutoRefresh = () => {
if (refreshTimer) {
clearInterval(refreshTimer);
clearTimeout(refreshTimer);
refreshTimer = null;
}
};
const startAutoRefresh = () => {
const scheduleNextRefresh = () => {
stopAutoRefresh();
refreshTimer = setInterval(() => {
refreshTimer = setTimeout(() => {
if (!document.hidden) {
void fetchData({ background: true });
}
}, REFRESH_INTERVAL_MS);
scheduleNextRefresh();
}, jitteredInterval(REFRESH_INTERVAL_MS));
};
const startAutoRefresh = () => {
scheduleNextRefresh();
};
const resetAutoRefresh = () => {

View File

@@ -1,5 +1,5 @@
<script setup>
import { computed, reactive, ref } from 'vue';
import { computed, onMounted, reactive, ref } from 'vue';
import { apiGet, ensureMesApiAvailable } from '../core/api.js';
import { buildResourceKpiFromHours } from '../core/compute.js';
@@ -306,7 +306,9 @@ async function initPage() {
await executeQuery();
}
void initPage();
onMounted(() => {
void initPage();
});
</script>
<template>

View File

@@ -1,5 +1,5 @@
<script setup>
import { computed, reactive, ref } from 'vue';
import { computed, onMounted, reactive, ref } from 'vue';
import { apiGet, ensureMesApiAvailable } from '../core/api.js';
import { useAutoRefresh } from '../wip-shared/composables/useAutoRefresh.js';
@@ -430,7 +430,9 @@ async function initPage() {
await loadData(true);
}
void initPage();
onMounted(() => {
void initPage();
});
</script>
<template>

View File

@@ -1,5 +1,5 @@
<script setup>
import { computed, reactive, ref } from 'vue';
import { computed, onMounted, reactive, ref } from 'vue';
import { apiGet } from '../core/api.js';
import {
@@ -267,7 +267,9 @@ async function initializePage() {
await loadAllData(true);
}
void initializePage();
onMounted(() => {
void initializePage();
});
</script>
<template>

View File

@@ -1,6 +1,12 @@
import { onBeforeUnmount, onMounted } from 'vue';
const DEFAULT_REFRESH_INTERVAL_MS = 10 * 60 * 1000;
const JITTER_FACTOR = 0.15; // ±15% random jitter to prevent synchronized requests
function jitteredInterval(baseMs) {
const jitter = baseMs * JITTER_FACTOR * (2 * Math.random() - 1);
return Math.max(1000, Math.round(baseMs + jitter));
}
export function useAutoRefresh({
onRefresh,
@@ -14,18 +20,23 @@ export function useAutoRefresh({
function stopAutoRefresh() {
if (refreshTimer) {
clearInterval(refreshTimer);
clearTimeout(refreshTimer);
refreshTimer = null;
}
}
function startAutoRefresh() {
function scheduleNextRefresh() {
stopAutoRefresh();
refreshTimer = setInterval(() => {
refreshTimer = setTimeout(() => {
if (!document.hidden) {
void onRefresh?.();
}
}, intervalMs);
scheduleNextRefresh();
}, jitteredInterval(intervalMs));
}
function startAutoRefresh() {
scheduleNextRefresh();
}
function resetAutoRefresh() {

View File

@@ -3,17 +3,17 @@
from __future__ import annotations
import logging
import time
from collections import defaultdict
from datetime import datetime
from threading import Lock
from urllib.parse import urlparse
import logging
import time
from collections import defaultdict
from datetime import datetime
from threading import Lock
from urllib.parse import urlparse
from flask import Blueprint, flash, redirect, render_template, request, session, url_for
from mes_dashboard.core.csrf import rotate_csrf_token
from mes_dashboard.services.auth_service import authenticate, is_admin
from flask import Blueprint, flash, redirect, render_template, request, session, url_for
from mes_dashboard.core.csrf import rotate_csrf_token
from mes_dashboard.services.auth_service import authenticate, is_admin
logger = logging.getLogger('mes_dashboard.auth_routes')
auth_bp = Blueprint("auth", __name__, url_prefix="/admin")
@@ -22,63 +22,100 @@ auth_bp = Blueprint("auth", __name__, url_prefix="/admin")
# ============================================================
# Rate Limiting for Login Endpoint
# ============================================================
# Simple in-memory rate limiter to prevent brute force attacks
# Redis-backed rate limiter (cross-worker) with in-memory fallback.
# Configuration: max 5 attempts per IP per 5 minutes
_rate_limit_lock = Lock()
_login_attempts: dict = defaultdict(list) # IP -> list of timestamps
RATE_LIMIT_MAX_ATTEMPTS = 5
RATE_LIMIT_WINDOW_SECONDS = 300 # 5 minutes
def _sanitize_next_url(next_url: str | None) -> str:
"""Return a safe post-login redirect URL limited to local paths."""
fallback = url_for("portal_index")
if not next_url:
return fallback
parsed = urlparse(next_url)
if parsed.scheme or parsed.netloc:
logger.warning("Blocked external next redirect: %s", next_url)
return fallback
if not next_url.startswith("/") or next_url.startswith("//"):
return fallback
return next_url
_last_cleanup = time.time()
RATE_LIMIT_MAX_ATTEMPTS = 5
RATE_LIMIT_WINDOW_SECONDS = 300 # 5 minutes
_CLEANUP_INTERVAL = 600 # Sweep stale entries every 10 minutes
_REDIS_LOGIN_KEY_PREFIX = "mes:login_attempts:"
def _get_redis():
"""Get Redis client if available."""
try:
from mes_dashboard.core.redis_client import get_redis_client
return get_redis_client()
except Exception:
return None
def _sanitize_next_url(next_url: str | None) -> str:
"""Return a safe post-login redirect URL limited to local paths."""
fallback = url_for("portal_index")
if not next_url:
return fallback
parsed = urlparse(next_url)
if parsed.scheme or parsed.netloc:
logger.warning("Blocked external next redirect: %s", next_url)
return fallback
if not next_url.startswith("/") or next_url.startswith("//"):
return fallback
return next_url
def _cleanup_stale_entries() -> None:
"""Remove stale IP entries from the in-memory rate limiter."""
global _last_cleanup
now = time.time()
if now - _last_cleanup < _CLEANUP_INTERVAL:
return
_last_cleanup = now
window_start = now - RATE_LIMIT_WINDOW_SECONDS
stale_ips = [
ip for ip, timestamps in _login_attempts.items()
if not timestamps or timestamps[-1] <= window_start
]
for ip in stale_ips:
del _login_attempts[ip]
def _is_rate_limited(ip: str) -> bool:
"""Check if an IP address is rate limited.
Args:
ip: Client IP address.
Returns:
True if rate limited, False otherwise.
Uses Redis when available for cross-worker consistency,
falls back to in-memory dict otherwise.
"""
redis_client = _get_redis()
if redis_client:
try:
key = f"{_REDIS_LOGIN_KEY_PREFIX}{ip}"
count = redis_client.get(key)
return int(count or 0) >= RATE_LIMIT_MAX_ATTEMPTS
except Exception:
pass # Fall through to in-memory
current_time = time.time()
window_start = current_time - RATE_LIMIT_WINDOW_SECONDS
with _rate_limit_lock:
# Clean up old attempts
_cleanup_stale_entries()
_login_attempts[ip] = [
ts for ts in _login_attempts[ip] if ts > window_start
]
# Check if limit exceeded
if len(_login_attempts[ip]) >= RATE_LIMIT_MAX_ATTEMPTS:
return True
return False
return len(_login_attempts[ip]) >= RATE_LIMIT_MAX_ATTEMPTS
def _record_login_attempt(ip: str) -> None:
"""Record a login attempt for rate limiting.
"""Record a login attempt for rate limiting."""
redis_client = _get_redis()
if redis_client:
try:
key = f"{_REDIS_LOGIN_KEY_PREFIX}{ip}"
pipe = redis_client.pipeline()
pipe.incr(key)
pipe.expire(key, RATE_LIMIT_WINDOW_SECONDS)
pipe.execute()
return
except Exception:
pass # Fall through to in-memory
Args:
ip: Client IP address.
"""
with _rate_limit_lock:
_login_attempts[ip].append(time.time())
@@ -108,27 +145,27 @@ def login():
user = authenticate(username, password)
if user is None:
error = "帳號或密碼錯誤"
elif not is_admin(user):
error = "您不是管理員,無法登入後台"
else:
# Login successful
session.clear()
session["admin"] = {
"username": user.get("username"),
"displayName": user.get("displayName"),
"mail": user.get("mail"),
"department": user.get("department"),
"login_time": datetime.now().isoformat(),
}
rotate_csrf_token()
next_url = _sanitize_next_url(request.args.get("next"))
return redirect(next_url)
elif not is_admin(user):
error = "您不是管理員,無法登入後台"
else:
# Login successful
session.clear()
session["admin"] = {
"username": user.get("username"),
"displayName": user.get("displayName"),
"mail": user.get("mail"),
"department": user.get("department"),
"login_time": datetime.now().isoformat(),
}
rotate_csrf_token()
next_url = _sanitize_next_url(request.args.get("next"))
return redirect(next_url)
return render_template("login.html", error=error)
@auth_bp.route("/logout")
def logout():
"""Admin logout."""
session.clear()
return redirect(url_for("portal_index"))
def logout():
"""Admin logout."""
session.clear()
return redirect(url_for("portal_index"))

View File

@@ -1,16 +1,17 @@
# -*- coding: utf-8 -*-
"""Job Query API routes.
"""Job Query API routes.
Contains Flask Blueprint for maintenance job query endpoints:
- Job list query by resources
- Job transaction history detail
- CSV export with full history
"""
import logging
from flask import Blueprint, jsonify, request, Response, render_template
"""
import logging
from flask import Blueprint, jsonify, request, Response, render_template
from mes_dashboard.core.rate_limit import configured_rate_limit
from mes_dashboard.services.job_query_service import (
get_jobs_by_resources,
get_job_txn_history,
@@ -18,9 +19,27 @@ from mes_dashboard.services.job_query_service import (
validate_date_range,
)
# Create Blueprint
job_query_bp = Blueprint('job_query', __name__)
logger = logging.getLogger('mes_dashboard.job_query_routes')
# Create Blueprint
job_query_bp = Blueprint('job_query', __name__)
logger = logging.getLogger('mes_dashboard.job_query_routes')
MAX_RESOURCE_IDS = 50
_JOB_QUERY_RATE_LIMIT = configured_rate_limit(
bucket="job-query",
max_attempts_env="JOB_QUERY_RATE_LIMIT_MAX_REQUESTS",
window_seconds_env="JOB_QUERY_RATE_LIMIT_WINDOW_SECONDS",
default_max_attempts=60,
default_window_seconds=60,
)
_JOB_EXPORT_RATE_LIMIT = configured_rate_limit(
bucket="job-export",
max_attempts_env="JOB_EXPORT_RATE_LIMIT_MAX_REQUESTS",
window_seconds_env="JOB_EXPORT_RATE_LIMIT_WINDOW_SECONDS",
default_max_attempts=10,
default_window_seconds=60,
)
# ============================================================
@@ -68,12 +87,13 @@ def get_resources():
'total': len(data)
})
except Exception as exc:
logger.exception("Failed to load job-query resources: %s", exc)
return jsonify({'error': '服務暫時無法使用'}), 500
except Exception as exc:
logger.exception("Failed to load job-query resources: %s", exc)
return jsonify({'error': '服務暫時無法使用'}), 500
@job_query_bp.route('/api/job-query/jobs', methods=['POST'])
@_JOB_QUERY_RATE_LIMIT
def query_jobs():
"""Query jobs for selected resources.
@@ -95,6 +115,8 @@ def query_jobs():
# Validation
if not resource_ids:
return jsonify({'error': '請選擇至少一台設備'}), 400
if len(resource_ids) > MAX_RESOURCE_IDS:
return jsonify({'error': f'設備數量不可超過 {MAX_RESOURCE_IDS}'}), 400
if not start_date or not end_date:
return jsonify({'error': '請指定日期範圍'}), 400
@@ -111,6 +133,7 @@ def query_jobs():
@job_query_bp.route('/api/job-query/txn/<job_id>', methods=['GET'])
@_JOB_QUERY_RATE_LIMIT
def query_job_txn_history(job_id: str):
"""Query transaction history for a single job.
@@ -131,6 +154,7 @@ def query_job_txn_history(job_id: str):
@job_query_bp.route('/api/job-query/export', methods=['POST'])
@_JOB_EXPORT_RATE_LIMIT
def export_jobs():
"""Export jobs with full transaction history as CSV.
@@ -152,6 +176,8 @@ def export_jobs():
# Validation
if not resource_ids:
return jsonify({'error': '請選擇至少一台設備'}), 400
if len(resource_ids) > MAX_RESOURCE_IDS:
return jsonify({'error': f'設備數量不可超過 {MAX_RESOURCE_IDS}'}), 400
if not start_date or not end_date:
return jsonify({'error': '請指定日期範圍'}), 400

View File

@@ -4,6 +4,8 @@
Contains Flask Blueprint for historical equipment performance analysis endpoints.
"""
from datetime import datetime
from flask import Blueprint, jsonify, request, redirect, Response
from mes_dashboard.core.cache import cache_get, cache_set, make_cache_key
@@ -218,6 +220,21 @@ def api_resource_history_export():
'error': '必須提供 start_date 和 end_date 參數'
}), 400
# Validate export date range (max 365 days)
try:
sd = datetime.strptime(start_date, '%Y-%m-%d')
ed = datetime.strptime(end_date, '%Y-%m-%d')
if (ed - sd).days > 365:
return jsonify({
'success': False,
'error': 'CSV 匯出範圍不可超過一年 (365 天)'
}), 400
except ValueError:
return jsonify({
'success': False,
'error': '日期格式錯誤,請使用 YYYY-MM-DD'
}), 400
# Generate filename
filename = f"resource_history_{start_date}_to_{end_date}.csv"

View File

@@ -423,7 +423,17 @@
function activateTab(targetId, toolSrc) {
sidebarItems.forEach(item => item.classList.remove('active'));
frames.forEach(frame => frame.classList.remove('active'));
// Unload inactive iframes to free memory and stop their timers
frames.forEach(frame => {
if (frame.classList.contains('active') && frame.id !== targetId) {
if (frame.src) {
frame.dataset.src = frame.src;
}
frame.removeAttribute('src');
}
frame.classList.remove('active');
});
const activeItems = document.querySelectorAll(`.sidebar-item[data-target="${targetId}"]`);
activeItems.forEach(item => {