feat: 資料庫連線穩定性改進與 UI 優化
- 新增結構化錯誤記錄:logging 模組取代 print,含 ORA 錯誤碼擷取 - 新增查詢時間統計:>1s 慢查詢標記為 WARNING - 密碼 URL 編碼:處理特殊字元避免連線字串解析錯誤 - 新增日誌輪替:服務啟動時自動歸檔舊日誌,保留最近 10 份 - 修正 Equipment 欄位:EQUIPMENTNAME → EQUIPMENTS - UI 簡化:移除 WIP 狀態卡片的 pcs 後綴 Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -159,9 +159,34 @@ run_all_checks() {
|
||||
# ============================================================
|
||||
ensure_dirs() {
|
||||
mkdir -p "${LOG_DIR}"
|
||||
mkdir -p "${LOG_DIR}/archive"
|
||||
mkdir -p "${ROOT}/tmp"
|
||||
}
|
||||
|
||||
rotate_logs() {
|
||||
# Archive existing logs with timestamp before starting new session
|
||||
local ts=$(date '+%Y%m%d_%H%M%S')
|
||||
|
||||
if [ -f "$ACCESS_LOG" ] && [ -s "$ACCESS_LOG" ]; then
|
||||
mv "$ACCESS_LOG" "${LOG_DIR}/archive/access_${ts}.log"
|
||||
log_info "Archived access.log -> archive/access_${ts}.log"
|
||||
fi
|
||||
|
||||
if [ -f "$ERROR_LOG" ] && [ -s "$ERROR_LOG" ]; then
|
||||
mv "$ERROR_LOG" "${LOG_DIR}/archive/error_${ts}.log"
|
||||
log_info "Archived error.log -> archive/error_${ts}.log"
|
||||
fi
|
||||
|
||||
# Clean up old archives (keep last 10)
|
||||
cd "${LOG_DIR}/archive" 2>/dev/null && \
|
||||
ls -t access_*.log 2>/dev/null | tail -n +11 | xargs -r rm -f && \
|
||||
ls -t error_*.log 2>/dev/null | tail -n +11 | xargs -r rm -f
|
||||
cd "$ROOT"
|
||||
|
||||
# Create fresh log files
|
||||
touch "$ACCESS_LOG" "$ERROR_LOG"
|
||||
}
|
||||
|
||||
get_pid() {
|
||||
if [ -f "$PID_FILE" ]; then
|
||||
local pid=$(cat "$PID_FILE" 2>/dev/null)
|
||||
@@ -205,6 +230,7 @@ do_start() {
|
||||
log_info "Starting ${APP_NAME} server..."
|
||||
|
||||
ensure_dirs
|
||||
rotate_logs # Archive old logs before starting new session
|
||||
conda activate "$CONDA_ENV"
|
||||
export PYTHONPATH="${ROOT}/src:${PYTHONPATH:-}"
|
||||
cd "$ROOT"
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from flask import Flask, jsonify, render_template, request
|
||||
|
||||
from mes_dashboard.config.tables import TABLES_CONFIG
|
||||
@@ -12,6 +15,34 @@ from mes_dashboard.core.database import get_table_data, get_table_columns, get_e
|
||||
from mes_dashboard.routes import register_routes
|
||||
|
||||
|
||||
def _configure_logging(app: Flask) -> None:
|
||||
"""Configure application logging.
|
||||
|
||||
Sets up logging to stderr (captured by Gunicorn's --capture-output).
|
||||
Log levels:
|
||||
- DEBUG: Query completion times, connection events
|
||||
- WARNING: Slow queries (>1s)
|
||||
- ERROR: Connection failures, query errors with ORA codes
|
||||
"""
|
||||
# Configure the mes_dashboard logger
|
||||
logger = logging.getLogger('mes_dashboard')
|
||||
logger.setLevel(logging.DEBUG if app.debug else logging.INFO)
|
||||
|
||||
# Only add handler if not already configured (avoid duplicates)
|
||||
if not logger.handlers:
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setLevel(logging.DEBUG)
|
||||
formatter = logging.Formatter(
|
||||
'%(asctime)s [%(levelname)s] %(name)s: %(message)s',
|
||||
datefmt='%Y-%m-%d %H:%M:%S'
|
||||
)
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
|
||||
# Prevent propagation to root logger (avoid duplicate logs)
|
||||
logger.propagate = False
|
||||
|
||||
|
||||
def create_app(config_name: str | None = None) -> Flask:
|
||||
"""Create and configure the Flask app instance."""
|
||||
app = Flask(__name__, template_folder="templates")
|
||||
@@ -19,6 +50,9 @@ def create_app(config_name: str | None = None) -> Flask:
|
||||
config_class = get_config(config_name)
|
||||
app.config.from_object(config_class)
|
||||
|
||||
# Configure logging first
|
||||
_configure_logging(app)
|
||||
|
||||
# Default cache backend (no-op)
|
||||
app.extensions["cache"] = NoOpCache()
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ Loads credentials from environment variables (.env file).
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
# Load .env file if python-dotenv is available
|
||||
try:
|
||||
@@ -25,7 +26,7 @@ DB_SERVICE = os.getenv('DB_SERVICE', 'DWDB')
|
||||
DB_USER = os.getenv('DB_USER', '')
|
||||
DB_PASSWORD = os.getenv('DB_PASSWORD', '')
|
||||
|
||||
# Oracle Database connection config
|
||||
# Oracle Database connection config (for direct oracledb connections)
|
||||
DB_CONFIG = {
|
||||
'user': DB_USER,
|
||||
'password': DB_PASSWORD,
|
||||
@@ -33,6 +34,8 @@ DB_CONFIG = {
|
||||
}
|
||||
|
||||
# SQLAlchemy connection string
|
||||
# Note: Password is URL-encoded to handle special characters (@:/?# etc.)
|
||||
CONNECTION_STRING = (
|
||||
f"oracle+oracledb://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/?service_name={DB_SERVICE}"
|
||||
f"oracle+oracledb://{DB_USER}:{quote_plus(DB_PASSWORD)}"
|
||||
f"@{DB_HOST}:{DB_PORT}/?service_name={DB_SERVICE}"
|
||||
)
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
import oracledb
|
||||
@@ -14,6 +17,9 @@ from sqlalchemy.pool import NullPool
|
||||
from mes_dashboard.config.database import DB_CONFIG, CONNECTION_STRING
|
||||
from mes_dashboard.config.settings import DevelopmentConfig
|
||||
|
||||
# Configure module logger
|
||||
logger = logging.getLogger('mes_dashboard.database')
|
||||
|
||||
# ============================================================
|
||||
# SQLAlchemy Engine (NullPool - no connection pooling)
|
||||
# ============================================================
|
||||
@@ -42,6 +48,7 @@ def get_engine():
|
||||
"retry_delay": 1, # 1s delay between retries
|
||||
}
|
||||
)
|
||||
logger.info("Database engine created with NullPool")
|
||||
return _ENGINE
|
||||
|
||||
|
||||
@@ -77,7 +84,7 @@ def init_db(app) -> None:
|
||||
|
||||
def start_keepalive():
|
||||
"""No-op: Keep-alive not needed with NullPool."""
|
||||
print("[DB] Using NullPool - no keep-alive needed")
|
||||
logger.debug("Using NullPool - no keep-alive needed")
|
||||
|
||||
|
||||
def stop_keepalive():
|
||||
@@ -96,24 +103,58 @@ def get_db_connection():
|
||||
Used for operations that need direct cursor access.
|
||||
"""
|
||||
try:
|
||||
return oracledb.connect(
|
||||
conn = oracledb.connect(
|
||||
**DB_CONFIG,
|
||||
tcp_connect_timeout=10, # TCP connect timeout 10s
|
||||
retry_count=1, # Retry once on connection failure
|
||||
retry_delay=1, # 1s delay between retries
|
||||
)
|
||||
logger.debug("Direct oracledb connection established")
|
||||
return conn
|
||||
except Exception as exc:
|
||||
print(f"Database connection failed: {exc}")
|
||||
ora_code = _extract_ora_code(exc)
|
||||
logger.error(f"Database connection failed - ORA-{ora_code}: {exc}")
|
||||
return None
|
||||
|
||||
|
||||
def _extract_ora_code(exc: Exception) -> str:
|
||||
"""Extract ORA error code from exception message."""
|
||||
match = re.search(r'ORA-(\d+)', str(exc))
|
||||
return match.group(1) if match else 'UNKNOWN'
|
||||
|
||||
|
||||
def read_sql_df(sql: str, params: Optional[Dict[str, Any]] = None) -> pd.DataFrame:
|
||||
"""Execute SQL query and return results as a DataFrame."""
|
||||
"""Execute SQL query and return results as a DataFrame.
|
||||
|
||||
Includes query timing and error logging with ORA codes.
|
||||
"""
|
||||
start_time = time.time()
|
||||
engine = get_engine()
|
||||
with engine.connect() as conn:
|
||||
df = pd.read_sql(text(sql), conn, params=params)
|
||||
df.columns = [str(c).upper() for c in df.columns]
|
||||
return df
|
||||
|
||||
try:
|
||||
with engine.connect() as conn:
|
||||
df = pd.read_sql(text(sql), conn, params=params)
|
||||
df.columns = [str(c).upper() for c in df.columns]
|
||||
|
||||
elapsed = time.time() - start_time
|
||||
# Log slow queries (>1 second) as warnings
|
||||
if elapsed > 1.0:
|
||||
# Truncate SQL for logging (first 100 chars)
|
||||
sql_preview = sql.strip().replace('\n', ' ')[:100]
|
||||
logger.warning(f"Slow query ({elapsed:.2f}s): {sql_preview}...")
|
||||
else:
|
||||
logger.debug(f"Query completed in {elapsed:.3f}s, rows={len(df)}")
|
||||
|
||||
return df
|
||||
|
||||
except Exception as exc:
|
||||
elapsed = time.time() - start_time
|
||||
ora_code = _extract_ora_code(exc)
|
||||
sql_preview = sql.strip().replace('\n', ' ')[:100]
|
||||
logger.error(
|
||||
f"Query failed after {elapsed:.2f}s - ORA-{ora_code}: {exc} | SQL: {sql_preview}..."
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
# ============================================================
|
||||
@@ -223,6 +264,8 @@ def get_table_data(
|
||||
'row_count': len(data)
|
||||
}
|
||||
except Exception as exc:
|
||||
ora_code = _extract_ora_code(exc)
|
||||
logger.error(f"get_table_data failed - ORA-{ora_code}: {exc}")
|
||||
if connection:
|
||||
connection.close()
|
||||
return {'error': f'查詢失敗: {str(exc)}'}
|
||||
|
||||
@@ -40,8 +40,8 @@ def api_overview_summary():
|
||||
lotid: Optional LOTID filter (fuzzy match)
|
||||
include_dummy: Include DUMMY lots (default: false)
|
||||
|
||||
Returns:
|
||||
JSON with totalLots, totalQtyPcs, byWipStatus, dataUpdateDate
|
||||
Returns:
|
||||
JSON with totalLots, totalQtyPcs, byWipStatus, dataUpdateDate
|
||||
"""
|
||||
workorder = request.args.get('workorder', '').strip() or None
|
||||
lotid = request.args.get('lotid', '').strip() or None
|
||||
|
||||
@@ -380,7 +380,7 @@ def get_wip_detail(
|
||||
SELECT * FROM (
|
||||
SELECT
|
||||
LOTID,
|
||||
EQUIPMENTNAME,
|
||||
EQUIPMENTS,
|
||||
STATUS,
|
||||
HOLDREASONNAME,
|
||||
QTY,
|
||||
@@ -404,7 +404,7 @@ def get_wip_detail(
|
||||
for _, row in lots_df.iterrows():
|
||||
lots.append({
|
||||
'lotId': _safe_value(row['LOTID']),
|
||||
'equipment': _safe_value(row['EQUIPMENTNAME']),
|
||||
'equipment': _safe_value(row['EQUIPMENTS']),
|
||||
'wipStatus': _safe_value(row['WIP_STATUS']),
|
||||
'holdReason': _safe_value(row['HOLDREASONNAME']),
|
||||
'qty': int(row['QTY'] or 0),
|
||||
|
||||
@@ -979,7 +979,7 @@
|
||||
);
|
||||
updateElementWithTransition(
|
||||
'runQty',
|
||||
runQty === null || runQty === undefined ? '-' : `${formatNumber(runQty)} pcs`
|
||||
runQty === null || runQty === undefined ? '-' : formatNumber(runQty)
|
||||
);
|
||||
updateElementWithTransition(
|
||||
'queueLots',
|
||||
@@ -987,7 +987,7 @@
|
||||
);
|
||||
updateElementWithTransition(
|
||||
'queueQty',
|
||||
queueQty === null || queueQty === undefined ? '-' : `${formatNumber(queueQty)} pcs`
|
||||
queueQty === null || queueQty === undefined ? '-' : formatNumber(queueQty)
|
||||
);
|
||||
updateElementWithTransition(
|
||||
'holdLots',
|
||||
@@ -995,7 +995,7 @@
|
||||
);
|
||||
updateElementWithTransition(
|
||||
'holdQty',
|
||||
holdQty === null || holdQty === undefined ? '-' : `${formatNumber(holdQty)} pcs`
|
||||
holdQty === null || holdQty === undefined ? '-' : formatNumber(holdQty)
|
||||
);
|
||||
|
||||
if (data.dataUpdateDate) {
|
||||
|
||||
Reference in New Issue
Block a user