From b56e80381bc3e67a8289063c8c5db8530252486b Mon Sep 17 00:00:00 2001 From: beabigegg Date: Sun, 8 Feb 2026 08:30:48 +0800 Subject: [PATCH] chore: reinitialize project with vite architecture --- .env.example | 181 + .gitignore | 56 + README.md | 658 + README.mdj | 61 + data/page_status.json | 56 + data/table_schema_info.json | 12093 ++++++++++++++++ deploy/mes-dashboard-watchdog.service | 40 + deploy/mes-dashboard.service | 43 + docs/DW_PJ_LOT_V_POWERBI_SQL.txt | 60 + docs/MES_Core_Tables_Analysis_Report.md | 2334 +++ docs/MES_Database_Reference.md | 1379 ++ docs/Oracle_Authorized_Objects.md | 36 + docs/architecture_findings.md | 936 ++ docs/environment_gaps_and_mitigation.md | 34 + docs/frontend_compute_shift_plan.md | 42 + docs/migration_gates_and_runbook.md | 113 + docs/migration_validation_evidence.md | 60 + docs/page_architecture_map.md | 44 + docs/root_cutover_inventory.md | 56 + docs/root_refactor_validation_notes.md | 37 + environment.yml | 46 + frontend/.gitignore | 2 + frontend/package-lock.json | 1105 ++ frontend/package.json | 14 + frontend/src/core/api.js | 82 + frontend/src/core/autocomplete.js | 69 + frontend/src/core/compute.js | 59 + frontend/src/core/field-contracts.js | 25 + frontend/src/core/table-tree.js | 44 + frontend/src/excel-query/main.js | 624 + frontend/src/hold-detail/main.js | 336 + frontend/src/job-query/main.js | 474 + frontend/src/portal/main.js | 193 + frontend/src/portal/portal.css | 29 + frontend/src/resource-history/main.js | 844 ++ frontend/src/resource-status/main.js | 853 ++ frontend/src/tables/main.js | 236 + frontend/src/wip-detail/main.js | 844 ++ frontend/src/wip-overview/main.js | 829 ++ frontend/tests/autocomplete.test.js | 57 + frontend/vite.config.js | 29 + frontend_design/Hold_detail.pen | 2182 +++ frontend_design/WIP_main.pen | 614 + gunicorn.conf.py | 38 + .../.openspec.yaml | 2 + .../design.md | 79 + .../proposal.md | 32 + .../cache-observability-hardening/spec.md | 15 + .../specs/field-contract-governance/spec.md | 19 + .../specs/frontend-compute-shift/spec.md | 15 + .../full-vite-page-modularization/spec.md | 19 + .../specs/migration-gates-and-rollout/spec.md | 15 + .../specs/root-cutover-finalization/spec.md | 19 + .../tasks.md | 42 + .../.openspec.yaml | 2 + .../design.md | 64 + .../proposal.md | 29 + .../specs/field-name-consistency/spec.md | 12 + .../specs/layered-route-cache/spec.md | 19 + .../specs/portal-drawer-navigation/spec.md | 15 + .../specs/root-project-restructure/spec.md | 12 + .../vite-single-port-integration/spec.md | 15 + .../tasks.md | 26 + .../.openspec.yaml | 2 + .../design.md | 50 + .../proposal.md | 26 + .../specs/field-contract-governance/spec.md | 8 + .../full-vite-page-modularization/spec.md | 12 + .../specs/report-effects-parity/spec.md | 8 + .../tasks.md | 17 + .../.openspec.yaml | 2 + .../design.md | 101 + .../proposal.md | 39 + .../cache-observability-hardening/spec.md | 22 + .../conda-systemd-runtime-alignment/spec.md | 22 + .../specs/frontend-compute-shift/spec.md | 22 + .../full-vite-page-modularization/spec.md | 22 + .../specs/layered-route-cache/spec.md | 22 + .../specs/migration-gates-and-rollout/spec.md | 22 + .../specs/runtime-resilience-recovery/spec.md | 29 + .../tasks.md | 36 + .../.openspec.yaml | 2 + .../design.md | 65 + .../proposal.md | 28 + .../specs/field-contract-governance/spec.md | 15 + .../specs/frontend-compute-shift/spec.md | 15 + .../full-vite-page-modularization/spec.md | 19 + .../specs/report-effects-parity/spec.md | 19 + .../specs/runtime-resilience-recovery/spec.md | 8 + .../tasks.md | 28 + .../.openspec.yaml | 2 + .../design.md | 67 + .../proposal.md | 40 + .../full-vite-page-modularization/spec.md | 12 + .../specs/migration-gates-and-rollout/spec.md | 12 + .../specs/runtime-resilience-recovery/spec.md | 12 + .../tasks.md | 23 + openspec/config.yaml | 20 + .../cache-observability-hardening/spec.md | 38 + .../conda-systemd-runtime-alignment/spec.md | 26 + .../specs/field-contract-governance/spec.md | 42 + openspec/specs/field-name-consistency/spec.md | 16 + openspec/specs/frontend-compute-shift/spec.md | 52 + .../full-vite-page-modularization/spec.md | 81 + openspec/specs/layered-route-cache/spec.md | 42 + .../specs/migration-gates-and-rollout/spec.md | 48 + .../specs/portal-drawer-navigation/spec.md | 19 + openspec/specs/report-effects-parity/spec.md | 30 + .../specs/root-cutover-finalization/spec.md | 23 + .../specs/root-project-restructure/spec.md | 16 + .../specs/runtime-resilience-recovery/spec.md | 50 + .../vite-single-port-integration/spec.md | 19 + pyproject.toml | 72 + pytest.ini | 12 + requirements.txt | 36 + scripts/deploy.sh | 289 + scripts/run_stress_tests.py | 195 + scripts/start_server.sh | 689 + scripts/worker_watchdog.py | 302 + shared/field_contracts.json | 110 + src/mes_dashboard/__init__.py | 5 + src/mes_dashboard/__main__.py | 12 + src/mes_dashboard/app.py | 366 + src/mes_dashboard/config/__init__.py | 47 + src/mes_dashboard/config/constants.py | 122 + src/mes_dashboard/config/database.py | 42 + src/mes_dashboard/config/field_contracts.py | 40 + src/mes_dashboard/config/settings.py | 115 + src/mes_dashboard/config/tables.py | 150 + src/mes_dashboard/config/workcenter_groups.py | 138 + src/mes_dashboard/core/__init__.py | 39 + src/mes_dashboard/core/cache.py | 437 + src/mes_dashboard/core/cache_updater.py | 328 + src/mes_dashboard/core/circuit_breaker.py | 305 + src/mes_dashboard/core/database.py | 693 + src/mes_dashboard/core/log_store.py | 529 + src/mes_dashboard/core/metrics.py | 232 + src/mes_dashboard/core/permissions.py | 66 + src/mes_dashboard/core/redis_client.py | 170 + src/mes_dashboard/core/resilience.py | 148 + src/mes_dashboard/core/response.py | 261 + src/mes_dashboard/core/utils.py | 258 + src/mes_dashboard/routes/__init__.py | 39 + src/mes_dashboard/routes/admin_routes.py | 538 + src/mes_dashboard/routes/auth_routes.py | 113 + src/mes_dashboard/routes/dashboard_routes.py | 113 + .../routes/excel_query_routes.py | 355 + src/mes_dashboard/routes/health_routes.py | 472 + src/mes_dashboard/routes/hold_routes.py | 152 + src/mes_dashboard/routes/job_query_routes.py | 165 + .../routes/resource_history_routes.py | 239 + src/mes_dashboard/routes/resource_routes.py | 339 + src/mes_dashboard/routes/wip_routes.py | 339 + src/mes_dashboard/services/__init__.py | 1 + src/mes_dashboard/services/auth_service.py | 124 + .../services/dashboard_service.py | 507 + .../services/excel_query_service.py | 557 + src/mes_dashboard/services/filter_cache.py | 397 + .../services/job_query_service.py | 386 + src/mes_dashboard/services/page_registry.py | 143 + .../services/realtime_equipment_cache.py | 753 + src/mes_dashboard/services/resource_cache.py | 858 ++ .../services/resource_history_service.py | 953 ++ .../services/resource_service.py | 599 + src/mes_dashboard/services/wip_service.py | 2535 ++++ src/mes_dashboard/sql/__init__.py | 90 + src/mes_dashboard/sql/builder.py | 263 + src/mes_dashboard/sql/dashboard/heatmap.sql | 31 + src/mes_dashboard/sql/dashboard/kpi.sql | 26 + .../sql/dashboard/kpi_standalone.sql | 64 + src/mes_dashboard/sql/dashboard/ou_trend.sql | 29 + .../dashboard/resource_detail_with_job.sql | 101 + .../sql/dashboard/workcenter_cards.sql | 17 + src/mes_dashboard/sql/filters.py | 287 + src/mes_dashboard/sql/job_query/job_list.sql | 33 + .../sql/job_query/job_txn_detail.sql | 27 + .../sql/job_query/job_txn_export.sql | 35 + src/mes_dashboard/sql/loader.py | 66 + src/mes_dashboard/sql/resource/by_status.sql | 11 + .../sql/resource/by_workcenter.sql | 12 + src/mes_dashboard/sql/resource/detail.sql | 30 + .../sql/resource/distinct_statuses.sql | 19 + .../sql/resource/latest_status.sql | 52 + .../sql/resource/workcenter_status_matrix.sql | 33 + .../sql/resource_history/detail.sql | 27 + .../sql/resource_history/heatmap.sql | 27 + .../sql/resource_history/kpi.sql | 24 + .../sql/resource_history/trend.sql | 28 + src/mes_dashboard/sql/wip/detail.sql | 30 + src/mes_dashboard/sql/wip/matrix.sql | 18 + src/mes_dashboard/sql/wip/summary.sql | 48 + src/mes_dashboard/static/js/echarts.min.js | 45 + src/mes_dashboard/static/js/mes-api.js | 364 + src/mes_dashboard/static/js/toast.js | 240 + src/mes_dashboard/templates/403.html | 87 + src/mes_dashboard/templates/404.html | 81 + src/mes_dashboard/templates/500.html | 101 + src/mes_dashboard/templates/_base.html | 122 + src/mes_dashboard/templates/admin/pages.html | 296 + .../templates/admin/performance.html | 1207 ++ src/mes_dashboard/templates/excel_query.html | 1181 ++ src/mes_dashboard/templates/hold_detail.html | 1013 ++ src/mes_dashboard/templates/index.html | 589 + src/mes_dashboard/templates/job_query.html | 923 ++ src/mes_dashboard/templates/login.html | 150 + src/mes_dashboard/templates/portal.html | 629 + .../templates/resource_history.html | 1531 ++ .../templates/resource_status.html | 1669 +++ src/mes_dashboard/templates/wip_detail.html | 1794 +++ src/mes_dashboard/templates/wip_overview.html | 1825 +++ tests/conftest.py | 77 + tests/e2e/conftest.py | 50 + tests/e2e/test_admin_auth_e2e.py | 350 + tests/e2e/test_cache_e2e.py | 281 + tests/e2e/test_global_connection.py | 362 + tests/e2e/test_realtime_equipment_e2e.py | 216 + tests/e2e/test_resource_cache_e2e.py | 250 + tests/e2e/test_resource_history_e2e.py | 319 + tests/fixtures/frontend_compute_parity.json | 46 + tests/stress/__init__.py | 2 + tests/stress/conftest.py | 118 + tests/stress/test_api_load.py | 327 + tests/stress/test_frontend_stress.py | 367 + tests/test_api_integration.py | 288 + tests/test_app_factory.py | 56 + tests/test_auth_integration.py | 301 + tests/test_auth_service.py | 159 + tests/test_cache.py | 313 + tests/test_cache_integration.py | 400 + tests/test_cache_updater.py | 222 + tests/test_circuit_breaker.py | 223 + tests/test_common_filters.py | 186 + tests/test_degraded_responses.py | 86 + tests/test_excel_query_e2e.py | 506 + tests/test_excel_query_routes.py | 474 + tests/test_excel_query_service.py | 261 + tests/test_field_contracts.py | 127 + tests/test_frontend_compute_parity.py | 74 + tests/test_health_routes.py | 80 + tests/test_hold_routes.py | 317 + tests/test_job_query_routes.py | 320 + tests/test_job_query_service.py | 170 + tests/test_log_store.py | 277 + tests/test_metrics.py | 203 + tests/test_page_registry.py | 194 + tests/test_performance_integration.py | 307 + tests/test_permissions.py | 102 + tests/test_realtime_equipment_cache.py | 494 + tests/test_redis_client.py | 162 + tests/test_resilience.py | 58 + tests/test_resource_cache.py | 579 + tests/test_resource_history_routes.py | 297 + tests/test_resource_history_service.py | 446 + tests/test_resource_service.py | 396 + tests/test_sql_builder.py | 238 + tests/test_sql_loader.py | 109 + tests/test_template_integration.py | 249 + tests/test_wip_routes.py | 337 + tests/test_wip_service.py | 767 + tests/test_workcenter_mapping.py | 349 + tools/generate_documentation.py | 344 + tools/query_table_schema.py | 261 + tools/test_oracle_connection.py | 152 + tools/update_oracle_authorized_objects.py | 194 + 264 files changed, 75752 insertions(+) create mode 100644 .env.example create mode 100644 .gitignore create mode 100644 README.md create mode 100644 README.mdj create mode 100644 data/page_status.json create mode 100644 data/table_schema_info.json create mode 100644 deploy/mes-dashboard-watchdog.service create mode 100644 deploy/mes-dashboard.service create mode 100644 docs/DW_PJ_LOT_V_POWERBI_SQL.txt create mode 100644 docs/MES_Core_Tables_Analysis_Report.md create mode 100644 docs/MES_Database_Reference.md create mode 100644 docs/Oracle_Authorized_Objects.md create mode 100644 docs/architecture_findings.md create mode 100644 docs/environment_gaps_and_mitigation.md create mode 100644 docs/frontend_compute_shift_plan.md create mode 100644 docs/migration_gates_and_runbook.md create mode 100644 docs/migration_validation_evidence.md create mode 100644 docs/page_architecture_map.md create mode 100644 docs/root_cutover_inventory.md create mode 100644 docs/root_refactor_validation_notes.md create mode 100644 environment.yml create mode 100644 frontend/.gitignore create mode 100644 frontend/package-lock.json create mode 100644 frontend/package.json create mode 100644 frontend/src/core/api.js create mode 100644 frontend/src/core/autocomplete.js create mode 100644 frontend/src/core/compute.js create mode 100644 frontend/src/core/field-contracts.js create mode 100644 frontend/src/core/table-tree.js create mode 100644 frontend/src/excel-query/main.js create mode 100644 frontend/src/hold-detail/main.js create mode 100644 frontend/src/job-query/main.js create mode 100644 frontend/src/portal/main.js create mode 100644 frontend/src/portal/portal.css create mode 100644 frontend/src/resource-history/main.js create mode 100644 frontend/src/resource-status/main.js create mode 100644 frontend/src/tables/main.js create mode 100644 frontend/src/wip-detail/main.js create mode 100644 frontend/src/wip-overview/main.js create mode 100644 frontend/tests/autocomplete.test.js create mode 100644 frontend/vite.config.js create mode 100644 frontend_design/Hold_detail.pen create mode 100644 frontend_design/WIP_main.pen create mode 100644 gunicorn.conf.py create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/.openspec.yaml create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/design.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/proposal.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/cache-observability-hardening/spec.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/field-contract-governance/spec.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/frontend-compute-shift/spec.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/full-vite-page-modularization/spec.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/migration-gates-and-rollout/spec.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/root-cutover-finalization/spec.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/tasks.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/.openspec.yaml create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/design.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/proposal.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/field-name-consistency/spec.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/layered-route-cache/spec.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/portal-drawer-navigation/spec.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/root-project-restructure/spec.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/vite-single-port-integration/spec.md create mode 100644 openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/tasks.md create mode 100644 openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/.openspec.yaml create mode 100644 openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/design.md create mode 100644 openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/proposal.md create mode 100644 openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/specs/field-contract-governance/spec.md create mode 100644 openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/specs/full-vite-page-modularization/spec.md create mode 100644 openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/specs/report-effects-parity/spec.md create mode 100644 openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/tasks.md create mode 100644 openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/.openspec.yaml create mode 100644 openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/design.md create mode 100644 openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/proposal.md create mode 100644 openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/cache-observability-hardening/spec.md create mode 100644 openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/conda-systemd-runtime-alignment/spec.md create mode 100644 openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/frontend-compute-shift/spec.md create mode 100644 openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/full-vite-page-modularization/spec.md create mode 100644 openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/layered-route-cache/spec.md create mode 100644 openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/migration-gates-and-rollout/spec.md create mode 100644 openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/runtime-resilience-recovery/spec.md create mode 100644 openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/tasks.md create mode 100644 openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/.openspec.yaml create mode 100644 openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/design.md create mode 100644 openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/proposal.md create mode 100644 openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/field-contract-governance/spec.md create mode 100644 openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/frontend-compute-shift/spec.md create mode 100644 openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/full-vite-page-modularization/spec.md create mode 100644 openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/report-effects-parity/spec.md create mode 100644 openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/runtime-resilience-recovery/spec.md create mode 100644 openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/tasks.md create mode 100644 openspec/changes/archive/2026-02-08-post-migration-resilience-governance/.openspec.yaml create mode 100644 openspec/changes/archive/2026-02-08-post-migration-resilience-governance/design.md create mode 100644 openspec/changes/archive/2026-02-08-post-migration-resilience-governance/proposal.md create mode 100644 openspec/changes/archive/2026-02-08-post-migration-resilience-governance/specs/full-vite-page-modularization/spec.md create mode 100644 openspec/changes/archive/2026-02-08-post-migration-resilience-governance/specs/migration-gates-and-rollout/spec.md create mode 100644 openspec/changes/archive/2026-02-08-post-migration-resilience-governance/specs/runtime-resilience-recovery/spec.md create mode 100644 openspec/changes/archive/2026-02-08-post-migration-resilience-governance/tasks.md create mode 100644 openspec/config.yaml create mode 100644 openspec/specs/cache-observability-hardening/spec.md create mode 100644 openspec/specs/conda-systemd-runtime-alignment/spec.md create mode 100644 openspec/specs/field-contract-governance/spec.md create mode 100644 openspec/specs/field-name-consistency/spec.md create mode 100644 openspec/specs/frontend-compute-shift/spec.md create mode 100644 openspec/specs/full-vite-page-modularization/spec.md create mode 100644 openspec/specs/layered-route-cache/spec.md create mode 100644 openspec/specs/migration-gates-and-rollout/spec.md create mode 100644 openspec/specs/portal-drawer-navigation/spec.md create mode 100644 openspec/specs/report-effects-parity/spec.md create mode 100644 openspec/specs/root-cutover-finalization/spec.md create mode 100644 openspec/specs/root-project-restructure/spec.md create mode 100644 openspec/specs/runtime-resilience-recovery/spec.md create mode 100644 openspec/specs/vite-single-port-integration/spec.md create mode 100644 pyproject.toml create mode 100644 pytest.ini create mode 100644 requirements.txt create mode 100644 scripts/deploy.sh create mode 100644 scripts/run_stress_tests.py create mode 100644 scripts/start_server.sh create mode 100644 scripts/worker_watchdog.py create mode 100644 shared/field_contracts.json create mode 100644 src/mes_dashboard/__init__.py create mode 100644 src/mes_dashboard/__main__.py create mode 100644 src/mes_dashboard/app.py create mode 100644 src/mes_dashboard/config/__init__.py create mode 100644 src/mes_dashboard/config/constants.py create mode 100644 src/mes_dashboard/config/database.py create mode 100644 src/mes_dashboard/config/field_contracts.py create mode 100644 src/mes_dashboard/config/settings.py create mode 100644 src/mes_dashboard/config/tables.py create mode 100644 src/mes_dashboard/config/workcenter_groups.py create mode 100644 src/mes_dashboard/core/__init__.py create mode 100644 src/mes_dashboard/core/cache.py create mode 100644 src/mes_dashboard/core/cache_updater.py create mode 100644 src/mes_dashboard/core/circuit_breaker.py create mode 100644 src/mes_dashboard/core/database.py create mode 100644 src/mes_dashboard/core/log_store.py create mode 100644 src/mes_dashboard/core/metrics.py create mode 100644 src/mes_dashboard/core/permissions.py create mode 100644 src/mes_dashboard/core/redis_client.py create mode 100644 src/mes_dashboard/core/resilience.py create mode 100644 src/mes_dashboard/core/response.py create mode 100644 src/mes_dashboard/core/utils.py create mode 100644 src/mes_dashboard/routes/__init__.py create mode 100644 src/mes_dashboard/routes/admin_routes.py create mode 100644 src/mes_dashboard/routes/auth_routes.py create mode 100644 src/mes_dashboard/routes/dashboard_routes.py create mode 100644 src/mes_dashboard/routes/excel_query_routes.py create mode 100644 src/mes_dashboard/routes/health_routes.py create mode 100644 src/mes_dashboard/routes/hold_routes.py create mode 100644 src/mes_dashboard/routes/job_query_routes.py create mode 100644 src/mes_dashboard/routes/resource_history_routes.py create mode 100644 src/mes_dashboard/routes/resource_routes.py create mode 100644 src/mes_dashboard/routes/wip_routes.py create mode 100644 src/mes_dashboard/services/__init__.py create mode 100644 src/mes_dashboard/services/auth_service.py create mode 100644 src/mes_dashboard/services/dashboard_service.py create mode 100644 src/mes_dashboard/services/excel_query_service.py create mode 100644 src/mes_dashboard/services/filter_cache.py create mode 100644 src/mes_dashboard/services/job_query_service.py create mode 100644 src/mes_dashboard/services/page_registry.py create mode 100644 src/mes_dashboard/services/realtime_equipment_cache.py create mode 100644 src/mes_dashboard/services/resource_cache.py create mode 100644 src/mes_dashboard/services/resource_history_service.py create mode 100644 src/mes_dashboard/services/resource_service.py create mode 100644 src/mes_dashboard/services/wip_service.py create mode 100644 src/mes_dashboard/sql/__init__.py create mode 100644 src/mes_dashboard/sql/builder.py create mode 100644 src/mes_dashboard/sql/dashboard/heatmap.sql create mode 100644 src/mes_dashboard/sql/dashboard/kpi.sql create mode 100644 src/mes_dashboard/sql/dashboard/kpi_standalone.sql create mode 100644 src/mes_dashboard/sql/dashboard/ou_trend.sql create mode 100644 src/mes_dashboard/sql/dashboard/resource_detail_with_job.sql create mode 100644 src/mes_dashboard/sql/dashboard/workcenter_cards.sql create mode 100644 src/mes_dashboard/sql/filters.py create mode 100644 src/mes_dashboard/sql/job_query/job_list.sql create mode 100644 src/mes_dashboard/sql/job_query/job_txn_detail.sql create mode 100644 src/mes_dashboard/sql/job_query/job_txn_export.sql create mode 100644 src/mes_dashboard/sql/loader.py create mode 100644 src/mes_dashboard/sql/resource/by_status.sql create mode 100644 src/mes_dashboard/sql/resource/by_workcenter.sql create mode 100644 src/mes_dashboard/sql/resource/detail.sql create mode 100644 src/mes_dashboard/sql/resource/distinct_statuses.sql create mode 100644 src/mes_dashboard/sql/resource/latest_status.sql create mode 100644 src/mes_dashboard/sql/resource/workcenter_status_matrix.sql create mode 100644 src/mes_dashboard/sql/resource_history/detail.sql create mode 100644 src/mes_dashboard/sql/resource_history/heatmap.sql create mode 100644 src/mes_dashboard/sql/resource_history/kpi.sql create mode 100644 src/mes_dashboard/sql/resource_history/trend.sql create mode 100644 src/mes_dashboard/sql/wip/detail.sql create mode 100644 src/mes_dashboard/sql/wip/matrix.sql create mode 100644 src/mes_dashboard/sql/wip/summary.sql create mode 100644 src/mes_dashboard/static/js/echarts.min.js create mode 100644 src/mes_dashboard/static/js/mes-api.js create mode 100644 src/mes_dashboard/static/js/toast.js create mode 100644 src/mes_dashboard/templates/403.html create mode 100644 src/mes_dashboard/templates/404.html create mode 100644 src/mes_dashboard/templates/500.html create mode 100644 src/mes_dashboard/templates/_base.html create mode 100644 src/mes_dashboard/templates/admin/pages.html create mode 100644 src/mes_dashboard/templates/admin/performance.html create mode 100644 src/mes_dashboard/templates/excel_query.html create mode 100644 src/mes_dashboard/templates/hold_detail.html create mode 100644 src/mes_dashboard/templates/index.html create mode 100644 src/mes_dashboard/templates/job_query.html create mode 100644 src/mes_dashboard/templates/login.html create mode 100644 src/mes_dashboard/templates/portal.html create mode 100644 src/mes_dashboard/templates/resource_history.html create mode 100644 src/mes_dashboard/templates/resource_status.html create mode 100644 src/mes_dashboard/templates/wip_detail.html create mode 100644 src/mes_dashboard/templates/wip_overview.html create mode 100644 tests/conftest.py create mode 100644 tests/e2e/conftest.py create mode 100644 tests/e2e/test_admin_auth_e2e.py create mode 100644 tests/e2e/test_cache_e2e.py create mode 100644 tests/e2e/test_global_connection.py create mode 100644 tests/e2e/test_realtime_equipment_e2e.py create mode 100644 tests/e2e/test_resource_cache_e2e.py create mode 100644 tests/e2e/test_resource_history_e2e.py create mode 100644 tests/fixtures/frontend_compute_parity.json create mode 100644 tests/stress/__init__.py create mode 100644 tests/stress/conftest.py create mode 100644 tests/stress/test_api_load.py create mode 100644 tests/stress/test_frontend_stress.py create mode 100644 tests/test_api_integration.py create mode 100644 tests/test_app_factory.py create mode 100644 tests/test_auth_integration.py create mode 100644 tests/test_auth_service.py create mode 100644 tests/test_cache.py create mode 100644 tests/test_cache_integration.py create mode 100644 tests/test_cache_updater.py create mode 100644 tests/test_circuit_breaker.py create mode 100644 tests/test_common_filters.py create mode 100644 tests/test_degraded_responses.py create mode 100644 tests/test_excel_query_e2e.py create mode 100644 tests/test_excel_query_routes.py create mode 100644 tests/test_excel_query_service.py create mode 100644 tests/test_field_contracts.py create mode 100644 tests/test_frontend_compute_parity.py create mode 100644 tests/test_health_routes.py create mode 100644 tests/test_hold_routes.py create mode 100644 tests/test_job_query_routes.py create mode 100644 tests/test_job_query_service.py create mode 100644 tests/test_log_store.py create mode 100644 tests/test_metrics.py create mode 100644 tests/test_page_registry.py create mode 100644 tests/test_performance_integration.py create mode 100644 tests/test_permissions.py create mode 100644 tests/test_realtime_equipment_cache.py create mode 100644 tests/test_redis_client.py create mode 100644 tests/test_resilience.py create mode 100644 tests/test_resource_cache.py create mode 100644 tests/test_resource_history_routes.py create mode 100644 tests/test_resource_history_service.py create mode 100644 tests/test_resource_service.py create mode 100644 tests/test_sql_builder.py create mode 100644 tests/test_sql_loader.py create mode 100644 tests/test_template_integration.py create mode 100644 tests/test_wip_routes.py create mode 100644 tests/test_wip_service.py create mode 100644 tests/test_workcenter_mapping.py create mode 100644 tools/generate_documentation.py create mode 100644 tools/query_table_schema.py create mode 100644 tools/test_oracle_connection.py create mode 100644 tools/update_oracle_authorized_objects.py diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..0541199 --- /dev/null +++ b/.env.example @@ -0,0 +1,181 @@ +# ============================================================ +# MES Dashboard Environment Configuration +# ============================================================ +# Copy this file to .env and fill in your actual values: +# cp .env.example .env +# nano .env +# ============================================================ + +# ============================================================ +# Database Configuration (REQUIRED) +# ============================================================ +# Oracle Database connection settings +DB_HOST=your_database_host +DB_PORT=1521 +DB_SERVICE=your_service_name +DB_USER=your_username +DB_PASSWORD=your_password + +# Database Pool Settings (optional, has defaults) +# Adjust based on expected load +DB_POOL_SIZE=5 # Default: 5 (dev: 2, prod: 10) +DB_MAX_OVERFLOW=10 # Default: 10 (dev: 3, prod: 20) +DB_POOL_TIMEOUT=30 # Seconds to wait when pool is exhausted +DB_POOL_RECYCLE=1800 # Recycle connection after N seconds +DB_TCP_CONNECT_TIMEOUT=10 +DB_CONNECT_RETRY_COUNT=1 +DB_CONNECT_RETRY_DELAY=1.0 +DB_CALL_TIMEOUT_MS=55000 # Must stay below worker timeout + +# ============================================================ +# Flask Configuration +# ============================================================ +# Environment mode: development | production | testing +FLASK_ENV=development + +# Debug mode: 0 for production, 1 for development +FLASK_DEBUG=0 + +# Session Security (REQUIRED for production!) +# Generate with: python -c "import secrets; print(secrets.token_hex(32))" +SECRET_KEY=your-secret-key-change-in-production + +# Session timeout in seconds (default: 28800 = 8 hours) +SESSION_LIFETIME=28800 + +# ============================================================ +# Authentication Configuration +# ============================================================ +# LDAP API endpoint for user authentication +LDAP_API_URL=https://your-ldap-api-endpoint.example.com + +# Admin email addresses (comma-separated for multiple) +ADMIN_EMAILS=admin@example.com + +# Local Authentication (for development/testing) +# When enabled, uses local credentials instead of LDAP +# Set LOCAL_AUTH_ENABLED=true to bypass LDAP authentication +LOCAL_AUTH_ENABLED=false +LOCAL_AUTH_USERNAME= +LOCAL_AUTH_PASSWORD= + +# ============================================================ +# Gunicorn Configuration +# ============================================================ +# Server bind address and port +GUNICORN_BIND=0.0.0.0:8080 + +# Number of worker processes (recommend: 2 * CPU cores + 1) +GUNICORN_WORKERS=2 + +# Threads per worker +GUNICORN_THREADS=4 + +# ============================================================ +# Redis Configuration (for WIP cache) +# ============================================================ +# Redis connection URL +REDIS_URL=redis://localhost:6379/0 + +# Enable/disable Redis cache (set to false to fallback to Oracle) +REDIS_ENABLED=true + +# Redis key prefix (to separate from other applications) +REDIS_KEY_PREFIX=mes_wip + +# Cache check interval in seconds (default: 600 = 10 minutes) +CACHE_CHECK_INTERVAL=600 + +# ============================================================ +# Resource Cache Configuration +# ============================================================ +# Enable/disable Resource cache (DW_MES_RESOURCE) +# When disabled, queries will fallback to Oracle directly +RESOURCE_CACHE_ENABLED=true + +# Resource cache sync interval in seconds (default: 14400 = 4 hours) +# The cache will check for updates at this interval using MAX(LASTCHANGEDATE) +RESOURCE_SYNC_INTERVAL=14400 + +# ============================================================ +# Circuit Breaker Configuration +# ============================================================ +# Enable/disable circuit breaker for database protection +CIRCUIT_BREAKER_ENABLED=true + +# Minimum failures before circuit can open +CIRCUIT_BREAKER_FAILURE_THRESHOLD=5 + +# Failure rate threshold (0.0 - 1.0) +CIRCUIT_BREAKER_FAILURE_RATE=0.5 + +# Seconds to wait in OPEN state before trying HALF_OPEN +CIRCUIT_BREAKER_RECOVERY_TIMEOUT=30 + +# Sliding window size for counting successes/failures +CIRCUIT_BREAKER_WINDOW_SIZE=10 + +# ============================================================ +# Performance Metrics Configuration +# ============================================================ +# Slow query threshold in seconds (default: 5.0) +# Note: Real-time Oracle views may take 2-5s per query, set threshold accordingly +SLOW_QUERY_THRESHOLD=5.0 + +# ============================================================ +# SQLite Log Store Configuration +# ============================================================ +# Enable/disable SQLite log store for admin dashboard +LOG_STORE_ENABLED=true + +# SQLite database path +LOG_SQLITE_PATH=logs/admin_logs.sqlite + +# Log retention period in days (default: 7) +LOG_SQLITE_RETENTION_DAYS=7 + +# Maximum log rows (default: 100000) +LOG_SQLITE_MAX_ROWS=100000 + +# ============================================================ +# Worker Watchdog Configuration +# ============================================================ +# Runtime directory for restart flag/pid/state files +WATCHDOG_RUNTIME_DIR=./tmp + +# Path to restart flag file (watchdog monitors this file) +WATCHDOG_RESTART_FLAG=./tmp/mes_dashboard_restart.flag + +# Gunicorn PID file path (must match start script / systemd config) +WATCHDOG_PID_FILE=./tmp/gunicorn.pid + +# Path to restart state file (stores last restart info) +WATCHDOG_STATE_FILE=./tmp/mes_dashboard_restart_state.json + +# Max entries persisted in restart history (bounded to avoid state growth) +WATCHDOG_RESTART_HISTORY_MAX=50 + +# Cooldown period between restart requests in seconds (default: 60) +WORKER_RESTART_COOLDOWN=60 + +# ============================================================ +# Runtime Resilience Diagnostics Thresholds +# ============================================================ +# Alert window for sustained degraded state (seconds) +RESILIENCE_DEGRADED_ALERT_SECONDS=300 + +# Pool saturation warning / critical levels +RESILIENCE_POOL_SATURATION_WARNING=0.90 +RESILIENCE_POOL_SATURATION_CRITICAL=1.0 + +# Restart churn threshold: N restarts within window triggers churn warning +RESILIENCE_RESTART_CHURN_WINDOW_SECONDS=600 +RESILIENCE_RESTART_CHURN_THRESHOLD=3 + +# ============================================================ +# CORS Configuration +# ============================================================ +# Comma-separated list of allowed origins for CORS +# Example: https://example.com,https://app.example.com +# Set to * for development (not recommended for production) +CORS_ALLOWED_ORIGINS= diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b59ffd7 --- /dev/null +++ b/.gitignore @@ -0,0 +1,56 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python + +# Virtual environments +venv/ +ENV/ +env/ +.venv/ + +# Package build artifacts +*.egg-info/ +*.egg +dist/ +build/ +*.whl +frontend/node_modules/ + +# IDE +.idea/ +.vscode/ +*.swp +*.swo +*.sublime-* + +# OS +.DS_Store +Thumbs.db +nul + +# Logs +*.log +logs/ + +# Local config (credentials) +.env + +# AI/LLM tools +.claude/ +.codex/ +CLAUDE.md + +# Test artifacts +.pytest_cache/ +.coverage +htmlcov/ +.tox/ + +# Jupyter +.ipynb_checkpoints/ + +# Note: openspec/ is tracked (not ignored) +tmp/ diff --git a/README.md b/README.md new file mode 100644 index 0000000..b699d23 --- /dev/null +++ b/README.md @@ -0,0 +1,658 @@ +# MES Dashboard 報表系統 + +基於 Flask + Gunicorn + Redis + Vite 的 MES 數據報表查詢與可視化系統 + +> 專案主執行根目錄:`DashBoard_vite/` +> 目前已移除舊版 `DashBoard/` 代碼,僅保留新架構。 + +--- + +## 專案狀態 + +| 功能 | 狀態 | +|------|------| +| WIP 即時概況 | ✅ 已完成 | +| WIP 明細查詢 | ✅ 已完成 | +| Hold 狀態分析 | ✅ 已完成 | +| 數據表查詢工具 | ✅ 已完成 | +| 設備狀態監控 | ✅ 已完成 | +| 設備歷史查詢 | ✅ 已完成 | +| 管理員認證系統 | ✅ 已完成 | +| 頁面狀態管理 | ✅ 已完成 | +| Redis 快取系統 | ✅ 已完成 | +| SQL 查詢安全架構 | ✅ 已完成 | +| 效能監控儀表板 | ✅ 已完成 | +| 熔斷器保護機制 | ✅ 已完成 | +| Worker 重啟控制 | ✅ 已完成 | +| Runtime 韌性診斷(threshold/churn/recommendation) | ✅ 已完成 | +| WIP 共用 autocomplete core 模組 | ✅ 已完成 | +| 前端核心模組測試(Node test) | ✅ 已完成 | +| 部署自動化 | ✅ 已完成 | + +--- + +## 遷移與驗收文件 + +- Root cutover 盤點:`docs/root_cutover_inventory.md` +- 頁面架構與抽屜分類:`docs/page_architecture_map.md` +- 前端計算前移與 parity 規則:`docs/frontend_compute_shift_plan.md` +- Cutover gates / rollout / rollback:`docs/migration_gates_and_runbook.md` +- 環境依賴缺口與對策:`docs/environment_gaps_and_mitigation.md` + +--- + +## 最新架構重點 + +1. 單一 port 契約維持不變 +- Flask + Gunicorn + Vite dist 由同一服務提供(`GUNICORN_BIND`),前後端同源。 + +2. Runtime 韌性採「降級 + 可操作建議」 +- `/health`、`/health/deep`、`/admin/api/system-status`、`/admin/api/worker/status` 皆提供: + - 門檻(thresholds) + - 重啟 churn 摘要 + - recovery recommendation(值班建議動作) + +3. Watchdog 維持手動觸發重啟模型 +- 仍以 admin API 觸發 reload,不預設啟用自動重啟風暴風險。 +- state 檔新增 bounded restart history,方便追蹤 churn。 + +4. 前端治理:WIP autocomplete/filter 共用化 +- `frontend/src/core/autocomplete.js` 作為 WIP overview/detail 共用邏輯來源。 +- 維持既有頁面流程與 drill-down 語意,不變更操作習慣。 + +--- + +## 快速開始 + +### 首次部署 + +```bash +# 1. 執行部署腳本 +./scripts/deploy.sh + +# 2. 編輯環境設定 +nano .env + +# 3. 啟動服務 +./scripts/start_server.sh start +``` + +### 日常操作 + +```bash +# 啟動服務(背景執行) +./scripts/start_server.sh start + +# 停止服務 +./scripts/start_server.sh stop + +# 重啟服務 +./scripts/start_server.sh restart + +# 查看狀態 +./scripts/start_server.sh status + +# 查看日誌 +./scripts/start_server.sh logs follow +``` + +訪問網址: **http://localhost:8080** (可在 .env 中配置) + +--- + +## 部署指南 + +### 環境需求 + +- Python 3.11+ +- Conda (Miniconda/Anaconda) +- Node.js 22+(建議由 Conda `environment.yml` 管理) +- Oracle Database 連線 +- Redis Server 7.x+ (設備狀態快取) + +### 部署步驟 + +#### 1. 自動部署(推薦) + +```bash +./scripts/deploy.sh +``` + +此腳本會自動: +- 檢查 Conda 環境 +- 建立 `mes-dashboard` 虛擬環境 +- 安裝依賴套件 +- 複製 `.env.example` 到 `.env` +- 驗證資料庫連線 + +#### 2. 手動部署 + +```bash +# 建立 Conda 環境 +conda create -n mes-dashboard python=3.11 -y +conda activate mes-dashboard + +# 安裝依賴 +pip install -r requirements.txt + +# 安裝前端依賴並建置(Vite) +npm --prefix frontend install +npm --prefix frontend test +npm --prefix frontend run build + +# 設定環境變數 +cp .env.example .env +nano .env # 編輯資料庫連線等設定 + +# 啟動服務 +./scripts/start_server.sh start +``` + +### 環境變數設定 + +編輯 `.env` 檔案: + +```bash +# 資料庫設定(必填) +DB_HOST=your_database_host +DB_PORT=1521 +DB_SERVICE=your_service_name +DB_USER=your_username +DB_PASSWORD=your_password + +# Flask 設定 +FLASK_ENV=production # production | development +SECRET_KEY=your-secret-key # 生產環境請更換 + +# Gunicorn 設定 +GUNICORN_BIND=0.0.0.0:8080 # 服務監聽位址 +GUNICORN_WORKERS=2 # Worker 數量 +GUNICORN_THREADS=4 # 每個 Worker 的執行緒數 + +# DB 韌性設定 +DB_POOL_SIZE=10 +DB_MAX_OVERFLOW=20 +DB_POOL_TIMEOUT=30 +DB_POOL_RECYCLE=1800 +DB_CALL_TIMEOUT_MS=55000 + +# Circuit Breaker +CIRCUIT_BREAKER_ENABLED=true +CIRCUIT_BREAKER_FAILURE_THRESHOLD=5 +CIRCUIT_BREAKER_FAILURE_RATE=0.5 +CIRCUIT_BREAKER_RECOVERY_TIMEOUT=30 + +# Redis 設定 +REDIS_URL=redis://localhost:6379/0 +REDIS_ENABLED=true + +# Watchdog runtime contract +WATCHDOG_RUNTIME_DIR=./tmp +WATCHDOG_RESTART_FLAG=./tmp/mes_dashboard_restart.flag +WATCHDOG_PID_FILE=./tmp/gunicorn.pid +WATCHDOG_STATE_FILE=./tmp/mes_dashboard_restart_state.json +WATCHDOG_RESTART_HISTORY_MAX=50 + +# Runtime resilience thresholds +RESILIENCE_DEGRADED_ALERT_SECONDS=300 +RESILIENCE_POOL_SATURATION_WARNING=0.90 +RESILIENCE_POOL_SATURATION_CRITICAL=1.0 +RESILIENCE_RESTART_CHURN_WINDOW_SECONDS=600 +RESILIENCE_RESTART_CHURN_THRESHOLD=3 + +# 管理員設定 +ADMIN_EMAILS=admin@example.com # 管理員郵件(逗號分隔) +``` + +### 生產環境注意事項 + +1. **SECRET_KEY**: 必須設定為隨機字串 + ```bash + python -c "import secrets; print(secrets.token_hex(32))" + ``` + +2. **FLASK_ENV**: 設定為 `production` + +3. **防火牆**: 開放服務端口(預設 8080) + +### Conda + systemd 服務配置 + +建議在生產環境使用同一份 conda runtime contract 啟動 App 與 Watchdog: + +```bash +# 1. 複製 systemd 服務檔案 +sudo cp deploy/mes-dashboard.service /etc/systemd/system/ +sudo cp deploy/mes-dashboard-watchdog.service /etc/systemd/system/ + +# 2. 準備環境設定檔 +sudo mkdir -p /etc/mes-dashboard +sudo cp .env /etc/mes-dashboard/mes-dashboard.env + +# 3. 重新載入 systemd +sudo systemctl daemon-reload + +# 4. 啟動並設定開機自動啟動 +sudo systemctl enable --now mes-dashboard mes-dashboard-watchdog + +# 5. 查看狀態 +sudo systemctl status mes-dashboard +sudo systemctl status mes-dashboard-watchdog +``` + +### Rollback 步驟 + +如需回滾到先前版本: + +```bash +# 1. 停止服務 +./scripts/start_server.sh stop +sudo systemctl stop mes-dashboard mes-dashboard-watchdog + +# 2. 回滾程式碼 +git checkout + +# 3. 重新安裝依賴(如有變更) +pip install -r requirements.txt + +# 4. 清理新版本資料(可選) +rm -f logs/admin_logs.sqlite # 清理 SQLite 日誌 + +# 5. 重啟服務 +./scripts/start_server.sh start +sudo systemctl start mes-dashboard mes-dashboard-watchdog +``` + +--- + +## 使用者操作指南 + +本節提供一般使用者的操作說明。 + +### 存取系統 + +1. 開啟瀏覽器,輸入系統網址(預設為 `http://localhost:8080`) +2. 進入 Portal 首頁,可透過上方 Tab 切換各功能模組 + +### 基本操作 + +#### WIP 即時概況 +- 顯示生產線 WIP(在製品)的即時統計 +- 可透過下拉選單篩選特定工作中心或產品線 +- 點擊統計卡片可展開查看詳細明細 +- 支援匯出 Excel 報表 + +#### WIP 明細查詢 +1. 選擇篩選條件(工作中心、Package、Hold 狀態、製程站點) +2. 點擊「查詢」按鈕執行查詢 +3. 查詢結果顯示於下方表格 +4. 點擊「匯出 Excel」下載報表 + +#### 設備狀態監控 +- 顯示所有設備的即時狀態(PRD/SBY/UDT/SDT/EGT/NST) +- 使用階層篩選功能: + - **生產設備**:僅顯示列入生產統計的設備 + - **重點設備**:僅顯示標記為重點監控的設備 + - **監控設備**:僅顯示需特別監控的設備 +- 設備狀態每 30 秒自動更新 + +#### 設備歷史查詢 +1. 選擇查詢日期範圍 +2. 可選擇特定設備或工作中心 +3. 查看歷史趨勢圖表和稼動率熱力圖 +4. 支援 CSV 匯出 + +### 管理員登入 + +1. 點擊右上角「登入」按鈕 +2. 輸入工號和密碼(使用 LDAP 認證) +3. 登入後可存取開發中功能頁面 +4. 管理員可使用效能監控儀表板(`/admin/performance`) + +### 常見問題 + +**Q: 頁面顯示「資料載入中」很久沒反應?** +A: 請檢查網路連線,或重新整理頁面。如持續發生請通知系統管理員。 + +**Q: 查詢結果與預期不符?** +A: 請確認篩選條件是否正確設定。資料來源為 MES 系統,約有 30 秒延遲。 + +**Q: 無法匯出 Excel?** +A: 請確認瀏覽器允許下載檔案,並檢查查詢結果是否有資料。 + +--- + +## 功能說明 + +### Portal 入口頁面 + +透過 Tab 切換各功能模組: +- WIP 即時概況 +- WIP 明細查詢 +- Hold 狀態分析 +- 設備狀態監控 +- 設備歷史查詢 +- 數據表查詢工具 +- 抽屜分組導覽(報表類/查詢類/開發工具類) + +### WIP 即時概況 + +- 總覽統計(總 LOT 數、總數量、總片數) +- 按 SPEC 和 WORKCENTER 統計 +- 按產品線統計(匯總 + 明細) +- Hold 狀態分類(品質異常/非品質異常) +- 柏拉圖視覺化圖表 + +### WIP 明細查詢 + +- 依工作中心篩選 +- 依 Package 篩選 +- 依 Hold 狀態篩選 +- 依製程站點篩選 +- 支援 Excel 匯出 + +### Hold 狀態分析 + +- Hold 批次總覽 +- 按 Hold 原因分類 +- Hold 明細查詢 +- 品質異常分類統計 + +### 設備狀態監控 + +- 即時設備狀態總覽(PRD/SBY/UDT/SDT/EGT/NST) +- 按工作中心群組統計 +- 設備稼動率(OU%)與運轉率(RUN%) +- 階層篩選(廠區/產線/重點設備/監控設備) +- Redis 快取自動更新(30 秒間隔) + +### 設備歷史查詢 + +- 歷史狀態趨勢分析 +- 稼動率熱力圖視覺化 +- 設備狀態明細查詢 +- 支援 CSV 匯出 + +### 管理員功能 + +- LDAP 認證登入(支援本地測試模式) +- 頁面狀態管理(released/dev) +- Dev 頁面僅管理員可見 + +### 效能監控儀表板 + +管理員專用的系統監控介面(`/admin/performance`): + +- **系統狀態總覽**:Database、Redis、Circuit Breaker、Worker 狀態 +- **查詢效能指標**:P50/P95/P99 延遲、慢查詢統計、延遲分布圖 +- **系統日誌檢視**:即時日誌查詢、等級篩選、關鍵字搜尋 +- **日誌管理**:儲存統計、手動清理功能 +- **Worker 控制**:優雅重啟(透過 Watchdog 機制) +- 自動更新(30 秒間隔) + +### 熔斷器保護機制 + +Circuit Breaker 模式保護資料庫免於雪崩效應: + +- **CLOSED**:正常運作,請求通過 +- **OPEN**:失敗過多,請求立即拒絕 +- **HALF_OPEN**:測試恢復,允許有限請求 + +配置方式: +```bash +CIRCUIT_BREAKER_ENABLED=true +CIRCUIT_BREAKER_FAILURE_THRESHOLD=5 +CIRCUIT_BREAKER_FAILURE_RATE=0.5 +CIRCUIT_BREAKER_RECOVERY_TIMEOUT=30 +``` + +--- + +## 技術架構 + +### 後端技術棧 + +| 技術 | 版本 | 用途 | +|------|------|------| +| Python | 3.11+ | 程式語言 | +| Flask | 3.x | Web 框架 | +| Gunicorn | 23.x | WSGI 伺服器 | +| SQLAlchemy | 2.x | ORM | +| oracledb | 2.x | Oracle 驅動 | +| Redis | 7.x | 快取伺服器 | +| Pandas | 2.x | 資料處理 | + +### 前端技術棧 + +| 技術 | 用途 | +|------|------| +| Jinja2 | 模板引擎 | +| Vite 6 | 前端多頁模組打包 | +| ECharts | 圖表庫 | +| Vanilla JS Modules | 互動功能與頁面邏輯 | + +### 資料庫 + +- Oracle Database 19c Enterprise Edition +- 主機: 詳見 .env 檔案 (DB_HOST:DB_PORT) +- 服務名: 詳見 .env 檔案 (DB_SERVICE) + +--- + +## 專案結構 + +``` +DashBoard_vite/ +├── src/mes_dashboard/ # 主程式 +│ ├── app.py # Flask 應用 +│ ├── config/ # 設定 +│ │ ├── settings.py # 環境設定 +│ │ ├── constants.py # 常數定義 +│ │ ├── field_contracts.py # UI/API/Export 欄位契約 +│ │ └── workcenter_groups.py # 工作中心群組設定 +│ ├── core/ # 核心模組 +│ │ ├── database.py # 資料庫連線 +│ │ ├── redis_client.py # Redis 客戶端 +│ │ ├── cache.py # 快取管理 +│ │ ├── cache_updater.py # 快取自動更新 +│ │ ├── circuit_breaker.py # 熔斷器 +│ │ ├── metrics.py # 效能指標收集 +│ │ ├── log_store.py # SQLite 日誌儲存 +│ │ ├── response.py # API 回應格式 +│ │ └── permissions.py # 權限管理 +│ ├── routes/ # 路由 +│ │ ├── wip_routes.py # WIP 相關 API +│ │ ├── resource_routes.py # 設備狀態 API +│ │ ├── dashboard_routes.py # 儀表板 API +│ │ └── ... # 其他路由 +│ ├── services/ # 服務層 +│ │ ├── wip_service.py # WIP 業務邏輯 +│ │ ├── resource_service.py # 設備狀態邏輯 +│ │ ├── resource_cache.py # 設備快取服務 +│ │ └── ... # 其他服務 +│ ├── sql/ # SQL 查詢管理 +│ │ ├── loader.py # SQLLoader (LRU 快取) +│ │ ├── builder.py # QueryBuilder (參數化) +│ │ ├── filters.py # CommonFilters +│ │ ├── dashboard/ # 儀表板查詢 +│ │ ├── resource/ # 設備查詢 +│ │ ├── wip/ # WIP 查詢 +│ │ └── resource_history/ # 設備歷史查詢 +│ └── templates/ # HTML 模板 +├── frontend/ # Vite 前端專案 +│ ├── src/core/ # 共用 API/欄位契約/計算 helper +│ ├── src/portal/ # Portal entry +│ ├── src/resource-status/ # 設備即時概況 entry +│ ├── src/resource-history/ # 設備歷史績效 entry +│ ├── src/job-query/ # 設備維修查詢 entry +│ ├── src/excel-query/ # Excel 批次查詢 entry +│ └── src/tables/ # 數據表查詢 entry +├── shared/ +│ └── field_contracts.json # 前後端共用欄位契約 +├── scripts/ # 腳本 +│ ├── deploy.sh # 部署腳本 +│ ├── start_server.sh # 服務管理腳本 +│ └── worker_watchdog.py # Worker 監控程式 +├── deploy/ # 部署設定 +│ ├── mes-dashboard.service # Gunicorn systemd 服務 (Conda) +│ └── mes-dashboard-watchdog.service # Watchdog systemd 服務 (Conda) +├── tests/ # 測試 +├── data/ # 資料檔案 +├── logs/ # 日誌 +├── docs/ # 文檔 +├── openspec/ # 變更管理 +├── .env.example # 環境變數範例 +├── requirements.txt # Python 依賴 +└── gunicorn.conf.py # Gunicorn 設定 +``` + +--- + +## 測試 + +```bash +# 執行所有測試 +pytest tests/ -v + +# 執行單元測試 +pytest tests/test_*.py -v --ignore=tests/e2e --ignore=tests/stress + +# 執行整合測試 +pytest tests/test_*_integration.py -v + +# 執行 E2E 測試 +pytest tests/e2e/ -v + +# 執行壓力測試 +pytest tests/stress/ -v +``` + +--- + +## 故障排除 + +### 服務無法啟動 + +1. 檢查 Conda 環境: + ```bash + conda activate mes-dashboard + ``` + +2. 檢查依賴: + ```bash + pip install -r requirements.txt + ``` + +3. 檢查日誌: + ```bash + ./scripts/start_server.sh logs error + ``` + +### 資料庫連線失敗 + +1. 確認 `.env` 中的資料庫設定正確 +2. 確認網路可連線到資料庫伺服器 +3. 確認資料庫帳號密碼正確 + +### Port 被占用 + +1. 檢查 port 使用狀況: + ```bash + lsof -i :8080 + ``` + +2. 修改 `.env` 中的 `GUNICORN_BIND` 設定 + +--- + +## 變更日誌 + +### 2026-02-08 + +- 完成並封存提案 `post-migration-resilience-governance` +- 新增 runtime 韌性診斷核心(thresholds / restart churn / recovery recommendation) +- health 與 admin API 新增可操作韌性欄位: + - `/health`、`/health/deep` + - `/admin/api/system-status`、`/admin/api/worker/status` +- watchdog restart state 支援 bounded history(`WATCHDOG_RESTART_HISTORY_MAX`) +- WIP overview/detail 抽離共用 autocomplete/filter 模組(`frontend/src/core/autocomplete.js`) +- 新增前端 Node 測試流程(`npm --prefix frontend test`) +- 更新 `README.mdj` 與 migration runbook 文件對齊 gate + +### 2026-02-07 + +- 完成並封存提案 `dashboard-vite-root-refactor` +- 完成並封存提案 `dashboard-vite-complete-migration` +- 完成並封存提案 `vite-jinja-report-parity-hardening` +- 完成並封存提案 `hold-detail-vite-hardening` +- 完成單一 port Vite 架構切換,根目錄成為唯一執行與部署主體 +- 完成 portal 抽屜分類導航、獨立頁與 drill-down 路徑對齊 +- 完成欄位契約治理與下載欄位一致性驗證 +- 完成 runtime resilience(pool/circuit/degraded contract)與 migration gates/runbook 建立 + +### 2026-02-04 + +- 新增效能監控儀表板(`/admin/performance`) +- 新增熔斷器保護機制(Circuit Breaker) +- 新增效能指標收集(P50/P95/P99 延遲、慢查詢統計) +- 新增 SQLite 日誌儲存與管理功能 +- 新增 Worker Watchdog 重啟機制 +- 新增統一 API 回應格式(success_response/error_response) +- 新增 404/500 錯誤頁面模板 +- 修復熔斷器 get_status() 死鎖問題 +- 修復 health_routes.py 模組匯入錯誤 +- 新增 psutil 依賴用於 Worker 狀態監控 +- 新增完整測試套件(59 個效能相關測試) + +### 2026-02-03 + +- 重構 SQL 查詢管理架構,提升安全性與效能 +- 新增 SQLLoader (LRU 快取)、QueryBuilder (參數化)、CommonFilters 模組 +- 抽取 20 個 SQL 檔案至 `src/mes_dashboard/sql/` 目錄 +- 修復所有 SQL 注入風險(LIKE 萬用字元跳脫、IN 條件參數化) +- 優化 workcenter_cards API 回應時間(55s → 0.1s) + +### 2026-02-02 + +- 新增 Hold Summary 柏拉圖視覺化圖表 +- 設備頁面統一排序、階層篩選與標籤優化 + +### 2026-01-30 + +- 新增本地認證模式支援開發測試環境 + +### 2026-01-29 + +- 新增設備狀態監控頁面 +- 新增設備歷史查詢頁面 +- 整合 Redis 快取系統(30 秒自動更新) + +### 2026-01-28 + +- 新增管理員認證系統(LDAP 整合) +- 新增頁面狀態管理(released/dev) +- 新增部署腳本 `deploy.sh` +- 更新啟動腳本自動載入 `.env` +- 新增完整測試套件(57 個測試) + +### 2026-01-27 + +- 新增 Hold Detail 頁面 +- WIP 查詢排除原物料 +- Hold 狀態分類(品質異常/非品質異常) + +### 2026-01-26 + +- 重構為 Flask App Factory 模式 +- 新增全域連線管理 +- 新增 WIP 篩選增強功能 + +--- + +## 聯絡方式 + +如有技術問題或需求變更,請聯繫系統管理員。 + +--- + +**文檔版本**: 4.1 +**最後更新**: 2026-02-08 diff --git a/README.mdj b/README.mdj new file mode 100644 index 0000000..17c1f1b --- /dev/null +++ b/README.mdj @@ -0,0 +1,61 @@ +# MES Dashboard Architecture Snapshot (README.mdj) + +本檔案為 `README.md` 的架構摘要鏡像,重點反映目前已完成的 Vite + 單一 port 運行契約與韌性治理策略。 + +## Runtime Contract + +- 單一服務單一 port:`GUNICORN_BIND`(預設 `0.0.0.0:8080`) +- 前端資產由 Vite build 到 `src/mes_dashboard/static/dist/`,由 Flask/Gunicorn 同源提供 +- Watchdog 透過 restart flag + `SIGHUP` 進行 graceful worker reload + +## Resilience Contract + +- 降級回應:`DB_POOL_EXHAUSTED`、`CIRCUIT_BREAKER_OPEN` + `Retry-After` +- health/admin 診斷輸出包含: + - thresholds + - restart churn summary + - recovery recommendation +- 不預設啟用自動重啟;維持受控人工觸發,避免重啟風暴 + +## Frontend Governance + +- WIP overview/detail 的 autocomplete/filter 查詢邏輯共用 `frontend/src/core/autocomplete.js` +- 目標:維持既有操作語意,同時降低重複邏輯與維護成本 +- 前端核心模組測試:`npm --prefix frontend test` + +## 開發歷史(摘要) + +### 2026-02-08 +- 封存 `post-migration-resilience-governance` +- 新增韌性診斷欄位(thresholds/churn/recommendation) +- 完成 WIP autocomplete 共用模組化與前端測試腳本 + +### 2026-02-07 +- 封存完整 Vite 遷移相關提案群組 +- 單一 port 架構、抽屜導航、欄位契約治理與 migration gates 就位 + +## Key Configs + +```bash +WATCHDOG_RUNTIME_DIR=./tmp +WATCHDOG_RESTART_FLAG=./tmp/mes_dashboard_restart.flag +WATCHDOG_PID_FILE=./tmp/gunicorn.pid +WATCHDOG_STATE_FILE=./tmp/mes_dashboard_restart_state.json +WATCHDOG_RESTART_HISTORY_MAX=50 + +RESILIENCE_DEGRADED_ALERT_SECONDS=300 +RESILIENCE_POOL_SATURATION_WARNING=0.90 +RESILIENCE_POOL_SATURATION_CRITICAL=1.0 +RESILIENCE_RESTART_CHURN_WINDOW_SECONDS=600 +RESILIENCE_RESTART_CHURN_THRESHOLD=3 +``` + +## Validation Quick Commands + +```bash +npm --prefix frontend test +npm --prefix frontend run build +python -m pytest -q tests/test_resilience.py tests/test_health_routes.py tests/test_performance_integration.py +``` + +> 詳細部署、使用說明與完整環境配置請參考 `README.md`。 diff --git a/data/page_status.json b/data/page_status.json new file mode 100644 index 0000000..427a548 --- /dev/null +++ b/data/page_status.json @@ -0,0 +1,56 @@ +{ + "pages": [ + { + "route": "/", + "name": "首頁", + "status": "released" + }, + { + "route": "/wip-overview", + "name": "WIP 即時概況", + "status": "released" + }, + { + "route": "/wip-detail", + "name": "WIP 明細", + "status": "released" + }, + { + "route": "/hold-detail", + "name": "Hold 明細", + "status": "released" + }, + { + "route": "/resource-history", + "name": "設備歷史績效", + "status": "released" + }, + { + "route": "/tables", + "name": "表格總覽", + "status": "dev" + }, + { + "route": "/resource", + "name": "機台狀態", + "status": "released" + }, + { + "route": "/excel-query", + "name": "Excel 批次查詢", + "status": "dev" + }, + { + "route": "/job-query", + "name": "設備維修查詢", + "status": "released" + } + ], + "api_public": true, + "db_scan": { + "schema": "DWH", + "updated_at": "2026-01-29 13:49:59", + "object_count": 19, + "source": "tools/query_table_schema.py" + } +} \ No newline at end of file diff --git a/data/table_schema_info.json b/data/table_schema_info.json new file mode 100644 index 0000000..41b3c55 --- /dev/null +++ b/data/table_schema_info.json @@ -0,0 +1,12093 @@ +{ + "DW_MES_CONTAINER": { + "owner": "DWH", + "table_comment": null, + "row_count": 5218406, + "schema": [ + { + "column_name": "CONTAINERCOMMENTS", + "data_type": "VARCHAR2", + "data_length": 2000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "CONTAINERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "CONTAINERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "CURRENTHOLDCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "CURRENTSTATUSID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "CURRENTREWORKCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "CURRENTWIPLOTID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "CUSTOMERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "DOCUMENTSETID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "EQUIPMENTCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "EQUIPMENTLOADINGCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "EXPIRATIONDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "FACTORYSTARTDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "FACTORYSTARTQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "FIRSTNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "FUTURECOMBINECOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "FUTURECOMBINEPARENTLOTID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "FUTURECOMBINESPECID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "FUTUREHOLDCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "HOLDLOCATIONDURATION", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "HOLDLOCATIONID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "HOLDLOCATIONSTARTTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "HOLDREASONID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 23 + }, + { + "column_name": "LASTACTIVITYDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 24 + }, + { + "column_name": "LASTCOMPLETIONDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 25 + }, + { + "column_name": "LASTMOVEOUTTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 26 + }, + { + "column_name": "LASTMOVEOUTUSERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 27 + }, + { + "column_name": "LOTATTRIBUTESID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 28 + }, + { + "column_name": "MFGORDERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 29 + }, + { + "column_name": "MOVEINQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 30 + }, + { + "column_name": "MOVEINQTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 31 + }, + { + "column_name": "MOVEINTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 32 + }, + { + "column_name": "MOVEINUSERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 33 + }, + { + "column_name": "OBJECTTYPE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 34 + }, + { + "column_name": "ONHOLDDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 35 + }, + { + "column_name": "ORIGINALCONTAINERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 36 + }, + { + "column_name": "ORIGINALQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 37 + }, + { + "column_name": "ORIGINALQTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 38 + }, + { + "column_name": "ORIGINALSTARTDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 39 + }, + { + "column_name": "OWNERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 40 + }, + { + "column_name": "PARENTCONTAINERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 41 + }, + { + "column_name": "PLANNEDSTARTDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 42 + }, + { + "column_name": "PRIORITYCODEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 43 + }, + { + "column_name": "PROCESSSPECID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 44 + }, + { + "column_name": "PRODUCTID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 45 + }, + { + "column_name": "QTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 46 + }, + { + "column_name": "QTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 47 + }, + { + "column_name": "QTYSCHEDULED", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 48 + }, + { + "column_name": "SCHEDULECOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 49 + }, + { + "column_name": "SCHEDULEDATAID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 50 + }, + { + "column_name": "SPLITCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 51 + }, + { + "column_name": "SPLITFROMID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 52 + }, + { + "column_name": "STARTREASONID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 53 + }, + { + "column_name": "STATUS", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 54 + }, + { + "column_name": "UNITCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 55 + }, + { + "column_name": "UOM2ID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 56 + }, + { + "column_name": "UOMID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 57 + }, + { + "column_name": "PJ_ERPPRODUCTID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 58 + }, + { + "column_name": "LASTMOVEDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 59 + }, + { + "column_name": "LOCATIONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 60 + }, + { + "column_name": "WORKFLOWSTEPNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 61 + }, + { + "column_name": "SPECNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 62 + }, + { + "column_name": "WORKCENTERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 63 + }, + { + "column_name": "HOLDLOCATIONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 64 + }, + { + "column_name": "HOLDREASONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 65 + }, + { + "column_name": "MFGORDERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 66 + }, + { + "column_name": "PJ_BOP", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 67 + }, + { + "column_name": "PJ_PRODUCEREGION", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 68 + }, + { + "column_name": "PRODUCTBOMBASEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 69 + }, + { + "column_name": "OWNERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 70 + }, + { + "column_name": "PRIORITYCODENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 71 + }, + { + "column_name": "PJ_TYPE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 72 + }, + { + "column_name": "PJ_FUNCTION", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 73 + }, + { + "column_name": "PRODUCTNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 74 + }, + { + "column_name": "PRODUCTLINENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 75 + }, + { + "column_name": "STARTREASONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 76 + }, + { + "column_name": "PRODUCTDESC", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 77 + }, + { + "column_name": "UTS", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 78 + }, + { + "column_name": "LEADFRAMENAME", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 79 + }, + { + "column_name": "LEADFRAMEDESC", + "data_type": "VARCHAR2", + "data_length": 200, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 80 + }, + { + "column_name": "LEADFRAMEOPTION", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 81 + }, + { + "column_name": "LAST_SYNC_DATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 82 + } + ], + "column_comments": { + "CONTAINERCOMMENTS": null, + "CONTAINERID": null, + "CONTAINERNAME": null, + "CURRENTHOLDCOUNT": null, + "CURRENTREWORKCOUNT": null, + "CURRENTSTATUSID": null, + "CURRENTWIPLOTID": null, + "CUSTOMERID": null, + "DOCUMENTSETID": null, + "EQUIPMENTCOUNT": null, + "EQUIPMENTLOADINGCOUNT": null, + "EXPIRATIONDATE": null, + "FACTORYSTARTDATE": null, + "FACTORYSTARTQTY": null, + "FIRSTNAME": null, + "FUTURECOMBINECOUNT": null, + "FUTURECOMBINEPARENTLOTID": null, + "FUTURECOMBINESPECID": null, + "FUTUREHOLDCOUNT": null, + "HOLDLOCATIONDURATION": null, + "HOLDLOCATIONID": null, + "HOLDLOCATIONNAME": "HOLDLOCATIONID", + "HOLDLOCATIONSTARTTIMESTAMP": null, + "HOLDREASONID": null, + "HOLDREASONNAME": "HOLDREASONID", + "LASTACTIVITYDATE": null, + "LASTCOMPLETIONDATE": null, + "LASTMOVEDATE": "CURRENTSTATUSID", + "LASTMOVEOUTTIMESTAMP": null, + "LASTMOVEOUTUSERNAME": null, + "LAST_SYNC_DATE": null, + "LEADFRAMEDESC": null, + "LEADFRAMENAME": null, + "LEADFRAMEOPTION": null, + "LOCATIONNAME": "CURRENTSTATUSID", + "LOTATTRIBUTESID": null, + "MFGORDERID": null, + "MFGORDERNAME": "MFGORDERID", + "MOVEINQTY": null, + "MOVEINQTY2": null, + "MOVEINTIMESTAMP": null, + "MOVEINUSERNAME": null, + "OBJECTTYPE": null, + "ONHOLDDATE": null, + "ORIGINALCONTAINERID": null, + "ORIGINALQTY": null, + "ORIGINALQTY2": null, + "ORIGINALSTARTDATE": null, + "OWNERID": null, + "OWNERNAME": "OWNERID", + "PARENTCONTAINERID": null, + "PJ_BOP": "MFGORDERID", + "PJ_ERPPRODUCTID": null, + "PJ_FUNCTION": "PRODUCTID", + "PJ_PRODUCEREGION": "MFGORDERID", + "PJ_TYPE": "PRODUCTID", + "PLANNEDSTARTDATE": null, + "PRIORITYCODEID": null, + "PRIORITYCODENAME": "PRIORITYCODEID", + "PROCESSSPECID": null, + "PRODUCTBOMBASEID": "MFGORDERID", + "PRODUCTDESC": "PRODUCTID", + "PRODUCTID": null, + "PRODUCTLINENAME": "PRODUCTID", + "PRODUCTNAME": "PRODUCTID", + "QTY": null, + "QTY2": null, + "QTYSCHEDULED": null, + "SCHEDULECOUNT": null, + "SCHEDULEDATAID": null, + "SPECNAME": "CURRENTSTATUSID", + "SPLITCOUNT": null, + "SPLITFROMID": null, + "STARTREASONID": null, + "STARTREASONNAME": "STARTREASONID", + "STATUS": null, + "UNITCOUNT": null, + "UOM2ID": null, + "UOMID": null, + "UTS": null, + "WORKCENTERNAME": "CURRENTSTATUSID", + "WORKFLOWSTEPNAME": "CURRENTSTATUSID" + }, + "indexes": [ + [ + "DW_C_CONTAINERID", + "UNIQUE", + "CONTAINERID" + ], + [ + "DW_C_CONTAINERNAME", + "UNIQUE", + "CONTAINERNAME" + ], + [ + "DW_C_MFGORDERNAME", + "NONUNIQUE", + "MFGORDERNAME" + ], + [ + "DW_C_PRODUCTBOMBASEID", + "NONUNIQUE", + "PRODUCTBOMBASEID" + ], + [ + "DW_C_SCHEDULEDATAID", + "NONUNIQUE", + "SCHEDULEDATAID" + ], + [ + "DW_MES_CONTAINER_PRODUCTLINENAME", + "NONUNIQUE", + "PRODUCTLINENAME" + ] + ], + "sample_columns": [ + "CONTAINERCOMMENTS", + "CONTAINERID", + "CONTAINERNAME", + "CURRENTHOLDCOUNT", + "CURRENTSTATUSID", + "CURRENTREWORKCOUNT", + "CURRENTWIPLOTID", + "CUSTOMERID", + "DOCUMENTSETID", + "EQUIPMENTCOUNT", + "EQUIPMENTLOADINGCOUNT", + "EXPIRATIONDATE", + "FACTORYSTARTDATE", + "FACTORYSTARTQTY", + "FIRSTNAME", + "FUTURECOMBINECOUNT", + "FUTURECOMBINEPARENTLOTID", + "FUTURECOMBINESPECID", + "FUTUREHOLDCOUNT", + "HOLDLOCATIONDURATION", + "HOLDLOCATIONID", + "HOLDLOCATIONSTARTTIMESTAMP", + "HOLDREASONID", + "LASTACTIVITYDATE", + "LASTCOMPLETIONDATE", + "LASTMOVEOUTTIMESTAMP", + "LASTMOVEOUTUSERNAME", + "LOTATTRIBUTESID", + "MFGORDERID", + "MOVEINQTY", + "MOVEINQTY2", + "MOVEINTIMESTAMP", + "MOVEINUSERNAME", + "OBJECTTYPE", + "ONHOLDDATE", + "ORIGINALCONTAINERID", + "ORIGINALQTY", + "ORIGINALQTY2", + "ORIGINALSTARTDATE", + "OWNERID", + "PARENTCONTAINERID", + "PLANNEDSTARTDATE", + "PRIORITYCODEID", + "PROCESSSPECID", + "PRODUCTID", + "QTY", + "QTY2", + "QTYSCHEDULED", + "SCHEDULECOUNT", + "SCHEDULEDATAID", + "SPLITCOUNT", + "SPLITFROMID", + "STARTREASONID", + "STATUS", + "UNITCOUNT", + "UOM2ID", + "UOMID", + "PJ_ERPPRODUCTID", + "LASTMOVEDATE", + "LOCATIONNAME", + "WORKFLOWSTEPNAME", + "SPECNAME", + "WORKCENTERNAME", + "HOLDLOCATIONNAME", + "HOLDREASONNAME", + "MFGORDERNAME", + "PJ_BOP", + "PJ_PRODUCEREGION", + "PRODUCTBOMBASEID", + "OWNERNAME", + "PRIORITYCODENAME", + "PJ_TYPE", + "PJ_FUNCTION", + "PRODUCTNAME", + "PRODUCTLINENAME", + "STARTREASONNAME", + "PRODUCTDESC", + "UTS", + "LEADFRAMENAME", + "LEADFRAMEDESC", + "LEADFRAMEOPTION", + "LAST_SYNC_DATE" + ], + "sample_data": [ + [ + null, + "48810380001cba48", + "GA23100020-A00-011", + 0, + "48850b80003a2278", + null, + null, + null, + null, + 0, + 0, + null, + "2023-10-03 13:10:07", + 155520, + "TXS-14873#3ACU-3734P", + 0, + null, + null, + 0, + 0, + null, + null, + null, + "2023-10-24 14:09:17", + null, + "2023-10-24 08:25:26", + "FT1", + "48823780005c4d3f", + "48803d80000778de", + 76449, + 0, + "2023-10-24 08:25:26", + "FT1", + "LOT", + null, + "48810380001c8f12", + 155520, + 2, + "2023-10-15 18:44:50", + "0005468000000066", + null, + null, + "00051e8000000050", + "48813980000271e9", + "48811680000003fc", + 0, + 0, + 0, + 0, + null, + 0, + "48810380001c8f1a", + "0007808000000002", + 2, + 1, + "0008de8000000034", + "0008de8000000003", + "48811680000003fc", + "2023-10-24 08:25:26", + null, + "LT", + "LT", + "TMTT", + null, + null, + "GA23100020", + "UCC10-DW", + "D區", + "4880ee800002ba05", + "量產", + "4.一般", + "BAV199-AU", + "SWITCHING", + "BAV199-AU_R2_000A1", + "SOT-23", + "NORMAL", + "/PB/TR/13\"/HF/12K/SOT-23/SWI/SOT/USM-03TCS/USM03-QI24/PJ///", + "2023-10-20 00:00:00", + "LEF000024", + "腳架/SOT-23/OPTION 5/REEL/Cu", + "OPTION 5", + "2025-07-29 14:48:23" + ], + [ + null, + "48810380001cddd2", + "GA23101928-A00-002", + 0, + "48850b80003a5f55", + null, + null, + null, + null, + 0, + 0, + null, + "2023-10-24 13:59:36", + 99840, + "GMSN-24178#SK232098-01P", + 0, + null, + null, + 0, + 0, + null, + null, + null, + "2023-11-01 18:05:30", + null, + "2023-11-01 06:16:30", + "14647", + "48823780005c9b4c", + "48803d8000078791", + 98582, + 0, + "2023-11-01 06:16:30", + "14647", + "LOT", + null, + "48810380001cd997", + 99840, + 2, + "2023-10-25 15:13:37", + "0005468000000066", + null, + null, + "00051e8000000050", + "48813980000005c5", + "4881168000000d9f", + 0, + 0, + 0, + 0, + null, + 0, + "48810380001cd9b3", + "0007808000000002", + 2, + 1, + "0008de8000000034", + "0008de8000000003", + "4881168000000d9f", + "2023-11-01 06:16:30", + null, + "鈦昇", + "鈦昇", + "TMTT", + null, + null, + "GA23101928", + "UAC10", + "A棟", + "4880ee8000003a73", + "量產", + "4.一般", + "RB551V-30-AU", + "SKY", + "RB551V-30-AU_R1_000A1", + "SOD-323", + "NORMAL", + "/551/TR/7\"/HF/5K/SOD-323/SKY/SOD/SSM-05A/SSM05-QI24/PJ///", + "2023-11-02 00:00:00", + "LEF000016", + "腳架/SOD-323/OPTION 1/REEL/A42", + "OPTION 1", + "2025-07-29 14:48:23" + ], + [ + null, + "4881048000201245", + "GA23112101-A01", + 0, + "48850b80003b19b1", + null, + null, + null, + null, + 0, + 0, + null, + "2023-11-22 19:56:00", + 357576, + "RO-4043#2336H-P002C", + 0, + null, + null, + 0, + 0, + null, + null, + null, + "2023-11-24 19:55:41", + null, + "2023-11-24 17:32:20", + "DS1B", + "48823780005d8d41", + "48803d800007a0ce", + 357576, + 24, + "2023-11-24 17:32:20", + "DS1B", + "LOT", + null, + "4881048000201153", + 357576, + 24, + "2023-11-23 00:38:33", + "000546800000006a", + null, + null, + "00051e8000000050", + "488135800002b513", + "4881168000023df0", + 0, + 0, + 0, + 0, + null, + 10, + "4881048000201153", + "0007808000000002", + 2, + 1, + "0008de8000000034", + "0008de8000000003", + "4881168000023df0", + "2023-11-24 17:32:20", + null, + "A線邊倉", + "A線邊倉", + "焊_DB_料", + null, + null, + "GA23112101", + "PCUAB", + "E區", + "4880ee8000032cca", + "代工", + "4.一般", + "RF081MP2STR", + "FAST", + "RF081MP2STR_R1_000A1", + "SOD-123FL", + "NORMAL", + "/66/TR/7\"/HF/3K/SOD-123FL/FR/SOD/RSM-08AAFL/RSM08FL-QI01/L021///", + "2023-12-01 00:00:00", + "LEF000097,LEF000098", + "腳架/SOD-123FL/OPTION 1 A/STRIP/Cu,腳架/SOD-123FL/OPTION 1 B/STRIP/Cu", + "OPTION 1 A,OPTION 1 B", + "2025-07-29 14:48:23" + ] + ] + }, + "DW_MES_EQUIPMENTSTATUS_WIP_V": { + "owner": "DWH", + "table_comment": null, + "row_count": 2631, + "schema": [ + { + "column_name": "RESOURCEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "N", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "EQUIPMENTID", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "OBJECTCATEGORY", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "EQUIPMENTASSETSSTATUS", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "EQUIPMENTASSETSSTATUSREASON", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "JOBORDER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "JOBMODEL", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "JOBSTAGE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "JOBID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "JOBSTATUS", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "CREATEDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "CREATEUSERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "CREATEUSER", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "SYMPTOMCODE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "CAUSECODE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "REPAIRCODE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "RUNCARDLOTID", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "Package", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "PACKAGE_LF", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "Function", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "TYPE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "BOP", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "WAFERLOTID", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 23 + }, + { + "column_name": "WAFERPN", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 24 + }, + { + "column_name": "WAFERLOTID_PREFIX", + "data_type": "VARCHAR2", + "data_length": 160, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 25 + }, + { + "column_name": "SPEC", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 26 + }, + { + "column_name": "LFOPTIONID", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 27 + }, + { + "column_name": "WIREDESCRIPTION", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 28 + }, + { + "column_name": "WAFERMIL", + "data_type": "VARCHAR2", + "data_length": 3062, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 29 + }, + { + "column_name": "LOTTRACKINQTY_PCS", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 30 + }, + { + "column_name": "LOTTRACKINTIME", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 31 + }, + { + "column_name": "LOTTRACKINEMPLOYEE", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 32 + } + ], + "column_comments": { + "BOP": null, + "CAUSECODE": null, + "CREATEDATE": null, + "CREATEUSER": null, + "CREATEUSERNAME": null, + "EQUIPMENTASSETSSTATUS": null, + "EQUIPMENTASSETSSTATUSREASON": null, + "EQUIPMENTID": null, + "Function": null, + "JOBID": null, + "JOBMODEL": null, + "JOBORDER": null, + "JOBSTAGE": null, + "JOBSTATUS": null, + "LFOPTIONID": null, + "LOTTRACKINEMPLOYEE": null, + "LOTTRACKINQTY_PCS": null, + "LOTTRACKINTIME": null, + "OBJECTCATEGORY": null, + "PACKAGE_LF": null, + "Package": null, + "REPAIRCODE": null, + "RESOURCEID": null, + "RUNCARDLOTID": null, + "SPEC": null, + "SYMPTOMCODE": null, + "TYPE": null, + "WAFERLOTID": null, + "WAFERLOTID_PREFIX": null, + "WAFERMIL": null, + "WAFERPN": null, + "WIREDESCRIPTION": null + }, + "indexes": [], + "sample_columns": [ + "RESOURCEID", + "EQUIPMENTID", + "OBJECTCATEGORY", + "EQUIPMENTASSETSSTATUS", + "EQUIPMENTASSETSSTATUSREASON", + "JOBORDER", + "JOBMODEL", + "JOBSTAGE", + "JOBID", + "JOBSTATUS", + "CREATEDATE", + "CREATEUSERNAME", + "CREATEUSER", + "SYMPTOMCODE", + "CAUSECODE", + "REPAIRCODE", + "RUNCARDLOTID", + "Package", + "PACKAGE_LF", + "Function", + "TYPE", + "BOP", + "WAFERLOTID", + "WAFERPN", + "WAFERLOTID_PREFIX", + "SPEC", + "LFOPTIONID", + "WIREDESCRIPTION", + "WAFERMIL", + "LOTTRACKINQTY_PCS", + "LOTTRACKINTIME", + "LOTTRACKINEMPLOYEE" + ], + "sample_data": [ + [ + "4880168000000433", + "GTMA-0124", + "ASSEMBLY", + "UDT", + "EE Repair", + "2026 033-0001251106", + "GTMA_Machine Repair", + "設備維修", + "4882ca80001530d1", + "CREATED", + "2026-01-29 13:45:43", + "CamstarAdmin", + "Camstar Administrator", + null, + null, + null, + "GA26011480-A00-006", + "DFN2510-10L", + "DFN2510-10L", + "TVS/ESD", + "PE1605M4AQ", + "ECA08", + "MSASM-0082#EB03904P", + "WAF006075_CP", + "MSASM", + "3M", + "OPTION 2", + "GOLD WIRE/φ0.8mil", + "8.1/*8.1mil", + 66094, + "2026-01-29 10:14:42", + "妮莎" + ], + [ + "4880168000000016", + "GDBA-0122", + "ASSEMBLY", + "PRD", + "Production RUN", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + "GA26011960-A00-019", + "DFN2510-10L", + "DFN2510-10L", + "TVS/ESD", + "PJE5UFN10A", + "ECA10", + "MSASM-0084#EA08258P", + "WAF004625_CP", + "MSASM", + "Epoxy D/B", + "OPTION 2", + "GOLD WIRE/φ1.0mil", + "26/*20.5mil", + 5842, + "2026-01-29 01:35:16", + "尤拉" + ], + [ + "4880168000000188", + "GWBK-0267", + "ASSEMBLY", + "PRD", + "Production RUN", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + "GA26011960-A00-011", + "DFN2510-10L", + "DFN2510-10L", + "TVS/ESD", + "PJE5UFN10A", + "ECA10", + "MSASM-0084#EA08258P", + "WAF004625_CP", + "MSASM", + "金線製程", + "OPTION 2", + "GOLD WIRE/φ1.0mil", + "26/*20.5mil", + 59904, + "2026-01-29 12:23:25", + "楊月珍" + ] + ] + }, + "DW_MES_HM_LOTMOVEOUT": { + "owner": "DWH", + "table_comment": null, + "row_count": 48645692, + "schema": [ + { + "column_name": "CALLBYCDONAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "CARRIERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "CARRIERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "CDONAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "CDOTXNSEQUENCE", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "COMMENTS", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "COMPUTERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "CONTAINERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "CONTAINERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "EMPLOYEEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "EMPLOYEENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "FACTORYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "FROMCONTAINERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "FROMCONTAINERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "FROMQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "FROMQTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "FROMSPECID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "FROMSPECNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "FROMWORKCENTER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "FROMSTATUS", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "FROMUOM2NAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "FROMUOMNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "FROMWORKFLOWNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 23 + }, + { + "column_name": "HISTORYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 24 + }, + { + "column_name": "HISTORYMAINLINEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "N", + "default_value": null, + "column_id": 25 + }, + { + "column_name": "HISTORYSUMMARYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 26 + }, + { + "column_name": "LASTLOTCARRIERSSETUPHISTORYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 27 + }, + { + "column_name": "LASTMOVEOUTTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 28 + }, + { + "column_name": "LASTMOVEOUTUSERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 29 + }, + { + "column_name": "MFGDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 30 + }, + { + "column_name": "MOVEINQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 31 + }, + { + "column_name": "MOVEINQTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 32 + }, + { + "column_name": "MOVEINTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 33 + }, + { + "column_name": "MOVEINUSERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 34 + }, + { + "column_name": "OPERATIONID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 35 + }, + { + "column_name": "OWNERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 36 + }, + { + "column_name": "OWNERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 37 + }, + { + "column_name": "PARAMETRICDETAILID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 38 + }, + { + "column_name": "PROCESSSPECID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 39 + }, + { + "column_name": "PRODUCTID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 40 + }, + { + "column_name": "PRODUCTNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 41 + }, + { + "column_name": "QTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 42 + }, + { + "column_name": "QTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 43 + }, + { + "column_name": "RESOURCEAVAILABILITY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 44 + }, + { + "column_name": "RESOURCEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 45 + }, + { + "column_name": "RESOURCENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 46 + }, + { + "column_name": "RESOURCEOBJECTCATEGORY", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 47 + }, + { + "column_name": "RESOURCEOBJECTTYPE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 48 + }, + { + "column_name": "RESOURCESTATUSCODEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 49 + }, + { + "column_name": "RESOURCESTATUSREASONID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 50 + }, + { + "column_name": "SERVERNAME", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 51 + }, + { + "column_name": "SHIFTNAME", + "data_type": "VARCHAR2", + "data_length": 30, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 52 + }, + { + "column_name": "SPECID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 53 + }, + { + "column_name": "SPECNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 54 + }, + { + "column_name": "WORKCENTER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 55 + }, + { + "column_name": "STATUS", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 56 + }, + { + "column_name": "SYSTEMDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 57 + }, + { + "column_name": "TXNDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 58 + }, + { + "column_name": "TXNID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 59 + }, + { + "column_name": "TXNTYPE", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 60 + }, + { + "column_name": "UOM2NAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 61 + }, + { + "column_name": "UOMNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 62 + }, + { + "column_name": "USERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 63 + }, + { + "column_name": "USERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 64 + }, + { + "column_name": "WIPTRACKINGGROUPKEYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 65 + }, + { + "column_name": "WORKFLOWNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 66 + }, + { + "column_name": "WORKFLOWSTEPID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 67 + }, + { + "column_name": "UPDATETIME", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 68 + }, + { + "column_name": "USERFULLNAME", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 69 + }, + { + "column_name": "EMPZONE", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 70 + }, + { + "column_name": "WAFERPRODUCT", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 71 + }, + { + "column_name": "CONSUMEFACTOR", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 72 + } + ], + "column_comments": { + "CALLBYCDONAME": null, + "CARRIERID": null, + "CARRIERNAME": null, + "CDONAME": null, + "CDOTXNSEQUENCE": null, + "COMMENTS": null, + "COMPUTERNAME": null, + "CONSUMEFACTOR": null, + "CONTAINERID": null, + "CONTAINERNAME": null, + "EMPLOYEEID": null, + "EMPLOYEENAME": null, + "EMPZONE": null, + "FACTORYID": null, + "FROMCONTAINERID": null, + "FROMCONTAINERNAME": null, + "FROMQTY": null, + "FROMQTY2": null, + "FROMSPECID": null, + "FROMSPECNAME": null, + "FROMSTATUS": null, + "FROMUOM2NAME": null, + "FROMUOMNAME": null, + "FROMWORKCENTER": null, + "FROMWORKFLOWNAME": null, + "HISTORYID": null, + "HISTORYMAINLINEID": null, + "HISTORYSUMMARYID": null, + "LASTLOTCARRIERSSETUPHISTORYID": null, + "LASTMOVEOUTTIMESTAMP": null, + "LASTMOVEOUTUSERNAME": null, + "MFGDATE": null, + "MOVEINQTY": null, + "MOVEINQTY2": null, + "MOVEINTIMESTAMP": null, + "MOVEINUSERNAME": null, + "OPERATIONID": null, + "OWNERID": null, + "OWNERNAME": null, + "PARAMETRICDETAILID": null, + "PROCESSSPECID": null, + "PRODUCTID": null, + "PRODUCTNAME": null, + "QTY": null, + "QTY2": null, + "RESOURCEAVAILABILITY": null, + "RESOURCEID": null, + "RESOURCENAME": null, + "RESOURCEOBJECTCATEGORY": null, + "RESOURCEOBJECTTYPE": null, + "RESOURCESTATUSCODEID": null, + "RESOURCESTATUSREASONID": null, + "SERVERNAME": null, + "SHIFTNAME": null, + "SPECID": null, + "SPECNAME": null, + "STATUS": null, + "SYSTEMDATE": null, + "TXNDATE": null, + "TXNID": null, + "TXNTYPE": null, + "UOM2NAME": null, + "UOMNAME": null, + "UPDATETIME": null, + "USERFULLNAME": null, + "USERID": null, + "USERNAME": null, + "WAFERPRODUCT": null, + "WIPTRACKINGGROUPKEYID": null, + "WORKCENTER": null, + "WORKFLOWNAME": null, + "WORKFLOWSTEPID": null + }, + "indexes": [ + [ + "DW_MES_HM_LMO_CALLBYCDONAME", + "NONUNIQUE", + "CALLBYCDONAME" + ], + [ + "DW_MES_HM_LMO_CDONAME", + "NONUNIQUE", + "CDONAME" + ], + [ + "DW_MES_HM_LMO_HISTORYID", + "NONUNIQUE", + "HISTORYID" + ], + [ + "DW_MES_HM_LMO_HISTORYID_TID_TDATE", + "NONUNIQUE", + "HISTORYID, TXNID, TXNDATE" + ], + [ + "DW_MES_HM_LMO_TXNDATE", + "NONUNIQUE", + "TXNDATE" + ], + [ + "DW_MES_HM_LMO__HID_TID_DATE_ID", + "NONUNIQUE", + "HISTORYID, TXNID, TXNDATE, HISTORYMAINLINEID" + ], + [ + "DW_MES_HM_LMO__HISTORYMAINLINEID", + "UNIQUE", + "HISTORYMAINLINEID" + ] + ], + "sample_columns": [ + "CALLBYCDONAME", + "CARRIERID", + "CARRIERNAME", + "CDONAME", + "CDOTXNSEQUENCE", + "COMMENTS", + "COMPUTERNAME", + "CONTAINERID", + "CONTAINERNAME", + "EMPLOYEEID", + "EMPLOYEENAME", + "FACTORYID", + "FROMCONTAINERID", + "FROMCONTAINERNAME", + "FROMQTY", + "FROMQTY2", + "FROMSPECID", + "FROMSPECNAME", + "FROMWORKCENTER", + "FROMSTATUS", + "FROMUOM2NAME", + "FROMUOMNAME", + "FROMWORKFLOWNAME", + "HISTORYID", + "HISTORYMAINLINEID", + "HISTORYSUMMARYID", + "LASTLOTCARRIERSSETUPHISTORYID", + "LASTMOVEOUTTIMESTAMP", + "LASTMOVEOUTUSERNAME", + "MFGDATE", + "MOVEINQTY", + "MOVEINQTY2", + "MOVEINTIMESTAMP", + "MOVEINUSERNAME", + "OPERATIONID", + "OWNERID", + "OWNERNAME", + "PARAMETRICDETAILID", + "PROCESSSPECID", + "PRODUCTID", + "PRODUCTNAME", + "QTY", + "QTY2", + "RESOURCEAVAILABILITY", + "RESOURCEID", + "RESOURCENAME", + "RESOURCEOBJECTCATEGORY", + "RESOURCEOBJECTTYPE", + "RESOURCESTATUSCODEID", + "RESOURCESTATUSREASONID", + "SERVERNAME", + "SHIFTNAME", + "SPECID", + "SPECNAME", + "WORKCENTER", + "STATUS", + "SYSTEMDATE", + "TXNDATE", + "TXNID", + "TXNTYPE", + "UOM2NAME", + "UOMNAME", + "USERID", + "USERNAME", + "WIPTRACKINGGROUPKEYID", + "WORKFLOWNAME", + "WORKFLOWSTEPID", + "UPDATETIME", + "USERFULLNAME", + "EMPZONE", + "WAFERPRODUCT", + "CONSUMEFACTOR" + ], + "sample_data": [ + [ + "LotMoveOut", + null, + null, + "MoveLot", + 7, + null, + null, + "4881048000000011", + "GA18030001-A01-05", + "0004748000002de2", + "21201", + "0004e28000000002", + "4881048000000011", + "GA18030001-A01-05", + 39000, + 1, + "48812e8000000006", + "晶片貼合", + "切割", + 1, + "PCS", + "UNIT", + "切割", + "4881048000000011", + "0009888000000581", + "48825f800000000e", + null, + "2018-03-01 10:46:38", + "CamstarAdmin", + "2018-03-01 11:39:56", + 39000, + 1, + "2018-03-01 11:39:22", + "21201", + "48803b8000000007", + "0005468000000002", + "PROD", + null, + "4881358000000004", + "48811680000004c9", + "PJA3415AE_R1_00001", + 39000, + 1, + null, + null, + null, + null, + null, + null, + null, + "MESAP1", + "D", + "48812e8000000007", + "切割烘烤", + "切割", + 1, + "2018-03-01 11:39:56", + "2018-03-01 11:39:56", + "4000000000026989", + 4752083, + "PCS", + "UNIT", + "0004748000002de2", + "21201", + "48825f800000000e", + "切割", + "00074e8000000023", + "2024-05-28 13:09:14", + "沈玉華", + "點測切割(D)", + "WAF007955", + 1 + ], + [ + "LotMoveOut", + null, + null, + "MoveLot", + 7, + null, + null, + "4881048000000011", + "GA18030001-A01-05", + "0004748000002de2", + "21201", + "0004e28000000002", + "4881048000000011", + "GA18030001-A01-05", + 39000, + 1, + "48812e8000000007", + "切割烘烤", + "切割", + 1, + "PCS", + "UNIT", + "切割", + "4881048000000011", + "000988800000058c", + "48825f800000000f", + null, + "2018-03-01 11:39:56", + "21201", + "2018-03-01 11:40:28", + 39000, + 1, + "2018-03-01 11:39:56", + "21201", + "48803b8000000008", + "0005468000000002", + "PROD", + null, + "4881358000000004", + "48811680000004c9", + "PJA3415AE_R1_00001", + 39000, + 1, + null, + null, + null, + null, + null, + null, + null, + "MESAP1", + "D", + "48812e8000000008", + "晶片切割", + null, + 1, + "2018-03-01 11:40:28", + "2018-03-01 11:40:28", + "400000000002698d", + 4752083, + "PCS", + "UNIT", + "0004748000002de2", + "21201", + "48825f800000000f", + "切割", + "00074e8000000024", + "2024-05-28 13:09:14", + "沈玉華", + "點測切割(D)", + "WAF007955", + 1 + ], + [ + "LotMoveOut", + null, + null, + "MoveLot", + 7, + null, + null, + "488104800000000d", + "GA18030001-A01-01", + "0004748000002de2", + "21201", + "0004e28000000002", + "488104800000000d", + "GA18030001-A01-01", + 39000, + 1, + "48812e8000000008", + "晶片切割", + null, + 1, + "PCS", + "UNIT", + "切割", + "488104800000000d", + "000988800000059b", + "48825f8000000007", + null, + "2018-03-01 11:30:10", + "21201", + "2018-03-01 11:51:34", + 39000, + 1, + "2018-03-01 11:30:10", + "21201", + "48803b800000000a", + "0005468000000002", + "PROD", + null, + "4881358000000004", + "48811680000004c9", + "PJA3415AE_R1_00001", + 39000, + 1, + null, + null, + null, + null, + null, + null, + null, + "MESAP1", + "D", + "48812e800000000a", + "晶片切割-END", + "切割", + 1, + "2018-03-01 11:51:34", + "2018-03-01 11:51:34", + "4000000000026e81", + 4752083, + "PCS", + "UNIT", + "0004748000002de2", + "21201", + "48825f8000000007", + "切割", + "00074e8000000025", + "2024-05-28 13:09:14", + "沈玉華", + "點測切割(D)", + "WAF007955", + 1 + ] + ] + }, + "DW_MES_HOLDRELEASEHISTORY": { + "owner": "DWH", + "table_comment": null, + "row_count": 310737, + "schema": [ + { + "column_name": "RN", + "data_type": "VARCHAR2", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "CONTAINERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "HISTORYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "HISTORYMAINLINEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "FINISHEDRUNCARD", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "PJ_WORKORDER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "WORKCENTERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "WORKCENTERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "FROMSPECID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "FROMSPECNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "QTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "QTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "PJ_CHIPREMARK1", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "PJ_CHIPREMARK2", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "PJ_CHIPREMARK3", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "HOLDTXNDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "RELEASETXNDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "HOLDEMP", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "HOLDEMPDEPTNAME", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "RELEASEEMP", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "RELEASEEMPDEPTNAME", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "HOLDCOMMENTS", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "RELEASECOMMENTS", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 23 + }, + { + "column_name": "HOLDREASONID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 24 + }, + { + "column_name": "HOLDREASONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 25 + }, + { + "column_name": "RELEASEREASONID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 26 + }, + { + "column_name": "RELEASEREASONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 27 + }, + { + "column_name": "NCRID", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 28 + }, + { + "column_name": "LAST_UPDATED_DATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 29 + }, + { + "column_name": "HOLDUSERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 30 + }, + { + "column_name": "FUTUREHOLDCOMMENTS", + "data_type": "VARCHAR2", + "data_length": 1000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 31 + } + ], + "column_comments": { + "CONTAINERID": null, + "FINISHEDRUNCARD": null, + "FROMSPECID": null, + "FROMSPECNAME": null, + "FUTUREHOLDCOMMENTS": null, + "HISTORYID": null, + "HISTORYMAINLINEID": null, + "HOLDCOMMENTS": null, + "HOLDEMP": null, + "HOLDEMPDEPTNAME": null, + "HOLDREASONID": null, + "HOLDREASONNAME": null, + "HOLDTXNDATE": null, + "HOLDUSERNAME": null, + "LAST_UPDATED_DATE": null, + "NCRID": null, + "PJ_CHIPREMARK1": null, + "PJ_CHIPREMARK2": null, + "PJ_CHIPREMARK3": null, + "PJ_WORKORDER": null, + "QTY": null, + "QTY2": null, + "RELEASECOMMENTS": null, + "RELEASEEMP": null, + "RELEASEEMPDEPTNAME": null, + "RELEASEREASONID": null, + "RELEASEREASONNAME": null, + "RELEASETXNDATE": null, + "RN": null, + "WORKCENTERID": null, + "WORKCENTERNAME": null + }, + "indexes": [ + [ + "DW_MES_HOLDRELEASEHISTORY_IDX1", + "NONUNIQUE", + "HISTORYMAINLINEID" + ], + [ + "DW_MES_HOLDRELEASEHISTORY_IDX2", + "NONUNIQUE", + "CONTAINERID" + ] + ], + "sample_columns": [ + "RN", + "CONTAINERID", + "HISTORYID", + "HISTORYMAINLINEID", + "FINISHEDRUNCARD", + "PJ_WORKORDER", + "WORKCENTERID", + "WORKCENTERNAME", + "FROMSPECID", + "FROMSPECNAME", + "QTY", + "QTY2", + "PJ_CHIPREMARK1", + "PJ_CHIPREMARK2", + "PJ_CHIPREMARK3", + "HOLDTXNDATE", + "RELEASETXNDATE", + "HOLDEMP", + "HOLDEMPDEPTNAME", + "RELEASEEMP", + "RELEASEEMPDEPTNAME", + "HOLDCOMMENTS", + "RELEASECOMMENTS", + "HOLDREASONID", + "HOLDREASONNAME", + "RELEASEREASONID", + "RELEASEREASONNAME", + "NCRID", + "LAST_UPDATED_DATE", + "HOLDUSERNAME", + "FUTUREHOLDCOMMENTS" + ], + "sample_data": [ + [ + "1", + "4881048000071663", + "4881048000071663", + "0009888003d51539", + "930843177;930843187", + "GA19021440", + "0005dc8000001e30", + "焊_WB_料", + "48812c800000001e", + "銀線製程", + 115200, + 0, + "IYTN-3125#P84721400A", + null, + null, + "2019-02-28 18:55:33", + "2019-02-28 18:57:03", + "鄭嫈潔", + "吹砂(D)", + "陳昭利", + "製造一部-製造一課", + null, + null, + "00078a8000000ba1", + "Q-Time Fail", + "001bef80000007b9", + "電漿清洗", + null, + "2023-12-02 21:53:11", + null, + null + ], + [ + "1", + "488103800002aba7", + "488103800002aba7", + "000988800430ff43", + "94034K977", + "GA19032568", + "0005dc8000001e30", + "焊_WB_料", + "48812c8000000020", + "銅線製程", + 69120, + 0, + "GMSN-15151#A191026-01P", + null, + null, + "2019-04-01 13:44:24", + "2019-04-01 19:29:31", + "謝嫦如", + "銲接A區(D)", + "郭登益", + "生產二課", + null, + "Release\n", + "00078a8000000ba1", + "Q-Time Fail", + "001bef80000007b9", + "電漿清洗", + null, + "2023-12-02 21:53:11", + null, + null + ], + [ + "1", + "488103800003359b", + "488103800003359b", + "00098880048d3c13", + "9504CE117;9504CE157", + "GA19042735", + "0005dc8000000006", + "焊接_WB", + "48812c8000000019", + "W/B氮氣迴焊爐固化", + 123600, + 0, + "GMZS-1114#TZ19202801P-3P", + null, + null, + "2019-05-03 06:47:46", + "2019-05-03 06:54:28", + "微萊朋", + "銲接E區(N)", + "許嘉倪", + "製造一部-製造一課", + null, + "半成品停滯時間過久", + "00078a8000000ba1", + "Q-Time Fail", + "001bef8000000004", + "Others", + null, + "2023-12-02 21:53:11", + null, + null + ] + ] + }, + "DW_MES_JOB": { + "owner": "DWH", + "table_comment": null, + "row_count": 1248622, + "schema": [ + { + "column_name": "ACKNOWLEDGECOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "ACTIVECLOCKONCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "ASSIGNCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "CANCELDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "CANCELUSERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "CAUSECODENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "CLOCKONCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "COMPLETEDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "COMPLETEUSERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "CREATEDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "CREATEUSERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "ESTIMATEDDURATION", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "EXPECTEDSTARTDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "FIRSTCLOCKONDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "ISSIMPLEMODE", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "JOBID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "JOBMODELNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "JOBORDERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "JOBORDERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "JOBSTATUS", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "LASTCLOCKOFFDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "REPAIRCODENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "RESOURCEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 23 + }, + { + "column_name": "STAGENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 24 + }, + { + "column_name": "STAGESEQUENCE", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 25 + }, + { + "column_name": "SYMPTOMCODENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 26 + }, + { + "column_name": "PJ_CAUSECODE2NAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 27 + }, + { + "column_name": "PJ_REPAIRCODE2NAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 28 + }, + { + "column_name": "PJ_SYMPTOMCODE2NAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 29 + }, + { + "column_name": "CANCEL_EMPNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 30 + }, + { + "column_name": "CANCEL_FULLNAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 31 + }, + { + "column_name": "COMPLETE_EMPNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 32 + }, + { + "column_name": "COMPLETE_FULLNAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 33 + }, + { + "column_name": "CREATE_EMPNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 34 + }, + { + "column_name": "CREATE_FULLNAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 35 + }, + { + "column_name": "RESOURCENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 36 + }, + { + "column_name": "CONTAINERIDS", + "data_type": "VARCHAR2", + "data_length": 2000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 37 + }, + { + "column_name": "CONTAINERNAMES", + "data_type": "VARCHAR2", + "data_length": 2000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 38 + }, + { + "column_name": "PARTREQUESTORDERNAME", + "data_type": "VARCHAR2", + "data_length": 2000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 39 + }, + { + "column_name": "RESOURCE_PKG_GROUP", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 40 + } + ], + "column_comments": { + "ACKNOWLEDGECOUNT": null, + "ACTIVECLOCKONCOUNT": null, + "ASSIGNCOUNT": null, + "CANCELDATE": null, + "CANCELUSERID": null, + "CANCEL_EMPNAME": "CANCELUSERID", + "CANCEL_FULLNAME": "CANCELUSERID", + "CAUSECODENAME": null, + "CLOCKONCOUNT": null, + "COMPLETEDATE": null, + "COMPLETEUSERID": null, + "COMPLETE_EMPNAME": "COMPLETEUSERID", + "COMPLETE_FULLNAME": "COMPLETEUSERID", + "CONTAINERIDS": null, + "CONTAINERNAMES": null, + "CREATEDATE": null, + "CREATEUSERID": null, + "CREATE_EMPNAME": "CREATEUSERID", + "CREATE_FULLNAME": "CREATEUSERID", + "ESTIMATEDDURATION": null, + "EXPECTEDSTARTDATE": null, + "FIRSTCLOCKONDATE": null, + "ISSIMPLEMODE": null, + "JOBID": null, + "JOBMODELNAME": null, + "JOBORDERID": null, + "JOBORDERNAME": null, + "JOBSTATUS": null, + "LASTCLOCKOFFDATE": null, + "PARTREQUESTORDERNAME": "DW_MES_PARTREQUESTORDER", + "PJ_CAUSECODE2NAME": null, + "PJ_REPAIRCODE2NAME": null, + "PJ_SYMPTOMCODE2NAME": null, + "REPAIRCODENAME": null, + "RESOURCEID": null, + "RESOURCENAME": "RESOURCEID", + "RESOURCE_PKG_GROUP": null, + "STAGENAME": null, + "STAGESEQUENCE": null, + "SYMPTOMCODENAME": null + }, + "indexes": [ + [ + "DW_MES_JOB_COMPLETEDATE", + "NONUNIQUE", + "COMPLETEDATE" + ], + [ + "DW_MES_JOB_CREATEDATE", + "NONUNIQUE", + "CREATEDATE" + ], + [ + "DW_MES_JOB_RESOURCEID", + "NONUNIQUE", + "RESOURCEID" + ], + [ + "DW_MES_JOB_RESOURCENAME", + "NONUNIQUE", + "RESOURCENAME" + ] + ], + "sample_columns": [ + "ACKNOWLEDGECOUNT", + "ACTIVECLOCKONCOUNT", + "ASSIGNCOUNT", + "CANCELDATE", + "CANCELUSERID", + "CAUSECODENAME", + "CLOCKONCOUNT", + "COMPLETEDATE", + "COMPLETEUSERID", + "CREATEDATE", + "CREATEUSERID", + "ESTIMATEDDURATION", + "EXPECTEDSTARTDATE", + "FIRSTCLOCKONDATE", + "ISSIMPLEMODE", + "JOBID", + "JOBMODELNAME", + "JOBORDERID", + "JOBORDERNAME", + "JOBSTATUS", + "LASTCLOCKOFFDATE", + "REPAIRCODENAME", + "RESOURCEID", + "STAGENAME", + "STAGESEQUENCE", + "SYMPTOMCODENAME", + "PJ_CAUSECODE2NAME", + "PJ_REPAIRCODE2NAME", + "PJ_SYMPTOMCODE2NAME", + "CANCEL_EMPNAME", + "CANCEL_FULLNAME", + "COMPLETE_EMPNAME", + "COMPLETE_FULLNAME", + "CREATE_EMPNAME", + "CREATE_FULLNAME", + "RESOURCENAME", + "CONTAINERIDS", + "CONTAINERNAMES", + "PARTREQUESTORDERNAME", + "RESOURCE_PKG_GROUP" + ], + "sample_data": [ + [ + 0, + 0, + 0, + "2018-04-02 15:57:50", + "0004748000002e73", + null, + 0, + null, + null, + "2018-04-02 15:53:37", + "0004748000000004", + null, + null, + null, + 0, + "4882ca8000000131", + "GWBA-Machine Repair", + "4880bf8000000131", + "2018 093-0000000033", + "CANCELLED", + null, + null, + "48801680000000fd", + "維修單確認", + 1, + null, + null, + null, + null, + "21707", + "陳佳芬", + null, + null, + "CamstarAdmin", + "Camstar Administrator", + "GWBA-0131", + null, + null, + null, + null + ], + [ + 0, + 0, + 0, + "2018-04-02 16:18:31", + "0004748000002e73", + null, + 0, + null, + null, + "2018-04-02 16:12:39", + "0004748000000004", + null, + null, + null, + 0, + "4882ca8000000132", + "GWBA-Machine Repair", + "4880bf8000000132", + "2018 093-0000000034", + "CANCELLED", + null, + null, + "48801680000000fd", + "維修單確認", + 1, + null, + null, + null, + null, + "21707", + "陳佳芬", + null, + null, + "CamstarAdmin", + "Camstar Administrator", + "GWBA-0131", + "488104800000a6d2", + "GA18032153-A00-001", + null, + null + ], + [ + 0, + 0, + 0, + "2018-04-02 21:17:10", + "0004748000002e8b", + null, + 0, + null, + null, + "2018-04-02 19:29:41", + "0004748000000004", + null, + null, + null, + 0, + "4882ca8000000133", + "GWBA-Machine Repair", + "4880bf8000000133", + "2018 093-0000000035", + "CANCELLED", + null, + null, + "48801680000000fd", + "維修單確認", + 1, + null, + null, + null, + null, + "90978", + "涂仲遠", + null, + null, + "CamstarAdmin", + "Camstar Administrator", + "GWBA-0131", + "488104800000a6d2", + "GA18032153-A00-001", + null, + null + ] + ] + }, + "DW_MES_JOBTXNHISTORY": { + "owner": "DWH", + "table_comment": null, + "row_count": 9554723, + "schema": [ + { + "column_name": "ACKNOWLEDGECOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "ASSIGNCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "CAUSECODEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "CAUSECODENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "CHECKLISTONLY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "CLOCKONCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "ESTIMATEDDURATION", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "EXPECTEDSTARTDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "FROMJOBSTATUS", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "HISTORYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "HISTORYMAINLINEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "JOBID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "JOBMODELID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "JOBMODELNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "JOBORDERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "JOBORDERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "JOBSTATUS", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "JOBTXNHISTORYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "REPAIRCODEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "REPAIRCODENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "STAGEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "STAGENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "STAGESEQUENCE", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 23 + }, + { + "column_name": "SYMPTOMCODEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 24 + }, + { + "column_name": "SYMPTOMCODENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 25 + }, + { + "column_name": "TOSTAGEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 26 + }, + { + "column_name": "TOSTAGENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 27 + }, + { + "column_name": "TOSTAGESEQUENCE", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 28 + }, + { + "column_name": "TXNID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 29 + }, + { + "column_name": "TXNDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 30 + }, + { + "column_name": "USERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 31 + }, + { + "column_name": "EMPLOYEEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 32 + }, + { + "column_name": "USER_EMPNO", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 33 + }, + { + "column_name": "USER_NAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 34 + }, + { + "column_name": "EMP_EMPNO", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 35 + }, + { + "column_name": "EMP_NAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 36 + }, + { + "column_name": "COMMENTS", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 37 + }, + { + "column_name": "CDONAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 38 + }, + { + "column_name": "CALLBYCDONAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 39 + } + ], + "column_comments": { + "ACKNOWLEDGECOUNT": null, + "ASSIGNCOUNT": null, + "CALLBYCDONAME": "HistoryMainline", + "CAUSECODEID": null, + "CAUSECODENAME": null, + "CDONAME": "HistoryMainline", + "CHECKLISTONLY": null, + "CLOCKONCOUNT": null, + "COMMENTS": "HistoryMainline", + "EMPLOYEEID": null, + "EMP_EMPNO": "工號", + "EMP_NAME": "姓名", + "ESTIMATEDDURATION": null, + "EXPECTEDSTARTDATE": null, + "FROMJOBSTATUS": null, + "HISTORYID": null, + "HISTORYMAINLINEID": null, + "JOBID": null, + "JOBMODELID": null, + "JOBMODELNAME": null, + "JOBORDERID": null, + "JOBORDERNAME": null, + "JOBSTATUS": null, + "JOBTXNHISTORYID": null, + "REPAIRCODEID": null, + "REPAIRCODENAME": null, + "STAGEID": null, + "STAGENAME": null, + "STAGESEQUENCE": null, + "SYMPTOMCODEID": null, + "SYMPTOMCODENAME": null, + "TOSTAGEID": null, + "TOSTAGENAME": null, + "TOSTAGESEQUENCE": null, + "TXNDATE": null, + "TXNID": null, + "USERID": null, + "USER_EMPNO": "工號", + "USER_NAME": "姓名" + }, + "indexes": [ + [ + "JOBTXN0_HISTORYMAINLINEID", + "NONUNIQUE", + "HISTORYMAINLINEID" + ], + [ + "JOBTXN0_JOBID", + "NONUNIQUE", + "JOBID" + ], + [ + "JOBTXN0_JOBTXNHISTORYID", + "NONUNIQUE", + "JOBTXNHISTORYID" + ], + [ + "JOBTXN0_TXNDATE", + "NONUNIQUE", + "TXNDATE" + ] + ], + "sample_columns": [ + "ACKNOWLEDGECOUNT", + "ASSIGNCOUNT", + "CAUSECODEID", + "CAUSECODENAME", + "CHECKLISTONLY", + "CLOCKONCOUNT", + "ESTIMATEDDURATION", + "EXPECTEDSTARTDATE", + "FROMJOBSTATUS", + "HISTORYID", + "HISTORYMAINLINEID", + "JOBID", + "JOBMODELID", + "JOBMODELNAME", + "JOBORDERID", + "JOBORDERNAME", + "JOBSTATUS", + "JOBTXNHISTORYID", + "REPAIRCODEID", + "REPAIRCODENAME", + "STAGEID", + "STAGENAME", + "STAGESEQUENCE", + "SYMPTOMCODEID", + "SYMPTOMCODENAME", + "TOSTAGEID", + "TOSTAGENAME", + "TOSTAGESEQUENCE", + "TXNID", + "TXNDATE", + "USERID", + "EMPLOYEEID", + "USER_EMPNO", + "USER_NAME", + "EMP_EMPNO", + "EMP_NAME", + "COMMENTS", + "CDONAME", + "CALLBYCDONAME" + ], + "sample_data": [ + [ + 1, + 1, + "4880c080000004cb", + "EAP", + 0, + 2, + null, + null, + "INPROGRESS", + "4880168000000115", + "0009888000336d11", + "4882ca80000000ca", + "4880be800000000d", + "GWBA-Machine Repair", + "4880bf80000000ca", + "2018 086-0000000024", + "INPROGRESS", + "4881e280000000ec", + "4880c180000005de", + "EAP-CHECK", + "4880bd8000000004", + "PD-產品檢驗", + 3, + "4880c280000004cb", + "EAP", + "4880bd8000000006", + "結單", + 5, + "40000000001b64cf", + "2018-03-27 09:14:32", + "0004748000002e7a", + "0004748000002e7a", + "90351", + "謝文介", + "90351", + "謝文介", + null, + "JobProgress", + "JobProgress" + ], + [ + 1, + 1, + "4880c080000004cb", + "EAP", + 0, + 2, + null, + null, + "INPROGRESS", + "4880168000000115", + "0009888000336d12", + "4882ca80000000ca", + "4880be800000000d", + "GWBA-Machine Repair", + "4880bf80000000ca", + "2018 086-0000000024", + "ACTIVE", + "4881e280000000ed", + "4880c180000005de", + "EAP-CHECK", + "4880bd8000000006", + "結單", + 5, + "4880c280000004cb", + "EAP", + "4880bd8000000006", + "結單", + 5, + "40000000001b64cf", + "2018-03-27 09:14:32", + "0004748000002e7a", + "0004748000002e7a", + "90351", + "謝文介", + "90351", + "謝文介", + null, + "JobClockOff", + "JobProgress" + ], + [ + 0, + 1, + null, + null, + 0, + 0, + null, + null, + "CREATED", + "48801680000000fd", + "0009888000347fa3", + "4882ca80000000fc", + "4880be800000000d", + "GWBA-Machine Repair", + "4880bf80000000fc", + "2018 087-0000000027", + "ASSIGNED", + "4881e280000000fd", + null, + null, + "4880bd8000000002", + "維修單確認", + 1, + null, + null, + "4880bd8000000002", + "維修單確認", + 1, + "40000000001be1ee", + "2018-03-27 17:05:11", + "0004748000002e7a", + "0004748000002e7a", + "90351", + "謝文介", + "90351", + "謝文介", + null, + "JobAssign", + "JobAssign" + ] + ] + }, + "DW_MES_LOTMATERIALSHISTORY": { + "owner": "DWH", + "table_comment": null, + "row_count": 17829931, + "schema": [ + { + "column_name": "CONTAINERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "FINISHEDRUNCARD", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "PJ_WORKORDER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "WORKCENTERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "WORKCENTERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "SPECID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "SPECNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "MATERIALPARTNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "DESCRIPTION", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "MATERIALLOTNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "EQUIPMENTID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "EQUIPMENTNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "QTYREQUIRED", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "CONSUMEFACTOR", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "QTYCONSUMED", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "TXNDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "VENDORLOTNUMBER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "MANUFACTUREREXPIRYDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "WITHDRAWALTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "THAWINGTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "EXPIRYTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "CONSUMEMATERIALSHISTORYDETAIID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "LAST_UPDATED_DATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 23 + }, + { + "column_name": "PRIMARY_CATEGORY", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 24 + }, + { + "column_name": "SECONDARY_CATEGORY", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 25 + }, + { + "column_name": "UOMNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 26 + } + ], + "column_comments": { + "CONSUMEFACTOR": null, + "CONSUMEMATERIALSHISTORYDETAIID": null, + "CONTAINERID": null, + "DESCRIPTION": null, + "EQUIPMENTID": null, + "EQUIPMENTNAME": null, + "EXPIRYTIMESTAMP": null, + "FINISHEDRUNCARD": null, + "LAST_UPDATED_DATE": null, + "MANUFACTUREREXPIRYDATE": null, + "MATERIALLOTNAME": null, + "MATERIALPARTNAME": null, + "PJ_WORKORDER": null, + "PRIMARY_CATEGORY": null, + "QTYCONSUMED": null, + "QTYREQUIRED": null, + "SECONDARY_CATEGORY": null, + "SPECID": null, + "SPECNAME": null, + "THAWINGTIMESTAMP": null, + "TXNDATE": null, + "UOMNAME": null, + "VENDORLOTNUMBER": null, + "WITHDRAWALTIMESTAMP": null, + "WORKCENTERID": null, + "WORKCENTERNAME": null + }, + "indexes": [ + [ + "DW_MES_LOTMATERIALSHISTORY_IDX1", + "NONUNIQUE", + "CONTAINERID" + ], + [ + "DW_MES_LOTMATERIALSHISTORY_IDX2", + "NONUNIQUE", + "PJ_WORKORDER" + ], + [ + "DW_MES_LOTMATERIALSHISTORY_IDX3", + "NONUNIQUE", + "MATERIALPARTNAME" + ], + [ + "DW_MES_LOTMATERIALSHISTORY_IDX4", + "NONUNIQUE", + "MATERIALLOTNAME" + ] + ], + "sample_columns": [ + "CONTAINERID", + "FINISHEDRUNCARD", + "PJ_WORKORDER", + "WORKCENTERID", + "WORKCENTERNAME", + "SPECID", + "SPECNAME", + "MATERIALPARTNAME", + "DESCRIPTION", + "MATERIALLOTNAME", + "EQUIPMENTID", + "EQUIPMENTNAME", + "QTYREQUIRED", + "CONSUMEFACTOR", + "QTYCONSUMED", + "TXNDATE", + "VENDORLOTNUMBER", + "MANUFACTUREREXPIRYDATE", + "WITHDRAWALTIMESTAMP", + "THAWINGTIMESTAMP", + "EXPIRYTIMESTAMP", + "CONSUMEMATERIALSHISTORYDETAIID", + "LAST_UPDATED_DATE", + "PRIMARY_CATEGORY", + "SECONDARY_CATEGORY", + "UOMNAME" + ], + "sample_data": [ + [ + "48810380000a7f6b", + null, + "GA20080006", + "0005dc8000001e2f", + "焊_DB_料", + "48812c8000000004", + "Eutectic D/B", + "LEF000016", + "腳架/SOD-323/OPTION 1/REEL/A42", + "207010-10-001", + "4880168000000072", + "GDBS-0202", + 1381.417, + 1.02784, + 1379.417, + "2020-08-13 13:40:54", + "207010-10", + "2021-08-02 00:00:00", + "2020-08-10 22:48:14", + null, + null, + "4881b880003b95cb", + null, + null, + null, + null + ], + [ + "4881048000104251", + "0N03C4077", + "GA20101311", + "0005dc8000001de4", + "成型_料", + "48812c8000000026", + "M.G.P Mold", + "COM000182", + "成型膠/ELER-8-130PJ9/φ13/4.5g/", + "E008F03-003", + "488016800000033f", + "GPRK-0067", + 408.4992, + 0.01576, + 408.4992, + "2020-11-02 10:08:15", + "E008F03", + "2021-02-17 00:00:00", + "2020-11-01 02:32:49", + "2020-11-02 02:32:49", + "2020-11-03 02:32:49", + "4881b8800041e09d", + null, + null, + null, + null + ], + [ + "48810380000670d6", + "01015L017;01095K247", + "GA19121762", + "0005dc8000000010", + "TMTT", + "48812c8000000055", + "3M", + "PAC000056", + "CARRIER TAPE/SOD-123/#3000/985M/8/*4mm/", + "0006536574-1", + "48801680000003d2", + "GTMU-0021", + 3273100.132, + 42.6407, + 3273100.132, + "2020-01-01 19:29:07", + null, + null, + null, + null, + null, + "4881b880002aeb7e", + null, + null, + null, + null + ] + ] + }, + "DW_MES_LOTREJECTHISTORY": { + "owner": "DWH", + "table_comment": null, + "row_count": 15786025, + "schema": [ + { + "column_name": "HISTORYMAINLINEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "CONTAINERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "FINISHEDRUNCARD", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "PJ_WORKORDER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "WORKCENTERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "WORKCENTERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "SPECID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "SPECNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "EQUIPMENTNAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "MOVEINTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "MOVEINQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "MOVEINQTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "EMPLOYEENAME", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "SHIFTNAME", + "data_type": "VARCHAR2", + "data_length": 30, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "TXNDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "COMMENTS", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "LOSSREASONID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "LOSSREASONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "WAFERSCRIBENUMBER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "REJECTCATEGORYNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "REJECTQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "STANDBYQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "QTYTOPROCESS", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 23 + }, + { + "column_name": "INPROCESSQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 24 + }, + { + "column_name": "PROCESSEDQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 25 + }, + { + "column_name": "DEFECTQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 26 + }, + { + "column_name": "WAFERREJECTSQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 27 + }, + { + "column_name": "REJECTCAUSE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 28 + }, + { + "column_name": "REJECTCOMMENT", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 29 + }, + { + "column_name": "PJ_WAFERID1", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 30 + }, + { + "column_name": "PJ_WAFERID2", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 31 + }, + { + "column_name": "PJ_WAFERID3", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 32 + }, + { + "column_name": "LAST_UPDATED_DATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 33 + }, + { + "column_name": "EMPZONE", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 34 + }, + { + "column_name": "WIPTRACKINGGROUPKEYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 35 + }, + { + "column_name": "FROMQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 36 + }, + { + "column_name": "FROMQTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 37 + }, + { + "column_name": "QTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 38 + }, + { + "column_name": "QTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 39 + }, + { + "column_name": "NOWSPECID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 40 + }, + { + "column_name": "NOWSPECNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 41 + }, + { + "column_name": "NOWWORKCENTERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 42 + }, + { + "column_name": "NOWWORKCENTERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 43 + } + ], + "column_comments": { + "COMMENTS": null, + "CONTAINERID": null, + "DEFECTQTY": null, + "EMPLOYEENAME": null, + "EMPZONE": null, + "EQUIPMENTNAME": null, + "FINISHEDRUNCARD": null, + "FROMQTY": null, + "FROMQTY2": null, + "HISTORYMAINLINEID": null, + "INPROCESSQTY": null, + "LAST_UPDATED_DATE": null, + "LOSSREASONID": null, + "LOSSREASONNAME": null, + "MOVEINQTY": null, + "MOVEINQTY2": null, + "MOVEINTIMESTAMP": null, + "NOWSPECID": null, + "NOWSPECNAME": null, + "NOWWORKCENTERID": null, + "NOWWORKCENTERNAME": null, + "PJ_WAFERID1": null, + "PJ_WAFERID2": null, + "PJ_WAFERID3": null, + "PJ_WORKORDER": null, + "PROCESSEDQTY": null, + "QTY": null, + "QTY2": null, + "QTYTOPROCESS": null, + "REJECTCATEGORYNAME": null, + "REJECTCAUSE": null, + "REJECTCOMMENT": null, + "REJECTQTY": null, + "SHIFTNAME": null, + "SPECID": null, + "SPECNAME": null, + "STANDBYQTY": null, + "TXNDATE": null, + "WAFERREJECTSQTY": null, + "WAFERSCRIBENUMBER": null, + "WIPTRACKINGGROUPKEYID": null, + "WORKCENTERID": null, + "WORKCENTERNAME": null + }, + "indexes": [ + [ + "DW_MES_LOTREJECTHISTORY_IDX1", + "NONUNIQUE", + "CONTAINERID" + ], + [ + "DW_MES_LOTREJECTHISTORY_IDX2", + "NONUNIQUE", + "SPECID" + ], + [ + "DW_MES_LOTREJECTHISTORY_IDX3", + "NONUNIQUE", + "HISTORYMAINLINEID" + ], + [ + "DW_MES_LOTREJECTHISTORY_IDX4", + "NONUNIQUE", + "TXNDATE" + ], + [ + "DW_MES_LOTREJECTHISTORY_IDX5", + "NONUNIQUE", + "WIPTRACKINGGROUPKEYID" + ] + ], + "sample_columns": [ + "HISTORYMAINLINEID", + "CONTAINERID", + "FINISHEDRUNCARD", + "PJ_WORKORDER", + "WORKCENTERID", + "WORKCENTERNAME", + "SPECID", + "SPECNAME", + "EQUIPMENTNAME", + "MOVEINTIMESTAMP", + "MOVEINQTY", + "MOVEINQTY2", + "EMPLOYEENAME", + "SHIFTNAME", + "TXNDATE", + "COMMENTS", + "LOSSREASONID", + "LOSSREASONNAME", + "WAFERSCRIBENUMBER", + "REJECTCATEGORYNAME", + "REJECTQTY", + "STANDBYQTY", + "QTYTOPROCESS", + "INPROCESSQTY", + "PROCESSEDQTY", + "DEFECTQTY", + "WAFERREJECTSQTY", + "REJECTCAUSE", + "REJECTCOMMENT", + "PJ_WAFERID1", + "PJ_WAFERID2", + "PJ_WAFERID3", + "LAST_UPDATED_DATE", + "EMPZONE", + "WIPTRACKINGGROUPKEYID", + "FROMQTY", + "FROMQTY2", + "QTY", + "QTY2", + "NOWSPECID", + "NOWSPECNAME", + "NOWWORKCENTERID", + "NOWWORKCENTERNAME" + ], + "sample_data": [ + [ + "00098880007217f6", + "488104800001011c", + "84185K527", + "GA18040871", + "0005dc8000000010", + "TMTT", + "48812c8000000052", + "LT", + "GTMT-0103", + "2018-04-18 17:12:02", + 56305, + 0, + "阮清福", + "N", + "2018-04-19 02:09:37", + null, + "00102c8000000179", + "CONT", + null, + null, + 0, + 0, + 0, + 0, + 290, + 0, + 0, + null, + null, + null, + null, + null, + null, + null, + "48825f80000a2c41", + 56305, + 0, + 54831, + 0, + "48812c8000000052", + "LT", + "0005dc8000000010", + "TMTT" + ], + [ + "000988800075c1f1", + "488104800001178c", + "84195L117;84205K107", + "GA18041197", + "0005dc8000000010", + "TMTT", + "48812c8000000052", + "LT", + "GTMU-0020", + "2018-04-20 05:44:26", + 76800, + 0, + "陳蒕茵", + "D", + "2018-04-20 09:17:18", + null, + "00102c800000001e", + "IR PAT", + null, + null, + 0, + 0, + 0, + 0, + 10, + 0, + 0, + null, + null, + null, + null, + null, + null, + null, + "48825f80000a9092", + 64442, + 0, + 63709, + 0, + "48812c8000000052", + "LT", + "0005dc8000000010", + "TMTT" + ], + [ + "0009888001695793", + "4881048000034cf9", + "8717CG057", + "GA18070797", + "0005dc8000000010", + "TMTT", + "48812c8000000052", + "LT", + "GTMA-0132", + "2018-07-17 07:00:04", + 212232, + 0, + "張秀如", + "D", + "2018-07-17 14:17:53", + null, + "00102c8000001082", + "250_VF", + null, + null, + 0, + 0, + 0, + 0, + 3, + 0, + 0, + null, + null, + null, + null, + null, + null, + null, + "48825f80002120c1", + 212232, + 0, + 211057, + 0, + "48812c8000000052", + "LT", + "0005dc8000000010", + "TMTT" + ] + ] + }, + "DW_MES_LOTWIPDATAHISTORY": { + "owner": "DWH", + "table_comment": null, + "row_count": 77960216, + "schema": [ + { + "column_name": "CONTAINERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "FINISHEDRUNCARD", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "PJ_WORKORDER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "WORKCENTERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "WORKCENTERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "SPECID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "SPECNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "EQUIPMENTID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "EQUIPMENTNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "EMPLOYEENAME", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "SERVICENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "TXNTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "WIPDATANAMEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "WIPDATANAMENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "WIPDATAVALUE", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "PJ_SPCDATARESULT", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "WIPLOTHISTORYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "LAST_UPDATED_DATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "PROCESSTYPENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "WAFERSCRIBENUMBER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + } + ], + "column_comments": { + "CONTAINERID": null, + "EMPLOYEENAME": null, + "EQUIPMENTID": null, + "EQUIPMENTNAME": null, + "FINISHEDRUNCARD": null, + "LAST_UPDATED_DATE": null, + "PJ_SPCDATARESULT": null, + "PJ_WORKORDER": null, + "PROCESSTYPENAME": null, + "SERVICENAME": null, + "SPECID": null, + "SPECNAME": null, + "TXNTIMESTAMP": null, + "WAFERSCRIBENUMBER": null, + "WIPDATANAMEID": null, + "WIPDATANAMENAME": null, + "WIPDATAVALUE": null, + "WIPLOTHISTORYID": null, + "WORKCENTERID": null, + "WORKCENTERNAME": null + }, + "indexes": [ + [ + "DW_MES_LOTWIPDATAHISTORY_IDX1", + "NONUNIQUE", + "CONTAINERID" + ], + [ + "DW_MES_LOTWIPDATAHISTORY_IDX2", + "NONUNIQUE", + "WIPLOTHISTORYID" + ], + [ + "DW_MES_LOTWIPDATAHISTORY_IDX3", + "NONUNIQUE", + "PJ_WORKORDER" + ], + [ + "DW_MES_LOTWIPDATAHISTORY_IDX4", + "NONUNIQUE", + "TXNTIMESTAMP" + ] + ], + "sample_columns": [ + "CONTAINERID", + "FINISHEDRUNCARD", + "PJ_WORKORDER", + "WORKCENTERID", + "WORKCENTERNAME", + "SPECID", + "SPECNAME", + "EQUIPMENTID", + "EQUIPMENTNAME", + "EMPLOYEENAME", + "SERVICENAME", + "TXNTIMESTAMP", + "WIPDATANAMEID", + "WIPDATANAMENAME", + "WIPDATAVALUE", + "PJ_SPCDATARESULT", + "WIPLOTHISTORYID", + "LAST_UPDATED_DATE", + "PROCESSTYPENAME", + "WAFERSCRIBENUMBER" + ], + "sample_data": [ + [ + "488104800006a5d5", + "911857707", + "GA19011132", + "0005dc8000000007", + "成型", + "48812c8000000029", + "成型-END", + null, + null, + "DF2", + "LotMoveOut", + "2019-01-16 22:43:45", + "4880578000000035", + "Leadframe", + "240", + null, + "48827380004faeb8", + null, + null, + null + ], + [ + "4881048000069913", + null, + "GA18122339", + "0005dc8000000007", + "成型", + "48812c8000000029", + "成型-END", + null, + null, + "楊勝雄", + "LotMoveOut", + "2019-01-10 13:01:35", + "4880578000000035", + "Leadframe", + "0", + null, + "48827380004df503", + null, + null, + null + ], + [ + "488103800001c6d8", + "91235L017", + "GA19011770", + "0005dc8000000008", + "去膠", + "48812c8000000031", + "去膠-END", + null, + null, + "DG2", + "LotMoveOut", + "2019-01-22 20:57:27", + "4880578000000035", + "Leadframe", + "240", + null, + "4882738000513357", + null, + null, + null + ] + ] + }, + "DW_MES_LOTWIPHISTORY": { + "owner": "DWH", + "table_comment": null, + "row_count": 53454213, + "schema": [ + { + "column_name": "WIPLOTHISTORYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "WIPEQUIPMENTHISTORYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "CONTAINERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "FINISHEDRUNCARD", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "PJ_WORKORDER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "WORKCENTERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "WORKCENTERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "SPECID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "SPECNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "PJ_WAFERID1", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "PJ_WAFERID2", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "PJ_WAFERID3", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "WORKFLOWNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "PRODUCTNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "DESCRIPTION", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "DATECODE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "MOVEINTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "MOVEINQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "MOVEOUTTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "MOVEOUTQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "EQUIPMENTID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "EQUIPMENTNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "TRACKINTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 23 + }, + { + "column_name": "TRACKINQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 24 + }, + { + "column_name": "TRACKINEMPLOYEENAME", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 25 + }, + { + "column_name": "TRACKOUTTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 26 + }, + { + "column_name": "TRACKOUTQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 27 + }, + { + "column_name": "TRACKOUTEMPLOYEENAME", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 28 + }, + { + "column_name": "FLAGNAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 29 + }, + { + "column_name": "CARRIERNAME", + "data_type": "VARCHAR2", + "data_length": 2000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 30 + }, + { + "column_name": "LAST_UPDATED_DATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": "SYSDATE\n", + "column_id": 31 + }, + { + "column_name": "LAST_SYNC_DATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 32 + }, + { + "column_name": "PROCESSTYPENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 33 + }, + { + "column_name": "PACKAGE_LF", + "data_type": "VARCHAR2", + "data_length": 60, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 34 + }, + { + "column_name": "PROCESSSPECNAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 35 + }, + { + "column_name": "TRACKINEMPZONE", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 36 + }, + { + "column_name": "TRACKOUTEMPZONE", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 37 + }, + { + "column_name": "MOVEINQTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 38 + }, + { + "column_name": "MOVEOUTQTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 39 + }, + { + "column_name": "TRACKINQTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 40 + }, + { + "column_name": "TRACKOUTQTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 41 + }, + { + "column_name": "WIPTRACKINGGROUPKEYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 42 + } + ], + "column_comments": { + "CARRIERNAME": null, + "CONTAINERID": null, + "DATECODE": null, + "DESCRIPTION": null, + "EQUIPMENTID": null, + "EQUIPMENTNAME": null, + "FINISHEDRUNCARD": null, + "FLAGNAME": null, + "LAST_SYNC_DATE": null, + "LAST_UPDATED_DATE": null, + "MOVEINQTY": null, + "MOVEINQTY2": null, + "MOVEINTIMESTAMP": null, + "MOVEOUTQTY": null, + "MOVEOUTQTY2": null, + "MOVEOUTTIMESTAMP": null, + "PACKAGE_LF": null, + "PJ_WAFERID1": null, + "PJ_WAFERID2": null, + "PJ_WAFERID3": null, + "PJ_WORKORDER": null, + "PROCESSSPECNAME": null, + "PROCESSTYPENAME": null, + "PRODUCTNAME": null, + "SPECID": null, + "SPECNAME": null, + "TRACKINEMPLOYEENAME": null, + "TRACKINEMPZONE": null, + "TRACKINQTY": null, + "TRACKINQTY2": null, + "TRACKINTIMESTAMP": null, + "TRACKOUTEMPLOYEENAME": null, + "TRACKOUTEMPZONE": null, + "TRACKOUTQTY": null, + "TRACKOUTQTY2": null, + "TRACKOUTTIMESTAMP": null, + "WIPEQUIPMENTHISTORYID": null, + "WIPLOTHISTORYID": null, + "WIPTRACKINGGROUPKEYID": null, + "WORKCENTERID": null, + "WORKCENTERNAME": null, + "WORKFLOWNAME": null + }, + "indexes": [ + [ + "DW_MES_LOTWIPHISTORY_IDX1", + "NONUNIQUE", + "CONTAINERID" + ], + [ + "DW_MES_LOTWIPHISTORY_IDX2", + "NONUNIQUE", + "WIPLOTHISTORYID" + ], + [ + "DW_MES_LOTWIPHISTORY_IDX3", + "NONUNIQUE", + "TRACKINTIMESTAMP" + ], + [ + "DW_MES_LOTWIPHISTORY_IDX4", + "NONUNIQUE", + "PJ_WORKORDER" + ], + [ + "DW_MES_LOTWIPHISTORY_IDX5", + "NONUNIQUE", + "DATECODE" + ], + [ + "DW_MES_LOTWIPHISTORY_IDX6", + "NONUNIQUE", + "WORKCENTERID" + ], + [ + "DW_MES_LOTWIPHISTORY_IDX7", + "NONUNIQUE", + "WIPEQUIPMENTHISTORYID" + ], + [ + "DW_MES_LOTWIPHISTORY_IDX8", + "NONUNIQUE", + "MOVEINTIMESTAMP" + ], + [ + "DW_MES_LOTWIPHISTORY_IDX9", + "NONUNIQUE", + "WIPTRACKINGGROUPKEYID" + ] + ], + "sample_columns": [ + "WIPLOTHISTORYID", + "WIPEQUIPMENTHISTORYID", + "CONTAINERID", + "FINISHEDRUNCARD", + "PJ_WORKORDER", + "WORKCENTERID", + "WORKCENTERNAME", + "SPECID", + "SPECNAME", + "PJ_WAFERID1", + "PJ_WAFERID2", + "PJ_WAFERID3", + "WORKFLOWNAME", + "PRODUCTNAME", + "DESCRIPTION", + "DATECODE", + "MOVEINTIMESTAMP", + "MOVEINQTY", + "MOVEOUTTIMESTAMP", + "MOVEOUTQTY", + "EQUIPMENTID", + "EQUIPMENTNAME", + "TRACKINTIMESTAMP", + "TRACKINQTY", + "TRACKINEMPLOYEENAME", + "TRACKOUTTIMESTAMP", + "TRACKOUTQTY", + "TRACKOUTEMPLOYEENAME", + "FLAGNAME", + "CARRIERNAME", + "LAST_UPDATED_DATE", + "LAST_SYNC_DATE", + "PROCESSTYPENAME", + "PACKAGE_LF", + "PROCESSSPECNAME", + "TRACKINEMPZONE", + "TRACKOUTEMPZONE", + "MOVEINQTY2", + "MOVEOUTQTY2", + "TRACKINQTY2", + "TRACKOUTQTY2", + "WIPTRACKINGGROUPKEYID" + ], + "sample_data": [ + [ + "48827380008d8a90", + "48827880004ade8e", + "488104800009e390", + null, + "GA19100121", + "0005dc8000000005", + "焊接_DB", + "48812c800000000a", + "Solder Paste D/B", + null, + null, + null, + "PCH_SOD-123FL", + "ES1002FL_R1_00001", + "/E1D/TR/7\"/HF/3K/SOD-123FL/ER/SOD/ESM-10AAFL/ESM10FL-QI01/PJ///", + null, + "2019-10-09 01:03:34", + 53760, + "2019-10-09 04:27:54", + 53760, + "4880168000000022", + "GDBA-0134", + "2019-10-09 01:37:10", + 26880, + "80733/倪帕", + "2019-10-09 04:27:21", + 26880, + "23815/林侑潔", + null, + null, + "2024-01-01 00:00:00", + "2024-01-01 00:00:00", + "NORMAL", + "SOD-123FL", + "PCH_SOD-123FL", + "製造一部-製造二課-焊接E(N)", + "銲接E區(N)", + null, + null, + null, + null, + "48825f80008d8c1d" + ], + [ + "4882738001737b47", + null, + "488104800016c2b8", + "1D16C5287", + "GA21112361", + "0005dc800000000a", + "水吹砂", + "48812c800000003c", + "吹砂-END", + "NA", + null, + null, + "PCU(片狀Reflow)_SMAF", + "SXM54ALF_R1_000A1", + "PJ/SXM54ALF/TR/7\"/HF/3K/SMAF/SKY/SMD/SSMMF-50AA/SSMMF50-QI03/1480(群創光電)///", + "1507", + "2021-12-14 20:21:49", + 41600, + "2021-12-14 20:31:55", + 41600, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + "2024-01-01 00:00:00", + "2024-01-01 00:00:00", + null, + "SMAF", + "PCU(片狀Reflow)_SMAF", + null, + null, + null, + null, + null, + null, + "48825f800173a3c6" + ], + [ + "488273800174b5b7", + "4882788000c75723", + "4881038000135870", + "1D204K537;1D204K867", + "GA21121088", + "0005dc8000001e30", + "焊_WB_料", + "48812c8000000020", + "銅線製程", + "NA", + null, + null, + "UAC_SOT-323", + "PE-BAT54AW-L_R1_000A1", + "/L42/TR/7\"/HF/3K/SOT-323/SKY/SOT/SSM-02TACA/SSM02-QI59/0474///", + "2E", + "2021-12-18 14:24:01", + 84480, + "2021-12-18 18:47:05", + 84480, + "4880168000000198", + "GWBA-0033", + "2021-12-18 14:25:21", + 84480, + "80890/替妲拉", + "2021-12-18 18:47:05", + 84396, + "80890/替妲拉", + null, + null, + "2024-01-01 00:00:00", + "2024-01-01 00:00:00", + "NORMAL", + "SOT-323", + "UAC_SOT-323", + "銲接A區(D)", + "銲接A區(D)", + null, + null, + null, + null, + "48825f800174cfb4" + ] + ] + }, + "DW_MES_LOT_V": { + "owner": "DWH", + "table_comment": null, + "row_count": 9468, + "schema": [ + { + "column_name": "NO", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "CONTAINERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "N", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "LOTID", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "QTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "QTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "STATUS", + "data_type": "VARCHAR2", + "data_length": 10, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "HOLDREASONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "CURRENTHOLDCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "STARTREASON", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "OWNER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "STARTDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "UTS", + "data_type": "VARCHAR2", + "data_length": 10, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "STARTQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "STARTQTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "FIRSTNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "PRODUCT", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "STEP", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "SPECNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "WORKCENTERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "WORKCENTERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "HOLDLOCATION", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "AGEBYDAYS", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "REMAINTIME", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 23 + }, + { + "column_name": "MOVEINQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 24 + }, + { + "column_name": "MOVEINQTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 25 + }, + { + "column_name": "MOVEINTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 26 + }, + { + "column_name": "MOVEINUSERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 27 + }, + { + "column_name": "EQUIPMENTCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 28 + }, + { + "column_name": "EQUIPMENTS", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 29 + }, + { + "column_name": "JOBCREATEDATE", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 30 + }, + { + "column_name": "JOBCOMMENTS", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 31 + }, + { + "column_name": "MATERIALTYPE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 32 + }, + { + "column_name": "PRODUCTLINENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 33 + }, + { + "column_name": "PACKAGE_LEF", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 34 + }, + { + "column_name": "PB_FUNCTION", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 35 + }, + { + "column_name": "WORKFLOWNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 36 + }, + { + "column_name": "BOP", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 37 + }, + { + "column_name": "DATECODE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 38 + }, + { + "column_name": "LEADFRAMENAME", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 39 + }, + { + "column_name": "LEADFRAMEOPTION", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 40 + }, + { + "column_name": "COMNAME", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 41 + }, + { + "column_name": "LOCATIONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 42 + }, + { + "column_name": "PJ_FUNCTION", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 43 + }, + { + "column_name": "PJ_TYPE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 44 + }, + { + "column_name": "WAFERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 45 + }, + { + "column_name": "WAFERLOT", + "data_type": "VARCHAR2", + "data_length": 160, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 46 + }, + { + "column_name": "EVENTNAME", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 47 + }, + { + "column_name": "OCCURRENCEDATE", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 48 + }, + { + "column_name": "RELEASETIME", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 49 + }, + { + "column_name": "RELEASEEMP", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 50 + }, + { + "column_name": "RELEASEREASON", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 51 + }, + { + "column_name": "COMMENT_HOLD", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 52 + }, + { + "column_name": "CONTAINERCOMMENTS", + "data_type": "VARCHAR2", + "data_length": 2000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 53 + }, + { + "column_name": "COMMENT_DATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 54 + }, + { + "column_name": "COMMENT_EMP", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 55 + }, + { + "column_name": "COMMENT_FUTURE", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 56 + }, + { + "column_name": "HOLDEMP", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 57 + }, + { + "column_name": "DEPTNAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 58 + }, + { + "column_name": "PJ_PRODUCEREGION", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 59 + }, + { + "column_name": "WORKORDER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 60 + }, + { + "column_name": "PRIORITYCODENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 61 + }, + { + "column_name": "SPECSEQUENCE", + "data_type": "VARCHAR2", + "data_length": 10, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 62 + }, + { + "column_name": "WORKCENTERSEQUENCE", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 63 + }, + { + "column_name": "TMTT_R", + "data_type": "CHAR", + "data_length": 1, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 64 + }, + { + "column_name": "WAFER_FACTOR", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 65 + }, + { + "column_name": "WORKCENTER_GROUP", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 66 + }, + { + "column_name": "WORKCENTERSEQUENCE_GROUP", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 67 + }, + { + "column_name": "WORKCENTER_SHORT", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 68 + }, + { + "column_name": "EQUIPMENTNAME", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 69 + }, + { + "column_name": "SYS_DATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 70 + } + ], + "column_comments": { + "AGEBYDAYS": null, + "BOP": null, + "COMMENT_DATE": null, + "COMMENT_EMP": null, + "COMMENT_FUTURE": null, + "COMMENT_HOLD": null, + "COMNAME": null, + "CONTAINERCOMMENTS": null, + "CONTAINERID": null, + "CURRENTHOLDCOUNT": null, + "DATECODE": null, + "DEPTNAME": null, + "EQUIPMENTCOUNT": null, + "EQUIPMENTNAME": null, + "EQUIPMENTS": null, + "EVENTNAME": null, + "FIRSTNAME": null, + "HOLDEMP": null, + "HOLDLOCATION": null, + "HOLDREASONNAME": null, + "JOBCOMMENTS": null, + "JOBCREATEDATE": null, + "LEADFRAMENAME": null, + "LEADFRAMEOPTION": null, + "LOCATIONNAME": null, + "LOTID": null, + "MATERIALTYPE": null, + "MOVEINQTY": null, + "MOVEINQTY2": null, + "MOVEINTIMESTAMP": null, + "MOVEINUSERNAME": null, + "NO": null, + "OCCURRENCEDATE": null, + "OWNER": null, + "PACKAGE_LEF": null, + "PB_FUNCTION": null, + "PJ_FUNCTION": null, + "PJ_PRODUCEREGION": null, + "PJ_TYPE": null, + "PRIORITYCODENAME": null, + "PRODUCT": null, + "PRODUCTLINENAME": null, + "QTY": null, + "QTY2": null, + "RELEASEEMP": null, + "RELEASEREASON": null, + "RELEASETIME": null, + "REMAINTIME": null, + "SPECNAME": null, + "SPECSEQUENCE": null, + "STARTDATE": null, + "STARTQTY": null, + "STARTQTY2": null, + "STARTREASON": null, + "STATUS": null, + "STEP": null, + "SYS_DATE": null, + "TMTT_R": null, + "UTS": null, + "WAFERLOT": null, + "WAFERNAME": null, + "WAFER_FACTOR": null, + "WORKCENTERID": null, + "WORKCENTERNAME": null, + "WORKCENTERSEQUENCE": null, + "WORKCENTERSEQUENCE_GROUP": null, + "WORKCENTER_GROUP": null, + "WORKCENTER_SHORT": null, + "WORKFLOWNAME": null, + "WORKORDER": null + }, + "indexes": [], + "sample_columns": [ + "NO", + "CONTAINERID", + "LOTID", + "QTY", + "QTY2", + "STATUS", + "HOLDREASONNAME", + "CURRENTHOLDCOUNT", + "STARTREASON", + "OWNER", + "STARTDATE", + "UTS", + "STARTQTY", + "STARTQTY2", + "FIRSTNAME", + "PRODUCT", + "STEP", + "SPECNAME", + "WORKCENTERID", + "WORKCENTERNAME", + "HOLDLOCATION", + "AGEBYDAYS", + "REMAINTIME", + "MOVEINQTY", + "MOVEINQTY2", + "MOVEINTIMESTAMP", + "MOVEINUSERNAME", + "EQUIPMENTCOUNT", + "EQUIPMENTS", + "JOBCREATEDATE", + "JOBCOMMENTS", + "MATERIALTYPE", + "PRODUCTLINENAME", + "PACKAGE_LEF", + "PB_FUNCTION", + "WORKFLOWNAME", + "BOP", + "DATECODE", + "LEADFRAMENAME", + "LEADFRAMEOPTION", + "COMNAME", + "LOCATIONNAME", + "PJ_FUNCTION", + "PJ_TYPE", + "WAFERNAME", + "WAFERLOT", + "EVENTNAME", + "OCCURRENCEDATE", + "RELEASETIME", + "RELEASEEMP", + "RELEASEREASON", + "COMMENT_HOLD", + "CONTAINERCOMMENTS", + "COMMENT_DATE", + "COMMENT_EMP", + "COMMENT_FUTURE", + "HOLDEMP", + "DEPTNAME", + "PJ_PRODUCEREGION", + "WORKORDER", + "PRIORITYCODENAME", + "SPECSEQUENCE", + "WORKCENTERSEQUENCE", + "TMTT_R", + "WAFER_FACTOR", + "WORKCENTER_GROUP", + "WORKCENTERSEQUENCE_GROUP", + "WORKCENTER_SHORT", + "EQUIPMENTNAME", + "SYS_DATE" + ], + "sample_data": [ + [ + 8530, + "488103800028d1b0", + "GA25120081-A00-001", + 2650, + 0, + "ACTIVE", + null, + 0, + "NORMAL", + "量產", + "2025-12-08 18:51:20", + "2025/12/19", + 36000, + 3, + "MSKS-0808#EK57358P", + "SBM260VAL-AU_R1_000A1", + "鈦昇", + "鈦昇", + "0005dc8000000010", + "TMTT", + null, + 45.11, + 3.54, + 107988, + 0, + "2025-12-15 11:03:52", + "CamstarAdmin", + 0, + null, + null, + null, + "成品", + "SOD-123FL", + "SOD-123FL OP1", + null, + "PCU(AU)_SOD-123FL OP1", + "PCUAC", + "5O", + "LEF000097,LEF000112", + "OPTION 1 A,OPTION 1 C", + "COM000163", + "TMTT-B2", + "SKY", + "SBM260VAL-AU", + "WAF006027_CP", + "MSKS", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + "E區", + "GA25120081", + "4.一般", + "2060", + "300", + "1", + 1, + "TMTT", + "300", + "TMTT", + "GSMP-0037", + "2026-01-29 13:39:41" + ], + [ + 8531, + "488103800028d278", + "GA25120025-A00-009", + 1500, + 0, + "ACTIVE", + null, + 0, + "NORMAL", + "量產", + "2025-12-08 18:06:09", + "2025/12/17", + 155520, + 1, + "TXS-16297#5ACX-3268P", + "BAV199-AU_R1_000A1", + "鈦昇", + "鈦昇", + "0005dc8000000010", + "TMTT", + null, + 44.86, + 1.29, + 76824, + 0, + "2025-12-15 16:59:52", + "15545", + 0, + null, + null, + null, + "成品", + "SOT-23", + "SOT-23 CU", + null, + "UCC(AU)_SOT-23", + "UCC10", + "4N", + "LEF000024", + "OPTION 5", + "COM000175", + "TMTT-K2", + "SWITCHING", + "BAV199-AU", + "WAF912148_CP", + "TXS", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + "A棟", + "GA25120025", + "4.一般", + "2060", + "300", + "1", + 2, + "TMTT", + "300", + "TMTT", + "GSMP-0042", + "2026-01-29 13:39:41" + ], + [ + 8532, + "488103800028cb78", + "GA25120306-A00-001", + 2400, + 0, + "ACTIVE", + null, + 0, + "NORMAL", + "量產", + "2025-12-06 15:50:45", + "2025/12/16", + 77760, + 1, + "TXZN-4547#5ADW-3083P", + "LVBZX84C3V3-AU_R1_007A4", + "鈦昇", + "鈦昇", + "0005dc8000000010", + "TMTT", + null, + 44.77, + 0.2, + 76894, + 0, + "2025-12-15 19:13:40", + "15545", + 0, + null, + null, + null, + "成品", + "SOT-23", + "SOT-23 CU", + null, + "UCC(AU)_SOT-23(LG)", + "UCC10", + "5O", + "LEF000023", + "OPTION 4", + "COM000175", + "TMTT-L5", + "ZENER", + "LVBZX84C3V3-AU", + "WAF911877_CP", + "TXZN", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + "A棟", + "GA25120306", + "4.一般", + "2060", + "300", + "1", + 1, + "TMTT", + "300", + "TMTT", + null, + "2026-01-29 13:39:41" + ] + ] + }, + "DW_MES_MAINTENANCE": { + "owner": "DWH", + "table_comment": null, + "row_count": 52060026, + "schema": [ + { + "column_name": "RESOURCEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "HISTORYMAINLINEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "RESOURCENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "SHIFTNAME", + "data_type": "VARCHAR2", + "data_length": 30, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "TXNDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "LASTDATEDUE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "LASTTHRUPUTQTYDUE", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "LASTTHRUPUTQTYLIMIT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "LASTTHRUPUTQTYWARNING", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "MAINTENANCEREQID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "MAINTENANCEREQNAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "CDOTYPEID", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "THRUPUTQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "CHECKLISTACTION", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "INSTRUCTION", + "data_type": "VARCHAR2", + "data_length": 4000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "DATANAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "DATAVALUE", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "LOCATIONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "USERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "EMPLOYEENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "FULLNAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "PJ_INSPECTIONLOT", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "DATAPOINTID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 23 + } + ], + "column_comments": { + "CDOTYPEID": null, + "CHECKLISTACTION": null, + "DATANAME": null, + "DATAPOINTID": null, + "DATAVALUE": null, + "EMPLOYEENAME": null, + "FULLNAME": null, + "HISTORYMAINLINEID": null, + "INSTRUCTION": null, + "LASTDATEDUE": null, + "LASTTHRUPUTQTYDUE": null, + "LASTTHRUPUTQTYLIMIT": null, + "LASTTHRUPUTQTYWARNING": null, + "LOCATIONNAME": null, + "MAINTENANCEREQID": null, + "MAINTENANCEREQNAME": null, + "PJ_INSPECTIONLOT": null, + "RESOURCEID": null, + "RESOURCENAME": null, + "SHIFTNAME": null, + "THRUPUTQTY": null, + "TXNDATE": null, + "USERNAME": null + }, + "indexes": [ + [ + "DW_MES_MAINTENANCE_IDX1", + "NONUNIQUE", + "HISTORYMAINLINEID" + ], + [ + "DW_MES_MAINTENANCE_IDX2", + "NONUNIQUE", + "TXNDATE" + ], + [ + "DW_MES_MAINTENANCE_IDX3", + "NONUNIQUE", + "MAINTENANCEREQNAME" + ], + [ + "DW_MES_MAINTENANCE_IDX4", + "NONUNIQUE", + "RESOURCEID" + ], + [ + "DW_MES_MAINTENANCE_IDX5", + "NONUNIQUE", + "MAINTENANCEREQID" + ], + [ + "DW_MES_MAINTENANCE_IDX6", + "NONUNIQUE", + "RESOURCENAME" + ], + [ + "DW_MES_MAINTENANCE_IDX7", + "NONUNIQUE", + "CDOTYPEID" + ] + ], + "sample_columns": [ + "RESOURCEID", + "HISTORYMAINLINEID", + "RESOURCENAME", + "SHIFTNAME", + "TXNDATE", + "LASTDATEDUE", + "LASTTHRUPUTQTYDUE", + "LASTTHRUPUTQTYLIMIT", + "LASTTHRUPUTQTYWARNING", + "MAINTENANCEREQID", + "MAINTENANCEREQNAME", + "CDOTYPEID", + "THRUPUTQTY", + "CHECKLISTACTION", + "INSTRUCTION", + "DATANAME", + "DATAVALUE", + "LOCATIONNAME", + "USERNAME", + "EMPLOYEENAME", + "FULLNAME", + "PJ_INSPECTIONLOT", + "DATAPOINTID" + ], + "sample_data": [ + [ + "48801680000000a2", + "000988801b109e12", + "GDBJ-0062", + "D", + "2023-11-09 11:54:39", + "2023-11-10 10:43:04", + null, + null, + null, + "001c9f8000000815", + "GDBJ-週保養", + 7361, + null, + null, + null, + "1_溫度量測_1ST(共晶)", + "420/421", + "焊接C區", + "90805", + "90805", + "呂志賢", + null, + null + ], + [ + "48801680000000a2", + "000988801b109e12", + "GDBJ-0062", + "D", + "2023-11-09 11:54:39", + "2023-11-10 10:43:04", + null, + null, + null, + "001c9f8000000815", + "GDBJ-週保養", + 7361, + null, + null, + null, + "2_溫度量測_1ST(共晶)", + "420/423", + "焊接C區", + "90805", + "90805", + "呂志賢", + null, + null + ], + [ + "48801680000000a2", + "000988801b109e12", + "GDBJ-0062", + "D", + "2023-11-09 11:54:39", + "2023-11-10 10:43:04", + null, + null, + null, + "001c9f8000000815", + "GDBJ-週保養", + 7361, + null, + null, + null, + "3_1底板磨耗量測≦0.03mm", + "0", + "焊接C區", + "90805", + "90805", + "呂志賢", + null, + null + ] + ] + }, + "DW_MES_PARTREQUESTORDER": { + "owner": "DWH", + "table_comment": null, + "row_count": 61396, + "schema": [ + { + "column_name": "DESCRIPTION", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "ISDONE", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "JOBID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "PARTREQUESTORDERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "PARTREQUESTORDERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "REQUESTSTATUS", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "REQUESTTYPE", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "REQUIREACKNOWLEDGEEMAIL", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "RESOURCEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "CREATIONDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "CREATIONUSERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "LASTCHANGEDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "USERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "RESOURCENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "USER_EMPNO", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "USER_NAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + } + ], + "column_comments": { + "CREATIONDATE": null, + "CREATIONUSERNAME": null, + "DESCRIPTION": null, + "ISDONE": null, + "JOBID": null, + "LASTCHANGEDATE": null, + "PARTREQUESTORDERID": null, + "PARTREQUESTORDERNAME": null, + "REQUESTSTATUS": null, + "REQUESTTYPE": null, + "REQUIREACKNOWLEDGEEMAIL": null, + "RESOURCEID": null, + "RESOURCENAME": null, + "USERID": null, + "USER_EMPNO": "工號", + "USER_NAME": "姓名" + }, + "indexes": [ + [ + "DW_MES_PARTREQUESTORDER_JOBID", + "NONUNIQUE", + "JOBID" + ], + [ + "DW_MES_PARTREQUESTORDER_RESOURCEID", + "NONUNIQUE", + "RESOURCEID" + ] + ], + "sample_columns": [ + "DESCRIPTION", + "ISDONE", + "JOBID", + "PARTREQUESTORDERID", + "PARTREQUESTORDERNAME", + "REQUESTSTATUS", + "REQUESTTYPE", + "REQUIREACKNOWLEDGEEMAIL", + "RESOURCEID", + "CREATIONDATE", + "CREATIONUSERNAME", + "LASTCHANGEDATE", + "USERID", + "RESOURCENAME", + "USER_EMPNO", + "USER_NAME" + ], + "sample_data": [ + [ + null, + 1, + null, + "4880718000000066", + "2018 318-0000000012", + "ISSUED", + 1, + 1, + "4880168000000039", + "2018-11-13 14:27:31", + "90351", + "2018-11-13 14:27:31", + "0004748000002e7a", + "GDBA-0157", + "90351", + "謝文介" + ], + [ + null, + 1, + null, + "4880718000000067", + "2018 318-0000000013", + "RETURNED", + 2, + 1, + "4880168000000039", + "2018-11-13 14:30:29", + "90351", + "2018-11-13 14:30:29", + "0004748000002e7a", + "GDBA-0157", + "90351", + "謝文介" + ], + [ + null, + 1, + null, + "4880718000000068", + "2018 318-0000000014", + "ISSUED", + 1, + 1, + "4880168000000039", + "2018-11-13 14:33:04", + "90351", + "2018-11-13 14:33:04", + "0004748000002e7a", + "GDBA-0157", + "90351", + "謝文介" + ] + ] + }, + "DW_MES_PJ_COMBINEDASSYLOTS": { + "owner": "DWH", + "table_comment": null, + "row_count": 1965425, + "schema": [ + { + "column_name": "CONTAINERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "CONTAINERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "PJ_WORKORDER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "PJ_COMBINEDASSEMBLYLOTSID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "LOTID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "FINISHEDNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "PJ_EXCESSLOTQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "PJ_GOODDIEQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "PJ_COMBINEDRATIO", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "PJ_ORIGINALGOODDIEQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "ORIGINALSTARTDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + } + ], + "column_comments": { + "CONTAINERID": null, + "CONTAINERNAME": null, + "FINISHEDNAME": null, + "LOTID": null, + "ORIGINALSTARTDATE": null, + "PJ_COMBINEDASSEMBLYLOTSID": null, + "PJ_COMBINEDRATIO": null, + "PJ_EXCESSLOTQTY": null, + "PJ_GOODDIEQTY": null, + "PJ_ORIGINALGOODDIEQTY": null, + "PJ_WORKORDER": null + }, + "indexes": [ + [ + "DW_MES_PJ_COMBINEDASSYLOTS_IDX1", + "NONUNIQUE", + "CONTAINERID" + ], + [ + "DW_MES_PJ_COMBINEDASSYLOTS_IDX2", + "NONUNIQUE", + "FINISHEDNAME" + ], + [ + "DW_MES_PJ_COMBINEDASSYLOTS_IDX3", + "NONUNIQUE", + "PJ_WORKORDER" + ] + ], + "sample_columns": [ + "CONTAINERID", + "CONTAINERNAME", + "PJ_WORKORDER", + "PJ_COMBINEDASSEMBLYLOTSID", + "LOTID", + "FINISHEDNAME", + "PJ_EXCESSLOTQTY", + "PJ_GOODDIEQTY", + "PJ_COMBINEDRATIO", + "PJ_ORIGINALGOODDIEQTY", + "ORIGINALSTARTDATE" + ], + "sample_data": [ + [ + "4881038000158506", + "GA22041611-A00-004", + "GA22041611", + "48001a80001200ea", + "4881038000158fd1", + "24255U067", + 0, + 76225, + 1, + 76225, + "2022-04-26 10:07:47" + ], + [ + "4881038000158507", + "GA22041611-A00-005", + "GA22041611", + "48001a80001200e8", + "4881038000158fd1", + "24255U067", + 0, + 65286, + 1, + 65286, + "2022-04-26 10:07:47" + ], + [ + "4881038000158509", + "GA22041611-A00-007", + "GA22041611", + "48001a80001200eb", + "4881038000158fd1", + "24255U067", + 53562, + 22919, + 0.2985, + 22919, + "2022-04-26 10:07:47" + ] + ] + }, + "DW_MES_RESOURCE": { + "owner": "DWH", + "table_comment": null, + "row_count": 91329, + "schema": [ + { + "column_name": "AUTOMATIONPLANID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "BOMBASEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "BOMID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "CONTAINERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "DESCRIPTION", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "DOCUMENTSETID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "EQUIPMENTTYPE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "FACTORYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "LOCATIONID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "LOTCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "MACHINEGROUPID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "MAINTENANCECLASSID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "MAXLOTS", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "MAXUNITS", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "MULTILOTSFLAG", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "NOTES", + "data_type": "VARCHAR2", + "data_length": 2000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "OBJECTCATEGORY", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "OBJECTTYPE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "PACKAGEGROUPID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "PARAMLISTID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "PARENTRESOURCEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "PRODUCTIONSTATUSID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "RECIPEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 23 + }, + { + "column_name": "RESOURCECOMMENTS", + "data_type": "VARCHAR2", + "data_length": 2000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 24 + }, + { + "column_name": "RESOURCEFAMILYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 25 + }, + { + "column_name": "RESOURCEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "N", + "default_value": null, + "column_id": 26 + }, + { + "column_name": "RESOURCENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 27 + }, + { + "column_name": "SETUPACCESSID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 28 + }, + { + "column_name": "SPCSETUPID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 29 + }, + { + "column_name": "STATUSMODELID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 30 + }, + { + "column_name": "SUBEQUIPMENTLOGICALID", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 31 + }, + { + "column_name": "TOOLPLANID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 32 + }, + { + "column_name": "TRAININGREQGROUPID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 33 + }, + { + "column_name": "UOMID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 34 + }, + { + "column_name": "USESPCMATRIX", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 35 + }, + { + "column_name": "VENDORID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 36 + }, + { + "column_name": "VENDORMODEL", + "data_type": "VARCHAR2", + "data_length": 30, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 37 + }, + { + "column_name": "VENDORSERIALNUMBER", + "data_type": "VARCHAR2", + "data_length": 30, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 38 + }, + { + "column_name": "WIPMSGDEFMGRID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 39 + }, + { + "column_name": "PJ_DATECODE1", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 40 + }, + { + "column_name": "PJ_DATECODE2", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 41 + }, + { + "column_name": "PJ_FINISHEDPRODUCT", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 42 + }, + { + "column_name": "PJ_OWNER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 43 + }, + { + "column_name": "PJ_PROCESSSPEC", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 44 + }, + { + "column_name": "PJ_WAFERPRODUCT", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 45 + }, + { + "column_name": "PJ_WORKORDER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 46 + }, + { + "column_name": "PJ_CHECKBYHOUR", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 47 + }, + { + "column_name": "PJ_CHECKBYIDLETIME", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 48 + }, + { + "column_name": "PJ_CHECKBYLOT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 49 + }, + { + "column_name": "PJ_CHECKBYPRODUCT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 50 + }, + { + "column_name": "PJ_CHECKBYTYPE", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 51 + }, + { + "column_name": "PJ_CHECKBYWORKORDER", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 52 + }, + { + "column_name": "PJ_VERIFYSPCRESULT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 53 + }, + { + "column_name": "PJ_ASSETSSTATUS", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 54 + }, + { + "column_name": "PJ_WORKCENTER_ID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 55 + }, + { + "column_name": "PJ_AUEQUIPMENTGROUPID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 56 + }, + { + "column_name": "PJ_CONTROLLENGTH", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 57 + }, + { + "column_name": "PJ_DEPARTMENT", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 58 + }, + { + "column_name": "PJ_EMPLOYEE", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 59 + }, + { + "column_name": "PJ_ISAUEQUIPMENT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 60 + }, + { + "column_name": "PJ_LOTID", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 61 + }, + { + "column_name": "PJ_SETUPACCESSID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 62 + }, + { + "column_name": "PJ_SPCSETUP", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 63 + }, + { + "column_name": "PJ_WORKCENTERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 64 + }, + { + "column_name": "PJ_AUTOMATIONLEVEL", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 65 + }, + { + "column_name": "CREATIONDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 66 + }, + { + "column_name": "CREATIONUSERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 67 + }, + { + "column_name": "LASTCHANGEDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 68 + }, + { + "column_name": "USERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 69 + }, + { + "column_name": "AUTOMATIONPLANNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 70 + }, + { + "column_name": "LOCATIONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 71 + }, + { + "column_name": "RESOURCEFAMILYNAME", + "data_type": "VARCHAR2", + "data_length": 30, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 72 + }, + { + "column_name": "VENDORNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 73 + }, + { + "column_name": "PJ_ERPVENDORID", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 74 + }, + { + "column_name": "WORKCENTERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 75 + }, + { + "column_name": "PJ_ISPRODUCTION", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 76 + }, + { + "column_name": "PJ_ISKEY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 77 + }, + { + "column_name": "PJ_ISMONITOR", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 78 + } + ], + "column_comments": { + "AUTOMATIONPLANID": null, + "AUTOMATIONPLANNAME": "AUTOMATIONPLANID", + "BOMBASEID": null, + "BOMID": null, + "CONTAINERID": null, + "CREATIONDATE": null, + "CREATIONUSERNAME": null, + "DESCRIPTION": null, + "DOCUMENTSETID": null, + "EQUIPMENTTYPE": null, + "FACTORYID": null, + "LASTCHANGEDATE": null, + "LOCATIONID": null, + "LOCATIONNAME": "LOCATIONID", + "LOTCOUNT": null, + "MACHINEGROUPID": null, + "MAINTENANCECLASSID": null, + "MAXLOTS": null, + "MAXUNITS": null, + "MULTILOTSFLAG": null, + "NOTES": null, + "OBJECTCATEGORY": null, + "OBJECTTYPE": null, + "PACKAGEGROUPID": null, + "PARAMLISTID": null, + "PARENTRESOURCEID": null, + "PJ_ASSETSSTATUS": null, + "PJ_AUEQUIPMENTGROUPID": null, + "PJ_AUTOMATIONLEVEL": null, + "PJ_CHECKBYHOUR": null, + "PJ_CHECKBYIDLETIME": null, + "PJ_CHECKBYLOT": null, + "PJ_CHECKBYPRODUCT": null, + "PJ_CHECKBYTYPE": null, + "PJ_CHECKBYWORKORDER": null, + "PJ_CONTROLLENGTH": null, + "PJ_DATECODE1": null, + "PJ_DATECODE2": null, + "PJ_DEPARTMENT": null, + "PJ_EMPLOYEE": null, + "PJ_ERPVENDORID": "VENDORID", + "PJ_FINISHEDPRODUCT": null, + "PJ_ISAUEQUIPMENT": null, + "PJ_ISKEY": "20251217 add:關鍵設備", + "PJ_ISMONITOR": "20251217 add:監控設備", + "PJ_ISPRODUCTION": "20251217 add:生產設備", + "PJ_LOTID": null, + "PJ_OWNER": null, + "PJ_PROCESSSPEC": null, + "PJ_SETUPACCESSID": null, + "PJ_SPCSETUP": null, + "PJ_VERIFYSPCRESULT": null, + "PJ_WAFERPRODUCT": null, + "PJ_WORKCENTERID": null, + "PJ_WORKCENTER_ID": null, + "PJ_WORKORDER": null, + "PRODUCTIONSTATUSID": null, + "RECIPEID": null, + "RESOURCECOMMENTS": null, + "RESOURCEFAMILYID": null, + "RESOURCEFAMILYNAME": "RESOURCEFAMILYID", + "RESOURCEID": null, + "RESOURCENAME": null, + "SETUPACCESSID": null, + "SPCSETUPID": null, + "STATUSMODELID": null, + "SUBEQUIPMENTLOGICALID": null, + "TOOLPLANID": null, + "TRAININGREQGROUPID": null, + "UOMID": null, + "USERID": null, + "USESPCMATRIX": null, + "VENDORID": null, + "VENDORMODEL": null, + "VENDORNAME": "VENDORID", + "VENDORSERIALNUMBER": null, + "WIPMSGDEFMGRID": null, + "WORKCENTERNAME": "PJ_WORKCENTERID" + }, + "indexes": [ + [ + "OBJECT", + "NONUNIQUE", + "OBJECTCATEGORY, OBJECTTYPE" + ], + [ + "RESOURCEID", + "NONUNIQUE", + "RESOURCEID" + ] + ], + "sample_columns": [ + "AUTOMATIONPLANID", + "BOMBASEID", + "BOMID", + "CONTAINERID", + "DESCRIPTION", + "DOCUMENTSETID", + "EQUIPMENTTYPE", + "FACTORYID", + "LOCATIONID", + "LOTCOUNT", + "MACHINEGROUPID", + "MAINTENANCECLASSID", + "MAXLOTS", + "MAXUNITS", + "MULTILOTSFLAG", + "NOTES", + "OBJECTCATEGORY", + "OBJECTTYPE", + "PACKAGEGROUPID", + "PARAMLISTID", + "PARENTRESOURCEID", + "PRODUCTIONSTATUSID", + "RECIPEID", + "RESOURCECOMMENTS", + "RESOURCEFAMILYID", + "RESOURCEID", + "RESOURCENAME", + "SETUPACCESSID", + "SPCSETUPID", + "STATUSMODELID", + "SUBEQUIPMENTLOGICALID", + "TOOLPLANID", + "TRAININGREQGROUPID", + "UOMID", + "USESPCMATRIX", + "VENDORID", + "VENDORMODEL", + "VENDORSERIALNUMBER", + "WIPMSGDEFMGRID", + "PJ_DATECODE1", + "PJ_DATECODE2", + "PJ_FINISHEDPRODUCT", + "PJ_OWNER", + "PJ_PROCESSSPEC", + "PJ_WAFERPRODUCT", + "PJ_WORKORDER", + "PJ_CHECKBYHOUR", + "PJ_CHECKBYIDLETIME", + "PJ_CHECKBYLOT", + "PJ_CHECKBYPRODUCT", + "PJ_CHECKBYTYPE", + "PJ_CHECKBYWORKORDER", + "PJ_VERIFYSPCRESULT", + "PJ_ASSETSSTATUS", + "PJ_WORKCENTER_ID", + "PJ_AUEQUIPMENTGROUPID", + "PJ_CONTROLLENGTH", + "PJ_DEPARTMENT", + "PJ_EMPLOYEE", + "PJ_ISAUEQUIPMENT", + "PJ_LOTID", + "PJ_SETUPACCESSID", + "PJ_SPCSETUP", + "PJ_WORKCENTERID", + "PJ_AUTOMATIONLEVEL", + "CREATIONDATE", + "CREATIONUSERNAME", + "LASTCHANGEDATE", + "USERID", + "AUTOMATIONPLANNAME", + "LOCATIONNAME", + "RESOURCEFAMILYNAME", + "VENDORNAME", + "PJ_ERPVENDORID", + "WORKCENTERNAME", + "PJ_ISPRODUCTION", + "PJ_ISKEY", + "PJ_ISMONITOR" + ], + "sample_data": [ + [ + null, + null, + null, + null, + "SOT-23/SOD-323/SOT-323/SOT-363", + null, + null, + "0004e28000000002", + null, + 0, + null, + null, + null, + null, + null, + null, + "CARRIER", + "CARRIER", + null, + null, + null, + "00069080000000b7", + null, + null, + "001edf8000000002", + "001ede80000000b7", + "G30M01-0182", + null, + null, + "48806e8000000002", + null, + null, + null, + null, + null, + null, + null, + null, + "00049c80000000d0", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + "2018-02-13 04:42:32", + "91417", + "2018-02-13 04:42:32", + "0004748000000cfb", + null, + null, + "G30M01焊接彈匣", + null, + null, + null, + 0, + 0, + 0 + ], + [ + null, + null, + null, + null, + "SOT-23/SOD-323/SOT-323/SOT-363", + null, + null, + "0004e28000000002", + null, + 0, + null, + null, + null, + null, + null, + null, + "CARRIER", + "CARRIER", + null, + null, + null, + "00069080000000b8", + null, + null, + "001edf8000000002", + "001ede80000000b8", + "G30M01-0183", + null, + null, + "48806e8000000002", + null, + null, + null, + null, + null, + null, + null, + null, + "00049c80000000d1", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + "2018-02-13 04:42:32", + "91417", + "2018-02-13 04:42:32", + "0004748000000cfb", + null, + null, + "G30M01焊接彈匣", + null, + null, + null, + 0, + 0, + 0 + ], + [ + null, + null, + null, + null, + "SOT-23/SOD-323/SOT-323/SOT-363", + null, + null, + "0004e28000000002", + null, + 0, + null, + null, + null, + null, + null, + null, + "CARRIER", + "CARRIER", + null, + null, + null, + "00069080000000b9", + null, + null, + "001edf8000000002", + "001ede80000000b9", + "G30M01-0184", + null, + null, + "48806e8000000002", + null, + null, + null, + null, + null, + null, + null, + null, + "00049c80000000d2", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + "2018-02-13 04:42:32", + "91417", + "2018-02-13 04:42:32", + "0004748000000cfb", + null, + null, + "G30M01焊接彈匣", + null, + null, + null, + 0, + 0, + 0 + ] + ] + }, + "DW_MES_RESOURCESTATUS": { + "owner": "DWH", + "table_comment": null, + "row_count": 65742614, + "schema": [ + { + "column_name": "HISTORYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "HISTORYMAINLINEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "RESOURCESTATUSHISTORYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "AVAILABILITY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "LASTSTATUSCHANGEDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "NEWREASONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "NEWSTATUSNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "UPDATELASTSTATUSCHANGEDATE", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "OLDAVAILABILITY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "OLDLASTACTIVITYDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "OLDLASTSTATUSCHANGEDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "OLDREASONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "OLDSTATUSNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "OLDUPDATELASTSTATUSCHANGEDATE", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "SS_ISDOWNVIAPARENT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "JOBID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "TXNDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "DESCRIPTION", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "RESOURCEFAMILYNAME", + "data_type": "VARCHAR2", + "data_length": 30, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "VENDORNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "VENDORMODEL", + "data_type": "VARCHAR2", + "data_length": 30, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "PJ_ERPVENDORID", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "LOCATIONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 23 + }, + { + "column_name": "WORKCENTERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 24 + }, + { + "column_name": "PJ_ASSETSSTATUS", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 25 + }, + { + "column_name": "PJ_DEPARTMENT", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 26 + }, + { + "column_name": "AUTOMATIONPLANNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 27 + } + ], + "column_comments": { + "AUTOMATIONPLANNAME": "AUTOMATIONPLANID", + "AVAILABILITY": null, + "DESCRIPTION": null, + "HISTORYID": "RESOURCEID", + "HISTORYMAINLINEID": null, + "JOBID": null, + "LASTSTATUSCHANGEDATE": null, + "LOCATIONNAME": "LOCATIONID", + "NEWREASONNAME": null, + "NEWSTATUSNAME": null, + "OLDAVAILABILITY": null, + "OLDLASTACTIVITYDATE": null, + "OLDLASTSTATUSCHANGEDATE": null, + "OLDREASONNAME": null, + "OLDSTATUSNAME": null, + "OLDUPDATELASTSTATUSCHANGEDATE": null, + "PJ_ASSETSSTATUS": null, + "PJ_DEPARTMENT": null, + "PJ_ERPVENDORID": "VENDORID", + "RESOURCEFAMILYNAME": "RESOURCEFAMILYID", + "RESOURCESTATUSHISTORYID": null, + "SS_ISDOWNVIAPARENT": null, + "TXNDATE": "資料更新時間(做差異同步用)", + "UPDATELASTSTATUSCHANGEDATE": null, + "VENDORMODEL": null, + "VENDORNAME": "VENDORID", + "WORKCENTERNAME": "PJ_WORKCENTERID" + }, + "indexes": [ + [ + "HISTORYID", + "NONUNIQUE", + "HISTORYID" + ], + [ + "OLDLASTSTATUSCHANGEDATE", + "NONUNIQUE", + "OLDLASTSTATUSCHANGEDATE" + ] + ], + "sample_columns": [ + "HISTORYID", + "HISTORYMAINLINEID", + "RESOURCESTATUSHISTORYID", + "AVAILABILITY", + "LASTSTATUSCHANGEDATE", + "NEWREASONNAME", + "NEWSTATUSNAME", + "UPDATELASTSTATUSCHANGEDATE", + "OLDAVAILABILITY", + "OLDLASTACTIVITYDATE", + "OLDLASTSTATUSCHANGEDATE", + "OLDREASONNAME", + "OLDSTATUSNAME", + "OLDUPDATELASTSTATUSCHANGEDATE", + "SS_ISDOWNVIAPARENT", + "JOBID", + "TXNDATE", + "DESCRIPTION", + "RESOURCEFAMILYNAME", + "VENDORNAME", + "VENDORMODEL", + "PJ_ERPVENDORID", + "LOCATIONNAME", + "WORKCENTERNAME", + "PJ_ASSETSSTATUS", + "PJ_DEPARTMENT", + "AUTOMATIONPLANNAME" + ], + "sample_data": [ + [ + "488016800002d432", + "0009888017a7a7a1", + "000c948001cbe5c7", + 1, + "2023-04-08 14:51:31", + "Wait For Instructions", + "SBY", + 1, + 1, + "2023-04-08 12:34:11", + "2023-04-08 12:34:11", + "Production RUN", + "PRD", + 1, + 0, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ], + [ + "488016800002ddbb", + "0009888017a7a7a2", + "000c948001cbe5c8", + 1, + "2023-04-08 14:51:39", + "Wait For Instructions", + "SBY", + 1, + 1, + "2023-04-08 14:51:00", + "2023-04-08 14:51:00", + "Production RUN", + "PRD", + 1, + 0, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ], + [ + "488016800002ddbb", + "0009888017a7a7f9", + "000c948001cbe5ca", + 1, + "2023-04-08 14:51:59", + "Production RUN", + "PRD", + 1, + 1, + "2023-04-08 14:51:39", + "2023-04-08 14:51:39", + "Wait For Instructions", + "SBY", + 1, + 0, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ] + ] + }, + "DW_MES_RESOURCESTATUS_SHIFT": { + "owner": "DWH", + "table_comment": null, + "row_count": 74820134, + "schema": [ + { + "column_name": "HISTORYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "HISTORYMAINLINEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "RESOURCESTATUSHISTORYID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "AVAILABILITY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "LASTSTATUSCHANGEDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "NEWREASONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "NEWSTATUSNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "UPDATELASTSTATUSCHANGEDATE", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "OLDAVAILABILITY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "OLDLASTACTIVITYDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "OLDLASTSTATUSCHANGEDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "OLDREASONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "OLDSTATUSNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "OLDUPDATELASTSTATUSCHANGEDATE", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "SS_ISDOWNVIAPARENT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "TXNDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "HOURS", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 12, + "data_scale": 6, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "JOBID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "DATADATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "SN", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "DESCRIPTION", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "RESOURCEFAMILYNAME", + "data_type": "VARCHAR2", + "data_length": 30, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "VENDORNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 23 + }, + { + "column_name": "VENDORMODEL", + "data_type": "VARCHAR2", + "data_length": 30, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 24 + }, + { + "column_name": "PJ_ERPVENDORID", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 25 + }, + { + "column_name": "LOCATIONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 26 + }, + { + "column_name": "WORKCENTERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 27 + }, + { + "column_name": "PJ_ASSETSSTATUS", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 28 + }, + { + "column_name": "PJ_DEPARTMENT", + "data_type": "VARCHAR2", + "data_length": 100, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 29 + }, + { + "column_name": "AUTOMATIONPLANNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 30 + } + ], + "column_comments": { + "AUTOMATIONPLANNAME": "AUTOMATIONPLANID", + "AVAILABILITY": null, + "DATADATE": null, + "DESCRIPTION": null, + "HISTORYID": "RESOURCEID", + "HISTORYMAINLINEID": null, + "HOURS": null, + "JOBID": null, + "LASTSTATUSCHANGEDATE": null, + "LOCATIONNAME": "LOCATIONID", + "NEWREASONNAME": null, + "NEWSTATUSNAME": null, + "OLDAVAILABILITY": null, + "OLDLASTACTIVITYDATE": null, + "OLDLASTSTATUSCHANGEDATE": null, + "OLDREASONNAME": null, + "OLDSTATUSNAME": null, + "OLDUPDATELASTSTATUSCHANGEDATE": null, + "PJ_ASSETSSTATUS": null, + "PJ_DEPARTMENT": null, + "PJ_ERPVENDORID": "VENDORID", + "RESOURCEFAMILYNAME": "RESOURCEFAMILYID", + "RESOURCESTATUSHISTORYID": null, + "SN": null, + "SS_ISDOWNVIAPARENT": null, + "TXNDATE": null, + "UPDATELASTSTATUSCHANGEDATE": null, + "VENDORMODEL": null, + "VENDORNAME": "VENDORID", + "WORKCENTERNAME": "PJ_WORKCENTERID" + }, + "indexes": [ + [ + "DW_MES_RESOURCESTATUS_SHIFT_DATADATE", + "NONUNIQUE", + "DATADATE" + ], + [ + "DW_MES_RESOURCESTATUS_SHIFT_HISTORYID", + "NONUNIQUE", + "HISTORYID" + ], + [ + "DW_MES_RESOURCESTATUS_SHIFT_JOBID", + "NONUNIQUE", + "JOBID" + ], + [ + "DW_MES_RESOURCESTATUS_SHIFT_OLDLASTSTATUSCHANGEDATE", + "NONUNIQUE", + "OLDLASTSTATUSCHANGEDATE" + ], + [ + "DW_MES_RESOURCESTATUS_SHIFT_TXNDATE", + "NONUNIQUE", + "TXNDATE" + ] + ], + "sample_columns": [ + "HISTORYID", + "HISTORYMAINLINEID", + "RESOURCESTATUSHISTORYID", + "AVAILABILITY", + "LASTSTATUSCHANGEDATE", + "NEWREASONNAME", + "NEWSTATUSNAME", + "UPDATELASTSTATUSCHANGEDATE", + "OLDAVAILABILITY", + "OLDLASTACTIVITYDATE", + "OLDLASTSTATUSCHANGEDATE", + "OLDREASONNAME", + "OLDSTATUSNAME", + "OLDUPDATELASTSTATUSCHANGEDATE", + "SS_ISDOWNVIAPARENT", + "TXNDATE", + "HOURS", + "JOBID", + "DATADATE", + "SN", + "DESCRIPTION", + "RESOURCEFAMILYNAME", + "VENDORNAME", + "VENDORMODEL", + "PJ_ERPVENDORID", + "LOCATIONNAME", + "WORKCENTERNAME", + "PJ_ASSETSSTATUS", + "PJ_DEPARTMENT", + "AUTOMATIONPLANNAME" + ], + "sample_data": [ + [ + "4880168000000003", + "000988800000001d", + "000c948000000004", + 1, + "2018-02-26 19:30:00", + "設備-LOST_NULL", + "設備-LOST", + 1, + 1, + null, + "2018-02-26 07:30:00", + "載具-IDLE_NULL", + "載具-IDLE", + null, + 0, + "2018-02-26 00:00:00", + 12.0, + null, + "2018-02-26 00:00:00", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ], + [ + "4880168000000003", + "000988800000001d", + "000c948000000004", + 1, + "2018-02-27 07:30:00", + "設備-LOST_NULL", + "設備-LOST", + 1, + 1, + null, + "2018-02-26 19:30:00", + "載具-IDLE_NULL", + "載具-IDLE", + null, + 0, + "2018-02-26 00:00:00", + 12.0, + null, + "2018-02-26 00:00:00", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ], + [ + "4880168000000003", + "000988800000001d", + "000c948000000004", + 1, + "2018-02-27 17:40:02", + "設備-LOST_NULL", + "設備-LOST", + 1, + 1, + null, + "2018-02-27 07:30:00", + "載具-IDLE_NULL", + "載具-IDLE", + null, + 0, + "2018-02-27 00:00:00", + 10.167222, + null, + "2018-02-27 00:00:00", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ] + ] + }, + "DW_MES_SPEC_WORKCENTER_V": { + "owner": "DWH", + "table_comment": null, + "row_count": 230, + "schema": [ + { + "column_name": "SPEC", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "SPECSEQUENCE", + "data_type": "VARCHAR2", + "data_length": 10, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "SPEC_ORDER", + "data_type": "VARCHAR2", + "data_length": 51, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "WORK_CENTER", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "WORK_CENTER_SEQUENCE", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "WORK_CENTER_GROUP", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "WORKCENTERSEQUENCE_GROUP", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "WORKCENTERGROUP_ORDER", + "data_type": "VARCHAR2", + "data_length": 296, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "WORK_CENTER_SHORT", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 9 + } + ], + "column_comments": { + "SPEC": null, + "SPECSEQUENCE": null, + "SPEC_ORDER": null, + "WORKCENTERGROUP_ORDER": null, + "WORKCENTERSEQUENCE_GROUP": null, + "WORK_CENTER": null, + "WORK_CENTER_GROUP": null, + "WORK_CENTER_SEQUENCE": null, + "WORK_CENTER_SHORT": null + }, + "indexes": [], + "sample_columns": [ + "SPEC", + "SPECSEQUENCE", + "SPEC_ORDER", + "WORK_CENTER", + "WORK_CENTER_SEQUENCE", + "WORK_CENTER_GROUP", + "WORKCENTERSEQUENCE_GROUP", + "WORKCENTERGROUP_ORDER", + "WORK_CENTER_SHORT" + ], + "sample_data": [ + [ + "X-Ray_WB_20", + null, + "_X-Ray_WB_20", + "焊接_WB", + "090", + "焊接_WB", + "090", + "090_焊接_WB", + "WB" + ], + [ + "X-Ray_WB_30", + null, + "_X-Ray_WB_30", + "焊接_WB", + "090", + "焊接_WB", + "090", + "090_焊接_WB", + "WB" + ], + [ + "X-Ray_WB_7", + null, + "_X-Ray_WB_7", + "焊接_WB", + "090", + "焊接_WB", + "090", + "090_焊接_WB", + "WB" + ] + ] + }, + "DW_MES_WIP": { + "owner": "DWH", + "table_comment": null, + "row_count": 79058085, + "schema": [ + { + "column_name": "CONTAINERID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 1 + }, + { + "column_name": "CONTAINERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 2 + }, + { + "column_name": "GA_CONTAINERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 3 + }, + { + "column_name": "QTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 4 + }, + { + "column_name": "QTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 5 + }, + { + "column_name": "CURRENTHOLDCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 6 + }, + { + "column_name": "HOLDREASONID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 7 + }, + { + "column_name": "ORIGINALSTARTDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 8 + }, + { + "column_name": "STATUS", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 9 + }, + { + "column_name": "ORIGINALQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 10 + }, + { + "column_name": "ORIGINALQTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 11 + }, + { + "column_name": "SPECID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 12 + }, + { + "column_name": "MOVEINTIMESTAMP", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 13 + }, + { + "column_name": "MOVEINUSERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 14 + }, + { + "column_name": "MOVEINQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 15 + }, + { + "column_name": "MOVEINQTY2", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 16 + }, + { + "column_name": "STARTREASONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 17 + }, + { + "column_name": "EXPECTEDENDDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 18 + }, + { + "column_name": "WORKFLOWNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 19 + }, + { + "column_name": "WORKFLOWSTEPNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 20 + }, + { + "column_name": "LOCATIONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 21 + }, + { + "column_name": "DATECODE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 22 + }, + { + "column_name": "CONTAINERCOMMENTS", + "data_type": "VARCHAR2", + "data_length": 2000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 23 + }, + { + "column_name": "COMMENT_DATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 24 + }, + { + "column_name": "COMMENT_EMP", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 25 + }, + { + "column_name": "EQUIPMENTCOUNT", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 26 + }, + { + "column_name": "EQUIPMENTS", + "data_type": "VARCHAR2", + "data_length": 1000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 27 + }, + { + "column_name": "EQP_LOCATIONNAME", + "data_type": "VARCHAR2", + "data_length": 1000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 28 + }, + { + "column_name": "HOLDEMP", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 29 + }, + { + "column_name": "HOLDDEPTNAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 30 + }, + { + "column_name": "HOLDLOCATIONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 31 + }, + { + "column_name": "HOLDCOMMENT_FUTURE", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 32 + }, + { + "column_name": "HOLDREASONNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 33 + }, + { + "column_name": "EVENTNAME", + "data_type": "VARCHAR2", + "data_length": 1000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 34 + }, + { + "column_name": "OCCURRENCEDATE", + "data_type": "VARCHAR2", + "data_length": 1000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 35 + }, + { + "column_name": "RELEASETIME", + "data_type": "VARCHAR2", + "data_length": 1000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 36 + }, + { + "column_name": "RELEASEEMP", + "data_type": "VARCHAR2", + "data_length": 1000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 37 + }, + { + "column_name": "RELEASEREASON", + "data_type": "VARCHAR2", + "data_length": 1000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 38 + }, + { + "column_name": "SPECNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 39 + }, + { + "column_name": "WORKCENTERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 40 + }, + { + "column_name": "MFGORDERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 41 + }, + { + "column_name": "PJ_BOP", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 42 + }, + { + "column_name": "PJ_PRODUCEREGION", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 43 + }, + { + "column_name": "PRODUCTBOMBASEID", + "data_type": "CHAR", + "data_length": 16, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 44 + }, + { + "column_name": "OWNERNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 45 + }, + { + "column_name": "PRIORITYCODENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 46 + }, + { + "column_name": "WOQTY", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 47 + }, + { + "column_name": "WOPLANNEDCOMPLETIONDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 48 + }, + { + "column_name": "PJ_TYPE", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 49 + }, + { + "column_name": "PJ_FUNCTION", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 50 + }, + { + "column_name": "PRODUCTNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 51 + }, + { + "column_name": "PRODUCTLINENAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 52 + }, + { + "column_name": "PRODUCTLINENAME_LEF", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 53 + }, + { + "column_name": "PRODUCTDESC", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 54 + }, + { + "column_name": "FIRSTNAME", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 55 + }, + { + "column_name": "WAFERLOTS1", + "data_type": "VARCHAR2", + "data_length": 40, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 56 + }, + { + "column_name": "WAFERLOT", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 57 + }, + { + "column_name": "WAFERNAME", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 58 + }, + { + "column_name": "WAFERDESC", + "data_type": "VARCHAR2", + "data_length": 255, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 59 + }, + { + "column_name": "NUMBEROFROWS", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": 10, + "data_scale": 0, + "nullable": "Y", + "default_value": null, + "column_id": 60 + }, + { + "column_name": "LEADFRAMENAME", + "data_type": "VARCHAR2", + "data_length": 1000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 61 + }, + { + "column_name": "LEADFRAMEDESC", + "data_type": "VARCHAR2", + "data_length": 1000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 62 + }, + { + "column_name": "LEADFRAMEOPTION", + "data_type": "VARCHAR2", + "data_length": 1000, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 63 + }, + { + "column_name": "CONSUMEFACTOR", + "data_type": "NUMBER", + "data_length": 22, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 64 + }, + { + "column_name": "TXNDATE", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 65 + }, + { + "column_name": "HOLDTIME", + "data_type": "DATE", + "data_length": 7, + "data_precision": null, + "data_scale": null, + "nullable": "Y", + "default_value": null, + "column_id": 66 + } + ], + "column_comments": { + "COMMENT_DATE": null, + "COMMENT_EMP": null, + "CONSUMEFACTOR": "CF", + "CONTAINERCOMMENTS": null, + "CONTAINERID": null, + "CONTAINERNAME": null, + "CURRENTHOLDCOUNT": null, + "DATECODE": null, + "EQP_LOCATIONNAME": "EM", + "EQUIPMENTCOUNT": null, + "EQUIPMENTS": "EM", + "EVENTNAME": "NCR", + "EXPECTEDENDDATE": "SD", + "FIRSTNAME": "CONTAINERID", + "GA_CONTAINERNAME": null, + "HOLDCOMMENT_FUTURE": null, + "HOLDDEPTNAME": null, + "HOLDEMP": null, + "HOLDLOCATIONNAME": null, + "HOLDREASONID": null, + "HOLDREASONNAME": "HOLDREASONID", + "HOLDTIME": null, + "LEADFRAMEDESC": null, + "LEADFRAMENAME": null, + "LEADFRAMEOPTION": null, + "LOCATIONNAME": null, + "MFGORDERNAME": "CONTAINERID", + "MOVEINQTY": null, + "MOVEINQTY2": null, + "MOVEINTIMESTAMP": null, + "MOVEINUSERNAME": null, + "NUMBEROFROWS": "CONTAINERID->PRODUCTID", + "OCCURRENCEDATE": "NCR", + "ORIGINALQTY": null, + "ORIGINALQTY2": null, + "ORIGINALSTARTDATE": null, + "OWNERNAME": "CONTAINERID", + "PJ_BOP": "CONTAINERID", + "PJ_FUNCTION": "CONTAINERID", + "PJ_PRODUCEREGION": "CONTAINERID", + "PJ_TYPE": "CONTAINERID", + "PRIORITYCODENAME": "CONTAINERID", + "PRODUCTBOMBASEID": "CONTAINERID", + "PRODUCTDESC": null, + "PRODUCTLINENAME": "CONTAINERID", + "PRODUCTLINENAME_LEF": null, + "PRODUCTNAME": "CONTAINERID", + "QTY": null, + "QTY2": null, + "RELEASEEMP": "DW_MES_HOLDRELEASEHISTORY", + "RELEASEREASON": "DW_MES_HOLDRELEASEHISTORY", + "RELEASETIME": "DW_MES_HOLDRELEASEHISTORY", + "SPECID": null, + "SPECNAME": "SPECID", + "STARTREASONNAME": "CONTAINERID", + "STATUS": null, + "TXNDATE": null, + "WAFERDESC": "3個加起來", + "WAFERLOT": "3個加起來", + "WAFERLOTS1": null, + "WAFERNAME": "3個加起來", + "WOPLANNEDCOMPLETIONDATE": "CONTAINERID->MFGORDERID", + "WOQTY": "CONTAINERID->MFGORDERID", + "WORKCENTERNAME": "SPECID", + "WORKFLOWNAME": "SD", + "WORKFLOWSTEPNAME": null + }, + "indexes": [ + [ + "DW_MES_WIP_CONTAINERNAME", + "NONUNIQUE", + "CONTAINERNAME" + ], + [ + "DW_MES_WIP_TXNDATE", + "NONUNIQUE", + "TXNDATE" + ] + ], + "sample_columns": [ + "CONTAINERID", + "CONTAINERNAME", + "GA_CONTAINERNAME", + "QTY", + "QTY2", + "CURRENTHOLDCOUNT", + "HOLDREASONID", + "ORIGINALSTARTDATE", + "STATUS", + "ORIGINALQTY", + "ORIGINALQTY2", + "SPECID", + "MOVEINTIMESTAMP", + "MOVEINUSERNAME", + "MOVEINQTY", + "MOVEINQTY2", + "STARTREASONNAME", + "EXPECTEDENDDATE", + "WORKFLOWNAME", + "WORKFLOWSTEPNAME", + "LOCATIONNAME", + "DATECODE", + "CONTAINERCOMMENTS", + "COMMENT_DATE", + "COMMENT_EMP", + "EQUIPMENTCOUNT", + "EQUIPMENTS", + "EQP_LOCATIONNAME", + "HOLDEMP", + "HOLDDEPTNAME", + "HOLDLOCATIONNAME", + "HOLDCOMMENT_FUTURE", + "HOLDREASONNAME", + "EVENTNAME", + "OCCURRENCEDATE", + "RELEASETIME", + "RELEASEEMP", + "RELEASEREASON", + "SPECNAME", + "WORKCENTERNAME", + "MFGORDERNAME", + "PJ_BOP", + "PJ_PRODUCEREGION", + "PRODUCTBOMBASEID", + "OWNERNAME", + "PRIORITYCODENAME", + "WOQTY", + "WOPLANNEDCOMPLETIONDATE", + "PJ_TYPE", + "PJ_FUNCTION", + "PRODUCTNAME", + "PRODUCTLINENAME", + "PRODUCTLINENAME_LEF", + "PRODUCTDESC", + "FIRSTNAME", + "WAFERLOTS1", + "WAFERLOT", + "WAFERNAME", + "WAFERDESC", + "NUMBEROFROWS", + "LEADFRAMENAME", + "LEADFRAMEDESC", + "LEADFRAMEOPTION", + "CONSUMEFACTOR", + "TXNDATE", + "HOLDTIME" + ], + "sample_data": [ + [ + "48810380001ae18a", + "GA23051025-A00-001", + "GA23051025-A00-001", + 99424, + 0, + 0, + null, + "2023-05-25 02:47:35", + 1, + 99840, + 1, + "48812c8000000020", + "2023-05-26 11:31:13", + "DW1A", + 99840, + 0, + "NORMAL", + "2023-06-01 00:00:00", + "UAC_SOD-323", + "銅線製程", + null, + null, + null, + null, + null, + 1, + "GWBK-0258", + "焊接A區", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + "銅線製程", + "焊_WB_料", + "GA23051025", + "UAC10", + "A棟", + "4880ee80000038c4", + "已驗證", + "4.一般", + 201000, + "2023-06-01 00:00:00", + "BZT52-C22S", + "ZENER", + "BZT52-C22S_R1_00001", + "SOD-323", + "SOD-323", + "/WP/TR/7\"/HF/5K/SOD-323/ZEN/SOD/ZSM-02A/ZSM02-QI02/PJ///", + "AUGZNT-0062#P508721A-5%P", + "AUGZNT", + "AUGZNT-0062#P508721A-5%P", + "WAF003135_CP", + "ZEN/4\"/ZM13N0220/12.8/*12.8mil/230um/ALSN/", + 1, + "LEF000016", + "腳架/SOD-323/OPTION 1/REEL/A42", + "OPTION 1", + 1, + "2023-05-26 14:49:11", + null + ], + [ + "48810380001ae21e", + "GA23051250-A00-001", + "GA23051250-A00-001", + 99424, + 0, + 0, + null, + "2023-05-25 06:21:49", + 1, + 99840, + 0, + "48812c8000000020", + "2023-05-26 12:23:15", + "DW1A", + 99840, + 0, + "NORMAL", + "2023-06-01 00:00:00", + "UAC_SOD-323", + "銅線製程", + null, + null, + null, + null, + null, + 1, + "GWBK-0238", + "焊接A區", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + "銅線製程", + "焊_WB_料", + "GA23051250", + "UAC10", + "A棟", + "4880ee80000039d8", + "量產", + "4.一般", + 124544, + "2023-06-01 00:00:00", + "PDZ30B", + "ZENER", + "PDZ30B_R1_00001", + "SOD-323", + "SOD-323", + "/ZS/TR/7\"/HF/5K/SOD-323/ZEN/SOD/ZSM-04A/ZSM04-QI21/PJ///", + "GMZN-3450#SZ229046-01P", + "GMZN", + "GMZN-3450#SZ229046-01P", + "WAF910518_CP", + "ZEN/6\"/6SZPDZ014030ATB2-K/13.8/*13.8mil/150um/ALSN/", + 1, + "LEF000016", + "腳架/SOD-323/OPTION 1/REEL/A42", + "OPTION 1", + 1, + "2023-05-26 14:49:11", + null + ], + [ + "48810380001ae197", + "GA23050804-A00-007", + "GA23050804-A00-007", + 69120, + 0, + 0, + null, + "2023-05-25 02:51:10", + 1, + 138240, + 1, + "48812c8000000020", + "2023-05-26 11:41:28", + "DW1A", + 69120, + 0, + "NORMAL", + "2023-06-03 00:00:00", + "UAC_SOT-23", + "銅線製程", + null, + null, + null, + null, + null, + 1, + "GWBA-0219", + "焊接B區", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + "銅線製程", + "焊_WB_料", + "GA23050804", + "UAC10", + "A棟", + "4880ee8000004480", + "量產", + "4.一般", + 2550780, + "2023-06-02 00:00:00", + "MMBD6100", + "SWITCHING", + "MMBD6100_R1_00001", + "SOT-23", + "SOT-23", + "/T4/TR/7\"/HF/3K/SOT-23/SWI/SOT/USM-03TCC/USM03-QI29/PJ///", + "TXS-14572#2AHU-8921P", + "TXS", + "TXS-14572#2AHU-8921P", + "WAF000099_CP", + "SWI/5\"/5DS01MH/11/*11mil/230um/ALAU/", + 2, + "LEF000027", + "腳架/SOT-23/OPTION 2/REEL/A42", + "OPTION 2", + 2, + "2023-05-26 14:49:11", + null + ] + ] + } +} \ No newline at end of file diff --git a/deploy/mes-dashboard-watchdog.service b/deploy/mes-dashboard-watchdog.service new file mode 100644 index 0000000..c291d9e --- /dev/null +++ b/deploy/mes-dashboard-watchdog.service @@ -0,0 +1,40 @@ +[Unit] +Description=MES Dashboard Worker Watchdog (Conda Runtime) +Documentation=https://github.com/your-org/mes-dashboard +After=network.target mes-dashboard.service +Requires=mes-dashboard.service + +[Service] +Type=simple +User=www-data +Group=www-data +WorkingDirectory=/opt/mes-dashboard +EnvironmentFile=-/etc/mes-dashboard/mes-dashboard.env +Environment="PYTHONPATH=/opt/mes-dashboard/src" +Environment="CONDA_BIN=/opt/miniconda3/bin/conda" +Environment="CONDA_ENV_NAME=mes-dashboard" +Environment="WATCHDOG_RUNTIME_DIR=/run/mes-dashboard" +Environment="WATCHDOG_RESTART_FLAG=/run/mes-dashboard/mes_dashboard_restart.flag" +Environment="WATCHDOG_PID_FILE=/run/mes-dashboard/gunicorn.pid" +Environment="WATCHDOG_STATE_FILE=/var/lib/mes-dashboard/restart_state.json" +Environment="WATCHDOG_CHECK_INTERVAL=5" + +RuntimeDirectory=mes-dashboard +StateDirectory=mes-dashboard + +ExecStart=/usr/bin/env bash -lc 'exec "${CONDA_BIN}" run --no-capture-output -n "${CONDA_ENV_NAME}" python scripts/worker_watchdog.py' + +Restart=always +RestartSec=5 + +StandardOutput=journal +StandardError=journal +SyslogIdentifier=mes-watchdog + +NoNewPrivileges=yes +PrivateTmp=yes +ProtectSystem=strict +ReadWritePaths=/run/mes-dashboard /var/lib/mes-dashboard + +[Install] +WantedBy=multi-user.target diff --git a/deploy/mes-dashboard.service b/deploy/mes-dashboard.service new file mode 100644 index 0000000..630577d --- /dev/null +++ b/deploy/mes-dashboard.service @@ -0,0 +1,43 @@ +[Unit] +Description=MES Dashboard Gunicorn Service (Conda Runtime) +Documentation=https://github.com/your-org/mes-dashboard +After=network.target redis-server.service +Wants=redis-server.service + +[Service] +Type=simple +User=www-data +Group=www-data +WorkingDirectory=/opt/mes-dashboard +EnvironmentFile=-/etc/mes-dashboard/mes-dashboard.env +Environment="PYTHONPATH=/opt/mes-dashboard/src" +Environment="CONDA_BIN=/opt/miniconda3/bin/conda" +Environment="CONDA_ENV_NAME=mes-dashboard" +Environment="GUNICORN_BIND=0.0.0.0:8080" +Environment="WATCHDOG_RUNTIME_DIR=/run/mes-dashboard" +Environment="WATCHDOG_RESTART_FLAG=/run/mes-dashboard/mes_dashboard_restart.flag" +Environment="WATCHDOG_PID_FILE=/run/mes-dashboard/gunicorn.pid" +Environment="WATCHDOG_STATE_FILE=/var/lib/mes-dashboard/restart_state.json" + +RuntimeDirectory=mes-dashboard +StateDirectory=mes-dashboard +PIDFile=/run/mes-dashboard/gunicorn.pid + +ExecStart=/usr/bin/env bash -lc 'exec "${CONDA_BIN}" run --no-capture-output -n "${CONDA_ENV_NAME}" gunicorn --config gunicorn.conf.py --pid "${WATCHDOG_PID_FILE}" --capture-output "mes_dashboard:create_app()"' + +KillSignal=SIGTERM +TimeoutStopSec=30 +Restart=always +RestartSec=5 + +StandardOutput=journal +StandardError=journal +SyslogIdentifier=mes-dashboard + +NoNewPrivileges=yes +PrivateTmp=yes +ProtectSystem=strict +ReadWritePaths=/run/mes-dashboard /var/lib/mes-dashboard /opt/mes-dashboard/logs + +[Install] +WantedBy=multi-user.target diff --git a/docs/DW_PJ_LOT_V_POWERBI_SQL.txt b/docs/DW_PJ_LOT_V_POWERBI_SQL.txt new file mode 100644 index 0000000..fc8d9cb --- /dev/null +++ b/docs/DW_PJ_LOT_V_POWERBI_SQL.txt @@ -0,0 +1,60 @@ +SELECT L.LOTID AS ""Run Card Lot ID"", + L.Workorder AS ""Work Order ID"", + L.Qty AS ""Lot Qty(pcs)"", + L.Qty2 AS ""Lot Qty(Wafer pcs)"", + L.Status AS ""Run Card Status"", + L.HOLDREASONNAME AS ""Hold Reason"", + L.CurrentHoldCount AS ""Hold Count"", + L.Owner AS ""Work Order Owner"", + L.StartDate AS ""Run Card Start Date"", + L.UTS, + L.Product AS ""Product P/N"", + L.Productlinename AS ""Package"", + L.Package_LEF as ""Package(LF)"", + L.PJ_FUNCTION AS ""Product Function"", + L.Pj_Type AS ""Product Type"", + L.BOP, + L.FirstName AS ""Wafer Lot ID"", + L.WAFERNAME AS ""Wafer P/N"", + L.WaferLot ""Wafer Lot ID(Prefix)"", + L.SpecName AS ""Spec"", + L.SPECSEQUENCE AS ""Spec Sequence"", + L.SPECSEQUENCE || '_' || L.SpecName AS ""Spec(Order)"", + L.Workcentername AS ""Work Center"", + L.WorkCenterSequence AS ""Work Center Sequence"", + L.WorkCenter_Group AS ""Work Center(Group)"", + L.WorkCenter_Short AS ""Work Center(Short)"", + L.WorkCenterSequence_Group AS ""Work Center Sequence(Group)"", + L.WorkCenterSequence_Group || '_' || L.WorkCenter_Group AS ""Work Center Group(Order)"", + L.AgeByDays AS ""Age By Days"", + L.Equipments AS ""Equipment ID"", + L.EquipmentCount AS ""Equipment Count"", + L.Workflowname AS ""Work Flow Name"", + L.Datecode AS ""Product Date Code"", + L.LEADFRAMENAME AS ""LF Material Part"", + L.LEADFRAMEOPTION AS ""LF Option ID"", + L.COMNAME AS ""Compound Material Part"", + L.LOCATIONNAME AS ""Run Card Location"", + L.Eventname AS ""NCR ID"", + L.Occurrencedate AS ""NCR-issued Time"", + L.ReleaseTime AS ""Release Time"", + L.ReleaseEmp AS ""Release Employee"", + L.ReleaseReason AS ""Release Comment"", + L.COMMENT_HOLD AS ""Hold Comment"", + L.CONTAINERCOMMENTS AS ""Comment"", + L.COMMENT_DATE AS ""Run Card Comment"", + L.COMMENT_EMP AS ""Run Card Comment Employee"", + L.COMMENT_FUTURE AS ""Future Hold Comment"", + L.HOLDEMP AS ""Hold Employee"", + L.DEPTNAME AS ""Hold Employee Dept"", + L.PJ_PRODUCEREGION AS ""Produce Region"", + L.Prioritycodename AS ""Work Order Priority"", + L.TMTT_R AS ""TMTT Remaining"", + L.wafer_factor AS ""Die Consumption Qty"", + Case When (L.EquipmentCount>0) Then 'RUN' + When (L.CurrentHoldCount>0) Then 'HOLD' + ELSE 'QUENE' End AS ""WIP Status"", + Case When (L.EquipmentCount>0) Then 1 + When (L.CurrentHoldCount>0) Then 3 + ELSE 2 End AS ""WIP Status Sequence"", + sys_date AS ""Data Update Date"" \ No newline at end of file diff --git a/docs/MES_Core_Tables_Analysis_Report.md b/docs/MES_Core_Tables_Analysis_Report.md new file mode 100644 index 0000000..4c89cf4 --- /dev/null +++ b/docs/MES_Core_Tables_Analysis_Report.md @@ -0,0 +1,2334 @@ +# MES 核心表詳細分析報告 + +**生成時間**: 2026-01-14(最後更新: 2026-01-29) +**分析範圍**: 19 張 MES 核心表(含 2 張 DWH 即時視圖 + 1 張工站對照視圖) +**資料來源**: MES_Database_Reference.md, DW_MES_LOT_V 實際數據分析, DW_MES_EQUIPMENTSTATUS_WIP_V 實際數據分析, DW_MES_SPEC_WORKCENTER_V 實際數據分析 + +--- + +## 目錄 + +1. [表性質分類總覽](#表性質分類總覽) +2. [即時數據表分析](#即時數據表分析) +3. [現況快照表分析](#現況快照表分析) +4. [歷史累積表分析](#歷史累積表分析) +5. [表間關聯關係圖](#表間關聯關係圖) +6. [關鍵業務場景查詢策略](#關鍵業務場景查詢策略) + +--- + +## 表性質分類總覽 + +### 即時數據表(Real-time Views) +透過 DB Link 從 DWH 取得的即時 WIP / 設備狀態視圖,依來源更新頻率提供 + +| 表名 | 數據量 | 主要用途 | 更新方式 | +|------|--------|---------|---------| +| **DW_MES_LOT_V** | ~9,468 | 即時 WIP 分布(70欄位) | DB Link 即時查詢(依 PJ_LOT_MV 更新頻率) | +| **DW_MES_EQUIPMENTSTATUS_WIP_V** | ~2,631 | 設備資產狀態 + WIP 追蹤(32欄位) | DB Link 即時查詢(真正即時表) | + +### 現況快照表(Snapshot Tables) +存儲當前狀態的數據,數據會被更新或覆蓋 + +| 表名 | 數據量 | 主要用途 | 更新方式 | +|------|--------|---------|---------| +| **DW_MES_WIP** | 79,058,085 | 在制品現況(含歷史累積) | 隨生產流程更新 | +| **DW_MES_RESOURCE** | 91,329 | 資源主檔(設備/工位) | 異動時更新 | +| **DW_MES_CONTAINER** | 5,218,406 | 容器當前狀態 | 隨批次流轉更新 | +| **DW_MES_JOB** | 1,248,622 | 設備維修工單當前狀態 | 維修工單狀態變更時更新 | + +### 歷史累積表(Historical Tables) +只新增不修改,記錄完整的歷史軌跡 + +| 表名 | 數據量 | 主要用途 | 累積方式 | +|------|--------|---------|---------| +| **DW_MES_RESOURCESTATUS** | 65,742,614 | 資源狀態變更歷史 | 狀態變更時新增記錄 | +| **DW_MES_RESOURCESTATUS_SHIFT** | 74,820,134 | 資源班次狀態歷史 | 班次資料匯總新增 | +| **DW_MES_LOTWIPHISTORY** | 53,454,213 | 批次流轉歷史 | 每次移出/移入新增 | +| **DW_MES_LOTWIPDATAHISTORY** | 77,960,216 | 批次數據變更歷史 | 數據採集時新增 | +| **DW_MES_HM_LOTMOVEOUT** | 48,645,692 | 批次移出事件 | 移出操作時新增 | +| **DW_MES_JOBTXNHISTORY** | 9,554,723 | 維修工單交易歷史 | 維修工單狀態變更新增 | +| **DW_MES_LOTREJECTHISTORY** | 15,786,025 | 批次拒絕歷史 | 報廢操作時新增 | +| **DW_MES_LOTMATERIALSHISTORY** | 17,829,931 | 物料消耗歷史 | 物料使用時新增 | +| **DW_MES_HOLDRELEASEHISTORY** | 310,737 | 暫停/釋放歷史 | Hold/Release時新增 | +| **DW_MES_MAINTENANCE** | 52,060,026 | 設備維護歷史 | 維護活動時新增 | + +### 輔助表(Auxiliary Tables) + +| 表名 | 數據量 | 主要用途 | +|------|--------|---------| +| **DW_MES_PARTREQUESTORDER** | 61,396 | 物料請求訂單 | +| **DW_MES_PJ_COMBINEDASSYLOTS** | 1,965,425 | 組合裝配批次 | +| **DW_MES_SPEC_WORKCENTER_V** | 230 | 工站/工序對照視圖 | + +--- + +## 即時數據表分析 + +### DW_MES_LOT_V(即時 WIP 批次視圖)⭐⭐⭐ + +**表性質**: 即時數據視圖(Real-time View) + +**業務定義**: DWH 提供的即時 WIP 視圖,透過 DB Link 從 `PJ_LOT_MV@DWDB_MESDB` 取得,依 PJ_LOT_MV 更新頻率提供。包含完整的批次狀態、工站位置、設備資訊、Hold 原因等 70 個欄位,是 WIP Dashboard 的主要數據源。 + +**數據來源**: `PJ_LOT_MV@DWDB_MESDB`(DB Link 連線) + +**數據量**: 約 9,468 筆(2026-01-29 查詢) + +#### 欄位分類總覽(70 欄位) + +| 分類 | 欄位數 | 說明 | +|------|--------|------| +| 批次識別 | 5 | LOTID, CONTAINERID, WORKORDER, FIRSTNAME, NO | +| 數量相關 | 6 | QTY, QTY2, STARTQTY, STARTQTY2, MOVEINQTY, MOVEINQTY2 | +| 狀態相關 | 4 | STATUS, CURRENTHOLDCOUNT, STARTREASON, OWNER | +| 時間相關 | 7 | STARTDATE, UTS, MOVEINTIMESTAMP, SYS_DATE, AGEBYDAYS, REMAINTIME, OCCURRENCEDATE | +| 工站/流程 | 12 | WORKCENTER*, SPEC*, STEP, WORKFLOWNAME, LOCATIONNAME | +| 產品/封裝 | 8 | PRODUCT, PRODUCTLINENAME, PACKAGE_LEF, MATERIALTYPE, PJ_TYPE, PJ_FUNCTION, BOP | +| Hold 相關 | 8 | HOLDREASONNAME, HOLDEMP, HOLDLOCATION, RELEASETIME, RELEASEEMP, RELEASEREASON, COMMENT_HOLD | +| 設備相關 | 4 | EQUIPMENTNAME, EQUIPMENTS, EQUIPMENTCOUNT, DEPTNAME | +| 物料資訊 | 6 | LEADFRAMENAME, LEADFRAMEOPTION, WAFERNAME, WAFERLOT, COMNAME, DATECODE | +| 備註/其他 | 10 | CONTAINERCOMMENTS, COMMENT_*, PRIORITYCODENAME, JOB*, PB_FUNCTION, TMTT_R, WAFER_FACTOR | + +#### 關鍵時間欄位 + +| 欄位名 | 類型 | 用途 | 說明 | +|--------|------|------|------| +| `SYS_DATE` | TIMESTAMP | 數據更新時間 | 視圖同步時間戳,用於確認數據新鮮度 | +| `STARTDATE` | TIMESTAMP | 批次開始時間 | 批次投產的時間點 | +| `MOVEINTIMESTAMP` | TIMESTAMP | 移入當前工站時間 | 進入當前工序的時間 | +| `UTS` | VARCHAR2 | 預計完成日期 | 格式為 'YYYY/MM/DD' | +| `AGEBYDAYS` | NUMBER | 批次天數 | 從 STARTDATE 到現在的天數(含小數) | +| `REMAINTIME` | NUMBER | 剩餘時間 | 預計完成前的剩餘天數(含小數) | + +#### 關鍵業務欄位詳解 + +##### 批次識別欄位 + +| 欄位名 | 類型 | 說明 | 範例值 | +|--------|------|------|--------| +| `LOTID` | VARCHAR2(40) | 批次號(業務識別碼) | `GA26011704-A00-003` | +| `CONTAINERID` | VARCHAR2(40) | 容器 ID(系統識別碼) | `48810480002ab0b4` | +| `WORKORDER` | VARCHAR2(40) | 工單號 | `GA26011704` | +| `FIRSTNAME` | VARCHAR2(100) | 首片批號 | `PSMS-4473#RFTLD3` | +| `NO` | NUMBER | 序號(查詢結果排序用) | 1, 2, 3... | + +##### 狀態欄位 + +| 欄位名 | 類型 | 說明 | 實際值分布 | +|--------|------|------|-----------| +| `STATUS` | VARCHAR2(20) | 批次狀態 | `ACTIVE`(約 98.7%)、`HOLD`(約 1.3%) | +| `OWNER` | VARCHAR2(40) | 所有者/用途 | `量產`、`重工RW`、`代工`、`點測`、`樣品`、`餘晶`、`工程`、`久存`、`PROD`、`降規` | +| `MATERIALTYPE` | VARCHAR2(40) | 物料類型 | `成品`(約 99%)、`Wafer`(約 1%) | +| `STARTREASON` | VARCHAR2(40) | 開始原因 | `NORMAL`、`RW` 等 | + +##### 數量欄位 + +| 欄位名 | 類型 | 說明 | 數值範圍 | +|--------|------|------|---------| +| `QTY` | NUMBER | 當前數量(主單位) | 1 - 3,000,000+ | +| `QTY2` | NUMBER | 當前數量(輔單位) | 通常為 0 | +| `STARTQTY` | NUMBER | 起始數量 | 通常 ≥ QTY | +| `MOVEINQTY` | NUMBER | 移入數量 | 進站時的數量 | + +##### 工站/流程欄位 + +| 欄位名 | 類型 | 說明 | 範例值 | +|--------|------|------|--------| +| `WORKCENTERNAME` | VARCHAR2(40) | 工作中心名稱 | `成型`、`TMTT`、`電鍍`、`焊接` | +| `WORKCENTER_GROUP` | VARCHAR2(40) | 工作中心群組 | 與 WORKCENTERNAME 相同或分組 | +| `WORKCENTER_SHORT` | VARCHAR2(20) | 工站簡稱 | `Mold`、`TMTT`、`DB`、`WB` | +| `WORKCENTERSEQUENCE` | VARCHAR2(10) | 工站順序 | `130`、`300` 等(數值越大越後段) | +| `SPECNAME` | VARCHAR2(100) | 工序規格名稱 | `成型烘烤`、`PRE TMTT` | +| `STEP` | VARCHAR2(100) | 當前步驟 | 通常與 SPECNAME 相同 | +| `WORKFLOWNAME` | VARCHAR2(100) | 工藝流程名稱 | `PCC_SOT-223`、`UAC_SOD-523` | + +##### 產品/封裝欄位 + +| 欄位名 | 類型 | 說明 | 範例值 | +|--------|------|------|--------| +| `PRODUCT` | VARCHAR2(100) | 產品名稱(完整) | `PJW5P06A_R2_00701` | +| `PRODUCTLINENAME` | VARCHAR2(40) | 產品線/封裝類型 | `SOT-223`、`SOD-523` | +| `PACKAGE_LEF` | VARCHAR2(40) | 封裝型號 | `SOT-223`、`SOD-523` | +| `PJ_TYPE` | VARCHAR2(40) | 產品型號 | `PJW5P06A`、`RB521S30-NC` | +| `PJ_FUNCTION` | VARCHAR2(40) | 產品功能分類 | `MOSFET`、`SKY` | +| `BOP` | VARCHAR2(40) | BOP 代碼 | `PCC15`、`UAC10` | + +##### Hold 相關欄位 + +| 欄位名 | 類型 | 說明 | 範例值 | +|--------|------|------|--------| +| `HOLDREASONNAME` | VARCHAR2(100) | Hold 原因 | `S2品質異常單(PE)`、`特殊需求管控` | +| `CURRENTHOLDCOUNT` | NUMBER | 當前 Hold 次數 | 0 = 非 Hold,≥1 = Hold 中 | +| `HOLDEMP` | VARCHAR2(40) | Hold 操作人員 | 員工姓名 | +| `HOLDLOCATION` | VARCHAR2(40) | Hold 位置 | 通常為 NULL | +| `RELEASETIME` | TIMESTAMP | 預計解除時間 | NULL 表示未設定 | +| `RELEASEEMP` | VARCHAR2(40) | 解除人員 | NULL 表示尚未解除 | +| `RELEASEREASON` | VARCHAR2(200) | 解除原因 | NULL 表示尚未解除 | +| `COMMENT_HOLD` | VARCHAR2(4000) | Hold 備註 | 詳細說明 Hold 原因 | + +##### 設備欄位(重要說明) + +| 欄位名 | 類型 | 說明 | 使用工站 | +|--------|------|------|---------| +| `EQUIPMENTNAME` | VARCHAR2(40) | 設備名稱(單一設備) | TMTT(82%)、切彎腳(69%)、PKG_SAW | +| `EQUIPMENTS` | VARCHAR2(4000) | 設備清單(逗號分隔) | 成型、焊接、電鍍、打印等其他工站 | +| `EQUIPMENTCOUNT` | NUMBER | 設備數量 | 0 表示尚無設備綁定 | + +**⚠️ 重要**: `EQUIPMENTNAME` 與 `EQUIPMENTS` 為**互斥使用**: +- **TMTT、切彎腳、PKG_SAW** 工站使用 `EQUIPMENTNAME`(單一設備) +- **其他工站**(成型、焊接、電鍍、打印等)使用 `EQUIPMENTS`(設備清單) +- 僅約 100 筆同時有兩欄位數據(均為 TMTT 工站) +- **建議查詢**: 使用 `COALESCE(EQUIPMENTNAME, EQUIPMENTS)` 取得統一設備資訊 + +##### 優先度欄位 + +| 欄位名 | 值 | 說明 | +|--------|-----|------| +| `PRIORITYCODENAME` | `1.超特急` | 最高優先度 | +| | `2.特急` | 高優先度 | +| | `3.急件` | 中高優先度(約 3%) | +| | `4.一般` | 一般優先度(約 96%) | + +##### Hold 原因分布(參考數據) + +| HOLDREASONNAME | 說明 | 典型佔比 | +|----------------|------|---------| +| `特殊需求管控` | 特殊製程或客戶要求 | 最常見 | +| `S2品質異常單(PE)` | PE 開立的品質異常 | 常見 | +| `現場品質異常單(PQC)` | PQC 開立的品質異常 | 常見 | +| `自行暫停` | 自主暫停 | 偶爾 | +| `治具不足HOLD` | 治具問題 | 偶爾 | +| 其他 | 換線暫停、生管暫停等 | 少見 | + +#### 查詢策略 + +**1. WIP 即時分布統計(按工站)** +```sql +SELECT + WORKCENTER_GROUP, + WORKCENTER_SHORT, + COUNT(*) as LOT_COUNT, + SUM(QTY) as TOTAL_QTY, + SUM(CASE WHEN STATUS = 'HOLD' THEN 1 ELSE 0 END) as HOLD_LOTS, + SUM(CASE WHEN STATUS = 'HOLD' THEN QTY ELSE 0 END) as HOLD_QTY +FROM DW_MES_LOT_V +WHERE OWNER NOT IN ('DUMMY') -- 排除 DUMMY 批次 +GROUP BY WORKCENTER_GROUP, WORKCENTER_SHORT, WORKCENTERSEQUENCE_GROUP +ORDER BY TO_NUMBER(WORKCENTERSEQUENCE_GROUP); +``` + +**2. WIP 交叉分析(工站 x 封裝)** +```sql +SELECT + WORKCENTER_GROUP, + PRODUCTLINENAME, + COUNT(*) as LOT_COUNT, + SUM(QTY) as TOTAL_QTY +FROM DW_MES_LOT_V +WHERE OWNER NOT IN ('DUMMY') +GROUP BY WORKCENTER_GROUP, PRODUCTLINENAME +ORDER BY WORKCENTER_GROUP, LOT_COUNT DESC; +``` + +**3. Hold 批次清單** +```sql +SELECT + LOTID, + PRODUCT, + WORKCENTERNAME, + SPECNAME, + QTY, + HOLDREASONNAME, + HOLDEMP, + COMMENT_HOLD, + AGEBYDAYS +FROM DW_MES_LOT_V +WHERE STATUS = 'HOLD' +ORDER BY AGEBYDAYS DESC; +``` + +**4. 設備使用查詢(統一處理 EQUIPMENTNAME/EQUIPMENTS)** +```sql +SELECT + LOTID, + WORKCENTERNAME, + COALESCE(EQUIPMENTNAME, EQUIPMENTS) as EQUIPMENT_INFO, + EQUIPMENTCOUNT, + QTY +FROM DW_MES_LOT_V +WHERE COALESCE(EQUIPMENTNAME, EQUIPMENTS) IS NOT NULL +ORDER BY WORKCENTERNAME; +``` + +**5. 批次詳細查詢** +```sql +SELECT + LOTID, + CONTAINERID, + WORKORDER, + PRODUCT, + PJ_TYPE, + PJ_FUNCTION, + PRODUCTLINENAME, + WORKCENTERNAME, + SPECNAME, + STATUS, + QTY, + STARTQTY, + AGEBYDAYS, + REMAINTIME, + UTS, + PRIORITYCODENAME, + OWNER, + COALESCE(EQUIPMENTNAME, EQUIPMENTS) as EQUIPMENT, + SYS_DATE +FROM DW_MES_LOT_V +WHERE LOTID LIKE 'GA26011%' -- 工單篩選 +ORDER BY WORKCENTERSEQUENCE; +``` + +#### 與其他表的關聯 + +| 關聯表 | 關聯欄位 | 用途 | +|--------|---------|------| +| DW_MES_CONTAINER | CONTAINERID | 取得更詳細的容器資訊 | +| DW_MES_LOTWIPHISTORY | CONTAINERID | 查詢批次流轉歷史 | +| DW_MES_HOLDRELEASEHISTORY | CONTAINERID | 查詢 Hold/Release 歷史 | + +#### 重要注意事項 + +⚠️ **資料更新頻率**: 每 5 分鐘從 DWH 同步,查詢時注意 `SYS_DATE` 確認數據新鮮度 + +⚠️ **DUMMY 批次過濾**: 生產報表應排除 `OWNER IN ('DUMMY')` 的測試批次 + +⚠️ **設備欄位選擇**: 使用 `COALESCE(EQUIPMENTNAME, EQUIPMENTS)` 處理不同工站的設備資訊 + +⚠️ **時間欄位**: `UTS` 為 VARCHAR2 格式 'YYYY/MM/DD',需轉換後才能計算 + +⚠️ **無資料庫備註**: 此視圖無 Oracle 欄位備註(ALL_COL_COMMENTS 為空),欄位說明請參考本文件 + +--- + +### DW_MES_EQUIPMENTSTATUS_WIP_V(設備狀態 + WIP 追蹤視圖)⭐⭐ + +**表性質**: 即時數據視圖(Real-time View) + +**業務定義**: DWH 提供設備資產狀態與 WIP 追蹤資料的即時視圖,透過 DB Link 直接查詢 `PJ_EquipmentStatus_WIP_V@DWDB_MESDB`,屬於真正即時表(非同步快照)。整合設備狀態、維修工單與批次 Track-In 及 Wafer/封裝資訊,適合做設備狀態與當前 WIP 關聯分析。 + +**數據來源**: `PJ_EquipmentStatus_WIP_V@DWDB_MESDB`(DB Link 連線) + +**數據量**: 約 2,631 筆(2026-01-29 查詢) + +#### 欄位分類總覽(32 欄位) + +| 分類 | 欄位數 | 說明 | +|------|--------|------| +| 設備/資源識別 | 3 | RESOURCEID, EQUIPMENTID, OBJECTCATEGORY | +| 設備狀態 | 2 | EQUIPMENTASSETSSTATUS, EQUIPMENTASSETSSTATUSREASON | +| 維修工單 | 11 | JOBORDER, JOBMODEL, JOBSTAGE, JOBID, JOBSTATUS, CREATEDATE, CREATEUSERNAME, CREATEUSER, SYMPTOMCODE, CAUSECODE, REPAIRCODE | +| WIP/產品 | 7 | RUNCARDLOTID, "Package", PACKAGE_LF, "Function", TYPE, BOP, SPEC | +| Wafer/材料 | 6 | WAFERLOTID, WAFERPN, WAFERLOTID_PREFIX, LFOPTIONID, WIREDESCRIPTION, WAFERMIL | +| Track-In | 3 | LOTTRACKINQTY_PCS, LOTTRACKINTIME, LOTTRACKINEMPLOYEE | + +#### 關鍵欄位說明 + +#### 欄位清單與說明(32 欄位) + +| 欄位名 | 類型 | 欄位功能說明 | +|--------|------|--------------| +| `RESOURCEID` | CHAR(16) | 資源/設備資源 ID(資源主檔識別碼) | +| `EQUIPMENTID` | VARCHAR2(40) | 設備編號(機台代號) | +| `OBJECTCATEGORY` | VARCHAR2(40) | 類別/製程分類(如 ASSEMBLY) | +| `EQUIPMENTASSETSSTATUS` | VARCHAR2(40) | 設備資產狀態(如 PRD、IDLE) | +| `EQUIPMENTASSETSSTATUSREASON` | VARCHAR2(40) | 設備狀態原因/說明(如 Production RUN) | +| `JOBORDER` | VARCHAR2(40) | 維修工單號 | +| `JOBMODEL` | VARCHAR2(40) | 維修工單機型/型號 | +| `JOBSTAGE` | VARCHAR2(40) | 維修工單階段 | +| `JOBID` | CHAR(16) | 維修工單內部 ID | +| `JOBSTATUS` | VARCHAR2(40) | 維修工單狀態 | +| `CREATEDATE` | DATE | 工單建立時間 | +| `CREATEUSERNAME` | VARCHAR2(40) | 建立者帳號 | +| `CREATEUSER` | VARCHAR2(255) | 建立者姓名/顯示名稱 | +| `SYMPTOMCODE` | VARCHAR2(40) | 維修症狀代碼 | +| `CAUSECODE` | VARCHAR2(40) | 故障原因代碼 | +| `REPAIRCODE` | VARCHAR2(40) | 維修處置代碼 | +| `RUNCARDLOTID` | VARCHAR2(40) | 批次號(Run card lot id) | +| `"Package"` | VARCHAR2(40) | 封裝型號(需雙引號保留大小寫) | +| `PACKAGE_LF` | VARCHAR2(4000) | 封裝/Leadframe 類型或描述 | +| `"Function"` | VARCHAR2(40) | 產品功能分類(需雙引號保留大小寫) | +| `TYPE` | VARCHAR2(40) | 產品型號 | +| `BOP` | VARCHAR2(40) | BOP 代碼 | +| `WAFERLOTID` | VARCHAR2(40) | Wafer Lot 編號 | +| `WAFERPN` | VARCHAR2(40) | Wafer 料號 | +| `WAFERLOTID_PREFIX` | VARCHAR2(160) | Wafer Lot 前綴 | +| `SPEC` | VARCHAR2(40) | 製程/工序規格 | +| `LFOPTIONID` | VARCHAR2(4000) | Leadframe Option | +| `WIREDESCRIPTION` | VARCHAR2(4000) | Wire 描述 | +| `WAFERMIL` | VARCHAR2(3062) | Wafer 規格/厚度 | +| `LOTTRACKINQTY_PCS` | NUMBER | Track-In 數量(PCS) | +| `LOTTRACKINTIME` | DATE | Track-In 時間 | +| `LOTTRACKINEMPLOYEE` | VARCHAR2(255) | Track-In 人員 | + +##### 設備狀態欄位 + +| 欄位名 | 類型 | 說明 | 範例值 | +|--------|------|------|--------| +| `EQUIPMENTASSETSSTATUS` | VARCHAR2(40) | 設備資產狀態 | `PRD` | +| `EQUIPMENTASSETSSTATUSREASON` | VARCHAR2(40) | 狀態原因 | `Production RUN` | +| `OBJECTCATEGORY` | VARCHAR2(40) | 類別/製程分類 | `ASSEMBLY` | + +##### 批次與產品欄位 + +| 欄位名 | 類型 | 說明 | 範例值 | +|--------|------|------|--------| +| `RUNCARDLOTID` | VARCHAR2(40) | 批次號(Run card lot id) | `GA26011480-A00-006` | +| `"Package"` | VARCHAR2(40) | 封裝型號 | `DFN2510-10L` | +| `"Function"` | VARCHAR2(40) | 產品功能分類 | `TVS/ESD` | +| `TYPE` | VARCHAR2(40) | 產品型號 | `PE1605M4AQ` | +| `BOP` | VARCHAR2(40) | BOP 代碼 | `ECA08` | +| `SPEC` | VARCHAR2(40) | 工序規格 | `元件切割` | + +##### Track-In 與 Wafer 欄位 + +| 欄位名 | 類型 | 說明 | +|--------|------|------| +| `LOTTRACKINQTY_PCS` | NUMBER | Track-In 數量(PCS) | +| `LOTTRACKINTIME` | DATE | Track-In 時間 | +| `LOTTRACKINEMPLOYEE` | VARCHAR2(255) | Track-In 人員 | +| `WAFERLOTID` | VARCHAR2(40) | Wafer Lot | +| `WAFERPN` | VARCHAR2(40) | Wafer 料號 | +| `WAFERLOTID_PREFIX` | VARCHAR2(160) | Wafer Lot 前綴 | +| `LFOPTIONID` | VARCHAR2(4000) | Leadframe Option | +| `WIREDESCRIPTION` | VARCHAR2(4000) | Wire 描述 | +| `WAFERMIL` | VARCHAR2(3062) | Wafer 厚度/規格 | + +#### 查詢策略 + +**1. 設備狀態分布** +```sql +SELECT + OBJECTCATEGORY, + EQUIPMENTASSETSSTATUS, + EQUIPMENTASSETSSTATUSREASON, + COUNT(*) as EQUIPMENT_COUNT +FROM DW_MES_EQUIPMENTSTATUS_WIP_V +GROUP BY OBJECTCATEGORY, EQUIPMENTASSETSSTATUS, EQUIPMENTASSETSSTATUSREASON +ORDER BY OBJECTCATEGORY, EQUIPMENT_COUNT DESC; +``` + +**2. 設備對應 WIP 批次(含 Track-In)** +```sql +SELECT + EQUIPMENTID, + RUNCARDLOTID, + "Package" as PACKAGE, + "Function" as FUNCTION, + TYPE, + BOP, + SPEC, + LOTTRACKINQTY_PCS, + LOTTRACKINTIME +FROM DW_MES_EQUIPMENTSTATUS_WIP_V +WHERE RUNCARDLOTID IS NOT NULL +ORDER BY LOTTRACKINTIME DESC; +``` + +**3. 維修工單清單** +```sql +SELECT + EQUIPMENTID, + JOBORDER, + JOBMODEL, + JOBSTAGE, + JOBSTATUS, + CREATEDATE, + SYMPTOMCODE, + CAUSECODE, + REPAIRCODE +FROM DW_MES_EQUIPMENTSTATUS_WIP_V +WHERE JOBORDER IS NOT NULL +ORDER BY CREATEDATE DESC; +``` + +**4. Wafer/材料分布** +```sql +SELECT + WAFERPN, + WAFERLOTID_PREFIX, + COUNT(*) as LOT_COUNT +FROM DW_MES_EQUIPMENTSTATUS_WIP_V +WHERE WAFERPN IS NOT NULL +GROUP BY WAFERPN, WAFERLOTID_PREFIX +ORDER BY LOT_COUNT DESC; +``` + +#### 與其他表的關聯 + +| 關聯表 | 關聯欄位 | 用途 | +|--------|---------|------| +| DW_MES_LOT_V | RUNCARDLOTID ↔ LOTID | 對照批次狀態/工站資訊 | +| DW_MES_WIP | RUNCARDLOTID ↔ CONTAINERNAME | 取得批次現況與工單資訊 | +| DW_MES_RESOURCE | EQUIPMENTID / RESOURCEID | 取得設備主檔/資源資訊 | + +#### 重要注意事項 + +⚠️ **資料更新頻率**: DB Link 即時查詢,查詢時可搭配 `LOTTRACKINTIME` 判斷新鮮度 + +⚠️ **欄位大小寫**: `"Package"`、`"Function"` 為**引用欄位**,查詢需使用雙引號保留大小寫 + +⚠️ **欄位空值**: 維修工單與 Wafer/材料欄位常為 NULL,需依使用情境加條件 + +⚠️ **無資料庫備註**: 此視圖無 Oracle 欄位備註(ALL_COL_COMMENTS 為空),欄位說明請參考本文件 + +--- + +### DW_MES_SPEC_WORKCENTER_V(工站/工序對照視圖)⭐ + +**表性質**: 對照視圖(Mapping View) + +**業務定義**: 由 `MES_SPEC`、`MES_OPERATION`、`MES_WORKCENTER` 組合,提供 SPEC 與工站名稱、分組與排序欄位的對照表。可用於統一工站命名與排序規則,補足報表分群需求。 + +**數據來源**: `MES_SPEC`, `MES_OPERATION`, `MES_WORKCENTER`(DWH 本地表) + +**數據量**: 230 筆(2026-01-29 查詢) + +#### 欄位說明(9 欄位) + +| 欄位名 | 類型 | 說明 | +|--------|------|------| +| `SPEC` | VARCHAR2(40) | SPEC 名稱 | +| `SPECSEQUENCE` | NUMBER | SPEC 順序(PJ_SEQUENCE) | +| `SPEC_ORDER` | VARCHAR2(200) | 排序欄位(SPECSEQUENCE + '_' + SPEC) | +| `WORK_CENTER` | VARCHAR2(100) | 工站名稱 | +| `WORK_CENTER_SEQUENCE` | VARCHAR2(40) | 工站順序碼(取自 WORKCENTER.Description) | +| `WORK_CENTER_GROUP` | VARCHAR2(100) | 工站分組名稱(依規則合併,如焊接/成型/電鍍) | +| `WORKCENTERSEQUENCE_GROUP` | VARCHAR2(40) | 工站群組順序碼(依規則統一) | +| `WORKCENTERGROUP_ORDER` | VARCHAR2(200) | 群組排序欄位(序號 + '_' + 群組名) | +| `WORK_CENTER_SHORT` | VARCHAR2(40) | 工站簡稱(如 DB/WB/Mold) | + +#### 查詢策略 + +**1. SPEC 對應工站分組** +```sql +SELECT + SPEC, + WORK_CENTER, + WORK_CENTER_GROUP, + WORK_CENTER_SHORT, + WORKCENTERSEQUENCE_GROUP +FROM DWH.DW_MES_SPEC_WORKCENTER_V +ORDER BY WORKCENTERSEQUENCE_GROUP, SPEC; +``` + +**2. 與 WIP 視圖對照(補足工站分組)** +```sql +SELECT + l.LOTID, + l.SPECNAME, + l.WORKCENTERNAME, + s.WORK_CENTER_GROUP, + s.WORK_CENTER_SHORT +FROM DWH.DW_MES_LOT_V l +LEFT JOIN DWH.DW_MES_SPEC_WORKCENTER_V s + ON l.SPECNAME = s.SPEC +ORDER BY l.WORKCENTERSEQUENCE_GROUP, l.LOTID; +``` + +#### 重要注意事項 + +⚠️ **分組規則**: `WORK_CENTER_GROUP` 與 `WORKCENTERSEQUENCE_GROUP` 由 CASE 規則產生,若工站命名異動需同步檢查 + +--- + +## 現況快照表分析 + +### 1. DW_MES_WIP(在制品表)⭐⭐⭐ + +**表性質**: 現況快照表(含歷史累積) + +**業務定義**: 存儲在制品(WIP)的現況資料,但實際包含歷史累積,需搭配時間條件(如 `TXNDATE`)限制查詢範圍 + +#### 關鍵時間欄位 + +| 欄位名 | 用途 | 查詢建議 | +|--------|------|---------| +| `MOVEINTIMESTAMP` | 批次移入當前工序的時間 | 計算在站時間 (SYSDATE - MOVEINTIMESTAMP) | +| `ORIGINALSTARTDATE` | 批次原始開始生產日期 | 計算生產週期 (SYSDATE - ORIGINALSTARTDATE) | +| `EXPECTEDENDDATE` | 預計完成日期 | 監控交期風險 | +| `TXNDATE` | 資料最後更新時間 | 數據同步監控用 | +| `HOLDTIME` | 暫停時間 | Hold批次的暫停時間點 | +| `COMMENT_DATE` | 備註更新時間 | 追蹤最後異動時間 | + +#### 關鍵業務欄位 + +**數量相關** +- `QTY` / `QTY2`: 當前數量(主/輔單位) +- `MOVEINQTY` / `MOVEINQTY2`: 移入數量 +- `ORIGINALQTY` / `ORIGINALQTY2`: 原始開始數量 +- `WOQTY`: 工單總數量 + +**狀態與位置** +- `STATUS`: 批次狀態碼(數值) +- `LOCATIONNAME`: 當前所在位置 +- `WORKFLOWSTEPNAME`: 當前工序步驟名稱 +- `WORKCENTERNAME`: 當前工作中心 + +**Hold相關** +- `CURRENTHOLDCOUNT`: 當前Hold數量 +- `HOLDREASONID` / `HOLDREASONNAME`: Hold原因 +- `HOLDLOCATIONNAME`: Hold所在位置 +- `HOLDEMP`: Hold操作人員 +- `HOLDCOMMENT_FUTURE`: Hold備註(FutureHold) + +**產品與工單** +- `CONTAINERNAME`: 批次號(LOT號) +- `MFGORDERNAME`: 工單號 +- `PRODUCTNAME`: 產品名稱 +- `PRODUCTLINENAME`: 產品線 +- `SPECNAME`: 當前站點規格 + +**生產信息** +- `DATECODE`: 生產週期代碼 +- `FIRSTNAME`: 首片批號 +- `WAFERLOT` / `WAFERNAME`: Wafer資訊(3個欄位合併) +- `LEADFRAMENAME` / `LEADFRAMEOPTION`: 框架資訊 +- `CONSUMEFACTOR`: 消耗因子(CF值) + +#### 查詢策略 + +**1. 查詢在站時間過長的批次(停滯分析)** +```sql +SELECT + CONTAINERNAME, + PRODUCTNAME, + WORKFLOWSTEPNAME, + MOVEINTIMESTAMP, + ROUND((SYSDATE - MOVEINTIMESTAMP) * 24, 2) as HOURS_IN_STATION +FROM DW_MES_WIP +WHERE STATUS NOT IN (8, 128) -- 排除已完成或取消 + AND (SYSDATE - MOVEINTIMESTAMP) > 2 -- 在站超過2天 +ORDER BY HOURS_IN_STATION DESC; +``` + +**2. 查詢Hold批次清單** +```sql +SELECT + CONTAINERNAME, + PRODUCTNAME, + HOLDREASONNAME, + HOLDEMP, + HOLDTIME, + HOLDLOCATIONNAME, + CURRENTHOLDCOUNT +FROM DW_MES_WIP +WHERE CURRENTHOLDCOUNT > 0 + AND STATUS NOT IN (8, 128) +ORDER BY HOLDTIME; +``` + +**3. 查詢在制品數量統計(按產品線)** +```sql +SELECT + PRODUCTLINENAME, + COUNT(DISTINCT CONTAINERNAME) as LOT_COUNT, + SUM(QTY) as TOTAL_QTY, + SUM(CASE WHEN CURRENTHOLDCOUNT > 0 THEN 1 ELSE 0 END) as HOLD_LOT_COUNT +FROM DW_MES_WIP +WHERE STATUS NOT IN (8, 128) +GROUP BY PRODUCTLINENAME +ORDER BY LOT_COUNT DESC; +``` + +**4. 工單進度查詢** +```sql +SELECT + MFGORDERNAME, + PRODUCTNAME, + WOQTY as WO_TOTAL_QTY, + COUNT(DISTINCT CONTAINERNAME) as LOT_COUNT, + SUM(QTY) as CURRENT_QTY, + MIN(MOVEINTIMESTAMP) as EARLIEST_MOVEIN, + MAX(MOVEINTIMESTAMP) as LATEST_MOVEIN +FROM DW_MES_WIP +WHERE MFGORDERNAME = 'WO12345' -- 替換為實際工單號 + AND STATUS NOT IN (8, 128) +GROUP BY MFGORDERNAME, PRODUCTNAME, WOQTY; +``` + +--- + +### 2. DW_MES_RESOURCE(資源主表) + +**表性質**: 現況快照表(主檔表) + +**業務定義**: 存儲所有生產資源(設備、工位)的基本信息和配置 + +#### 關鍵時間欄位 + +| 欄位名 | 用途 | +|--------|------| +| `CREATIONDATE` | 資源創建日期 | +| `LASTCHANGEDATE` | 最後修改日期 | + +#### 關鍵業務欄位 + +**基本信息** +- `RESOURCEID` / `RESOURCENAME`: 資源唯一標識與名稱 +- `OBJECTCATEGORY` / `OBJECTTYPE`: 資源分類(設備/工位等) +- `DESCRIPTION`: 資源描述 +- `EQUIPMENTTYPE`: 設備類型 + +**位置與歸屬** +- `LOCATIONID` / `LOCATIONNAME`: 所在位置 +- `WORKCENTERNAME`: 所屬工作中心 +- `RESOURCEFAMILYNAME`: 資源家族 +- `PJ_DEPARTMENT`: 所屬部門 + +**設備狀態與能力** +- `PJ_ASSETSSTATUS`: 資產狀態 +- `MAXLOTS`: 最大批次容量 +- `MAXUNITS`: 最大單元容量 +- `MULTILOTSFLAG`: 是否支持多批次 + +**設備屬性標記(2025-12-17新增)** +- `PJ_ISPRODUCTION`: 是否為生產設備 +- `PJ_ISKEY`: 是否為關鍵設備 +- `PJ_ISMONITOR`: 是否為監控設備 + +**供應商信息** +- `VENDORID` / `VENDORNAME`: 供應商 +- `PJ_ERPVENDORID`: ERP供應商代碼 +- `VENDORMODEL`: 設備型號 +- `VENDORSERIALNUMBER`: 序列號 + +#### 查詢策略 + +**1. 查詢關鍵生產設備清單** +```sql +SELECT + RESOURCENAME, + WORKCENTERNAME, + LOCATIONNAME, + EQUIPMENTTYPE, + VENDORNAME, + VENDORMODEL, + PJ_ASSETSSTATUS +FROM DW_MES_RESOURCE +WHERE PJ_ISPRODUCTION = 1 + AND PJ_ISKEY = 1 + AND OBJECTTYPE = 'Equipment' +ORDER BY WORKCENTERNAME, RESOURCENAME; +``` + +**2. 查詢設備容量信息** +```sql +SELECT + RESOURCENAME, + WORKCENTERNAME, + MAXLOTS, + MAXUNITS, + MULTILOTSFLAG, + LOTCOUNT as CURRENT_LOT_COUNT +FROM DW_MES_RESOURCE +WHERE OBJECTTYPE = 'Equipment' + AND MAXLOTS > 0 +ORDER BY WORKCENTERNAME; +``` + +--- + +### 3. DW_MES_CONTAINER(容器信息表) + +**表性質**: 現況快照表 + +**業務定義**: 存儲生產容器(批次載體)的當前信息和狀態 + +#### 關鍵時間欄位 + +| 欄位名 | 用途 | +|--------|------| +| `LASTMOVEOUTTIMESTAMP` | 最後移出時間 | +| `MOVEINTIMESTAMP` | 最後移入時間 | +| `FACTORYSTARTDATE` | 工廠開始日期 | +| `ORIGINALSTARTDATE` | 原始開始日期 | +| `PLANNEDSTARTDATE` | 計劃開始日期 | +| `LASTACTIVITYDATE` | 最後活動日期 | +| `LASTCOMPLETIONDATE` | 最後完成日期 | +| `ONHOLDDATE` | Hold日期 | +| `EXPIRATIONDATE` | 過期日期 | +| `UTS` | 更新時間戳 | +| `LAST_SYNC_DATE` | 最後同步日期 | + +#### 關鍵業務欄位 + +**容器標識** +- `CONTAINERID` / `CONTAINERNAME`: 容器唯一標識 +- `FIRSTNAME`: 首片資訊 + +**當前狀態** +- `STATUS`: 狀態碼 +- `CURRENTSTATUSID`: 當前狀態ID +- `LOCATIONNAME`: 當前位置 +- `WORKFLOWSTEPNAME`: 當前工序 +- `SPECNAME`: 當前規格 +- `WORKCENTERNAME`: 當前工作中心 + +**數量信息** +- `QTY` / `QTY2`: 當前數量 +- `MOVEINQTY` / `MOVEINQTY2`: 移入數量 +- `ORIGINALQTY` / `ORIGINALQTY2`: 原始數量 +- `FACTORYSTARTQTY`: 工廠開始數量 + +**Hold狀態** +- `CURRENTHOLDCOUNT`: 當前Hold計數 +- `FUTUREHOLDCOUNT`: FutureHold計數 +- `HOLDREASONID` / `HOLDREASONNAME`: Hold原因 +- `HOLDLOCATIONNAME`: Hold位置 +- `HOLDLOCATIONSTARTTIMESTAMP`: Hold開始時間 +- `HOLDLOCATIONDURATION`: Hold持續時間 + +**工單與產品** +- `MFGORDERID` / `MFGORDERNAME`: 工單 +- `PRODUCTID` / `PRODUCTNAME`: 產品 +- `PRODUCTLINENAME`: 產品線 +- `PROCESSSPECID`: 工藝規格 +- `PJ_BOP`: BOP信息 +- `PJ_PRODUCEREGION`: 生產區域 + +**Lead Frame信息** +- `LEADFRAMENAME`: 框架名稱 +- `LEADFRAMEDESC`: 框架描述 +- `LEADFRAMEOPTION`: 框架選項 + +#### 查詢策略 + +**1. 查詢容器完整生命週期** +```sql +SELECT + CONTAINERNAME, + FACTORYSTARTDATE, + FACTORYSTARTQTY, + CURRENTSTATUSID, + QTY, + LASTMOVEOUTTIMESTAMP, + LASTMOVEOUTUSERNAME, + ROUND((SYSDATE - FACTORYSTARTDATE), 2) as DAYS_IN_PRODUCTION +FROM DW_MES_CONTAINER +WHERE CONTAINERNAME = 'LOT123456' -- 替換為實際批號 +ORDER BY LASTMOVEOUTTIMESTAMP DESC; +``` + +**2. 查詢長時間Hold的容器** +```sql +SELECT + CONTAINERNAME, + PRODUCTNAME, + HOLDREASONNAME, + HOLDLOCATIONSTARTTIMESTAMP, + HOLDLOCATIONDURATION, + CURRENTHOLDCOUNT +FROM DW_MES_CONTAINER +WHERE CURRENTHOLDCOUNT > 0 + AND HOLDLOCATIONDURATION > 48 -- Hold超過48小時 +ORDER BY HOLDLOCATIONDURATION DESC; +``` + +--- + +### 4. DW_MES_JOB(工單表) + +**表性質**: 現況快照表 + +**業務定義**: 存儲維修/維護工單的當前狀態信息 + +#### 關鍵時間欄位 + +| 欄位名 | 用途 | +|--------|------| +| `CREATEDATE` | 工單創建日期 | +| `EXPECTEDSTARTDATE` | 預計開始日期 | +| `FIRSTCLOCKONDATE` | 首次簽到日期 | +| `LASTCLOCKOFFDATE` | 最後簽退日期 | +| `COMPLETEDATE` | 完成日期 | +| `CANCELDATE` | 取消日期 | + +#### 關鍵業務欄位 + +**工單基本信息** +- `JOBID`: 工單唯一標識 +- `JOBORDERNAME`: 工單名稱 +- `JOBSTATUS`: 工單狀態 +- `JOBMODELNAME`: 工單模型 +- `STAGENAME`: 階段名稱 +- `STAGESEQUENCE`: 階段順序 + +**資源與容器** +- `RESOURCEID` / `RESOURCENAME`: 關聯資源(設備) +- `CONTAINERIDS` / `CONTAINERNAMES`: 關聯容器(批次) +- `PARTREQUESTORDERNAME`: 物料請求訂單 + +**維修信息** +- `SYMPTOMCODENAME`: 症狀代碼 +- `CAUSECODENAME`: 原因代碼 +- `REPAIRCODENAME`: 維修代碼 +- `PJ_SYMPTOMCODE2NAME`: 症狀代碼2 +- `PJ_CAUSECODE2NAME`: 原因代碼2 +- `PJ_REPAIRCODE2NAME`: 維修代碼2 + +**工單統計** +- `ACKNOWLEDGECOUNT`: 確認計數 +- `ASSIGNCOUNT`: 分配計數 +- `CLOCKONCOUNT`: 簽到計數 +- `ACTIVECLOCKONCOUNT`: 活動簽到計數 +- `ESTIMATEDDURATION`: 預估工時 + +**操作人員** +- `CREATEUSERID` / `CREATE_EMPNAME` / `CREATE_FULLNAME`: 創建人 +- `COMPLETEUSERID` / `COMPLETE_EMPNAME` / `COMPLETE_FULLNAME`: 完成人 +- `CANCELUSERID` / `CANCEL_EMPNAME` / `CANCEL_FULLNAME`: 取消人 + +#### 查詢策略 + +**1. 查詢設備維修工單統計** +```sql +SELECT + RESOURCENAME, + JOBSTATUS, + COUNT(*) as JOB_COUNT, + AVG(COMPLETEDATE - CREATEDATE) as AVG_COMPLETION_DAYS +FROM DW_MES_JOB +WHERE CREATEDATE >= TRUNC(SYSDATE) - 30 +GROUP BY RESOURCENAME, JOBSTATUS +ORDER BY JOB_COUNT DESC; +``` + +**2. 查詢未完成工單清單** +```sql +SELECT + JOBORDERNAME, + RESOURCENAME, + JOBSTATUS, + CREATEDATE, + EXPECTEDSTARTDATE, + SYMPTOMCODENAME, + CREATE_FULLNAME +FROM DW_MES_JOB +WHERE JOBSTATUS NOT IN ('Completed', 'Cancelled') +ORDER BY CREATEDATE; +``` + +**3. 查詢維修原因分析** +```sql +SELECT + SYMPTOMCODENAME, + CAUSECODENAME, + REPAIRCODENAME, + COUNT(*) as OCCURRENCE_COUNT +FROM DW_MES_JOB +WHERE COMPLETEDATE >= TRUNC(SYSDATE) - 90 + AND JOBSTATUS = 'Completed' +GROUP BY SYMPTOMCODENAME, CAUSECODENAME, REPAIRCODENAME +ORDER BY OCCURRENCE_COUNT DESC; +``` + +--- + +## 歷史累積表分析 + +### 5. DW_MES_RESOURCESTATUS(資源狀態表)⭐⭐⭐ + +**表性質**: 歷史累積表(關鍵核心表) + +**業務定義**: 記錄設備狀態的每一次變更,用於計算設備稼動率、停機時間等關鍵指標 + +#### 關鍵時間欄位 + +| 欄位名 | 用途 | 查詢建議 | +|--------|------|---------| +| `OLDLASTSTATUSCHANGEDATE` | 上一個狀態開始時間 | **狀態持續時間計算起點** | +| `LASTSTATUSCHANGEDATE` | 新狀態開始時間 | **狀態持續時間計算終點** | +| `OLDLASTACTIVITYDATE` | 上次活動日期 | 設備最後活動時間 | +| `TXNDATE` | 交易時間 | 資料同步時間(用於ETL) | + +**時間計算公式**: +```sql +狀態持續時間(小時) = (LASTSTATUSCHANGEDATE - OLDLASTSTATUSCHANGEDATE) * 24 +``` + +#### 關鍵業務欄位 + +**狀態變更信息** +- `OLDSTATUSNAME` → `NEWSTATUSNAME`: 狀態變更(從→到) +- `OLDREASONNAME` → `NEWREASONNAME`: 原因變更 +- `OLDAVAILABILITY` → `AVAILABILITY`: 可用性變更 + +**可用性標記(AVAILABILITY)** +- `1`: Productive(生產中) +- `2`: Standby(待機) +- `3`: Non-Scheduled(非排程) +- `4`: Unscheduled Down(非計劃停機) +- `5`: Scheduled Down(計劃停機) + +**資源信息(來自RESOURCE表)** +- `HISTORYID`: 資源ID(關聯RESOURCEID) +- `DESCRIPTION`: 設備描述 +- `RESOURCEFAMILYNAME`: 設備家族 +- `WORKCENTERNAME`: 工作中心 +- `LOCATIONNAME`: 位置 +- `VENDORNAME` / `VENDORMODEL`: 供應商與型號 +- `PJ_ASSETSSTATUS`: 資產狀態 +- `PJ_DEPARTMENT`: 部門 + +**工單關聯** +- `JOBID`: 關聯的維修工單ID + +**特殊標記** +- `SS_ISDOWNVIAPARENT`: 是否因父設備Down而Down +- `UPDATELASTSTATUSCHANGEDATE` / `OLDUPDATELASTSTATUSCHANGEDATE`: 更新標記 + +#### 查詢策略 + +**1. 計算設備稼動率(OEE基礎數據)** +```sql +SELECT + HISTORYID as RESOURCE_ID, + WORKCENTERNAME, + TRUNC(OLDLASTSTATUSCHANGEDATE) as DATE_KEY, + SUM(CASE + WHEN AVAILABILITY = 1 THEN + (LASTSTATUSCHANGEDATE - OLDLASTSTATUSCHANGEDATE) * 24 + ELSE 0 + END) as PRODUCTIVE_HOURS, + SUM(CASE + WHEN AVAILABILITY = 2 THEN + (LASTSTATUSCHANGEDATE - OLDLASTSTATUSCHANGEDATE) * 24 + ELSE 0 + END) as STANDBY_HOURS, + SUM(CASE + WHEN AVAILABILITY = 4 THEN + (LASTSTATUSCHANGEDATE - OLDLASTSTATUSCHANGEDATE) * 24 + ELSE 0 + END) as UNSCHEDULED_DOWN_HOURS, + SUM((LASTSTATUSCHANGEDATE - OLDLASTSTATUSCHANGEDATE) * 24) as TOTAL_HOURS +FROM DW_MES_RESOURCESTATUS +WHERE OLDLASTSTATUSCHANGEDATE >= TRUNC(SYSDATE) - 7 + AND LASTSTATUSCHANGEDATE <= SYSDATE +GROUP BY HISTORYID, WORKCENTERNAME, TRUNC(OLDLASTSTATUSCHANGEDATE) +ORDER BY DATE_KEY DESC, RESOURCE_ID; +``` + +**2. 查詢設備停機記錄(Down Time分析)** +```sql +SELECT + HISTORYID as RESOURCE_ID, + WORKCENTERNAME, + OLDLASTSTATUSCHANGEDATE as DOWN_START, + LASTSTATUSCHANGEDATE as DOWN_END, + ROUND((LASTSTATUSCHANGEDATE - OLDLASTSTATUSCHANGEDATE) * 24, 2) as DOWN_HOURS, + NEWSTATUSNAME, + NEWREASONNAME, + AVAILABILITY +FROM DW_MES_RESOURCESTATUS +WHERE AVAILABILITY IN (4, 5) -- Unscheduled Down / Scheduled Down + AND OLDLASTSTATUSCHANGEDATE >= TRUNC(SYSDATE) - 7 +ORDER BY DOWN_HOURS DESC; +``` + +**3. 查詢設備狀態變更頻率** +```sql +SELECT + HISTORYID as RESOURCE_ID, + WORKCENTERNAME, + COUNT(*) as STATUS_CHANGE_COUNT, + MIN(OLDLASTSTATUSCHANGEDATE) as FIRST_CHANGE, + MAX(LASTSTATUSCHANGEDATE) as LAST_CHANGE +FROM DW_MES_RESOURCESTATUS +WHERE OLDLASTSTATUSCHANGEDATE >= TRUNC(SYSDATE) - 1 +GROUP BY HISTORYID, WORKCENTERNAME +ORDER BY STATUS_CHANGE_COUNT DESC; +``` + +**4. 查詢特定時間段設備時間軸** +```sql +SELECT + OLDLASTSTATUSCHANGEDATE as START_TIME, + LASTSTATUSCHANGEDATE as END_TIME, + OLDSTATUSNAME as FROM_STATUS, + NEWSTATUSNAME as TO_STATUS, + NEWREASONNAME as REASON, + ROUND((LASTSTATUSCHANGEDATE - OLDLASTSTATUSCHANGEDATE) * 24 * 60, 2) as DURATION_MINUTES +FROM DW_MES_RESOURCESTATUS +WHERE HISTORYID = 'RESOURCE_ID_HERE' -- 替換為實際設備ID + AND OLDLASTSTATUSCHANGEDATE >= TO_DATE('2026-01-14 08:00:00', 'YYYY-MM-DD HH24:MI:SS') + AND LASTSTATUSCHANGEDATE <= TO_DATE('2026-01-14 20:00:00', 'YYYY-MM-DD HH24:MI:SS') +ORDER BY OLDLASTSTATUSCHANGEDATE; +``` + +#### 重要注意事項 + +⚠️ **時間範圍必須限制**: 此表有 6500 萬筆資料,查詢時務必加上時間條件 + +⚠️ **狀態持續時間計算**: 使用 `LASTSTATUSCHANGEDATE - OLDLASTSTATUSCHANGEDATE` + +⚠️ **索引使用**: 優先使用 `OLDLASTSTATUSCHANGEDATE` 和 `HISTORYID` 索引 + +--- + +### 6. DW_MES_RESOURCESTATUS_SHIFT(資源狀態班次表)⭐⭐⭐ + +**表性質**: 歷史累積表(彙總表) + +**業務定義**: 按班次彙總資源狀態資料,已計算好時長,是生產報表的首選數據源 + +#### 關鍵時間欄位 + +| 欄位名 | 用途 | +|--------|------| +| `DATADATE` | 資料日期(班次日期) | +| `OLDLASTSTATUSCHANGEDATE` | 狀態開始時間 | +| `LASTSTATUSCHANGEDATE` | 狀態結束時間 | +| `TXNDATE` | 交易時間 | + +#### 關鍵業務欄位 + +**時長計算(已彙總)** +- `HOURS`: **狀態持續時長(小時)** ⭐ 已計算好,直接使用 + +**班次信息** +- `SN`: 班次序號 + +**狀態信息(同RESOURCESTATUS)** +- `OLDSTATUSNAME` / `NEWSTATUSNAME`: 狀態變更 +- `OLDREASONNAME` / `NEWREASONNAME`: 原因 +- `OLDAVAILABILITY` / `AVAILABILITY`: 可用性 + +**資源信息** +- `HISTORYID`: 資源ID +- `WORKCENTERNAME`: 工作中心 +- `RESOURCEFAMILYNAME`: 設備家族 +- `LOCATIONNAME`: 位置 + +**工單關聯** +- `JOBID`: 維修工單ID + +#### 查詢策略 + +**1. 日報表:設備稼動率統計(最佳實踐)** +```sql +SELECT + DATADATE, + HISTORYID as RESOURCE_ID, + WORKCENTERNAME, + SUM(CASE WHEN AVAILABILITY = 1 THEN HOURS ELSE 0 END) as PRODUCTIVE_HOURS, + SUM(CASE WHEN AVAILABILITY = 2 THEN HOURS ELSE 0 END) as STANDBY_HOURS, + SUM(CASE WHEN AVAILABILITY = 4 THEN HOURS ELSE 0 END) as DOWN_HOURS, + SUM(HOURS) as TOTAL_HOURS, + ROUND(SUM(CASE WHEN AVAILABILITY = 1 THEN HOURS ELSE 0 END) / NULLIF(SUM(HOURS), 0) * 100, 2) as UTILIZATION_PCT +FROM DW_MES_RESOURCESTATUS_SHIFT +WHERE DATADATE >= TRUNC(SYSDATE) - 7 +GROUP BY DATADATE, HISTORYID, WORKCENTERNAME +ORDER BY DATADATE DESC, UTILIZATION_PCT DESC; +``` + +**2. 月報表:設備停機時長排名** +```sql +SELECT + HISTORYID as RESOURCE_ID, + WORKCENTERNAME, + NEWREASONNAME as DOWN_REASON, + SUM(HOURS) as TOTAL_DOWN_HOURS, + COUNT(*) as DOWN_COUNT +FROM DW_MES_RESOURCESTATUS_SHIFT +WHERE DATADATE >= TRUNC(ADD_MONTHS(SYSDATE, -1), 'MM') + AND AVAILABILITY IN (4, 5) -- Down狀態 +GROUP BY HISTORYID, WORKCENTERNAME, NEWREASONNAME +ORDER BY TOTAL_DOWN_HOURS DESC; +``` + +**3. 趨勢分析:設備稼動率趨勢(按日)** +```sql +SELECT + DATADATE, + COUNT(DISTINCT HISTORYID) as EQUIPMENT_COUNT, + SUM(CASE WHEN AVAILABILITY = 1 THEN HOURS ELSE 0 END) as TOTAL_PRODUCTIVE_HOURS, + SUM(HOURS) as TOTAL_HOURS, + ROUND(SUM(CASE WHEN AVAILABILITY = 1 THEN HOURS ELSE 0 END) / NULLIF(SUM(HOURS), 0) * 100, 2) as AVG_UTILIZATION_PCT +FROM DW_MES_RESOURCESTATUS_SHIFT +WHERE DATADATE >= TRUNC(SYSDATE) - 30 + AND WORKCENTERNAME = 'WC001' -- 可選:指定工作中心 +GROUP BY DATADATE +ORDER BY DATADATE; +``` + +#### 優勢與使用建議 + +✅ **優勢**: +- 已彙總計算好時長(HOURS欄位),無需自行計算 +- 數據按DATADATE分區,查詢效率高 +- 適合做日報表、月報表 + +✅ **使用建議**: +- 優先使用此表做報表統計,而非RESOURCESTATUS +- 使用DATADATE作為主要時間篩選條件 +- 適合做時間序列分析和趨勢圖表 + +--- + +### 7. DW_MES_LOTWIPHISTORY(批次在制品歷史表)⭐⭐⭐ + +**表性質**: 歷史累積表(核心流程表) + +**業務定義**: 記錄批次在每個工序的完整流轉歷史,包含MoveIn/MoveOut和TrackIn/TrackOut信息 + +#### 關鍵時間欄位 + +| 欄位名 | 用途 | 業務含義 | +|--------|------|---------| +| `MOVEINTIMESTAMP` | 批次移入工序時間 | 批次到達工序的時間 | +| `MOVEOUTTIMESTAMP` | 批次移出工序時間 | 批次離開工序的時間 | +| `TRACKINTIMESTAMP` | 批次上機時間 | 批次開始在設備上加工 | +| `TRACKOUTTIMESTAMP` | 批次下機時間 | 批次完成加工離開設備 | +| `ORIGINALSTARTDATE` | 原始開始日期 | 批次首次開始生產日期 | +| `LAST_UPDATED_DATE` | 最後更新日期 | 記錄更新時間 | +| `LAST_SYNC_DATE` | 最後同步日期 | 資料同步時間 | + +**時間關係**: +``` +MoveIn → TrackIn → TrackOut → MoveOut + ↓ ↓ ↓ ↓ +到達工序 上機加工 完成加工 離開工序 +``` + +#### 關鍵業務欄位 + +**批次標識** +- `WIPLOTHISTORYID`: 歷史記錄唯一ID +- `WIPEQUIPMENTHISTORYID`: 設備歷史關聯ID +- `CONTAINERID` / `FINISHEDRUNCARD`: 批次容器ID與完成品號 +- `PJ_WORKORDER`: 工單號 + +**數量追蹤(4組數量)** +- `MOVEINQTY` / `MOVEINQTY2`: 移入數量(主/輔單位) +- `MOVEOUTQTY` / `MOVEOUTQTY2`: 移出數量 +- `TRACKINQTY` / `TRACKINQTY2`: 上機數量 +- `TRACKOUTQTY` / `TRACKOUTQTY2`: 下機數量 + +**工序與設備** +- `WORKCENTERID` / `WORKCENTERNAME`: 工作中心 +- `SPECID` / `SPECNAME`: 工序規格 +- `EQUIPMENTID` / `EQUIPMENTNAME`: 加工設備 +- `WORKFLOWNAME`: 工藝流程名稱 +- `PROCESSSPECNAME`: 工藝規格 +- `PROCESSTYPENAME`: 工序類型 + +**產品信息** +- `PRODUCTNAME`: 產品名稱 +- `DESCRIPTION`: 產品描述 +- `DATECODE`: 生產週期代碼 +- `PACKAGE_LF`: 封裝Lead Frame信息 + +**Wafer信息** +- `PJ_WAFERID1` / `PJ_WAFERID2` / `PJ_WAFERID3`: Wafer ID + +**人員信息** +- `TRACKINEMPLOYEENAME` / `TRACKINEMPZONE`: 上機人員與區域 +- `TRACKOUTEMPLOYEENAME` / `TRACKOUTEMPZONE`: 下機人員與區域 + +**其他** +- `FLAGNAME`: 標記名稱 +- `CARRIERNAME`: 載具名稱 +- `WIPTRACKINGGROUPKEYID`: WIP追蹤群組Key + +#### 查詢策略 + +**1. 批次完整流轉軌跡查詢** +```sql +SELECT + WIPLOTHISTORYID, + WORKCENTERNAME, + SPECNAME, + EQUIPMENTNAME, + MOVEINTIMESTAMP, + TRACKINTIMESTAMP, + TRACKOUTTIMESTAMP, + MOVEOUTTIMESTAMP, + MOVEINQTY, + MOVEOUTQTY, + ROUND((TRACKOUTTIMESTAMP - TRACKINTIMESTAMP) * 24, 2) as PROCESS_HOURS, + ROUND((MOVEOUTTIMESTAMP - MOVEINTIMESTAMP) * 24, 2) as STATION_HOURS, + TRACKINEMPLOYEENAME, + TRACKOUTEMPLOYEENAME +FROM DW_MES_LOTWIPHISTORY +WHERE CONTAINERID = 'CONTAINER_ID_HERE' -- 或使用 PJ_WORKORDER = 'WO123' +ORDER BY MOVEINTIMESTAMP; +``` + +**2. 工序加工時長分析(Cycle Time)** +```sql +SELECT + WORKCENTERNAME, + SPECNAME, + COUNT(*) as LOT_COUNT, + AVG((TRACKOUTTIMESTAMP - TRACKINTIMESTAMP) * 24) as AVG_PROCESS_HOURS, + MIN((TRACKOUTTIMESTAMP - TRACKINTIMESTAMP) * 24) as MIN_PROCESS_HOURS, + MAX((TRACKOUTTIMESTAMP - TRACKINTIMESTAMP) * 24) as MAX_PROCESS_HOURS, + STDDEV((TRACKOUTTIMESTAMP - TRACKINTIMESTAMP) * 24) as STDDEV_HOURS +FROM DW_MES_LOTWIPHISTORY +WHERE TRACKINTIMESTAMP >= TRUNC(SYSDATE) - 30 + AND TRACKOUTTIMESTAMP IS NOT NULL +GROUP BY WORKCENTERNAME, SPECNAME +ORDER BY AVG_PROCESS_HOURS DESC; +``` + +**3. 設備產出統計(Throughput)** +```sql +SELECT + EQUIPMENTNAME, + WORKCENTERNAME, + TRUNC(TRACKINTIMESTAMP) as WORK_DATE, + COUNT(DISTINCT CONTAINERID) as LOT_COUNT, + SUM(TRACKINQTY) as TOTAL_QTY_IN, + SUM(TRACKOUTQTY) as TOTAL_QTY_OUT, + SUM(TRACKOUTQTY - TRACKINQTY) as QTY_LOSS +FROM DW_MES_LOTWIPHISTORY +WHERE TRACKINTIMESTAMP >= TRUNC(SYSDATE) - 7 + AND EQUIPMENTNAME IS NOT NULL +GROUP BY EQUIPMENTNAME, WORKCENTERNAME, TRUNC(TRACKINTIMESTAMP) +ORDER BY WORK_DATE DESC, TOTAL_QTY_OUT DESC; +``` + +**4. 工序等待時間分析(Queue Time)** +```sql +SELECT + WORKCENTERNAME, + SPECNAME, + COUNT(*) as LOT_COUNT, + AVG((TRACKINTIMESTAMP - MOVEINTIMESTAMP) * 24) as AVG_QUEUE_HOURS, + MAX((TRACKINTIMESTAMP - MOVEINTIMESTAMP) * 24) as MAX_QUEUE_HOURS +FROM DW_MES_LOTWIPHISTORY +WHERE MOVEINTIMESTAMP >= TRUNC(SYSDATE) - 7 + AND TRACKINTIMESTAMP IS NOT NULL +GROUP BY WORKCENTERNAME, SPECNAME +ORDER BY AVG_QUEUE_HOURS DESC; +``` + +**5. 批次數量損耗追蹤** +```sql +SELECT + CONTAINERID, + PJ_WORKORDER, + WORKCENTERNAME, + SPECNAME, + MOVEINQTY, + MOVEOUTQTY, + (MOVEINQTY - MOVEOUTQTY) as QTY_LOSS, + ROUND((MOVEINQTY - MOVEOUTQTY) / NULLIF(MOVEINQTY, 0) * 100, 2) as LOSS_PCT, + MOVEINTIMESTAMP, + MOVEOUTTIMESTAMP +FROM DW_MES_LOTWIPHISTORY +WHERE MOVEINTIMESTAMP >= TRUNC(SYSDATE) - 7 + AND (MOVEINQTY - MOVEOUTQTY) > 0 -- 有損耗 +ORDER BY QTY_LOSS DESC; +``` + +#### 重要注意事項 + +⚠️ **時間範圍必須限制**: 此表有 5300 萬筆資料 + +⚠️ **時間計算**: +- 加工時間 = `TRACKOUTTIMESTAMP - TRACKINTIMESTAMP` +- 在站時間 = `MOVEOUTTIMESTAMP - MOVEINTIMESTAMP` +- 等待時間 = `TRACKINTIMESTAMP - MOVEINTIMESTAMP` + +⚠️ **索引優先使用**: `TRACKINTIMESTAMP`, `MOVEINTIMESTAMP`, `CONTAINERID`, `PJ_WORKORDER` + +--- + +### 8. DW_MES_LOTWIPDATAHISTORY(批次在制品數據歷史表) + +**表性質**: 歷史累積表(數據採集表) + +**業務定義**: 記錄批次在生產過程中採集的所有參數數據(如測試結果、SPC數據等) + +#### 關鍵時間欄位 + +| 欄位名 | 用途 | +|--------|------| +| `TXNTIMESTAMP` | 數據採集時間 | +| `LAST_UPDATED_DATE` | 最後更新日期 | + +#### 關鍵業務欄位 + +**批次與工序** +- `CONTAINERID` / `FINISHEDRUNCARD`: 批次標識 +- `PJ_WORKORDER`: 工單號 +- `WORKCENTERID` / `WORKCENTERNAME`: 工作中心 +- `SPECID` / `SPECNAME`: 工序規格 +- `EQUIPMENTID` / `EQUIPMENTNAME`: 設備 + +**數據內容** +- `WIPDATANAMEID` / `WIPDATANAMENAME`: 數據項名稱 +- `WIPDATAVALUE`: 數據值(最長4000字元) +- `PJ_SPCDATARESULT`: SPC數據結果 + +**關聯信息** +- `WIPLOTHISTORYID`: 關聯LOTWIPHISTORY的ID +- `SERVICENAME`: 服務名稱 +- `PROCESSTYPENAME`: 工序類型 +- `EMPLOYEENAME`: 採集人員 +- `WAFERSCRIBENUMBER`: Wafer刻號 + +#### 查詢策略 + +**1. 查詢批次採集的所有數據** +```sql +SELECT + WIPDATANAMENAME as DATA_NAME, + WIPDATAVALUE as DATA_VALUE, + TXNTIMESTAMP, + WORKCENTERNAME, + SPECNAME, + EQUIPMENTNAME, + EMPLOYEENAME +FROM DW_MES_LOTWIPDATAHISTORY +WHERE CONTAINERID = 'CONTAINER_ID_HERE' +ORDER BY TXNTIMESTAMP, WIPDATANAMENAME; +``` + +**2. 查詢特定參數的歷史趨勢** +```sql +SELECT + CONTAINERID, + TXNTIMESTAMP, + WIPDATAVALUE, + EQUIPMENTNAME, + PJ_SPCDATARESULT +FROM DW_MES_LOTWIPDATAHISTORY +WHERE WIPDATANAMENAME = 'PARAMETER_NAME' -- 如: 'Temperature' + AND TXNTIMESTAMP >= TRUNC(SYSDATE) - 7 +ORDER BY TXNTIMESTAMP; +``` + +**3. SPC異常數據查詢** +```sql +SELECT + CONTAINERID, + PJ_WORKORDER, + WORKCENTERNAME, + SPECNAME, + WIPDATANAMENAME, + WIPDATAVALUE, + PJ_SPCDATARESULT, + TXNTIMESTAMP +FROM DW_MES_LOTWIPDATAHISTORY +WHERE PJ_SPCDATARESULT IN ('Out of Control', 'Warning') -- 根據實際值調整 + AND TXNTIMESTAMP >= TRUNC(SYSDATE) - 7 +ORDER BY TXNTIMESTAMP DESC; +``` + +#### 重要注意事項 + +⚠️ **大數據量表**: 約 7,796 萬筆資料,務必加時間條件 + +⚠️ **與LOTWIPHISTORY關聯**: 通過`WIPLOTHISTORYID`關聯 + +⚠️ **數據值為文字**: `WIPDATAVALUE`是VARCHAR2,數值運算需轉換 + +--- + +### 9. DW_MES_HM_LOTMOVEOUT(批次移出表)⭐⭐ + +**表性質**: 歷史累積表(事件表) + +**業務定義**: 記錄每次批次從工序移出(MoveOut)的事件,是生產流程追蹤的重要數據源 + +#### 關鍵時間欄位 + +| 欄位名 | 用途 | +|--------|------| +| `TXNDATE` | 交易時間(MoveOut時間) | +| `MOVEINTIMESTAMP` | 移入時間 | +| `LASTMOVEOUTTIMESTAMP` | 最後移出時間 | +| `SYSTEMDATE` | 系統時間 | +| `MFGDATE` | 製造日期 | + +#### 關鍵業務欄位 + +**交易信息** +- `HISTORYID` / `HISTORYMAINLINEID`: 歷史記錄ID +- `HISTORYSUMMARYID`: 歷史匯總ID +- `TXNID`: 交易ID +- `TXNTYPE`: 交易類型 + +**批次與容器** +- `CONTAINERID` / `CONTAINERNAME`: 批次容器 +- `CARRIERID` / `CARRIERNAME`: 載具 + +**狀態變更(From → To)** +- `FROMSPECID` / `FROMSPECNAME`: 來源工序 +- `SPECID` / `SPECNAME`: 目標工序 +- `FROMWORKCENTER` / `WORKCENTER`: 工作中心變更 +- `FROMSTATUS` / `STATUS`: 狀態變更 +- `FROMQTY` / `QTY`: 數量變更 +- `FROMQTY2` / `QTY2`: 數量2變更 + +**數量信息** +- `MOVEINQTY` / `MOVEINQTY2`: 移入數量 +- `FROMUOMNAME` / `UOMNAME`: 單位 + +**設備與資源** +- `RESOURCEID` / `RESOURCENAME`: 資源(設備) +- `RESOURCEOBJECTCATEGORY` / `RESOURCEOBJECTTYPE`: 資源類型 +- `RESOURCESTATUSCODEID` / `RESOURCESTATUSREASONID`: 資源狀態 + +**產品與工單** +- `PRODUCTID` / `PRODUCTNAME`: 產品 +- `OWNERID` / `OWNERNAME`: 所有者 +- `WORKFLOWNAME`: 工藝流程 + +**人員信息** +- `EMPLOYEEID` / `EMPLOYEENAME`: 操作人員 +- `USERID` / `USERNAME`: 用戶 +- `USERFULLNAME`: 用戶全名 +- `EMPZONE`: 人員區域 + +**班次與時間** +- `SHIFTNAME`: 班次 +- `COMPUTERNAME`: 電腦名稱 +- `SERVERNAME`: 伺服器名稱 + +**MES CDC信息** +- `CDONAME`: CDO名稱 +- `CDOTXNSEQUENCE`: CDO交易序號 +- `CALLBYCDONAME`: 調用CDO名稱 + +**其他** +- `COMMENTS`: 備註 +- `CONSUMEFACTOR`: 消耗因子 +- `WAFERPRODUCT`: Wafer產品 + +#### 查詢策略 + +**1. 批次流轉記錄查詢** +```sql +SELECT + TXNDATE, + CONTAINERNAME, + FROMSPECNAME as FROM_STEP, + SPECNAME as TO_STEP, + FROMQTY, + QTY, + (FROMQTY - QTY) as QTY_LOSS, + RESOURCENAME, + EMPLOYEENAME, + SHIFTNAME +FROM DW_MES_HM_LOTMOVEOUT +WHERE CONTAINERID = 'CONTAINER_ID_HERE' +ORDER BY TXNDATE; +``` + +**2. 每日產出統計** +```sql +SELECT + TRUNC(TXNDATE) as WORK_DATE, + SPECNAME, + WORKCENTER, + COUNT(DISTINCT CONTAINERID) as LOT_COUNT, + SUM(QTY) as TOTAL_QTY, + COUNT(DISTINCT RESOURCENAME) as EQUIPMENT_COUNT +FROM DW_MES_HM_LOTMOVEOUT +WHERE TXNDATE >= TRUNC(SYSDATE) - 7 +GROUP BY TRUNC(TXNDATE), SPECNAME, WORKCENTER +ORDER BY WORK_DATE DESC, TOTAL_QTY DESC; +``` + +**3. 人員產出績效** +```sql +SELECT + EMPLOYEENAME, + EMPZONE, + COUNT(DISTINCT CONTAINERID) as LOT_COUNT, + SUM(QTY) as TOTAL_OUTPUT, + COUNT(*) as OPERATION_COUNT +FROM DW_MES_HM_LOTMOVEOUT +WHERE TXNDATE >= TRUNC(SYSDATE) - 7 +GROUP BY EMPLOYEENAME, EMPZONE +ORDER BY TOTAL_OUTPUT DESC; +``` + +#### 重要注意事項 + +⚠️ **大數據量**: 約 4,865 萬筆,必須加時間條件 + +⚠️ **與LOTWIPHISTORY差異**: +- HM_LOTMOVEOUT: 只記錄MoveOut事件 +- LOTWIPHISTORY: 記錄完整的MoveIn/TrackIn/TrackOut/MoveOut + +--- + +### 10. 其他歷史表簡要說明 + +#### DW_MES_LOTREJECTHISTORY(批次拒絕歷史表) +- **用途**: 記錄批次報廢、損耗的歷史 +- **關鍵欄位**: + - `REJECTQTY`: 拒絕數量 + - `LOSSREASONNAME`: 損耗原因 + - `REJECTCATEGORYNAME`: 拒絕類別 +- **查詢場景**: 良率分析、損耗原因分析 + +#### DW_MES_LOTMATERIALSHISTORY(批次物料歷史表) +- **用途**: 記錄批次使用物料的歷史 +- **關鍵欄位**: + - `MATERIALPARTNAME`: 物料名稱 + - `MATERIALLOTNAME`: 物料批號 + - `QTYCONSUMED`: 消耗數量 + - `CONSUMEFACTOR`: 消耗因子 +- **查詢場景**: 物料追溯、消耗分析 + +#### DW_MES_HOLDRELEASEHISTORY(暫停/釋放歷史表) +- **用途**: 記錄批次Hold和Release的完整歷史 +- **關鍵欄位**: + - `HOLDTXNDATE` / `RELEASETXNDATE`: Hold/Release時間 + - `HOLDREASONNAME` / `RELEASEREASONNAME`: Hold/Release原因 + - `HOLDEMP` / `RELEASEEMP`: 操作人員 +- **查詢場景**: Hold原因分析、Hold時長統計 + +#### DW_MES_JOBTXNHISTORY(維修工單交易歷史表) +- **用途**: 記錄維修工單的狀態變更歷史 +- **關鍵欄位**: + - `FROMJOBSTATUS` / `JOBSTATUS`: 狀態變更 + - `TXNDATE`: 交易時間 +- **查詢場景**: 維修工單流程追蹤 + +#### DW_MES_MAINTENANCE(維護記錄表) +- **用途**: 記錄設備維護保養的詳細記錄 +- **關鍵欄位**: + - `MAINTENANCEREQNAME`: 維護需求名稱 + - `THRUPUTQTY`: 產出數量 + - `DATAVALUE`: 維護數據值 +- **查詢場景**: 設備保養追蹤、維護計劃執行狀況 + +--- + +## 表間關聯關係圖 + +### 核心實體關係 + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ 核心實體關係圖 │ +└─────────────────────────────────────────────────────────────────┘ + +1. 在制品流轉主線(核心業務流程) + + DW_MES_WIP (現況快照,含歷史累積) + ↓ CONTAINERID + DW_MES_CONTAINER (容器主檔) + ↓ CONTAINERID + DW_MES_LOTWIPHISTORY (流轉歷史) + ↓ WIPLOTHISTORYID + DW_MES_LOTWIPDATAHISTORY (數據採集歷史) + + DW_MES_LOTWIPHISTORY + ↓ CONTAINERID + DW_MES_HM_LOTMOVEOUT (移出事件) + + +2. 資源狀態主線(設備管理) + + DW_MES_RESOURCE (資源主檔) + ↓ RESOURCEID + DW_MES_RESOURCESTATUS (狀態變更歷史) + ↓ HISTORYID (= RESOURCEID) + DW_MES_RESOURCESTATUS_SHIFT (班次彙總) + + +3. 工單維修主線(維修管理) + + DW_MES_JOB (工單現況) + ↓ JOBID + DW_MES_JOBTXNHISTORY (維修工單交易歷史) + + DW_MES_JOB + ↓ RESOURCEID + DW_MES_RESOURCE (關聯設備) + + DW_MES_JOB + ↓ JOBID + DW_MES_PARTREQUESTORDER (物料請求) + + +4. 批次異常處理主線 + + DW_MES_WIP / DW_MES_CONTAINER + ↓ CONTAINERID + DW_MES_HOLDRELEASEHISTORY (Hold/Release歷史) + + DW_MES_LOTWIPHISTORY + ↓ HISTORYMAINLINEID + DW_MES_LOTREJECTHISTORY (拒絕歷史) + + +5. 物料消耗主線 + + DW_MES_LOTWIPHISTORY + ↓ CONTAINERID + DW_MES_LOTMATERIALSHISTORY (物料消耗歷史) + + +6. 設備維護主線 + + DW_MES_RESOURCE + ↓ RESOURCEID + DW_MES_MAINTENANCE (維護記錄) +``` + +### 詳細關聯鍵對照表 + +| 主表 | 關聯表 | 關聯欄位 | 關聯類型 | 說明 | +|------|--------|---------|---------|------| +| **DW_MES_WIP** | DW_MES_CONTAINER | CONTAINERID | 1:1 | 在制品關聯容器 | +| **DW_MES_CONTAINER** | DW_MES_LOTWIPHISTORY | CONTAINERID | 1:N | 容器的流轉歷史 | +| **DW_MES_LOTWIPHISTORY** | DW_MES_LOTWIPDATAHISTORY | WIPLOTHISTORYID | 1:N | 流轉記錄的數據採集 | +| **DW_MES_LOTWIPHISTORY** | DW_MES_HM_LOTMOVEOUT | CONTAINERID + HISTORYMAINLINEID | 1:N | 流轉的移出事件 | +| **DW_MES_LOTWIPHISTORY** | DW_MES_LOTREJECTHISTORY | HISTORYMAINLINEID | 1:N | 流轉的拒絕記錄 | +| **DW_MES_LOTWIPHISTORY** | DW_MES_LOTMATERIALSHISTORY | CONTAINERID | 1:N | 流轉的物料消耗 | +| **DW_MES_WIP** | DW_MES_HOLDRELEASEHISTORY | CONTAINERID | 1:N | 在制品的Hold歷史 | +| **DW_MES_RESOURCE** | DW_MES_RESOURCESTATUS | RESOURCEID = HISTORYID | 1:N | 資源的狀態歷史 | +| **DW_MES_RESOURCE** | DW_MES_RESOURCESTATUS_SHIFT | RESOURCEID = HISTORYID | 1:N | 資源的班次彙總 | +| **DW_MES_RESOURCE** | DW_MES_MAINTENANCE | RESOURCEID | 1:N | 資源的維護記錄 | +| **DW_MES_RESOURCE** | DW_MES_PARTREQUESTORDER | RESOURCEID | 1:N | 資源的維修用料請求 | +| **DW_MES_RESOURCE** | DW_MES_HM_LOTMOVEOUT | RESOURCEID | 1:N | 資源對應的移出事件 | +| **DW_MES_JOB** | DW_MES_JOBTXNHISTORY | JOBID | 1:N | 工單的交易歷史 | +| **DW_MES_JOB** | DW_MES_RESOURCE | RESOURCEID | N:1 | 工單關聯資源 | +| **DW_MES_JOB** | DW_MES_PARTREQUESTORDER | JOBID | 1:N | 工單的物料請求 | +| **DW_MES_CONTAINER** | DW_MES_PJ_COMBINEDASSYLOTS | CONTAINERID | 1:N | 容器的組合裝配 | + +### Reference 備註確認的關聯 + +以下關聯來自 `MES_Database_Reference.md` 的欄位備註(維護人註記): + +| 表 | 欄位 | 備註 | 可推得關聯/用途 | +|------|------|------|----------------| +| **DW_MES_RESOURCESTATUS** | HISTORYID | RESOURCEID | 關聯 `DW_MES_RESOURCE.RESOURCEID` | +| **DW_MES_RESOURCESTATUS_SHIFT** | HISTORYID | RESOURCEID | 關聯 `DW_MES_RESOURCE.RESOURCEID` | +| **DW_MES_JOB** | PARTREQUESTORDERNAME | DW_MES_PARTREQUESTORDER | 可由 `DW_MES_PARTREQUESTORDER` 取得工單請領資訊 | +| **DW_MES_WIP** | RELEASETIME / RELEASEEMP / RELEASEREASON | DW_MES_HOLDRELEASEHISTORY | WIP 的解除資訊來源於 Hold/Release 歷史 | + +### 欄位來源備註(同表內派生) + +以下備註顯示欄位來源於同表關鍵欄位(非跨表),建議查詢時以 ID 欄位為主: + +| 表 | 欄位 | 備註 | +|------|------|------| +| **DW_MES_CONTAINER** | MFGORDERNAME / PJ_BOP / PJ_PRODUCEREGION / PRODUCTBOMBASEID | MFGORDERID | +| **DW_MES_WIP** | STARTREASONNAME / MFGORDERNAME / FIRSTNAME / OWNERNAME / PRIORITYCODENAME / PRODUCTBOMBASEID / PRODUCTNAME / PRODUCTLINENAME / PJ_BOP / PJ_PRODUCEREGION / PJ_TYPE / PJ_FUNCTION | CONTAINERID | +| **DW_MES_WIP** | WOQTY / WOPLANNEDCOMPLETIONDATE | CONTAINERID -> MFGORDERID | + +### 關鍵關聯欄位說明 + +#### CONTAINERID +- 批次/容器的唯一標識(16位元CHAR) +- 貫穿所有與批次相關的表 +- 最重要的關聯欄位 + +#### RESOURCEID / HISTORYID +- RESOURCE表使用 `RESOURCEID` +- RESOURCESTATUS表使用 `HISTORYID`(實際上等於RESOURCEID) +- 關聯時注意欄位名稱差異 + +#### HISTORYMAINLINEID +- 歷史記錄的主線ID +- 用於關聯同一批次在不同歷史表的記錄 + +#### WIPLOTHISTORYID +- LOTWIPHISTORY的主鍵 +- LOTWIPDATAHISTORY用此欄位關聯 + +#### PJ_WORKORDER +- 工單號(業務鍵) +- 部分表使用此欄位追蹤批次 + +--- + +## 關鍵業務場景查詢策略 + +### 場景1: 在制品(WIP)看板 + +**需求**: 顯示當前所有在制品的狀態、位置、停滯時間 + +**推薦表**: `DW_MES_WIP` + +**查詢邏輯**: +```sql +SELECT + CONTAINERNAME, + PRODUCTNAME, + PRODUCTLINENAME, + WORKCENTERNAME, + WORKFLOWSTEPNAME, + QTY, + MOVEINTIMESTAMP, + ROUND((SYSDATE - MOVEINTIMESTAMP) * 24, 2) as HOURS_IN_STATION, + CURRENTHOLDCOUNT, + HOLDREASONNAME, + LOCATIONNAME +FROM DW_MES_WIP +WHERE STATUS NOT IN (8, 128) -- 排除已完成或取消 +ORDER BY HOURS_IN_STATION DESC; +``` + +**效能優化**: +- 使用索引: `TXNDATE`, `CONTAINERNAME` +- 建議增加工作中心或產品線篩選 + +--- + +### 場景2: 設備稼動率(OEE)報表 + +**需求**: 計算每日設備的稼動率、停機時長 + +**推薦表**: `DW_MES_RESOURCESTATUS_SHIFT`(首選,已彙總) + +**查詢邏輯**: +```sql +SELECT + DATADATE, + HISTORYID as RESOURCE_ID, + WORKCENTERNAME, + -- 生產時間 + SUM(CASE WHEN AVAILABILITY = 1 THEN HOURS ELSE 0 END) as PRODUCTIVE_HOURS, + -- 待機時間 + SUM(CASE WHEN AVAILABILITY = 2 THEN HOURS ELSE 0 END) as STANDBY_HOURS, + -- 非計劃停機 + SUM(CASE WHEN AVAILABILITY = 4 THEN HOURS ELSE 0 END) as UNSCHEDULED_DOWN_HOURS, + -- 計劃停機 + SUM(CASE WHEN AVAILABILITY = 5 THEN HOURS ELSE 0 END) as SCHEDULED_DOWN_HOURS, + -- 總時間 + SUM(HOURS) as TOTAL_HOURS, + -- 稼動率 + ROUND(SUM(CASE WHEN AVAILABILITY = 1 THEN HOURS ELSE 0 END) / NULLIF(SUM(HOURS), 0) * 100, 2) as UTILIZATION_PCT +FROM DW_MES_RESOURCESTATUS_SHIFT +WHERE DATADATE >= TRUNC(SYSDATE) - 7 +GROUP BY DATADATE, HISTORYID, WORKCENTERNAME +ORDER BY DATADATE DESC, UTILIZATION_PCT DESC; +``` + +**替代方案**: 若需要更細緻的時間分析,使用 `DW_MES_RESOURCESTATUS` + +**效能優化**: +- 優先使用 `DATADATE` 索引 +- 班次表比狀態表效率高約10倍 + +--- + +### 場景3: 批次生產履歷追溯 + +**需求**: 追溯某批次的完整生產過程(每個工序的時間、設備、人員) + +**推薦表**: `DW_MES_LOTWIPHISTORY` + +**查詢邏輯**: +```sql +SELECT + WIPLOTHISTORYID, + WORKCENTERNAME, + SPECNAME, + EQUIPMENTNAME, + MOVEINTIMESTAMP, + TRACKINTIMESTAMP, + TRACKOUTTIMESTAMP, + MOVEOUTTIMESTAMP, + MOVEINQTY, + MOVEOUTQTY, + (MOVEINQTY - MOVEOUTQTY) as QTY_LOSS, + ROUND((TRACKOUTTIMESTAMP - TRACKINTIMESTAMP) * 24, 2) as PROCESS_HOURS, + ROUND((MOVEOUTTIMESTAMP - MOVEINTIMESTAMP) * 24, 2) as STATION_HOURS, + ROUND((TRACKINTIMESTAMP - MOVEINTIMESTAMP) * 24, 2) as QUEUE_HOURS, + TRACKINEMPLOYEENAME, + TRACKOUTEMPLOYEENAME, + FLAGNAME +FROM DW_MES_LOTWIPHISTORY +WHERE CONTAINERID = 'CONTAINER_ID_HERE' -- 或使用 PJ_WORKORDER +ORDER BY MOVEINTIMESTAMP; +``` + +**擴展查詢**: 加入採集數據 +```sql +SELECT + lwh.SPECNAME, + lwh.EQUIPMENTNAME, + lwh.TRACKINTIMESTAMP, + lwd.WIPDATANAMENAME, + lwd.WIPDATAVALUE, + lwd.PJ_SPCDATARESULT +FROM DW_MES_LOTWIPHISTORY lwh +LEFT JOIN DW_MES_LOTWIPDATAHISTORY lwd + ON lwh.WIPLOTHISTORYID = lwd.WIPLOTHISTORYID +WHERE lwh.CONTAINERID = 'CONTAINER_ID_HERE' +ORDER BY lwh.MOVEINTIMESTAMP, lwd.WIPDATANAMENAME; +``` + +--- + +### 場景4: 工序Cycle Time分析 + +**需求**: 分析各工序的平均加工時間、最大/最小時間 + +**推薦表**: `DW_MES_LOTWIPHISTORY` + +**查詢邏輯**: +```sql +SELECT + WORKCENTERNAME, + SPECNAME, + PROCESSTYPENAME, + COUNT(*) as LOT_COUNT, + -- 加工時間統計 + ROUND(AVG((TRACKOUTTIMESTAMP - TRACKINTIMESTAMP) * 24), 2) as AVG_PROCESS_HOURS, + ROUND(MIN((TRACKOUTTIMESTAMP - TRACKINTIMESTAMP) * 24), 2) as MIN_PROCESS_HOURS, + ROUND(MAX((TRACKOUTTIMESTAMP - TRACKINTIMESTAMP) * 24), 2) as MAX_PROCESS_HOURS, + ROUND(STDDEV((TRACKOUTTIMESTAMP - TRACKINTIMESTAMP) * 24), 2) as STDDEV_HOURS, + -- 在站時間統計 + ROUND(AVG((MOVEOUTTIMESTAMP - MOVEINTIMESTAMP) * 24), 2) as AVG_STATION_HOURS, + -- 等待時間統計 + ROUND(AVG((TRACKINTIMESTAMP - MOVEINTIMESTAMP) * 24), 2) as AVG_QUEUE_HOURS +FROM DW_MES_LOTWIPHISTORY +WHERE TRACKINTIMESTAMP >= TRUNC(SYSDATE) - 30 + AND TRACKOUTTIMESTAMP IS NOT NULL + AND (TRACKOUTTIMESTAMP - TRACKINTIMESTAMP) > 0 -- 排除異常數據 +GROUP BY WORKCENTERNAME, SPECNAME, PROCESSTYPENAME +ORDER BY AVG_PROCESS_HOURS DESC; +``` + +**瓶頸工序識別**: +```sql +SELECT + WORKCENTERNAME, + SPECNAME, + AVG((MOVEOUTTIMESTAMP - MOVEINTIMESTAMP) * 24) as AVG_STATION_HOURS, + COUNT(*) as LOT_COUNT +FROM DW_MES_LOTWIPHISTORY +WHERE MOVEINTIMESTAMP >= TRUNC(SYSDATE) - 7 +GROUP BY WORKCENTERNAME, SPECNAME +HAVING AVG((MOVEOUTTIMESTAMP - MOVEINTIMESTAMP) * 24) > 24 -- 在站超過24小時 +ORDER BY AVG_STATION_HOURS DESC; +``` + +--- + +### 場景5: 設備產出與效率分析 + +**需求**: 統計各設備的產出數量、良率、稼動時間 + +**推薦表**: +- 產出數量: `DW_MES_LOTWIPHISTORY` +- 良率: `DW_MES_LOTREJECTHISTORY` +- 稼動: `DW_MES_RESOURCESTATUS_SHIFT` + +**查詢邏輯(產出)**: +```sql +SELECT + EQUIPMENTNAME, + WORKCENTERNAME, + TRUNC(TRACKINTIMESTAMP) as WORK_DATE, + COUNT(DISTINCT CONTAINERID) as LOT_COUNT, + SUM(TRACKINQTY) as TOTAL_INPUT_QTY, + SUM(TRACKOUTQTY) as TOTAL_OUTPUT_QTY, + SUM(TRACKINQTY - TRACKOUTQTY) as TOTAL_LOSS_QTY, + ROUND((1 - SUM(TRACKINQTY - TRACKOUTQTY) / NULLIF(SUM(TRACKINQTY), 0)) * 100, 2) as YIELD_PCT +FROM DW_MES_LOTWIPHISTORY +WHERE TRACKINTIMESTAMP >= TRUNC(SYSDATE) - 7 + AND EQUIPMENTNAME IS NOT NULL +GROUP BY EQUIPMENTNAME, WORKCENTERNAME, TRUNC(TRACKINTIMESTAMP) +ORDER BY WORK_DATE DESC, TOTAL_OUTPUT_QTY DESC; +``` + +**整合稼動率查詢**: +```sql +SELECT + r.HISTORYID as RESOURCE_ID, + r.WORKCENTERNAME, + r.DATADATE, + -- 稼動數據 + SUM(CASE WHEN r.AVAILABILITY = 1 THEN r.HOURS ELSE 0 END) as PRODUCTIVE_HOURS, + ROUND(SUM(CASE WHEN r.AVAILABILITY = 1 THEN r.HOURS ELSE 0 END) / NULLIF(SUM(r.HOURS), 0) * 100, 2) as UTILIZATION_PCT, + -- 產出數據 + COUNT(DISTINCT w.CONTAINERID) as LOT_COUNT, + SUM(w.TRACKOUTQTY) as TOTAL_OUTPUT_QTY +FROM DW_MES_RESOURCESTATUS_SHIFT r +LEFT JOIN DW_MES_LOTWIPHISTORY w + ON r.HISTORYID = w.EQUIPMENTID + AND TRUNC(w.TRACKINTIMESTAMP) = r.DATADATE +WHERE r.DATADATE >= TRUNC(SYSDATE) - 7 +GROUP BY r.HISTORYID, r.WORKCENTERNAME, r.DATADATE +ORDER BY r.DATADATE DESC, UTILIZATION_PCT DESC; +``` + +--- + +### 場景6: Hold批次分析 + +**需求**: 統計當前Hold批次、Hold原因、Hold時長 + +**推薦表**: +- 當前狀態: `DW_MES_WIP` +- 歷史記錄: `DW_MES_HOLDRELEASEHISTORY` + +**查詢邏輯(當前Hold)**: +```sql +SELECT + CONTAINERNAME, + PRODUCTNAME, + PRODUCTLINENAME, + WORKCENTERNAME, + WORKFLOWSTEPNAME, + HOLDREASONNAME, + HOLDTIME, + ROUND((SYSDATE - HOLDTIME) * 24, 2) as HOLD_HOURS, + HOLDEMP, + HOLDLOCATIONNAME, + CURRENTHOLDCOUNT, + HOLDCOMMENT_FUTURE +FROM DW_MES_WIP +WHERE CURRENTHOLDCOUNT > 0 + AND STATUS NOT IN (8, 128) +ORDER BY HOLD_HOURS DESC; +``` + +**查詢邏輯(Hold歷史分析)**: +```sql +SELECT + HOLDREASONNAME, + COUNT(*) as HOLD_COUNT, + AVG((RELEASETXNDATE - HOLDTXNDATE) * 24) as AVG_HOLD_HOURS, + MAX((RELEASETXNDATE - HOLDTXNDATE) * 24) as MAX_HOLD_HOURS, + SUM(QTY) as TOTAL_HOLD_QTY +FROM DW_MES_HOLDRELEASEHISTORY +WHERE HOLDTXNDATE >= TRUNC(SYSDATE) - 30 + AND RELEASETXNDATE IS NOT NULL +GROUP BY HOLDREASONNAME +ORDER BY HOLD_COUNT DESC; +``` + +--- + +### 場景7: 設備維修工單進度追蹤 + +**需求**: 查詢工單的投入數量、完成數量、在制數量、預計完成時間 + +**推薦表**: `DW_MES_WIP` + `DW_MES_CONTAINER` + +**查詢邏輯**: +```sql +WITH WO_SUMMARY AS ( + SELECT + MFGORDERNAME, + PRODUCTNAME, + MAX(WOQTY) as WO_TOTAL_QTY, + MAX(WOPLANNEDCOMPLETIONDATE) as PLANNED_COMPLETION_DATE, + COUNT(DISTINCT CONTAINERNAME) as LOT_COUNT, + SUM(QTY) as CURRENT_WIP_QTY, + SUM(CASE WHEN CURRENTHOLDCOUNT > 0 THEN QTY ELSE 0 END) as HOLD_QTY, + MIN(MOVEINTIMESTAMP) as FIRST_MOVEIN, + MAX(MOVEINTIMESTAMP) as LAST_MOVEIN + FROM DW_MES_WIP + WHERE STATUS NOT IN (8, 128) + GROUP BY MFGORDERNAME, PRODUCTNAME +) +SELECT + MFGORDERNAME, + PRODUCTNAME, + WO_TOTAL_QTY, + CURRENT_WIP_QTY, + HOLD_QTY, + (WO_TOTAL_QTY - CURRENT_WIP_QTY) as COMPLETED_QTY, + ROUND((CURRENT_WIP_QTY / NULLIF(WO_TOTAL_QTY, 0)) * 100, 2) as WIP_PCT, + ROUND((HOLD_QTY / NULLIF(CURRENT_WIP_QTY, 0)) * 100, 2) as HOLD_PCT, + PLANNED_COMPLETION_DATE, + CASE + WHEN PLANNED_COMPLETION_DATE < SYSDATE THEN 'Overdue' + WHEN PLANNED_COMPLETION_DATE < SYSDATE + 3 THEN 'Critical' + ELSE 'On Track' + END as STATUS, + LOT_COUNT, + FIRST_MOVEIN, + LAST_MOVEIN +FROM WO_SUMMARY +ORDER BY PLANNED_COMPLETION_DATE; +``` + +--- + +### 場景8: 良率分析 + +**需求**: 分析各工序、產品的良率 + +**推薦表**: +- `DW_MES_LOTWIPHISTORY`(產出) +- `DW_MES_LOTREJECTHISTORY`(報廢) + +**查詢邏輯**: +```sql +SELECT + w.WORKCENTERNAME, + w.SPECNAME, + w.PRODUCTNAME, + TRUNC(w.MOVEINTIMESTAMP) as WORK_DATE, + -- 產出統計 + COUNT(DISTINCT w.CONTAINERID) as LOT_COUNT, + SUM(w.MOVEINQTY) as TOTAL_INPUT_QTY, + SUM(w.MOVEOUTQTY) as TOTAL_OUTPUT_QTY, + -- 報廢統計 + SUM(NVL(r.REJECTQTY, 0)) as TOTAL_REJECT_QTY, + -- 良率計算 + ROUND((1 - SUM(NVL(r.REJECTQTY, 0)) / NULLIF(SUM(w.MOVEINQTY), 0)) * 100, 2) as YIELD_PCT +FROM DW_MES_LOTWIPHISTORY w +LEFT JOIN DW_MES_LOTREJECTHISTORY r + ON w.CONTAINERID = r.CONTAINERID + AND w.SPECID = r.SPECID + AND TRUNC(w.MOVEINTIMESTAMP) = TRUNC(r.TXNDATE) +WHERE w.MOVEINTIMESTAMP >= TRUNC(SYSDATE) - 30 +GROUP BY w.WORKCENTERNAME, w.SPECNAME, w.PRODUCTNAME, TRUNC(w.MOVEINTIMESTAMP) +ORDER BY WORK_DATE DESC, YIELD_PCT ASC; +``` + +**報廢原因分析**: +```sql +SELECT + WORKCENTERNAME, + SPECNAME, + LOSSREASONNAME, + REJECTCATEGORYNAME, + COUNT(*) as OCCURRENCE_COUNT, + SUM(REJECTQTY) as TOTAL_REJECT_QTY, + AVG(REJECTQTY) as AVG_REJECT_QTY_PER_LOT +FROM DW_MES_LOTREJECTHISTORY +WHERE TXNDATE >= TRUNC(SYSDATE) - 30 +GROUP BY WORKCENTERNAME, SPECNAME, LOSSREASONNAME, REJECTCATEGORYNAME +ORDER BY TOTAL_REJECT_QTY DESC; +``` + +--- + +## 查詢效能最佳實踐 + +### 1. 大表查詢原則 + +#### 必須加時間範圍的表(>1000萬筆) +- `DW_MES_WIP`: 使用 `TXNDATE` +- `DW_MES_LOTWIPDATAHISTORY`: 使用 `TXNTIMESTAMP` +- `DW_MES_RESOURCESTATUS_SHIFT`: 使用 `DATADATE`(推薦) +- `DW_MES_RESOURCESTATUS`: 使用 `OLDLASTSTATUSCHANGEDATE` +- `DW_MES_LOTWIPHISTORY`: 使用 `TRACKINTIMESTAMP` 或 `MOVEINTIMESTAMP` +- `DW_MES_MAINTENANCE`: 使用 `TXNDATE` +- `DW_MES_HM_LOTMOVEOUT`: 使用 `TXNDATE` +- `DW_MES_LOTMATERIALSHISTORY`: 使用 `TXNDATE` +- `DW_MES_LOTREJECTHISTORY`: 使用 `TXNDATE` + +#### 推薦時間範圍 +```sql +-- 日報表 +WHERE DATADATE >= TRUNC(SYSDATE) - 7 + +-- 週報表 +WHERE DATADATE >= TRUNC(SYSDATE, 'IW') - 7 + +-- 月報表 +WHERE DATADATE >= TRUNC(SYSDATE, 'MM') +``` + +### 2. 索引使用策略 + +#### 優先使用索引欄位 +```sql +-- 好的寫法(使用索引) +WHERE TXNDATE >= TRUNC(SYSDATE) - 7 + AND CONTAINERNAME = 'LOT123' + +-- 不好的寫法(破壞索引) +WHERE TO_CHAR(TXNDATE, 'YYYY-MM-DD') = '2026-01-14' + OR UPPER(CONTAINERNAME) = 'LOT123' +``` + +#### 各表主要索引 + +| 表名 | 推薦查詢索引 | +|------|------------| +| DW_MES_WIP | `CONTAINERNAME`, `TXNDATE` | +| DW_MES_RESOURCESTATUS_SHIFT | `DATADATE`, `HISTORYID` | +| DW_MES_LOTWIPHISTORY | `TRACKINTIMESTAMP`, `CONTAINERID`, `PJ_WORKORDER` | +| DW_MES_HM_LOTMOVEOUT | `TXNDATE`, `HISTORYID` | + +### 3. JOIN優化 + +#### 推薦JOIN順序 +```sql +-- 小表 JOIN 大表 +SELECT ... +FROM DW_MES_RESOURCE r -- 90K rows +INNER JOIN DW_MES_RESOURCESTATUS_SHIFT rs -- 74M rows + ON r.RESOURCEID = rs.HISTORYID +WHERE rs.DATADATE >= TRUNC(SYSDATE) - 7 -- 先過濾大表 +``` + +#### 避免笛卡爾積 +```sql +-- 使用 DISTINCT 或 GROUP BY 去重 +SELECT DISTINCT + w.CONTAINERNAME, + r.RESOURCENAME +FROM DW_MES_WIP w +INNER JOIN DW_MES_LOTWIPHISTORY h + ON w.CONTAINERID = h.CONTAINERID +``` + +### 4. 聚合查詢優化 + +#### 使用SHIFT表而非原始表 +```sql +-- 推薦:使用班次彙總表 +SELECT DATADATE, SUM(HOURS) +FROM DW_MES_RESOURCESTATUS_SHIFT +WHERE DATADATE >= TRUNC(SYSDATE) - 30 +GROUP BY DATADATE; + +-- 不推薦:使用原始狀態表 +SELECT TRUNC(OLDLASTSTATUSCHANGEDATE), + SUM((LASTSTATUSCHANGEDATE - OLDLASTSTATUSCHANGEDATE) * 24) +FROM DW_MES_RESOURCESTATUS +WHERE OLDLASTSTATUSCHANGEDATE >= TRUNC(SYSDATE) - 30 +GROUP BY TRUNC(OLDLASTSTATUSCHANGEDATE); +``` + +### 5. 分頁查詢 + +```sql +-- Oracle 12c+ 使用 OFFSET FETCH +SELECT * +FROM ( + SELECT CONTAINERNAME, PRODUCTNAME, MOVEINTIMESTAMP + FROM DW_MES_WIP + WHERE STATUS NOT IN (8, 128) + ORDER BY MOVEINTIMESTAMP DESC +) +OFFSET 0 ROWS FETCH NEXT 100 ROWS ONLY; + +-- Oracle 11g 使用 ROWNUM +SELECT * +FROM ( + SELECT ROWNUM as RN, t.* + FROM ( + SELECT CONTAINERNAME, PRODUCTNAME, MOVEINTIMESTAMP + FROM DW_MES_WIP + WHERE STATUS NOT IN (8, 128) + ORDER BY MOVEINTIMESTAMP DESC + ) t + WHERE ROWNUM <= 100 +) +WHERE RN > 0; +``` + +--- + +## 附錄:常用代碼對照表 + +### STATUS 狀態碼 + +| STATUS值 | 含義 | +|---------|------| +| 1 | In Progress(進行中) | +| 2 | On Hold(暫停) | +| 4 | Released(已釋放) | +| 8 | Completed(已完成) | +| 16 | In Queue(排隊中) | +| 32 | Reserved(保留) | +| 64 | In Transit(運輸中) | +| 128 | Cancelled(已取消) | + +### AVAILABILITY 可用性代碼 + +| AVAILABILITY | 含義 | 用途 | +|-------------|------|------| +| 1 | Productive | 生產中(計入稼動時間) | +| 2 | Standby | 待機(不計入稼動) | +| 3 | Non-Scheduled | 非排程時間 | +| 4 | Unscheduled Down | 非計劃停機(故障) | +| 5 | Scheduled Down | 計劃停機(保養) | + +### TXNTYPE 交易類型 + +需根據實際系統定義 + +### OBJECTTYPE 資源類型 + +| OBJECTTYPE | 說明 | +|-----------|------| +| Equipment | 設備 | +| WorkStation | 工作站 | +| Location | 位置 | + +--- + +## 總結與建議 + +### 表選擇決策樹 + +``` +查詢需求分類: + +1. 要查詢"當前狀態" → 使用快照表 + - 在制品現況 → DW_MES_WIP + - 設備基本信息 → DW_MES_RESOURCE + - 容器當前狀態 → DW_MES_CONTAINER + - 設備維修工單當前狀態 → DW_MES_JOB + +2. 要查詢"歷史記錄" → 使用歷史表 + - 批次流轉歷史 → DW_MES_LOTWIPHISTORY + - 設備狀態歷史 → DW_MES_RESOURCESTATUS_SHIFT(推薦)或 RESOURCESTATUS + - 批次移出事件 → DW_MES_HM_LOTMOVEOUT + - 數據採集歷史 → DW_MES_LOTWIPDATAHISTORY + +3. 要做"統計分析" → 優先使用彙總表 + - 設備稼動率 → DW_MES_RESOURCESTATUS_SHIFT(已計算HOURS) + - 產出統計 → DW_MES_LOTWIPHISTORY + - 良率分析 → DW_MES_LOTWIPHISTORY + DW_MES_LOTREJECTHISTORY +``` + +### 開發優先級建議 + +#### 第一階段:基礎報表(必須) +1. 在制品看板(WIP Dashboard) +2. 設備稼動率報表(OEE Report) +3. 設備維修工單進度追蹤(WO Progress) + +#### 第二階段:分析報表(重要) +4. Cycle Time分析 +5. 設備產出分析 +6. Hold批次分析 + +#### 第三階段:深度分析(進階) +7. 良率分析 +8. 瓶頸工序分析 +9. 批次履歷追溯 + +### 效能關鍵注意事項 + +⚠️ **關鍵警告**: +1. 所有大表(>1000萬筆)查詢必須加時間範圍 +2. 優先使用班次彙總表(SHIFT)而非原始表 +3. 避免在索引欄位使用函數 +4. JOIN時先過濾再關聯 +5. 使用EXPLAIN PLAN檢查執行計劃 + +--- + +**文檔版本**: v1.2 +**最後更新**: 2026-01-29 +**更新內容**: DWH 全表掃描更新數據量、補充 DW_MES_SPEC_WORKCENTER_V 工站對照視圖與查詢策略 +**建議更新週期**: 每季度或表結構變更時 + + + + diff --git a/docs/MES_Database_Reference.md b/docs/MES_Database_Reference.md new file mode 100644 index 0000000..c4899d0 --- /dev/null +++ b/docs/MES_Database_Reference.md @@ -0,0 +1,1379 @@ +# MES 数据库报表开发参考文档 + +**生成时间**: 2026-01-29 14:48:13 + +--- + +## 目录 + +1. [数据库连接信息](#数据库连接信息) +2. [数据库概览](#数据库概览) +3. [表结构详细说明](#表结构详细说明) +4. [报表开发注意事项](#报表开发注意事项) +5. [常用查询示例](#常用查询示例) + +--- + +## 数据库连接信息 + +### 连接参数 + +| 参数 | 值 | +|------|------| +| 数据库类型 | Oracle Database 19c Enterprise Edition | +| 主机地址 | 請參考 .env 檔案 (DB_HOST) | +| 端口 | 請參考 .env 檔案 (DB_PORT) | +| 服务名 | 請參考 .env 檔案 (DB_SERVICE) | +| 用户名 | 請參考 .env 檔案 (DB_USER) | +| 密码 | 請參考 .env 檔案 (DB_PASSWORD) | + +### Python 连接示例 + +```python +import os +import oracledb +from dotenv import load_dotenv + +# 載入環境變數 +load_dotenv() + +# 连接配置 (從環境變數讀取) +DB_CONFIG = { + 'user': os.getenv('DB_USER'), + 'password': os.getenv('DB_PASSWORD'), + 'dsn': f"(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST={os.getenv('DB_HOST')})(PORT={os.getenv('DB_PORT')})))(CONNECT_DATA=(SERVICE_NAME={os.getenv('DB_SERVICE')})))" +} + +# 建立连接 +connection = oracledb.connect(**DB_CONFIG) +cursor = connection.cursor() + +# 执行查询 +cursor.execute('SELECT * FROM DW_MES_WIP WHERE ROWNUM <= 10') +results = cursor.fetchall() + +# 关闭连接 +cursor.close() +connection.close() +``` + +### JDBC 连接字符串 + +``` +jdbc:oracle:thin:@${DB_HOST}:${DB_PORT}:${DB_SERVICE} +``` + +--- + +## 数据库概览 + +### 表统计信息 + +| # | 表名 | 用途 | 数据量 | +|---|------|------|--------| +| 1 | `DW_MES_CONTAINER` | 容器/批次主檔 - 目前在製容器狀態、數量與流程資訊 | 5,218,406 | +| 2 | `DW_MES_EQUIPMENTSTATUS_WIP_V` | 待补充 | 2,631 | +| 3 | `DW_MES_HM_LOTMOVEOUT` | 批次出站事件歷史表 - 出站/移出交易 | 48,645,692 | +| 4 | `DW_MES_HOLDRELEASEHISTORY` | Hold/Release 歷史表 - 批次停工與解除紀錄 | 310,737 | +| 5 | `DW_MES_JOB` | 設備維修工單表 - 維修工單的當前狀態與流程 | 1,248,622 | +| 6 | `DW_MES_JOBTXNHISTORY` | 維修工單交易歷史表 - 工單狀態變更紀錄 | 9,554,723 | +| 7 | `DW_MES_LOTMATERIALSHISTORY` | 批次物料消耗歷史表 - 用料與批次關聯 | 17,829,931 | +| 8 | `DW_MES_LOTREJECTHISTORY` | 批次不良/報廢歷史表 - 不良原因與數量 | 15,786,025 | +| 9 | `DW_MES_LOTWIPDATAHISTORY` | 在製數據採集歷史表 - 製程量測/參數紀錄 | 77,960,216 | +| 10 | `DW_MES_LOTWIPHISTORY` | 在製流轉歷史表 - 批次進出站與流程軌跡 | 53,454,213 | +| 11 | `DW_MES_LOT_V` | 待补充 | 9,468 | +| 12 | `DW_MES_MAINTENANCE` | 設備保養/維護紀錄表 - 保養計畫與點檢數據 | 52,060,026 | +| 13 | `DW_MES_PARTREQUESTORDER` | 維修用料請求表 - 維修/設備零件請領 | 61,396 | +| 14 | `DW_MES_PJ_COMBINEDASSYLOTS` | 併批紀錄表 - 合批/合併批次關聯與數量資訊 | 1,965,425 | +| 15 | `DW_MES_RESOURCE` | 資源表 - 設備/載具等資源基本資料(OBJECTCATEGORY=ASSEMBLY 時,RESOURCENAME 為設備編號) | 91,329 | +| 16 | `DW_MES_RESOURCESTATUS` | 設備狀態變更歷史表 - 狀態切換與原因 | 65,742,614 | +| 17 | `DW_MES_RESOURCESTATUS_SHIFT` | 設備狀態班次彙總表 - 班次級狀態/工時 | 74,820,134 | +| 18 | `DW_MES_SPEC_WORKCENTER_V` | 待补充 | 230 | +| 19 | `DW_MES_WIP` | 在製品現況表(含歷史累積)- 當前 WIP 狀態/數量 | 79,058,085 | + +**总数据量**: 503,819,903 行 + +--- + +## 表结构详细说明 + +### DW_MES_CONTAINER + +**用途**: 容器/批次主檔 - 目前在製容器狀態、數量與流程資訊 + +**数据量**: 5,218,406 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `CONTAINERCOMMENTS` | VARCHAR2(2000) | 2000 | 是 | None | +| 2 | `CONTAINERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 3 | `CONTAINERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 4 | `CURRENTHOLDCOUNT` | NUMBER(10) | 22 | 是 | None | +| 5 | `CURRENTSTATUSID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 6 | `CURRENTREWORKCOUNT` | NUMBER(10) | 22 | 是 | None | +| 7 | `CURRENTWIPLOTID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 8 | `CUSTOMERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 9 | `DOCUMENTSETID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 10 | `EQUIPMENTCOUNT` | NUMBER(10) | 22 | 是 | None | +| 11 | `EQUIPMENTLOADINGCOUNT` | NUMBER(10) | 22 | 是 | None | +| 12 | `EXPIRATIONDATE` | DATE | 7 | 是 | None | +| 13 | `FACTORYSTARTDATE` | DATE | 7 | 是 | None | +| 14 | `FACTORYSTARTQTY` | NUMBER(10) | 22 | 是 | 数量 | +| 15 | `FIRSTNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 16 | `FUTURECOMBINECOUNT` | NUMBER(10) | 22 | 是 | None | +| 17 | `FUTURECOMBINEPARENTLOTID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 18 | `FUTURECOMBINESPECID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 19 | `FUTUREHOLDCOUNT` | NUMBER(10) | 22 | 是 | None | +| 20 | `HOLDLOCATIONDURATION` | NUMBER | 22 | 是 | None | +| 21 | `HOLDLOCATIONID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 22 | `HOLDLOCATIONSTARTTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 23 | `HOLDREASONID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 24 | `LASTACTIVITYDATE` | DATE | 7 | 是 | None | +| 25 | `LASTCOMPLETIONDATE` | DATE | 7 | 是 | None | +| 26 | `LASTMOVEOUTTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 27 | `LASTMOVEOUTUSERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 28 | `LOTATTRIBUTESID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 29 | `MFGORDERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 30 | `MOVEINQTY` | NUMBER | 22 | 是 | 数量 | +| 31 | `MOVEINQTY2` | NUMBER | 22 | 是 | 数量 | +| 32 | `MOVEINTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 33 | `MOVEINUSERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 34 | `OBJECTTYPE` | VARCHAR2(40) | 40 | 是 | None | +| 35 | `ONHOLDDATE` | DATE | 7 | 是 | None | +| 36 | `ORIGINALCONTAINERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 37 | `ORIGINALQTY` | NUMBER | 22 | 是 | 数量 | +| 38 | `ORIGINALQTY2` | NUMBER | 22 | 是 | 数量 | +| 39 | `ORIGINALSTARTDATE` | DATE | 7 | 是 | None | +| 40 | `OWNERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 41 | `PARENTCONTAINERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 42 | `PLANNEDSTARTDATE` | DATE | 7 | 是 | None | +| 43 | `PRIORITYCODEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 44 | `PROCESSSPECID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 45 | `PRODUCTID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 46 | `QTY` | NUMBER | 22 | 是 | 数量 | +| 47 | `QTY2` | NUMBER | 22 | 是 | 数量 | +| 48 | `QTYSCHEDULED` | NUMBER | 22 | 是 | 数量 | +| 49 | `SCHEDULECOUNT` | NUMBER(10) | 22 | 是 | None | +| 50 | `SCHEDULEDATAID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 51 | `SPLITCOUNT` | NUMBER(10) | 22 | 是 | None | +| 52 | `SPLITFROMID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 53 | `STARTREASONID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 54 | `STATUS` | NUMBER(10) | 22 | 是 | 状态 | +| 55 | `UNITCOUNT` | NUMBER(10) | 22 | 是 | None | +| 56 | `UOM2ID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 57 | `UOMID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 58 | `PJ_ERPPRODUCTID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 59 | `LASTMOVEDATE` | DATE | 7 | 是 | CURRENTSTATUSID | +| 60 | `LOCATIONNAME` | VARCHAR2(40) | 40 | 是 | CURRENTSTATUSID | +| 61 | `WORKFLOWSTEPNAME` | VARCHAR2(40) | 40 | 是 | CURRENTSTATUSID | +| 62 | `SPECNAME` | VARCHAR2(40) | 40 | 是 | CURRENTSTATUSID | +| 63 | `WORKCENTERNAME` | VARCHAR2(40) | 40 | 是 | CURRENTSTATUSID | +| 64 | `HOLDLOCATIONNAME` | VARCHAR2(40) | 40 | 是 | HOLDLOCATIONID | +| 65 | `HOLDREASONNAME` | VARCHAR2(40) | 40 | 是 | HOLDREASONID | +| 66 | `MFGORDERNAME` | VARCHAR2(40) | 40 | 是 | MFGORDERID | +| 67 | `PJ_BOP` | VARCHAR2(40) | 40 | 是 | MFGORDERID | +| 68 | `PJ_PRODUCEREGION` | VARCHAR2(40) | 40 | 是 | MFGORDERID | +| 69 | `PRODUCTBOMBASEID` | CHAR(16) | 16 | 是 | MFGORDERID | +| 70 | `OWNERNAME` | VARCHAR2(40) | 40 | 是 | OWNERID | +| 71 | `PRIORITYCODENAME` | VARCHAR2(40) | 40 | 是 | PRIORITYCODEID | +| 72 | `PJ_TYPE` | VARCHAR2(40) | 40 | 是 | PRODUCTID | +| 73 | `PJ_FUNCTION` | VARCHAR2(40) | 40 | 是 | PRODUCTID | +| 74 | `PRODUCTNAME` | VARCHAR2(40) | 40 | 是 | PRODUCTID | +| 75 | `PRODUCTLINENAME` | VARCHAR2(40) | 40 | 是 | PRODUCTID | +| 76 | `STARTREASONNAME` | VARCHAR2(40) | 40 | 是 | STARTREASONID | +| 77 | `PRODUCTDESC` | VARCHAR2(255) | 255 | 是 | PRODUCTID | +| 78 | `UTS` | DATE | 7 | 是 | None | +| 79 | `LEADFRAMENAME` | VARCHAR2(100) | 100 | 是 | 名称 | +| 80 | `LEADFRAMEDESC` | VARCHAR2(200) | 200 | 是 | None | +| 81 | `LEADFRAMEOPTION` | VARCHAR2(100) | 100 | 是 | None | +| 82 | `LAST_SYNC_DATE` | DATE | 7 | 是 | None | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `DW_C_CONTAINERID` | 唯一索引 | CONTAINERID | +| `DW_C_CONTAINERNAME` | 唯一索引 | CONTAINERNAME | +| `DW_C_MFGORDERNAME` | 普通索引 | MFGORDERNAME | +| `DW_C_PRODUCTBOMBASEID` | 普通索引 | PRODUCTBOMBASEID | +| `DW_C_SCHEDULEDATAID` | 普通索引 | SCHEDULEDATAID | +| `DW_MES_CONTAINER_PRODUCTLINENAME` | 普通索引 | PRODUCTLINENAME | + +--- + +### DW_MES_EQUIPMENTSTATUS_WIP_V + +**用途**: 待补充 + +**数据量**: 2,631 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `RESOURCEID` | CHAR(16) | 16 | 否 | 唯一标识符 | +| 2 | `EQUIPMENTID` | VARCHAR2(40) | 40 | 是 | 唯一标识符 | +| 3 | `OBJECTCATEGORY` | VARCHAR2(40) | 40 | 是 | None | +| 4 | `EQUIPMENTASSETSSTATUS` | VARCHAR2(40) | 40 | 是 | 状态 | +| 5 | `EQUIPMENTASSETSSTATUSREASON` | VARCHAR2(40) | 40 | 是 | 状态 | +| 6 | `JOBORDER` | VARCHAR2(40) | 40 | 是 | None | +| 7 | `JOBMODEL` | VARCHAR2(40) | 40 | 是 | None | +| 8 | `JOBSTAGE` | VARCHAR2(40) | 40 | 是 | None | +| 9 | `JOBID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 10 | `JOBSTATUS` | VARCHAR2(40) | 40 | 是 | 状态 | +| 11 | `CREATEDATE` | DATE | 7 | 是 | 创建日期 | +| 12 | `CREATEUSERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 13 | `CREATEUSER` | VARCHAR2(255) | 255 | 是 | None | +| 14 | `SYMPTOMCODE` | VARCHAR2(40) | 40 | 是 | None | +| 15 | `CAUSECODE` | VARCHAR2(40) | 40 | 是 | None | +| 16 | `REPAIRCODE` | VARCHAR2(40) | 40 | 是 | None | +| 17 | `RUNCARDLOTID` | VARCHAR2(40) | 40 | 是 | 唯一标识符 | +| 18 | `Package` | VARCHAR2(40) | 40 | 是 | None | +| 19 | `PACKAGE_LF` | VARCHAR2(4000) | 4000 | 是 | None | +| 20 | `Function` | VARCHAR2(40) | 40 | 是 | None | +| 21 | `TYPE` | VARCHAR2(40) | 40 | 是 | None | +| 22 | `BOP` | VARCHAR2(40) | 40 | 是 | None | +| 23 | `WAFERLOTID` | VARCHAR2(40) | 40 | 是 | 唯一标识符 | +| 24 | `WAFERPN` | VARCHAR2(40) | 40 | 是 | None | +| 25 | `WAFERLOTID_PREFIX` | VARCHAR2(160) | 160 | 是 | 唯一标识符 | +| 26 | `SPEC` | VARCHAR2(40) | 40 | 是 | None | +| 27 | `LFOPTIONID` | VARCHAR2(4000) | 4000 | 是 | 唯一标识符 | +| 28 | `WIREDESCRIPTION` | VARCHAR2(4000) | 4000 | 是 | None | +| 29 | `WAFERMIL` | VARCHAR2(3062) | 3062 | 是 | None | +| 30 | `LOTTRACKINQTY_PCS` | NUMBER | 22 | 是 | 数量 | +| 31 | `LOTTRACKINTIME` | DATE | 7 | 是 | None | +| 32 | `LOTTRACKINEMPLOYEE` | VARCHAR2(255) | 255 | 是 | None | + +--- + +### DW_MES_HM_LOTMOVEOUT + +**用途**: 批次出站事件歷史表 - 出站/移出交易 + +**数据量**: 48,645,692 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `CALLBYCDONAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 2 | `CARRIERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 3 | `CARRIERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 4 | `CDONAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 5 | `CDOTXNSEQUENCE` | NUMBER(10) | 22 | 是 | None | +| 6 | `COMMENTS` | VARCHAR2(255) | 255 | 是 | None | +| 7 | `COMPUTERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 8 | `CONTAINERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 9 | `CONTAINERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 10 | `EMPLOYEEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 11 | `EMPLOYEENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 12 | `FACTORYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 13 | `FROMCONTAINERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 14 | `FROMCONTAINERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 15 | `FROMQTY` | NUMBER | 22 | 是 | 数量 | +| 16 | `FROMQTY2` | NUMBER | 22 | 是 | 数量 | +| 17 | `FROMSPECID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 18 | `FROMSPECNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 19 | `FROMWORKCENTER` | VARCHAR2(40) | 40 | 是 | None | +| 20 | `FROMSTATUS` | NUMBER(10) | 22 | 是 | 状态 | +| 21 | `FROMUOM2NAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 22 | `FROMUOMNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 23 | `FROMWORKFLOWNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 24 | `HISTORYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 25 | `HISTORYMAINLINEID` | CHAR(16) | 16 | 否 | 唯一标识符 | +| 26 | `HISTORYSUMMARYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 27 | `LASTLOTCARRIERSSETUPHISTORYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 28 | `LASTMOVEOUTTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 29 | `LASTMOVEOUTUSERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 30 | `MFGDATE` | DATE | 7 | 是 | None | +| 31 | `MOVEINQTY` | NUMBER | 22 | 是 | 数量 | +| 32 | `MOVEINQTY2` | NUMBER | 22 | 是 | 数量 | +| 33 | `MOVEINTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 34 | `MOVEINUSERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 35 | `OPERATIONID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 36 | `OWNERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 37 | `OWNERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 38 | `PARAMETRICDETAILID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 39 | `PROCESSSPECID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 40 | `PRODUCTID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 41 | `PRODUCTNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 42 | `QTY` | NUMBER | 22 | 是 | 数量 | +| 43 | `QTY2` | NUMBER | 22 | 是 | 数量 | +| 44 | `RESOURCEAVAILABILITY` | NUMBER(10) | 22 | 是 | None | +| 45 | `RESOURCEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 46 | `RESOURCENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 47 | `RESOURCEOBJECTCATEGORY` | VARCHAR2(40) | 40 | 是 | None | +| 48 | `RESOURCEOBJECTTYPE` | VARCHAR2(40) | 40 | 是 | None | +| 49 | `RESOURCESTATUSCODEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 50 | `RESOURCESTATUSREASONID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 51 | `SERVERNAME` | VARCHAR2(100) | 100 | 是 | 名称 | +| 52 | `SHIFTNAME` | VARCHAR2(30) | 30 | 是 | 名称 | +| 53 | `SPECID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 54 | `SPECNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 55 | `WORKCENTER` | VARCHAR2(40) | 40 | 是 | None | +| 56 | `STATUS` | NUMBER(10) | 22 | 是 | 状态 | +| 57 | `SYSTEMDATE` | DATE | 7 | 是 | None | +| 58 | `TXNDATE` | DATE | 7 | 是 | None | +| 59 | `TXNID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 60 | `TXNTYPE` | NUMBER(10) | 22 | 是 | None | +| 61 | `UOM2NAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 62 | `UOMNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 63 | `USERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 64 | `USERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 65 | `WIPTRACKINGGROUPKEYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 66 | `WORKFLOWNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 67 | `WORKFLOWSTEPID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 68 | `UPDATETIME` | DATE | 7 | 是 | None | +| 69 | `USERFULLNAME` | VARCHAR2(100) | 100 | 是 | 名称 | +| 70 | `EMPZONE` | VARCHAR2(100) | 100 | 是 | None | +| 71 | `WAFERPRODUCT` | VARCHAR2(40) | 40 | 是 | None | +| 72 | `CONSUMEFACTOR` | NUMBER(10) | 22 | 是 | None | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `DW_MES_HM_LMO_CALLBYCDONAME` | 普通索引 | CALLBYCDONAME | +| `DW_MES_HM_LMO_CDONAME` | 普通索引 | CDONAME | +| `DW_MES_HM_LMO_HISTORYID` | 普通索引 | HISTORYID | +| `DW_MES_HM_LMO_HISTORYID_TID_TDATE` | 普通索引 | HISTORYID, TXNID, TXNDATE | +| `DW_MES_HM_LMO_TXNDATE` | 普通索引 | TXNDATE | +| `DW_MES_HM_LMO__HID_TID_DATE_ID` | 普通索引 | HISTORYID, TXNID, TXNDATE, HISTORYMAINLINEID | +| `DW_MES_HM_LMO__HISTORYMAINLINEID` | 唯一索引 | HISTORYMAINLINEID | + +--- + +### DW_MES_HOLDRELEASEHISTORY + +**用途**: Hold/Release 歷史表 - 批次停工與解除紀錄 + +**数据量**: 310,737 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `RN` | VARCHAR2(16) | 16 | 是 | None | +| 2 | `CONTAINERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 3 | `HISTORYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 4 | `HISTORYMAINLINEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 5 | `FINISHEDRUNCARD` | VARCHAR2(255) | 255 | 是 | None | +| 6 | `PJ_WORKORDER` | VARCHAR2(40) | 40 | 是 | None | +| 7 | `WORKCENTERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 8 | `WORKCENTERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 9 | `FROMSPECID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 10 | `FROMSPECNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 11 | `QTY` | NUMBER | 22 | 是 | 数量 | +| 12 | `QTY2` | NUMBER | 22 | 是 | 数量 | +| 13 | `PJ_CHIPREMARK1` | VARCHAR2(255) | 255 | 是 | None | +| 14 | `PJ_CHIPREMARK2` | VARCHAR2(255) | 255 | 是 | None | +| 15 | `PJ_CHIPREMARK3` | VARCHAR2(255) | 255 | 是 | None | +| 16 | `HOLDTXNDATE` | DATE | 7 | 是 | None | +| 17 | `RELEASETXNDATE` | DATE | 7 | 是 | None | +| 18 | `HOLDEMP` | VARCHAR2(40) | 40 | 是 | None | +| 19 | `HOLDEMPDEPTNAME` | VARCHAR2(100) | 100 | 是 | 名称 | +| 20 | `RELEASEEMP` | VARCHAR2(40) | 40 | 是 | None | +| 21 | `RELEASEEMPDEPTNAME` | VARCHAR2(100) | 100 | 是 | 名称 | +| 22 | `HOLDCOMMENTS` | VARCHAR2(255) | 255 | 是 | None | +| 23 | `RELEASECOMMENTS` | VARCHAR2(255) | 255 | 是 | None | +| 24 | `HOLDREASONID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 25 | `HOLDREASONNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 26 | `RELEASEREASONID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 27 | `RELEASEREASONNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 28 | `NCRID` | VARCHAR2(255) | 255 | 是 | 唯一标识符 | +| 29 | `LAST_UPDATED_DATE` | DATE | 7 | 是 | None | +| 30 | `HOLDUSERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 31 | `FUTUREHOLDCOMMENTS` | VARCHAR2(1000) | 1000 | 是 | None | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `DW_MES_HOLDRELEASEHISTORY_IDX1` | 普通索引 | HISTORYMAINLINEID | +| `DW_MES_HOLDRELEASEHISTORY_IDX2` | 普通索引 | CONTAINERID | + +--- + +### DW_MES_JOB + +**用途**: 設備維修工單表 - 維修工單的當前狀態與流程 + +**数据量**: 1,248,622 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `ACKNOWLEDGECOUNT` | NUMBER(10) | 22 | 是 | None | +| 2 | `ACTIVECLOCKONCOUNT` | NUMBER(10) | 22 | 是 | None | +| 3 | `ASSIGNCOUNT` | NUMBER(10) | 22 | 是 | None | +| 4 | `CANCELDATE` | DATE | 7 | 是 | None | +| 5 | `CANCELUSERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 6 | `CAUSECODENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 7 | `CLOCKONCOUNT` | NUMBER(10) | 22 | 是 | None | +| 8 | `COMPLETEDATE` | DATE | 7 | 是 | None | +| 9 | `COMPLETEUSERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 10 | `CREATEDATE` | DATE | 7 | 是 | 创建日期 | +| 11 | `CREATEUSERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 12 | `ESTIMATEDDURATION` | NUMBER | 22 | 是 | None | +| 13 | `EXPECTEDSTARTDATE` | DATE | 7 | 是 | None | +| 14 | `FIRSTCLOCKONDATE` | DATE | 7 | 是 | None | +| 15 | `ISSIMPLEMODE` | NUMBER(10) | 22 | 是 | None | +| 16 | `JOBID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 17 | `JOBMODELNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 18 | `JOBORDERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 19 | `JOBORDERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 20 | `JOBSTATUS` | VARCHAR2(40) | 40 | 是 | 状态 | +| 21 | `LASTCLOCKOFFDATE` | DATE | 7 | 是 | None | +| 22 | `REPAIRCODENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 23 | `RESOURCEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 24 | `STAGENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 25 | `STAGESEQUENCE` | NUMBER(10) | 22 | 是 | None | +| 26 | `SYMPTOMCODENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 27 | `PJ_CAUSECODE2NAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 28 | `PJ_REPAIRCODE2NAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 29 | `PJ_SYMPTOMCODE2NAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 30 | `CANCEL_EMPNAME` | VARCHAR2(40) | 40 | 是 | CANCELUSERID | +| 31 | `CANCEL_FULLNAME` | VARCHAR2(255) | 255 | 是 | CANCELUSERID | +| 32 | `COMPLETE_EMPNAME` | VARCHAR2(40) | 40 | 是 | COMPLETEUSERID | +| 33 | `COMPLETE_FULLNAME` | VARCHAR2(255) | 255 | 是 | COMPLETEUSERID | +| 34 | `CREATE_EMPNAME` | VARCHAR2(40) | 40 | 是 | CREATEUSERID | +| 35 | `CREATE_FULLNAME` | VARCHAR2(255) | 255 | 是 | CREATEUSERID | +| 36 | `RESOURCENAME` | VARCHAR2(40) | 40 | 是 | RESOURCEID | +| 37 | `CONTAINERIDS` | VARCHAR2(2000) | 2000 | 是 | 唯一标识符 | +| 38 | `CONTAINERNAMES` | VARCHAR2(2000) | 2000 | 是 | 名称 | +| 39 | `PARTREQUESTORDERNAME` | VARCHAR2(2000) | 2000 | 是 | DW_MES_PARTREQUESTORDER | +| 40 | `RESOURCE_PKG_GROUP` | VARCHAR2(255) | 255 | 是 | None | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `DW_MES_JOB_COMPLETEDATE` | 普通索引 | COMPLETEDATE | +| `DW_MES_JOB_CREATEDATE` | 普通索引 | CREATEDATE | +| `DW_MES_JOB_RESOURCEID` | 普通索引 | RESOURCEID | +| `DW_MES_JOB_RESOURCENAME` | 普通索引 | RESOURCENAME | + +--- + +### DW_MES_JOBTXNHISTORY + +**用途**: 維修工單交易歷史表 - 工單狀態變更紀錄 + +**数据量**: 9,554,723 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `ACKNOWLEDGECOUNT` | NUMBER(10) | 22 | 是 | None | +| 2 | `ASSIGNCOUNT` | NUMBER(10) | 22 | 是 | None | +| 3 | `CAUSECODEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 4 | `CAUSECODENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 5 | `CHECKLISTONLY` | NUMBER(10) | 22 | 是 | None | +| 6 | `CLOCKONCOUNT` | NUMBER(10) | 22 | 是 | None | +| 7 | `ESTIMATEDDURATION` | NUMBER | 22 | 是 | None | +| 8 | `EXPECTEDSTARTDATE` | DATE | 7 | 是 | None | +| 9 | `FROMJOBSTATUS` | VARCHAR2(40) | 40 | 是 | 状态 | +| 10 | `HISTORYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 11 | `HISTORYMAINLINEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 12 | `JOBID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 13 | `JOBMODELID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 14 | `JOBMODELNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 15 | `JOBORDERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 16 | `JOBORDERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 17 | `JOBSTATUS` | VARCHAR2(40) | 40 | 是 | 状态 | +| 18 | `JOBTXNHISTORYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 19 | `REPAIRCODEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 20 | `REPAIRCODENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 21 | `STAGEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 22 | `STAGENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 23 | `STAGESEQUENCE` | NUMBER(10) | 22 | 是 | None | +| 24 | `SYMPTOMCODEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 25 | `SYMPTOMCODENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 26 | `TOSTAGEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 27 | `TOSTAGENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 28 | `TOSTAGESEQUENCE` | NUMBER(10) | 22 | 是 | None | +| 29 | `TXNID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 30 | `TXNDATE` | DATE | 7 | 是 | None | +| 31 | `USERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 32 | `EMPLOYEEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 33 | `USER_EMPNO` | VARCHAR2(40) | 40 | 是 | 工號 | +| 34 | `USER_NAME` | VARCHAR2(255) | 255 | 是 | 姓名 | +| 35 | `EMP_EMPNO` | VARCHAR2(40) | 40 | 是 | 工號 | +| 36 | `EMP_NAME` | VARCHAR2(255) | 255 | 是 | 姓名 | +| 37 | `COMMENTS` | VARCHAR2(255) | 255 | 是 | HistoryMainline | +| 38 | `CDONAME` | VARCHAR2(40) | 40 | 是 | HistoryMainline | +| 39 | `CALLBYCDONAME` | VARCHAR2(40) | 40 | 是 | HistoryMainline | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `JOBTXN0_HISTORYMAINLINEID` | 普通索引 | HISTORYMAINLINEID | +| `JOBTXN0_JOBID` | 普通索引 | JOBID | +| `JOBTXN0_JOBTXNHISTORYID` | 普通索引 | JOBTXNHISTORYID | +| `JOBTXN0_TXNDATE` | 普通索引 | TXNDATE | + +--- + +### DW_MES_LOTMATERIALSHISTORY + +**用途**: 批次物料消耗歷史表 - 用料與批次關聯 + +**数据量**: 17,829,931 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `CONTAINERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 2 | `FINISHEDRUNCARD` | VARCHAR2(255) | 255 | 是 | None | +| 3 | `PJ_WORKORDER` | VARCHAR2(40) | 40 | 是 | None | +| 4 | `WORKCENTERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 5 | `WORKCENTERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 6 | `SPECID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 7 | `SPECNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 8 | `MATERIALPARTNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 9 | `DESCRIPTION` | VARCHAR2(255) | 255 | 是 | None | +| 10 | `MATERIALLOTNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 11 | `EQUIPMENTID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 12 | `EQUIPMENTNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 13 | `QTYREQUIRED` | NUMBER | 22 | 是 | 数量 | +| 14 | `CONSUMEFACTOR` | NUMBER | 22 | 是 | None | +| 15 | `QTYCONSUMED` | NUMBER | 22 | 是 | 数量 | +| 16 | `TXNDATE` | DATE | 7 | 是 | None | +| 17 | `VENDORLOTNUMBER` | VARCHAR2(40) | 40 | 是 | None | +| 18 | `MANUFACTUREREXPIRYDATE` | DATE | 7 | 是 | None | +| 19 | `WITHDRAWALTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 20 | `THAWINGTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 21 | `EXPIRYTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 22 | `CONSUMEMATERIALSHISTORYDETAIID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 23 | `LAST_UPDATED_DATE` | DATE | 7 | 是 | None | +| 24 | `PRIMARY_CATEGORY` | VARCHAR2(40) | 40 | 是 | None | +| 25 | `SECONDARY_CATEGORY` | VARCHAR2(40) | 40 | 是 | None | +| 26 | `UOMNAME` | VARCHAR2(40) | 40 | 是 | 名称 | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `DW_MES_LOTMATERIALSHISTORY_IDX1` | 普通索引 | CONTAINERID | +| `DW_MES_LOTMATERIALSHISTORY_IDX2` | 普通索引 | PJ_WORKORDER | +| `DW_MES_LOTMATERIALSHISTORY_IDX3` | 普通索引 | MATERIALPARTNAME | +| `DW_MES_LOTMATERIALSHISTORY_IDX4` | 普通索引 | MATERIALLOTNAME | + +--- + +### DW_MES_LOTREJECTHISTORY + +**用途**: 批次不良/報廢歷史表 - 不良原因與數量 + +**数据量**: 15,786,025 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `HISTORYMAINLINEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 2 | `CONTAINERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 3 | `FINISHEDRUNCARD` | VARCHAR2(255) | 255 | 是 | None | +| 4 | `PJ_WORKORDER` | VARCHAR2(40) | 40 | 是 | None | +| 5 | `WORKCENTERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 6 | `WORKCENTERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 7 | `SPECID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 8 | `SPECNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 9 | `EQUIPMENTNAME` | VARCHAR2(255) | 255 | 是 | 名称 | +| 10 | `MOVEINTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 11 | `MOVEINQTY` | NUMBER | 22 | 是 | 数量 | +| 12 | `MOVEINQTY2` | NUMBER | 22 | 是 | 数量 | +| 13 | `EMPLOYEENAME` | VARCHAR2(100) | 100 | 是 | 名称 | +| 14 | `SHIFTNAME` | VARCHAR2(30) | 30 | 是 | 名称 | +| 15 | `TXNDATE` | DATE | 7 | 是 | None | +| 16 | `COMMENTS` | VARCHAR2(255) | 255 | 是 | None | +| 17 | `LOSSREASONID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 18 | `LOSSREASONNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 19 | `WAFERSCRIBENUMBER` | VARCHAR2(40) | 40 | 是 | None | +| 20 | `REJECTCATEGORYNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 21 | `REJECTQTY` | NUMBER | 22 | 是 | 数量 | +| 22 | `STANDBYQTY` | NUMBER | 22 | 是 | 数量 | +| 23 | `QTYTOPROCESS` | NUMBER | 22 | 是 | 数量 | +| 24 | `INPROCESSQTY` | NUMBER | 22 | 是 | 数量 | +| 25 | `PROCESSEDQTY` | NUMBER | 22 | 是 | 数量 | +| 26 | `DEFECTQTY` | NUMBER | 22 | 是 | 数量 | +| 27 | `WAFERREJECTSQTY` | NUMBER | 22 | 是 | 数量 | +| 28 | `REJECTCAUSE` | VARCHAR2(40) | 40 | 是 | None | +| 29 | `REJECTCOMMENT` | VARCHAR2(255) | 255 | 是 | None | +| 30 | `PJ_WAFERID1` | VARCHAR2(255) | 255 | 是 | 唯一标识符 | +| 31 | `PJ_WAFERID2` | VARCHAR2(255) | 255 | 是 | 唯一标识符 | +| 32 | `PJ_WAFERID3` | VARCHAR2(255) | 255 | 是 | 唯一标识符 | +| 33 | `LAST_UPDATED_DATE` | DATE | 7 | 是 | None | +| 34 | `EMPZONE` | VARCHAR2(100) | 100 | 是 | None | +| 35 | `WIPTRACKINGGROUPKEYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 36 | `FROMQTY` | NUMBER | 22 | 是 | 数量 | +| 37 | `FROMQTY2` | NUMBER | 22 | 是 | 数量 | +| 38 | `QTY` | NUMBER | 22 | 是 | 数量 | +| 39 | `QTY2` | NUMBER | 22 | 是 | 数量 | +| 40 | `NOWSPECID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 41 | `NOWSPECNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 42 | `NOWWORKCENTERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 43 | `NOWWORKCENTERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `DW_MES_LOTREJECTHISTORY_IDX1` | 普通索引 | CONTAINERID | +| `DW_MES_LOTREJECTHISTORY_IDX2` | 普通索引 | SPECID | +| `DW_MES_LOTREJECTHISTORY_IDX3` | 普通索引 | HISTORYMAINLINEID | +| `DW_MES_LOTREJECTHISTORY_IDX4` | 普通索引 | TXNDATE | +| `DW_MES_LOTREJECTHISTORY_IDX5` | 普通索引 | WIPTRACKINGGROUPKEYID | + +--- + +### DW_MES_LOTWIPDATAHISTORY + +**用途**: 在製數據採集歷史表 - 製程量測/參數紀錄 + +**数据量**: 77,960,216 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `CONTAINERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 2 | `FINISHEDRUNCARD` | VARCHAR2(255) | 255 | 是 | None | +| 3 | `PJ_WORKORDER` | VARCHAR2(40) | 40 | 是 | None | +| 4 | `WORKCENTERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 5 | `WORKCENTERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 6 | `SPECID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 7 | `SPECNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 8 | `EQUIPMENTID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 9 | `EQUIPMENTNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 10 | `EMPLOYEENAME` | VARCHAR2(100) | 100 | 是 | 名称 | +| 11 | `SERVICENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 12 | `TXNTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 13 | `WIPDATANAMEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 14 | `WIPDATANAMENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 15 | `WIPDATAVALUE` | VARCHAR2(4000) | 4000 | 是 | None | +| 16 | `PJ_SPCDATARESULT` | VARCHAR2(40) | 40 | 是 | None | +| 17 | `WIPLOTHISTORYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 18 | `LAST_UPDATED_DATE` | DATE | 7 | 是 | None | +| 19 | `PROCESSTYPENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 20 | `WAFERSCRIBENUMBER` | VARCHAR2(40) | 40 | 是 | None | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `DW_MES_LOTWIPDATAHISTORY_IDX1` | 普通索引 | CONTAINERID | +| `DW_MES_LOTWIPDATAHISTORY_IDX2` | 普通索引 | WIPLOTHISTORYID | +| `DW_MES_LOTWIPDATAHISTORY_IDX3` | 普通索引 | PJ_WORKORDER | +| `DW_MES_LOTWIPDATAHISTORY_IDX4` | 普通索引 | TXNTIMESTAMP | + +--- + +### DW_MES_LOTWIPHISTORY + +**用途**: 在製流轉歷史表 - 批次進出站與流程軌跡 + +**数据量**: 53,454,213 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `WIPLOTHISTORYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 2 | `WIPEQUIPMENTHISTORYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 3 | `CONTAINERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 4 | `FINISHEDRUNCARD` | VARCHAR2(255) | 255 | 是 | None | +| 5 | `PJ_WORKORDER` | VARCHAR2(40) | 40 | 是 | None | +| 6 | `WORKCENTERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 7 | `WORKCENTERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 8 | `SPECID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 9 | `SPECNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 10 | `PJ_WAFERID1` | VARCHAR2(255) | 255 | 是 | 唯一标识符 | +| 11 | `PJ_WAFERID2` | VARCHAR2(255) | 255 | 是 | 唯一标识符 | +| 12 | `PJ_WAFERID3` | VARCHAR2(255) | 255 | 是 | 唯一标识符 | +| 13 | `WORKFLOWNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 14 | `PRODUCTNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 15 | `DESCRIPTION` | VARCHAR2(255) | 255 | 是 | None | +| 16 | `DATECODE` | VARCHAR2(40) | 40 | 是 | None | +| 17 | `MOVEINTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 18 | `MOVEINQTY` | NUMBER | 22 | 是 | 数量 | +| 19 | `MOVEOUTTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 20 | `MOVEOUTQTY` | NUMBER | 22 | 是 | 数量 | +| 21 | `EQUIPMENTID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 22 | `EQUIPMENTNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 23 | `TRACKINTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 24 | `TRACKINQTY` | NUMBER | 22 | 是 | 数量 | +| 25 | `TRACKINEMPLOYEENAME` | VARCHAR2(100) | 100 | 是 | 名称 | +| 26 | `TRACKOUTTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 27 | `TRACKOUTQTY` | NUMBER | 22 | 是 | 数量 | +| 28 | `TRACKOUTEMPLOYEENAME` | VARCHAR2(100) | 100 | 是 | 名称 | +| 29 | `FLAGNAME` | VARCHAR2(255) | 255 | 是 | 名称 | +| 30 | `CARRIERNAME` | VARCHAR2(2000) | 2000 | 是 | 名称 | +| 31 | `LAST_UPDATED_DATE` | DATE | 7 | 是 | None | +| 32 | `LAST_SYNC_DATE` | DATE | 7 | 是 | None | +| 33 | `PROCESSTYPENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 34 | `PACKAGE_LF` | VARCHAR2(60) | 60 | 是 | None | +| 35 | `PROCESSSPECNAME` | VARCHAR2(255) | 255 | 是 | 名称 | +| 36 | `TRACKINEMPZONE` | VARCHAR2(100) | 100 | 是 | None | +| 37 | `TRACKOUTEMPZONE` | VARCHAR2(100) | 100 | 是 | None | +| 38 | `MOVEINQTY2` | NUMBER | 22 | 是 | 数量 | +| 39 | `MOVEOUTQTY2` | NUMBER | 22 | 是 | 数量 | +| 40 | `TRACKINQTY2` | NUMBER | 22 | 是 | 数量 | +| 41 | `TRACKOUTQTY2` | NUMBER | 22 | 是 | 数量 | +| 42 | `WIPTRACKINGGROUPKEYID` | CHAR(16) | 16 | 是 | 唯一标识符 | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `DW_MES_LOTWIPHISTORY_IDX1` | 普通索引 | CONTAINERID | +| `DW_MES_LOTWIPHISTORY_IDX2` | 普通索引 | WIPLOTHISTORYID | +| `DW_MES_LOTWIPHISTORY_IDX3` | 普通索引 | TRACKINTIMESTAMP | +| `DW_MES_LOTWIPHISTORY_IDX4` | 普通索引 | PJ_WORKORDER | +| `DW_MES_LOTWIPHISTORY_IDX5` | 普通索引 | DATECODE | +| `DW_MES_LOTWIPHISTORY_IDX6` | 普通索引 | WORKCENTERID | +| `DW_MES_LOTWIPHISTORY_IDX7` | 普通索引 | WIPEQUIPMENTHISTORYID | +| `DW_MES_LOTWIPHISTORY_IDX8` | 普通索引 | MOVEINTIMESTAMP | +| `DW_MES_LOTWIPHISTORY_IDX9` | 普通索引 | WIPTRACKINGGROUPKEYID | + +--- + +### DW_MES_LOT_V + +**用途**: 待补充 + +**数据量**: 9,468 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `NO` | NUMBER | 22 | 是 | None | +| 2 | `CONTAINERID` | CHAR(16) | 16 | 否 | 唯一标识符 | +| 3 | `LOTID` | VARCHAR2(40) | 40 | 是 | 唯一标识符 | +| 4 | `QTY` | NUMBER | 22 | 是 | 数量 | +| 5 | `QTY2` | NUMBER | 22 | 是 | 数量 | +| 6 | `STATUS` | VARCHAR2(10) | 10 | 是 | 状态 | +| 7 | `HOLDREASONNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 8 | `CURRENTHOLDCOUNT` | NUMBER(10) | 22 | 是 | None | +| 9 | `STARTREASON` | VARCHAR2(40) | 40 | 是 | None | +| 10 | `OWNER` | VARCHAR2(40) | 40 | 是 | None | +| 11 | `STARTDATE` | DATE | 7 | 是 | None | +| 12 | `UTS` | VARCHAR2(10) | 10 | 是 | None | +| 13 | `STARTQTY` | NUMBER | 22 | 是 | 数量 | +| 14 | `STARTQTY2` | NUMBER | 22 | 是 | 数量 | +| 15 | `FIRSTNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 16 | `PRODUCT` | VARCHAR2(40) | 40 | 是 | None | +| 17 | `STEP` | VARCHAR2(40) | 40 | 是 | None | +| 18 | `SPECNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 19 | `WORKCENTERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 20 | `WORKCENTERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 21 | `HOLDLOCATION` | VARCHAR2(40) | 40 | 是 | None | +| 22 | `AGEBYDAYS` | NUMBER | 22 | 是 | None | +| 23 | `REMAINTIME` | NUMBER | 22 | 是 | None | +| 24 | `MOVEINQTY` | NUMBER | 22 | 是 | 数量 | +| 25 | `MOVEINQTY2` | NUMBER | 22 | 是 | 数量 | +| 26 | `MOVEINTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 27 | `MOVEINUSERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 28 | `EQUIPMENTCOUNT` | NUMBER(10) | 22 | 是 | None | +| 29 | `EQUIPMENTS` | VARCHAR2(4000) | 4000 | 是 | None | +| 30 | `JOBCREATEDATE` | VARCHAR2(4000) | 4000 | 是 | 创建日期 | +| 31 | `JOBCOMMENTS` | VARCHAR2(4000) | 4000 | 是 | None | +| 32 | `MATERIALTYPE` | VARCHAR2(40) | 40 | 是 | None | +| 33 | `PRODUCTLINENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 34 | `PACKAGE_LEF` | VARCHAR2(4000) | 4000 | 是 | None | +| 35 | `PB_FUNCTION` | VARCHAR2(40) | 40 | 是 | None | +| 36 | `WORKFLOWNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 37 | `BOP` | VARCHAR2(40) | 40 | 是 | None | +| 38 | `DATECODE` | VARCHAR2(40) | 40 | 是 | None | +| 39 | `LEADFRAMENAME` | VARCHAR2(4000) | 4000 | 是 | 名称 | +| 40 | `LEADFRAMEOPTION` | VARCHAR2(4000) | 4000 | 是 | None | +| 41 | `COMNAME` | VARCHAR2(4000) | 4000 | 是 | 名称 | +| 42 | `LOCATIONNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 43 | `PJ_FUNCTION` | VARCHAR2(40) | 40 | 是 | None | +| 44 | `PJ_TYPE` | VARCHAR2(40) | 40 | 是 | None | +| 45 | `WAFERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 46 | `WAFERLOT` | VARCHAR2(160) | 160 | 是 | None | +| 47 | `EVENTNAME` | VARCHAR2(4000) | 4000 | 是 | 名称 | +| 48 | `OCCURRENCEDATE` | VARCHAR2(4000) | 4000 | 是 | None | +| 49 | `RELEASETIME` | VARCHAR2(4000) | 4000 | 是 | None | +| 50 | `RELEASEEMP` | VARCHAR2(4000) | 4000 | 是 | None | +| 51 | `RELEASEREASON` | VARCHAR2(4000) | 4000 | 是 | None | +| 52 | `COMMENT_HOLD` | VARCHAR2(255) | 255 | 是 | None | +| 53 | `CONTAINERCOMMENTS` | VARCHAR2(2000) | 2000 | 是 | None | +| 54 | `COMMENT_DATE` | DATE | 7 | 是 | None | +| 55 | `COMMENT_EMP` | VARCHAR2(255) | 255 | 是 | None | +| 56 | `COMMENT_FUTURE` | VARCHAR2(255) | 255 | 是 | None | +| 57 | `HOLDEMP` | VARCHAR2(255) | 255 | 是 | None | +| 58 | `DEPTNAME` | VARCHAR2(255) | 255 | 是 | 名称 | +| 59 | `PJ_PRODUCEREGION` | VARCHAR2(40) | 40 | 是 | None | +| 60 | `WORKORDER` | VARCHAR2(40) | 40 | 是 | None | +| 61 | `PRIORITYCODENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 62 | `SPECSEQUENCE` | VARCHAR2(10) | 10 | 是 | None | +| 63 | `WORKCENTERSEQUENCE` | VARCHAR2(255) | 255 | 是 | None | +| 64 | `TMTT_R` | CHAR(1) | 1 | 是 | None | +| 65 | `WAFER_FACTOR` | NUMBER | 22 | 是 | None | +| 66 | `WORKCENTER_GROUP` | VARCHAR2(40) | 40 | 是 | None | +| 67 | `WORKCENTERSEQUENCE_GROUP` | VARCHAR2(255) | 255 | 是 | None | +| 68 | `WORKCENTER_SHORT` | VARCHAR2(40) | 40 | 是 | None | +| 69 | `EQUIPMENTNAME` | VARCHAR2(4000) | 4000 | 是 | 名称 | +| 70 | `SYS_DATE` | DATE | 7 | 是 | None | + +--- + +### DW_MES_MAINTENANCE + +**用途**: 設備保養/維護紀錄表 - 保養計畫與點檢數據 + +**数据量**: 52,060,026 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `RESOURCEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 2 | `HISTORYMAINLINEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 3 | `RESOURCENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 4 | `SHIFTNAME` | VARCHAR2(30) | 30 | 是 | 名称 | +| 5 | `TXNDATE` | DATE | 7 | 是 | None | +| 6 | `LASTDATEDUE` | DATE | 7 | 是 | None | +| 7 | `LASTTHRUPUTQTYDUE` | NUMBER | 22 | 是 | 数量 | +| 8 | `LASTTHRUPUTQTYLIMIT` | NUMBER | 22 | 是 | 数量 | +| 9 | `LASTTHRUPUTQTYWARNING` | NUMBER | 22 | 是 | 数量 | +| 10 | `MAINTENANCEREQID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 11 | `MAINTENANCEREQNAME` | VARCHAR2(255) | 255 | 是 | 名称 | +| 12 | `CDOTYPEID` | NUMBER(10) | 22 | 是 | 唯一标识符 | +| 13 | `THRUPUTQTY` | NUMBER | 22 | 是 | 数量 | +| 14 | `CHECKLISTACTION` | NUMBER(10) | 22 | 是 | None | +| 15 | `INSTRUCTION` | VARCHAR2(4000) | 4000 | 是 | None | +| 16 | `DATANAME` | VARCHAR2(255) | 255 | 是 | 名称 | +| 17 | `DATAVALUE` | VARCHAR2(255) | 255 | 是 | None | +| 18 | `LOCATIONNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 19 | `USERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 20 | `EMPLOYEENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 21 | `FULLNAME` | VARCHAR2(255) | 255 | 是 | 名称 | +| 22 | `PJ_INSPECTIONLOT` | VARCHAR2(40) | 40 | 是 | None | +| 23 | `DATAPOINTID` | CHAR(16) | 16 | 是 | 唯一标识符 | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `DW_MES_MAINTENANCE_IDX1` | 普通索引 | HISTORYMAINLINEID | +| `DW_MES_MAINTENANCE_IDX2` | 普通索引 | TXNDATE | +| `DW_MES_MAINTENANCE_IDX3` | 普通索引 | MAINTENANCEREQNAME | +| `DW_MES_MAINTENANCE_IDX4` | 普通索引 | RESOURCEID | +| `DW_MES_MAINTENANCE_IDX5` | 普通索引 | MAINTENANCEREQID | +| `DW_MES_MAINTENANCE_IDX6` | 普通索引 | RESOURCENAME | +| `DW_MES_MAINTENANCE_IDX7` | 普通索引 | CDOTYPEID | + +--- + +### DW_MES_PARTREQUESTORDER + +**用途**: 維修用料請求表 - 維修/設備零件請領 + +**数据量**: 61,396 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `DESCRIPTION` | VARCHAR2(255) | 255 | 是 | None | +| 2 | `ISDONE` | NUMBER(10) | 22 | 是 | None | +| 3 | `JOBID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 4 | `PARTREQUESTORDERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 5 | `PARTREQUESTORDERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 6 | `REQUESTSTATUS` | VARCHAR2(40) | 40 | 是 | 状态 | +| 7 | `REQUESTTYPE` | NUMBER(10) | 22 | 是 | None | +| 8 | `REQUIREACKNOWLEDGEEMAIL` | NUMBER(10) | 22 | 是 | None | +| 9 | `RESOURCEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 10 | `CREATIONDATE` | DATE | 7 | 是 | None | +| 11 | `CREATIONUSERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 12 | `LASTCHANGEDATE` | DATE | 7 | 是 | None | +| 13 | `USERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 14 | `RESOURCENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 15 | `USER_EMPNO` | VARCHAR2(40) | 40 | 是 | 工號 | +| 16 | `USER_NAME` | VARCHAR2(255) | 255 | 是 | 姓名 | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `DW_MES_PARTREQUESTORDER_JOBID` | 普通索引 | JOBID | +| `DW_MES_PARTREQUESTORDER_RESOURCEID` | 普通索引 | RESOURCEID | + +--- + +### DW_MES_PJ_COMBINEDASSYLOTS + +**用途**: 併批紀錄表 - 合批/合併批次關聯與數量資訊 + +**数据量**: 1,965,425 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `CONTAINERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 2 | `CONTAINERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 3 | `PJ_WORKORDER` | VARCHAR2(40) | 40 | 是 | None | +| 4 | `PJ_COMBINEDASSEMBLYLOTSID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 5 | `LOTID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 6 | `FINISHEDNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 7 | `PJ_EXCESSLOTQTY` | NUMBER | 22 | 是 | 数量 | +| 8 | `PJ_GOODDIEQTY` | NUMBER | 22 | 是 | 数量 | +| 9 | `PJ_COMBINEDRATIO` | NUMBER | 22 | 是 | None | +| 10 | `PJ_ORIGINALGOODDIEQTY` | NUMBER | 22 | 是 | 数量 | +| 11 | `ORIGINALSTARTDATE` | DATE | 7 | 是 | None | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `DW_MES_PJ_COMBINEDASSYLOTS_IDX1` | 普通索引 | CONTAINERID | +| `DW_MES_PJ_COMBINEDASSYLOTS_IDX2` | 普通索引 | FINISHEDNAME | +| `DW_MES_PJ_COMBINEDASSYLOTS_IDX3` | 普通索引 | PJ_WORKORDER | + +--- + +### DW_MES_RESOURCE + +**用途**: 資源表 - 設備/載具等資源基本資料(OBJECTCATEGORY=ASSEMBLY 時,RESOURCENAME 為設備編號) + +**数据量**: 91,329 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `AUTOMATIONPLANID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 2 | `BOMBASEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 3 | `BOMID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 4 | `CONTAINERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 5 | `DESCRIPTION` | VARCHAR2(255) | 255 | 是 | None | +| 6 | `DOCUMENTSETID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 7 | `EQUIPMENTTYPE` | VARCHAR2(40) | 40 | 是 | None | +| 8 | `FACTORYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 9 | `LOCATIONID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 10 | `LOTCOUNT` | NUMBER(10) | 22 | 是 | None | +| 11 | `MACHINEGROUPID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 12 | `MAINTENANCECLASSID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 13 | `MAXLOTS` | NUMBER(10) | 22 | 是 | None | +| 14 | `MAXUNITS` | NUMBER | 22 | 是 | None | +| 15 | `MULTILOTSFLAG` | NUMBER(10) | 22 | 是 | None | +| 16 | `NOTES` | VARCHAR2(2000) | 2000 | 是 | None | +| 17 | `OBJECTCATEGORY` | VARCHAR2(40) | 40 | 是 | None | +| 18 | `OBJECTTYPE` | VARCHAR2(40) | 40 | 是 | None | +| 19 | `PACKAGEGROUPID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 20 | `PARAMLISTID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 21 | `PARENTRESOURCEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 22 | `PRODUCTIONSTATUSID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 23 | `RECIPEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 24 | `RESOURCECOMMENTS` | VARCHAR2(2000) | 2000 | 是 | None | +| 25 | `RESOURCEFAMILYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 26 | `RESOURCEID` | CHAR(16) | 16 | 否 | 唯一标识符 | +| 27 | `RESOURCENAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 28 | `SETUPACCESSID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 29 | `SPCSETUPID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 30 | `STATUSMODELID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 31 | `SUBEQUIPMENTLOGICALID` | VARCHAR2(40) | 40 | 是 | 唯一标识符 | +| 32 | `TOOLPLANID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 33 | `TRAININGREQGROUPID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 34 | `UOMID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 35 | `USESPCMATRIX` | NUMBER(10) | 22 | 是 | None | +| 36 | `VENDORID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 37 | `VENDORMODEL` | VARCHAR2(30) | 30 | 是 | None | +| 38 | `VENDORSERIALNUMBER` | VARCHAR2(30) | 30 | 是 | None | +| 39 | `WIPMSGDEFMGRID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 40 | `PJ_DATECODE1` | VARCHAR2(40) | 40 | 是 | None | +| 41 | `PJ_DATECODE2` | VARCHAR2(40) | 40 | 是 | None | +| 42 | `PJ_FINISHEDPRODUCT` | VARCHAR2(40) | 40 | 是 | None | +| 43 | `PJ_OWNER` | VARCHAR2(40) | 40 | 是 | None | +| 44 | `PJ_PROCESSSPEC` | VARCHAR2(40) | 40 | 是 | None | +| 45 | `PJ_WAFERPRODUCT` | VARCHAR2(40) | 40 | 是 | None | +| 46 | `PJ_WORKORDER` | VARCHAR2(40) | 40 | 是 | None | +| 47 | `PJ_CHECKBYHOUR` | NUMBER | 22 | 是 | None | +| 48 | `PJ_CHECKBYIDLETIME` | NUMBER | 22 | 是 | 唯一标识符 | +| 49 | `PJ_CHECKBYLOT` | NUMBER(10) | 22 | 是 | None | +| 50 | `PJ_CHECKBYPRODUCT` | NUMBER(10) | 22 | 是 | None | +| 51 | `PJ_CHECKBYTYPE` | NUMBER(10) | 22 | 是 | None | +| 52 | `PJ_CHECKBYWORKORDER` | NUMBER(10) | 22 | 是 | None | +| 53 | `PJ_VERIFYSPCRESULT` | NUMBER(10) | 22 | 是 | None | +| 54 | `PJ_ASSETSSTATUS` | VARCHAR2(40) | 40 | 是 | 状态 | +| 55 | `PJ_WORKCENTER_ID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 56 | `PJ_AUEQUIPMENTGROUPID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 57 | `PJ_CONTROLLENGTH` | NUMBER(10) | 22 | 是 | None | +| 58 | `PJ_DEPARTMENT` | VARCHAR2(100) | 100 | 是 | None | +| 59 | `PJ_EMPLOYEE` | VARCHAR2(100) | 100 | 是 | None | +| 60 | `PJ_ISAUEQUIPMENT` | NUMBER(10) | 22 | 是 | None | +| 61 | `PJ_LOTID` | VARCHAR2(40) | 40 | 是 | 唯一标识符 | +| 62 | `PJ_SETUPACCESSID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 63 | `PJ_SPCSETUP` | CHAR(16) | 16 | 是 | None | +| 64 | `PJ_WORKCENTERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 65 | `PJ_AUTOMATIONLEVEL` | NUMBER(10) | 22 | 是 | None | +| 66 | `CREATIONDATE` | DATE | 7 | 是 | None | +| 67 | `CREATIONUSERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 68 | `LASTCHANGEDATE` | DATE | 7 | 是 | None | +| 69 | `USERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 70 | `AUTOMATIONPLANNAME` | VARCHAR2(40) | 40 | 是 | AUTOMATIONPLANID | +| 71 | `LOCATIONNAME` | VARCHAR2(40) | 40 | 是 | LOCATIONID | +| 72 | `RESOURCEFAMILYNAME` | VARCHAR2(30) | 30 | 是 | RESOURCEFAMILYID | +| 73 | `VENDORNAME` | VARCHAR2(40) | 40 | 是 | VENDORID | +| 74 | `PJ_ERPVENDORID` | VARCHAR2(40) | 40 | 是 | VENDORID | +| 75 | `WORKCENTERNAME` | VARCHAR2(40) | 40 | 是 | PJ_WORKCENTERID | +| 76 | `PJ_ISPRODUCTION` | NUMBER | 22 | 是 | 20251217 add:生產設備 | +| 77 | `PJ_ISKEY` | NUMBER | 22 | 是 | 20251217 add:關鍵設備 | +| 78 | `PJ_ISMONITOR` | NUMBER | 22 | 是 | 20251217 add:監控設備 | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `OBJECT` | 普通索引 | OBJECTCATEGORY, OBJECTTYPE | +| `RESOURCEID` | 普通索引 | RESOURCEID | + +--- + +### DW_MES_RESOURCESTATUS + +**用途**: 設備狀態變更歷史表 - 狀態切換與原因 + +**数据量**: 65,742,614 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `HISTORYID` | CHAR(16) | 16 | 是 | RESOURCEID | +| 2 | `HISTORYMAINLINEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 3 | `RESOURCESTATUSHISTORYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 4 | `AVAILABILITY` | NUMBER(10) | 22 | 是 | None | +| 5 | `LASTSTATUSCHANGEDATE` | DATE | 7 | 是 | 状态 | +| 6 | `NEWREASONNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 7 | `NEWSTATUSNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 8 | `UPDATELASTSTATUSCHANGEDATE` | NUMBER(10) | 22 | 是 | 状态 | +| 9 | `OLDAVAILABILITY` | NUMBER(10) | 22 | 是 | None | +| 10 | `OLDLASTACTIVITYDATE` | DATE | 7 | 是 | None | +| 11 | `OLDLASTSTATUSCHANGEDATE` | DATE | 7 | 是 | 状态 | +| 12 | `OLDREASONNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 13 | `OLDSTATUSNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 14 | `OLDUPDATELASTSTATUSCHANGEDATE` | NUMBER(10) | 22 | 是 | 状态 | +| 15 | `SS_ISDOWNVIAPARENT` | NUMBER(10) | 22 | 是 | None | +| 16 | `JOBID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 17 | `TXNDATE` | DATE | 7 | 是 | 資料更新時間(做差異同步用) | +| 18 | `DESCRIPTION` | VARCHAR2(255) | 255 | 是 | None | +| 19 | `RESOURCEFAMILYNAME` | VARCHAR2(30) | 30 | 是 | RESOURCEFAMILYID | +| 20 | `VENDORNAME` | VARCHAR2(40) | 40 | 是 | VENDORID | +| 21 | `VENDORMODEL` | VARCHAR2(30) | 30 | 是 | None | +| 22 | `PJ_ERPVENDORID` | VARCHAR2(40) | 40 | 是 | VENDORID | +| 23 | `LOCATIONNAME` | VARCHAR2(40) | 40 | 是 | LOCATIONID | +| 24 | `WORKCENTERNAME` | VARCHAR2(40) | 40 | 是 | PJ_WORKCENTERID | +| 25 | `PJ_ASSETSSTATUS` | VARCHAR2(40) | 40 | 是 | 状态 | +| 26 | `PJ_DEPARTMENT` | VARCHAR2(100) | 100 | 是 | None | +| 27 | `AUTOMATIONPLANNAME` | VARCHAR2(40) | 40 | 是 | AUTOMATIONPLANID | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `HISTORYID` | 普通索引 | HISTORYID | +| `OLDLASTSTATUSCHANGEDATE` | 普通索引 | OLDLASTSTATUSCHANGEDATE | + +--- + +### DW_MES_RESOURCESTATUS_SHIFT + +**用途**: 設備狀態班次彙總表 - 班次級狀態/工時 + +**数据量**: 74,820,134 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `HISTORYID` | CHAR(16) | 16 | 是 | RESOURCEID | +| 2 | `HISTORYMAINLINEID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 3 | `RESOURCESTATUSHISTORYID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 4 | `AVAILABILITY` | NUMBER(10) | 22 | 是 | None | +| 5 | `LASTSTATUSCHANGEDATE` | DATE | 7 | 是 | 状态 | +| 6 | `NEWREASONNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 7 | `NEWSTATUSNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 8 | `UPDATELASTSTATUSCHANGEDATE` | NUMBER(10) | 22 | 是 | 状态 | +| 9 | `OLDAVAILABILITY` | NUMBER(10) | 22 | 是 | None | +| 10 | `OLDLASTACTIVITYDATE` | DATE | 7 | 是 | None | +| 11 | `OLDLASTSTATUSCHANGEDATE` | DATE | 7 | 是 | 状态 | +| 12 | `OLDREASONNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 13 | `OLDSTATUSNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 14 | `OLDUPDATELASTSTATUSCHANGEDATE` | NUMBER(10) | 22 | 是 | 状态 | +| 15 | `SS_ISDOWNVIAPARENT` | NUMBER(10) | 22 | 是 | None | +| 16 | `TXNDATE` | DATE | 7 | 是 | None | +| 17 | `HOURS` | NUMBER(12,6) | 22 | 是 | None | +| 18 | `JOBID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 19 | `DATADATE` | DATE | 7 | 是 | None | +| 20 | `SN` | NUMBER | 22 | 是 | None | +| 21 | `DESCRIPTION` | VARCHAR2(255) | 255 | 是 | None | +| 22 | `RESOURCEFAMILYNAME` | VARCHAR2(30) | 30 | 是 | RESOURCEFAMILYID | +| 23 | `VENDORNAME` | VARCHAR2(40) | 40 | 是 | VENDORID | +| 24 | `VENDORMODEL` | VARCHAR2(30) | 30 | 是 | None | +| 25 | `PJ_ERPVENDORID` | VARCHAR2(40) | 40 | 是 | VENDORID | +| 26 | `LOCATIONNAME` | VARCHAR2(40) | 40 | 是 | LOCATIONID | +| 27 | `WORKCENTERNAME` | VARCHAR2(40) | 40 | 是 | PJ_WORKCENTERID | +| 28 | `PJ_ASSETSSTATUS` | VARCHAR2(40) | 40 | 是 | 状态 | +| 29 | `PJ_DEPARTMENT` | VARCHAR2(100) | 100 | 是 | None | +| 30 | `AUTOMATIONPLANNAME` | VARCHAR2(40) | 40 | 是 | AUTOMATIONPLANID | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `DW_MES_RESOURCESTATUS_SHIFT_DATADATE` | 普通索引 | DATADATE | +| `DW_MES_RESOURCESTATUS_SHIFT_HISTORYID` | 普通索引 | HISTORYID | +| `DW_MES_RESOURCESTATUS_SHIFT_JOBID` | 普通索引 | JOBID | +| `DW_MES_RESOURCESTATUS_SHIFT_OLDLASTSTATUSCHANGEDATE` | 普通索引 | OLDLASTSTATUSCHANGEDATE | +| `DW_MES_RESOURCESTATUS_SHIFT_TXNDATE` | 普通索引 | TXNDATE | + +--- + +### DW_MES_SPEC_WORKCENTER_V + +**用途**: 待补充 + +**数据量**: 230 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `SPEC` | VARCHAR2(40) | 40 | 是 | None | +| 2 | `SPECSEQUENCE` | VARCHAR2(10) | 10 | 是 | None | +| 3 | `SPEC_ORDER` | VARCHAR2(51) | 51 | 是 | None | +| 4 | `WORK_CENTER` | VARCHAR2(40) | 40 | 是 | None | +| 5 | `WORK_CENTER_SEQUENCE` | VARCHAR2(255) | 255 | 是 | None | +| 6 | `WORK_CENTER_GROUP` | VARCHAR2(40) | 40 | 是 | None | +| 7 | `WORKCENTERSEQUENCE_GROUP` | VARCHAR2(255) | 255 | 是 | None | +| 8 | `WORKCENTERGROUP_ORDER` | VARCHAR2(296) | 296 | 是 | None | +| 9 | `WORK_CENTER_SHORT` | VARCHAR2(40) | 40 | 是 | None | + +--- + +### DW_MES_WIP + +**用途**: 在製品現況表(含歷史累積)- 當前 WIP 狀態/數量 + +**数据量**: 79,058,085 行 + +#### 字段列表 + +| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 | +|---|--------|----------|------|------|------| +| 1 | `CONTAINERID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 2 | `CONTAINERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 3 | `GA_CONTAINERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 4 | `QTY` | NUMBER | 22 | 是 | 数量 | +| 5 | `QTY2` | NUMBER | 22 | 是 | 数量 | +| 6 | `CURRENTHOLDCOUNT` | NUMBER(10) | 22 | 是 | None | +| 7 | `HOLDREASONID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 8 | `ORIGINALSTARTDATE` | DATE | 7 | 是 | None | +| 9 | `STATUS` | NUMBER(10) | 22 | 是 | 状态 | +| 10 | `ORIGINALQTY` | NUMBER | 22 | 是 | 数量 | +| 11 | `ORIGINALQTY2` | NUMBER | 22 | 是 | 数量 | +| 12 | `SPECID` | CHAR(16) | 16 | 是 | 唯一标识符 | +| 13 | `MOVEINTIMESTAMP` | DATE | 7 | 是 | 时间戳 | +| 14 | `MOVEINUSERNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 15 | `MOVEINQTY` | NUMBER | 22 | 是 | 数量 | +| 16 | `MOVEINQTY2` | NUMBER | 22 | 是 | 数量 | +| 17 | `STARTREASONNAME` | VARCHAR2(40) | 40 | 是 | CONTAINERID | +| 18 | `EXPECTEDENDDATE` | DATE | 7 | 是 | SD | +| 19 | `WORKFLOWNAME` | VARCHAR2(40) | 40 | 是 | SD | +| 20 | `WORKFLOWSTEPNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 21 | `LOCATIONNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 22 | `DATECODE` | VARCHAR2(40) | 40 | 是 | None | +| 23 | `CONTAINERCOMMENTS` | VARCHAR2(2000) | 2000 | 是 | None | +| 24 | `COMMENT_DATE` | DATE | 7 | 是 | None | +| 25 | `COMMENT_EMP` | VARCHAR2(40) | 40 | 是 | None | +| 26 | `EQUIPMENTCOUNT` | NUMBER(10) | 22 | 是 | None | +| 27 | `EQUIPMENTS` | VARCHAR2(1000) | 1000 | 是 | EM | +| 28 | `EQP_LOCATIONNAME` | VARCHAR2(1000) | 1000 | 是 | EM | +| 29 | `HOLDEMP` | VARCHAR2(40) | 40 | 是 | None | +| 30 | `HOLDDEPTNAME` | VARCHAR2(255) | 255 | 是 | 名称 | +| 31 | `HOLDLOCATIONNAME` | VARCHAR2(40) | 40 | 是 | 名称 | +| 32 | `HOLDCOMMENT_FUTURE` | VARCHAR2(255) | 255 | 是 | None | +| 33 | `HOLDREASONNAME` | VARCHAR2(40) | 40 | 是 | HOLDREASONID | +| 34 | `EVENTNAME` | VARCHAR2(1000) | 1000 | 是 | NCR | +| 35 | `OCCURRENCEDATE` | VARCHAR2(1000) | 1000 | 是 | NCR | +| 36 | `RELEASETIME` | VARCHAR2(1000) | 1000 | 是 | DW_MES_HOLDRELEASEHISTORY | +| 37 | `RELEASEEMP` | VARCHAR2(1000) | 1000 | 是 | DW_MES_HOLDRELEASEHISTORY | +| 38 | `RELEASEREASON` | VARCHAR2(1000) | 1000 | 是 | DW_MES_HOLDRELEASEHISTORY | +| 39 | `SPECNAME` | VARCHAR2(40) | 40 | 是 | SPECID | +| 40 | `WORKCENTERNAME` | VARCHAR2(40) | 40 | 是 | SPECID | +| 41 | `MFGORDERNAME` | VARCHAR2(40) | 40 | 是 | CONTAINERID | +| 42 | `PJ_BOP` | VARCHAR2(40) | 40 | 是 | CONTAINERID | +| 43 | `PJ_PRODUCEREGION` | VARCHAR2(40) | 40 | 是 | CONTAINERID | +| 44 | `PRODUCTBOMBASEID` | CHAR(16) | 16 | 是 | CONTAINERID | +| 45 | `OWNERNAME` | VARCHAR2(40) | 40 | 是 | CONTAINERID | +| 46 | `PRIORITYCODENAME` | VARCHAR2(40) | 40 | 是 | CONTAINERID | +| 47 | `WOQTY` | NUMBER | 22 | 是 | CONTAINERID->MFGORDERID | +| 48 | `WOPLANNEDCOMPLETIONDATE` | DATE | 7 | 是 | CONTAINERID->MFGORDERID | +| 49 | `PJ_TYPE` | VARCHAR2(40) | 40 | 是 | CONTAINERID | +| 50 | `PJ_FUNCTION` | VARCHAR2(40) | 40 | 是 | CONTAINERID | +| 51 | `PRODUCTNAME` | VARCHAR2(40) | 40 | 是 | CONTAINERID | +| 52 | `PRODUCTLINENAME` | VARCHAR2(40) | 40 | 是 | CONTAINERID | +| 53 | `PRODUCTLINENAME_LEF` | VARCHAR2(40) | 40 | 是 | 名称 | +| 54 | `PRODUCTDESC` | VARCHAR2(255) | 255 | 是 | None | +| 55 | `FIRSTNAME` | VARCHAR2(40) | 40 | 是 | CONTAINERID | +| 56 | `WAFERLOTS1` | VARCHAR2(40) | 40 | 是 | None | +| 57 | `WAFERLOT` | VARCHAR2(255) | 255 | 是 | 3個加起來 | +| 58 | `WAFERNAME` | VARCHAR2(255) | 255 | 是 | 3個加起來 | +| 59 | `WAFERDESC` | VARCHAR2(255) | 255 | 是 | 3個加起來 | +| 60 | `NUMBEROFROWS` | NUMBER(10) | 22 | 是 | CONTAINERID->PRODUCTID | +| 61 | `LEADFRAMENAME` | VARCHAR2(1000) | 1000 | 是 | 名称 | +| 62 | `LEADFRAMEDESC` | VARCHAR2(1000) | 1000 | 是 | None | +| 63 | `LEADFRAMEOPTION` | VARCHAR2(1000) | 1000 | 是 | None | +| 64 | `CONSUMEFACTOR` | NUMBER | 22 | 是 | CF | +| 65 | `TXNDATE` | DATE | 7 | 是 | None | +| 66 | `HOLDTIME` | DATE | 7 | 是 | None | + +#### 索引 + +| 索引名 | 类型 | 字段 | +|--------|------|------| +| `DW_MES_WIP_CONTAINERNAME` | 普通索引 | CONTAINERNAME | +| `DW_MES_WIP_TXNDATE` | 普通索引 | TXNDATE | + +--- + +## 报表开发注意事项 + +### 性能优化建议 + +1. **大数据量表查询优化** + - 以下表数据量较大,查询时务必添加时间范围限制: + - `DW_MES_WIP`: 79,058,085 行 + - `DW_MES_LOTWIPDATAHISTORY`: 77,960,216 行 + - `DW_MES_RESOURCESTATUS_SHIFT`: 74,820,134 行 + - `DW_MES_RESOURCESTATUS`: 65,742,614 行 + - `DW_MES_LOTWIPHISTORY`: 53,454,213 行 + - `DW_MES_MAINTENANCE`: 52,060,026 行 + - `DW_MES_HM_LOTMOVEOUT`: 48,645,692 行 + - `DW_MES_LOTMATERIALSHISTORY`: 17,829,931 行 + - `DW_MES_LOTREJECTHISTORY`: 15,786,025 行 + +2. **索引使用** + - 查询时尽量使用已建立索引的字段作为查询条件 + - 避免在索引字段上使用函数,会导致索引失效 + +3. **连接池配置** + - 建议使用连接池管理数据库连接 + - 推荐连接池大小:5-10 个连接 + +4. **查询超时设置** + - 建议设置查询超时时间为 30-60 秒 + - 避免长时间运行的查询影响系统性能 + +### 数据时效性 + +- **实时数据表**: `DW_MES_WIP`(含歷史累積), `DW_MES_RESOURCESTATUS` +- **历史数据表**: 带有 `HISTORY` 后缀的表 +- **主数据表**: `DW_MES_RESOURCE`, `DW_MES_CONTAINER` + +### 常用时间字段 + +大多数历史表包含以下时间相关字段: +- `CREATEDATE` / `CREATETIMESTAMP`: 记录创建时间 +- `UPDATEDATE` / `UPDATETIMESTAMP`: 记录更新时间 +- `TRANSACTIONDATE`: 交易发生时间 + +### 数据权限 + +- 當前帳號為唯讀帳號 (詳見 .env 中的 DB_USER) +- 仅可执行 SELECT 查询 +- 无法进行 INSERT, UPDATE, DELETE 操作 + +--- + +## 常用查询示例 + +### 1. 查询当前在制品数量 + +```sql +SELECT COUNT(*) as WIP_COUNT +FROM DW_MES_WIP +WHERE CURRENTSTATUSID IS NOT NULL; +``` + +### 2. 查询设备状态统计 + +```sql +SELECT + CURRENTSTATUSID, + COUNT(*) as COUNT +FROM DW_MES_RESOURCESTATUS +GROUP BY CURRENTSTATUSID +ORDER BY COUNT DESC; +``` + +### 3. 查询最近 7 天的批次历史 + +```sql +SELECT * +FROM DW_MES_LOTWIPHISTORY +WHERE CREATEDATE >= SYSDATE - 7 +ORDER BY CREATEDATE DESC; +``` + +### 4. 查询工单完成情况 + +```sql +SELECT + JOBID, + JOBSTATUS, + COUNT(*) as COUNT +FROM DW_MES_JOB +GROUP BY JOBID, JOBSTATUS +ORDER BY JOBID; +``` + +### 5. 按日期统计生产数量 + +```sql +SELECT + TRUNC(CREATEDATE) as PRODUCTION_DATE, + COUNT(*) as LOT_COUNT +FROM DW_MES_HM_LOTMOVEOUT +WHERE CREATEDATE >= SYSDATE - 30 +GROUP BY TRUNC(CREATEDATE) +ORDER BY PRODUCTION_DATE DESC; +``` + +### 6. 联表查询示例(批次与容器) + +```sql +SELECT + w.LOTID, + w.CONTAINERNAME, + c.CURRENTSTATUSID, + c.CUSTOMERID +FROM DW_MES_WIP w +LEFT JOIN DW_MES_CONTAINER c ON w.CONTAINERID = c.CONTAINERID +WHERE w.CREATEDATE >= SYSDATE - 1 +ORDER BY w.CREATEDATE DESC; +``` + +--- + +## 附录 + +### 文档更新记录 + +- 2026-01-29: 初始版本创建 + +### 联系方式 + +如有疑问或需要补充信息,请联系数据库管理员。 diff --git a/docs/Oracle_Authorized_Objects.md b/docs/Oracle_Authorized_Objects.md new file mode 100644 index 0000000..ef7e095 --- /dev/null +++ b/docs/Oracle_Authorized_Objects.md @@ -0,0 +1,36 @@ +# Oracle 可使用 TABLE/VIEW 清單(DWH) + +**產生時間**: 2026-01-29 13:34:22 +**使用者**: (詳見 .env 中的 DB_USER) +**Schema**: DWH + +## 摘要 + +- 可使用物件總數: 19 +- TABLE: 16 +- VIEW: 3 +- 來源 (去重後物件數): DIRECT 19, PUBLIC 0, ROLE 0, SYSTEM 0 + +## 物件清單 + +| 物件 | 類型 | 權限 | 授權來源 | +|------|------|------|----------| +| `DWH.DW_MES_CONTAINER` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_EQUIPMENTSTATUS_WIP_V` | VIEW | SELECT | DIRECT | +| `DWH.DW_MES_HM_LOTMOVEOUT` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_HOLDRELEASEHISTORY` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_JOB` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_JOBTXNHISTORY` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_LOTMATERIALSHISTORY` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_LOTREJECTHISTORY` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_LOTWIPDATAHISTORY` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_LOTWIPHISTORY` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_LOT_V` | VIEW | SELECT | DIRECT | +| `DWH.DW_MES_MAINTENANCE` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_PARTREQUESTORDER` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_PJ_COMBINEDASSYLOTS` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_RESOURCE` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_RESOURCESTATUS` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_RESOURCESTATUS_SHIFT` | TABLE | SELECT | DIRECT | +| `DWH.DW_MES_SPEC_WORKCENTER_V` | VIEW | SELECT | DIRECT | +| `DWH.DW_MES_WIP` | TABLE | SELECT | DIRECT | \ No newline at end of file diff --git a/docs/architecture_findings.md b/docs/architecture_findings.md new file mode 100644 index 0000000..e9349dd --- /dev/null +++ b/docs/architecture_findings.md @@ -0,0 +1,936 @@ +# MES Dashboard - Architecture Findings + +本文件記錄專案開發過程中確立的架構設計、全局規範與資料處理規則。 + +--- + +## 1. 資料庫連線管理 + +### 連線池統一使用 +所有資料庫操作必須透過 `mes_dashboard.core.database` 模組: + +```python +from mes_dashboard.core.database import read_sql_df, get_engine + +# 讀取資料 (推薦方式) +df = read_sql_df(sql, params) + +# 取得 engine(若需要直接操作) +engine = get_engine() +``` + +### 連線池配置 (位置: `core/database.py`) + +| 參數 | 開發環境 | 生產環境 | 說明 | +|------|---------|---------|------| +| pool_size | 2 | 10 | 基礎連線數 | +| max_overflow | 3 | 20 | 額外連線數 | +| pool_timeout | 30 | 30 | 等待超時 (秒) | +| pool_recycle | 1800 | 1800 | 回收週期 (30分鐘) | +| pool_pre_ping | True | True | 使用前驗證連線 | + +### Keep-Alive 機制 +- 背景執行緒每 5 分鐘執行 `SELECT 1 FROM DUAL` +- 防止 NAT/防火牆斷開閒置連線 +- 啟動: `start_keepalive()`,停止: `stop_keepalive()` + +### 注意事項 +- **禁止**在各 service 中自行建立連線 +- **禁止**直接使用 `oracledb.connect()` +- 連線池由 `database.py` 統一管理,避免連線洩漏 +- 測試環境需在 setUp 中重置:`db._ENGINE = None` + +--- + +## 2. SQL 集中管理 + +### 目錄結構 +所有 SQL 查詢放在 `src/mes_dashboard/sql/` 目錄: + +``` +sql/ +├── loader.py # SQL 檔案載入器 (LRU 快取) +├── builder.py # 參數化查詢構建器 +├── filters.py # 通用篩選條件 +├── dashboard/ # 儀表板 SQL +│ ├── kpi.sql +│ ├── heatmap.sql +│ └── workcenter_cards.sql +├── wip/ # WIP SQL +│ ├── summary.sql +│ └── detail.sql +├── resource/ # 設備 SQL +│ ├── by_status.sql +│ └── detail.sql +├── resource_history/ # 歷史 SQL +└── job_query/ # 維修工單 SQL +``` + +### SQLLoader 使用方式 + +```python +from mes_dashboard.sql.loader import SQLLoader + +# 載入 SQL 檔案 (自動 LRU 快取,最多 100 個) +sql = SQLLoader.load("wip/summary") + +# 結構性參數替換 (用於 SQL 片段) +sql = SQLLoader.load_with_params("dashboard/kpi", + LATEST_STATUS_SUBQUERY="...", + WHERE_CLAUSE="...") + +# 清除快取 +SQLLoader.clear_cache() +``` + +### QueryBuilder 使用方式 + +```python +from mes_dashboard.sql.builder import QueryBuilder + +builder = QueryBuilder() + +# 添加條件 (自動參數化,防 SQL 注入) +builder.add_param_condition("STATUS", "PRD") +builder.add_in_condition("STATUS", ["PRD", "SBY"]) +builder.add_not_in_condition("HOLD_REASON", exclude_list) +builder.add_like_condition("LOTID", user_input, position="both") +builder.add_or_like_conditions(["COL1", "COL2"], [val1, val2]) +builder.add_is_null("COLUMN") +builder.add_is_not_null("COLUMN") +builder.add_condition("FIXED_CONDITION = 1") # 固定條件 + +# 構建 WHERE 子句 +where_clause, params = builder.build_where_only() + +# 替換佔位符並執行 +sql = sql.replace("{{ WHERE_CLAUSE }}", where_clause) +df = read_sql_df(sql, params) +``` + +### 佔位符規範 + +| 類型 | 語法 | 用途 | 安全性 | +|------|------|------|--------| +| 結構性 | `{{ PLACEHOLDER }}` | 靜態 SQL 片段 | 僅限預定義值 | +| 參數 | `:param_name` | 動態用戶輸入 | Oracle bind variables | + +### Oracle IN 子句限制 +Oracle IN 子句上限 1000 個值,需分批處理: + +```python +BATCH_SIZE = 1000 + +# 參考 job_query_service.py 的 _build_resource_filter() +``` + +--- + +## 3. 快取機制 + +### 多層快取架構 + +``` +請求 → 進程級快取 (30 秒 TTL) + → Redis 快取 (可配置 TTL) + → Oracle 資料庫 +``` + +### 全局快取 API +使用 `mes_dashboard.core.cache` 模組: + +```python +from mes_dashboard.core.cache import cache_get, cache_set, make_cache_key + +# 建立快取 key(支援 filters dict) +cache_key = make_cache_key("resource_history_summary", filters={ + 'start_date': start_date, + 'workcenter_groups': sorted(groups) if groups else None, +}) + +# 讀取/寫入快取 +result = cache_get(cache_key) +if result is None: + result = query_data() + cache_set(cache_key, result, ttl=CACHE_TTL_TREND) +``` + +### 快取 TTL 常數 +定義於 `mes_dashboard.config.constants`: + +```python +CACHE_TTL_DEFAULT = 60 # 1 分鐘 +CACHE_TTL_FILTER_OPTIONS = 600 # 10 分鐘 +CACHE_TTL_PIVOT_COLUMNS = 300 # 5 分鐘 +CACHE_TTL_KPI = 60 # 1 分鐘 +CACHE_TTL_TREND = 300 # 5 分鐘 +``` + +### Redis 快取配置 +環境變數: +``` +REDIS_ENABLED=true +REDIS_URL=redis://localhost:6379/0 +REDIS_KEY_PREFIX=mes_wip +``` + +### 專用快取服務 + +| 服務 | 位置 | 用途 | +|------|------|------| +| WIP 快取更新器 | `core/cache_updater.py` | 背景線程自動更新 WIP 數據 | +| 資源快取 | `services/resource_cache.py` | DW_MES_RESOURCE 表快取 (4 小時同步) | +| 設備狀態快取 | `services/realtime_equipment_cache.py` | 設備實時狀態 (5 分鐘同步) | +| Filter 快取 | `services/filter_cache.py` | 篩選選項快取 | + +--- + +## 4. Filter Cache(篩選選項快取) + +### 位置 +`mes_dashboard.services.filter_cache` + +### 用途 +快取全站共用的篩選選項,避免重複查詢資料庫: + +```python +from mes_dashboard.services.filter_cache import ( + get_workcenter_groups, # 取得 workcenter group 列表 + get_workcenter_mapping, # 取得 workcentername → group 對應 + get_workcenters_for_groups, # 根據 group 取得 workcentername 列表 + get_resource_families, # 取得 resource family 列表 +) +``` + +### Workcenter 對應關係 +``` +WORKCENTERNAME (資料庫) → WORKCENTER_GROUP (顯示) +焊接_DB_1 → 焊接_DB +焊接_DB_2 → 焊接_DB +成型_1 → 成型 +``` + +### 資料來源 +- Workcenter Groups: `DW_PJ_LOT_V` (WORKCENTER_GROUP, WORKCENTERSEQUENCE_GROUP) +- Resource Families: `DW_MES_RESOURCE` (RESOURCEFAMILYNAME) + +--- + +## 5. 熔斷器 (Circuit Breaker) + +### 位置 +`mes_dashboard.core.circuit_breaker` + +### 狀態機制 + +``` +CLOSED (正常) + ↓ 失敗達到閾值 +OPEN (故障,拒絕請求) + ↓ 等待 recovery_timeout +HALF_OPEN (測試恢復) + ↓ 成功 → CLOSED / 失敗 → OPEN +``` + +### 配置 (環境變數) + +``` +CIRCUIT_BREAKER_ENABLED=true +CIRCUIT_BREAKER_FAILURE_THRESHOLD=5 # 最少失敗次數 +CIRCUIT_BREAKER_FAILURE_RATE=0.5 # 失敗率閾值 (0.0-1.0) +CIRCUIT_BREAKER_RECOVERY_TIMEOUT=30 # OPEN 狀態等待秒數 +CIRCUIT_BREAKER_WINDOW_SIZE=10 # 滑動窗口大小 +``` + +### 使用方式 +熔斷器已整合在 `read_sql_df()` 中,自動: +- 檢查是否允許請求 +- 記錄成功/失敗 +- 狀態轉移 + +### 狀態查詢 +```python +from mes_dashboard.core.circuit_breaker import get_database_circuit_breaker + +cb = get_database_circuit_breaker() +status = cb.get_status() +# status.state, status.failure_count, status.success_count, status.failure_rate +``` + +--- + +## 6. 統一 API 響應格式 + +### 位置 +`mes_dashboard.core.response` + +### 響應格式 + +```python +# 成功響應 +{ + "success": True, + "data": {...}, + "meta": {"timestamp": "2024-02-04T10:30:45.123456"} +} + +# 錯誤響應 +{ + "success": False, + "error": { + "code": "DB_CONNECTION_FAILED", + "message": "資料庫連線失敗,請稍後再試", + "details": "ORA-12541" # 僅開發模式 + }, + "meta": {"timestamp": "..."} +} +``` + +### 錯誤代碼 + +| 代碼 | HTTP | 說明 | +|------|------|------| +| DB_CONNECTION_FAILED | 503 | 資料庫連線失敗 | +| DB_QUERY_TIMEOUT | 504 | 查詢逾時 | +| DB_QUERY_ERROR | 500 | 查詢執行錯誤 | +| SERVICE_UNAVAILABLE | 503 | 服務不可用 | +| CIRCUIT_BREAKER_OPEN | 503 | 熔斷器開啟 | +| VALIDATION_ERROR | 400 | 驗證失敗 | +| UNAUTHORIZED | 401 | 未授權 | +| FORBIDDEN | 403 | 禁止訪問 | +| NOT_FOUND | 404 | 不存在 | +| TOO_MANY_REQUESTS | 429 | 過多請求 | +| INTERNAL_ERROR | 500 | 內部錯誤 | + +### 便利函數 + +```python +from mes_dashboard.core.response import ( + success_response, + validation_error, # 400 + unauthorized_error, # 401 + forbidden_error, # 403 + not_found_error, # 404 + db_connection_error, # 503 + internal_error, # 500 +) +``` + +--- + +## 7. 認證與授權機制 + +### 認證服務 +位置: `mes_dashboard.services.auth_service` + +#### LDAP 認證 (生產環境) +```python +from mes_dashboard.services.auth_service import authenticate + +user = authenticate(username, password) +# 返回: {username, displayName, mail, department} +``` + +#### 本地認證 (開發環境) +``` +LOCAL_AUTH_ENABLED=true +LOCAL_AUTH_USERNAME=admin +LOCAL_AUTH_PASSWORD=password +``` + +### Session 管理 +```python +# 登入後存入 session +session["admin"] = { + "username": user.get("username"), + "displayName": user.get("displayName"), + "mail": user.get("mail"), + "department": user.get("department"), + "login_time": datetime.now().isoformat() +} + +# Session 配置 +SESSION_COOKIE_SECURE = True # HTTPS only (生產) +SESSION_COOKIE_HTTPONLY = True # 防止 JS 訪問 +SESSION_COOKIE_SAMESITE = 'Lax' # CSRF 防護 +PERMANENT_SESSION_LIFETIME = 28800 # 8 小時 +``` + +### 權限檢查 +位置: `mes_dashboard.core.permissions` + +```python +from mes_dashboard.core.permissions import is_admin_logged_in, admin_required + +# 檢查登入狀態 +if is_admin_logged_in(): + ... + +# 裝飾器保護路由 +@admin_required +def admin_only_view(): + ... +``` + +### 登入速率限制 +- 單 IP 每 5 分鐘最多 5 次嘗試 +- 位置: `routes/auth_routes.py` + +--- + +## 8. 頁面狀態管理 + +### 位置 +- 服務: `mes_dashboard.services.page_registry` +- 數據: `data/page_status.json` + +### 狀態定義 + +| 狀態 | 說明 | +|------|------| +| `released` | 所有用戶可訪問 | +| `dev` | 僅管理員可訪問 | +| `None` | 未註冊,由 Flask 路由控制 | + +### 數據格式 +```json +{ + "pages": [ + {"route": "/wip-overview", "name": "WIP 即時概況", "status": "released"}, + {"route": "/tables", "name": "表格總覽", "status": "dev"} + ], + "api_public": true +} +``` + +### API + +```python +from mes_dashboard.services.page_registry import ( + get_page_status, # 取得頁面狀態 + set_page_status, # 設定頁面狀態 + is_api_public, # API 是否公開 + get_all_pages, # 取得所有頁面 +) +``` + +### 權限檢查 (自動) +在 `app.py` 的 `@app.before_request` 中自動執行: +- dev 頁面 + 非管理員 → 403 + +--- + +## 9. 日誌系統 + +### 雙層日誌架構 + +| 層級 | 目標 | 用途 | +|------|------|------| +| 控制台 (stderr) | Gunicorn 捕獲 | 即時監控 | +| SQLite | 管理員儀表板 | 歷史查詢 | + +### 配置 (環境變數) +``` +LOG_STORE_ENABLED=true +LOG_SQLITE_PATH=logs/admin_logs.sqlite +LOG_SQLITE_RETENTION_DAYS=7 +LOG_SQLITE_MAX_ROWS=100000 +``` + +### 日誌記錄規範 + +```python +import logging +logger = logging.getLogger('mes_dashboard') + +logger.debug("詳細調試資訊") +logger.info("一般操作記錄") +logger.warning("警告但可繼續") +logger.error("錯誤需要關注", exc_info=True) # 包含堆棧 +``` + +### SQLite 日誌查詢 +位置: `mes_dashboard.core.log_store` + +```python +from mes_dashboard.core.log_store import get_log_store + +store = get_log_store() +logs = store.query_logs( + level="ERROR", + limit=100, + offset=0, + search="keyword" +) +``` + +--- + +## 10. 健康檢查 + +### 端點 + +| 端點 | 認證 | 說明 | +|------|------|------| +| `/health` | 無需 | 基本健康檢查 | +| `/health/deep` | 需管理員 | 詳細指標 | + +### 基本檢查項目 +- 資料庫連線 (`SELECT 1 FROM DUAL`) +- Redis 連線 (`PING`) +- 各快取狀態 + +### 詳細檢查項目 (deep) +- 資料庫延遲 (毫秒) +- 連線池狀態 (size, checked_out, overflow) +- 快取新鮮度 +- 熔斷器狀態 +- 查詢性能指標 (P50/P95/P99) + +### 狀態判定 +- `200 OK` (healthy/degraded): DB 正常 +- `503 Unavailable` (unhealthy): DB 故障 + +--- + +## 11. API 路由結構 (Blueprint) + +### Blueprint 列表 + +| Blueprint | URL 前綴 | 檔案 | +|-----------|---------|------| +| wip | `/api/wip` | `wip_routes.py` | +| resource | `/api/resource` | `resource_routes.py` | +| dashboard | `/api/dashboard` | `dashboard_routes.py` | +| excel_query | `/api/excel-query` | `excel_query_routes.py` | +| hold | `/api/hold` | `hold_routes.py` | +| resource_history | `/api/resource-history` | `resource_history_routes.py` | +| job_query | `/api/job-query` | `job_query_routes.py` | +| admin | `/admin` | `admin_routes.py` | +| auth | `/admin` | `auth_routes.py` | +| health | `/` | `health_routes.py` | + +### 路由註冊 +位置: `routes/__init__.py` 的 `register_routes(app)` + +--- + +## 12. 前端全局組件 + +### Toast 通知 +定義於 `static/js/toast.js`,透過 `_base.html` 載入: + +```javascript +// 正確用法 +Toast.info('訊息'); +Toast.success('成功'); +Toast.warning('警告'); +Toast.error('錯誤', { retry: () => loadData() }); + +const id = Toast.loading('載入中...'); +Toast.update(id, { message: '完成!' }); +Toast.dismiss(id); + +// 錯誤用法(不存在) +MESToast.warning('...'); // ❌ 錯誤 +``` + +### 自動消失時間 +- info: 3000ms +- success: 2000ms +- warning: 5000ms +- error: 永久(需手動關閉) +- loading: 永久 + +### MesApi(HTTP 請求) +定義於 `static/js/mes-api.js`: + +```javascript +// GET 請求 +const data = await MesApi.get('/api/wip/summary', { + params: { page: 1 }, + timeout: 60000, + retries: 5, + signal: abortController.signal, + silent: true // 禁用 toast 通知 +}); + +// POST 請求 +const data = await MesApi.post('/api/query_table', { + table_name: 'TABLE_A', + filters: {...} +}); +``` + +### MesApi 特性 +- 自動重試 (3 次,指數退避: 1s, 2s, 4s) +- 自動 Toast 通知 +- 請求 ID 追蹤 +- AbortSignal 支援 +- 4xx 不重試,5xx 重試 + +--- + +## 13. 資料表預篩選規則 + +### 設備類型篩選 +定義於 `mes_dashboard.config.constants.EQUIPMENT_TYPE_FILTER`: + +```sql +((OBJECTCATEGORY = 'ASSEMBLY' AND OBJECTTYPE = 'ASSEMBLY') + OR (OBJECTCATEGORY = 'WAFERSORT' AND OBJECTTYPE = 'WAFERSORT')) +``` + +### 排除條件 +```python +# 排除的地點 +EXCLUDED_LOCATIONS = [ + 'ATEC', 'F區', 'F區焊接站', '報廢', '實驗室', + '山東', '成型站_F區', '焊接F區', '無錫', '熒茂' +] + +# 排除的資產狀態 +EXCLUDED_ASSET_STATUSES = ['Disapproved'] +``` + +### CommonFilters 使用 +位置: `mes_dashboard.sql.filters` + +```python +from mes_dashboard.sql.filters import CommonFilters + +# 添加標準篩選 +CommonFilters.add_location_exclusion(builder, 'r') +CommonFilters.add_asset_status_exclusion(builder, 'r') +CommonFilters.add_wip_base_filters(builder, filters) +CommonFilters.add_equipment_filter(builder, filters) +``` + +--- + +## 14. 資料庫欄位對應 + +### DW_MES_RESOURCE +| 常見錯誤 | 正確欄位名 | +|---------|-----------| +| ASSETSTATUS | PJ_ASSETSSTATUS(雙 S)| +| LOCATION | LOCATIONNAME | +| ISPRODUCTION | PJ_ISPRODUCTION | +| ISKEY | PJ_ISKEY | +| ISMONITOR | PJ_ISMONITOR | + +### DW_MES_RESOURCESTATUS_SHIFT +| 欄位 | 說明 | +|-----|------| +| HISTORYID | 對應 DW_MES_RESOURCE.RESOURCEID | +| TXNDATE | 交易日期 | +| OLDSTATUSNAME | E10 狀態 (PRD, SBY, UDT, SDT, EGT, NST) | +| HOURS | 該狀態時數 | + +### DW_PJ_LOT_V +| 欄位 | 說明 | +|-----|------| +| WORKCENTERNAME | 站點名稱(細分)| +| WORKCENTER_GROUP | 站點群組(顯示用)| +| WORKCENTERSEQUENCE_GROUP | 群組排序 | + +--- + +## 15. E10 狀態定義 + +| 狀態 | 說明 | 計入 OU% | +|-----|------|---------| +| PRD | Production(生產)| 是(分子)| +| SBY | Standby(待機)| 是(分母)| +| UDT | Unscheduled Downtime(非計畫停機)| 是(分母)| +| SDT | Scheduled Downtime(計畫停機)| 是(分母)| +| EGT | Engineering Time(工程時間)| 是(分母)| +| NST | Non-Scheduled Time(非排程時間)| 否 | + +### OU% 計算公式 +``` +OU% = PRD / (PRD + SBY + UDT + SDT + EGT) × 100 +``` + +### 狀態顯示名稱 +```python +STATUS_DISPLAY_NAMES = { + 'PRD': '生產中', + 'SBY': '待機', + 'UDT': '非計畫停機', + 'SDT': '計畫停機', + 'EGT': '工程時間', + 'NST': '未排單', +} +``` + +--- + +## 16. 配置管理 + +### 環境變數 (.env) + +#### 資料庫 +``` +DB_HOST= +DB_PORT=1521 +DB_SERVICE= +DB_USER= +DB_PASSWORD= +DB_POOL_SIZE=5 +DB_MAX_OVERFLOW=10 +``` + +> 實際值請參考 `.env` 或 `.env.example` + +#### Flask +``` +FLASK_ENV=production +FLASK_DEBUG=0 +SECRET_KEY=your_secret_key +SESSION_LIFETIME=28800 +``` + +#### 認證 +``` +LDAP_API_URL= +ADMIN_EMAILS= +LOCAL_AUTH_ENABLED=false +``` + +#### Gunicorn +``` +GUNICORN_BIND=0.0.0.0:8080 +GUNICORN_WORKERS=4 +GUNICORN_THREADS=8 +``` + +#### 快取 +``` +REDIS_ENABLED=true +REDIS_URL=redis://localhost:6379/0 +CACHE_CHECK_INTERVAL=600 +RESOURCE_CACHE_ENABLED=true +RESOURCE_SYNC_INTERVAL=14400 +``` + +#### 熔斷器 +``` +CIRCUIT_BREAKER_ENABLED=true +CIRCUIT_BREAKER_FAILURE_THRESHOLD=5 +CIRCUIT_BREAKER_FAILURE_RATE=0.5 +CIRCUIT_BREAKER_RECOVERY_TIMEOUT=30 +``` + +#### 日誌 +``` +LOG_STORE_ENABLED=true +LOG_SQLITE_PATH=logs/admin_logs.sqlite +LOG_SQLITE_RETENTION_DAYS=7 +``` + +### 環境配置類 +位置: `mes_dashboard.config.settings` + +```python +class DevelopmentConfig(Config): + DEBUG = True + DB_POOL_SIZE = 2 + +class ProductionConfig(Config): + DEBUG = False + DB_POOL_SIZE = 10 + +class TestingConfig(Config): + TESTING = True + DB_POOL_SIZE = 1 +``` + +--- + +## 17. 平行查詢 + +### ThreadPoolExecutor +對於多個獨立查詢,使用平行執行提升效能: + +```python +from concurrent.futures import ThreadPoolExecutor, as_completed + +with ThreadPoolExecutor(max_workers=4) as executor: + futures = { + executor.submit(read_sql_df, kpi_sql): 'kpi', + executor.submit(read_sql_df, trend_sql): 'trend', + executor.submit(read_sql_df, heatmap_sql): 'heatmap', + } + for future in as_completed(futures): + query_name = futures[future] + results[query_name] = future.result() +``` + +### 注意事項 +- Mock 測試時不能使用 `side_effect` 列表(順序不可預測) +- 應使用函式判斷 SQL 內容來回傳對應的 mock 資料 + +--- + +## 18. Oracle SQL 優化 + +### CTE MATERIALIZE Hint +防止 Oracle 優化器將 CTE inline 多次執行: + +```sql +WITH shift_data AS ( + SELECT /*+ MATERIALIZE */ HISTORYID, TXNDATE, OLDSTATUSNAME, HOURS + FROM DW_MES_RESOURCESTATUS_SHIFT + WHERE TXNDATE >= TO_DATE('2024-01-01', 'YYYY-MM-DD') + AND TXNDATE < TO_DATE('2024-01-07', 'YYYY-MM-DD') + 1 +) +SELECT ... +``` + +### 日期範圍查詢 +```sql +-- 包含 end_date 當天 +WHERE TXNDATE >= TO_DATE(:start_date, 'YYYY-MM-DD') + AND TXNDATE < TO_DATE(:end_date, 'YYYY-MM-DD') + 1 +``` + +### 慢查詢警告 +- 閾值: 1 秒 (警告),5 秒 (`SLOW_QUERY_THRESHOLD`) +- 自動記錄到日誌 + +--- + +## 19. 前端資料限制 + +### 明細資料上限 +為避免瀏覽器記憶體問題,明細查詢有筆數限制: + +```python +MAX_DETAIL_RECORDS = 5000 + +if total > MAX_DETAIL_RECORDS: + df = df.head(MAX_DETAIL_RECORDS) + truncated = True +``` + +前端顯示警告: +```javascript +if (result.truncated) { + Toast.warning(`資料超過 ${result.max_records} 筆,請使用篩選條件縮小範圍。`); +} +``` + +--- + +## 20. JavaScript 注意事項 + +### Array.reverse() 原地修改 +```javascript +// 錯誤 - 原地修改陣列 +const arr = [1, 2, 3]; +arr.reverse(); // arr 被修改為 [3, 2, 1] + +// 正確 - 建立新陣列 +const reversed = arr.slice().reverse(); // arr 不變 +// 或 +const reversed = [...arr].reverse(); +``` + +--- + +## 21. 測試規範 + +### 測試檔案結構 +``` +tests/ +├── conftest.py # pytest fixtures +├── test_*_service.py # 單元測試(service layer) +├── test_*_routes.py # 整合測試(API endpoints) +├── e2e/ +│ └── test_*_e2e.py # 端對端測試(完整流程) +└── stress/ + └── test_*.py # 壓力測試 +``` + +### 測試前重置 +```python +def setUp(self): + db._ENGINE = None # 重置連線池 + self.app = create_app('testing') +``` + +### 執行測試 +```bash +# 單一模組 +pytest tests/test_resource_history_service.py -v + +# 全部相關測試 +pytest tests/test_resource_history_*.py tests/e2e/test_resource_history_e2e.py -v + +# 覆蓋率報告 +pytest tests/ --cov=mes_dashboard +``` + +--- + +## 22. 錯誤處理模式 + +### 三層錯誤處理 + +```python +# 1. 路由層 - 驗證錯誤 +@bp.route('/api/query') +def query(): + if not request.json.get('table_name'): + return validation_error("table_name 為必填") + +# 2. 服務層 - 業務錯誤 (優雅降級) +def get_wip_summary(filters): + try: + df = query_wip(filters) + if df.empty: + return None + return process_data(df) + except Exception as exc: + logger.error(f"WIP query failed: {exc}") + return None + +# 3. 核心層 - 基礎設施錯誤 +def read_sql_df(sql, params): + if not circuit_breaker.allow_request(): + raise RuntimeError("Circuit breaker open") +``` + +### 全局錯誤處理 +位置: `app.py` 的 `_register_error_handlers()` + +- 401 → `unauthorized_error()` +- 403 → `forbidden_error()` +- 404 → JSON (API) 或 HTML (頁面) +- 500 → `internal_error()` +- Exception → 通用處理 + +--- + +## 參考檔案索引 + +| 功能 | 檔案位置 | +|------|---------| +| SQL 載入 | `src/mes_dashboard/sql/loader.py` | +| 查詢構建 | `src/mes_dashboard/sql/builder.py` | +| 通用篩選 | `src/mes_dashboard/sql/filters.py` | +| 資料庫操作 | `src/mes_dashboard/core/database.py` | +| 快取 | `src/mes_dashboard/core/cache.py` | +| 熔斷器 | `src/mes_dashboard/core/circuit_breaker.py` | +| API 響應 | `src/mes_dashboard/core/response.py` | +| 權限檢查 | `src/mes_dashboard/core/permissions.py` | +| 日誌存儲 | `src/mes_dashboard/core/log_store.py` | +| 配置類 | `src/mes_dashboard/config/settings.py` | +| 常量定義 | `src/mes_dashboard/config/constants.py` | +| 認證服務 | `src/mes_dashboard/services/auth_service.py` | +| 頁面狀態 | `src/mes_dashboard/services/page_registry.py` | +| Filter 快取 | `src/mes_dashboard/services/filter_cache.py` | +| 資源快取 | `src/mes_dashboard/services/resource_cache.py` | +| API 客戶端 | `src/mes_dashboard/static/js/mes-api.js` | +| Toast 系統 | `src/mes_dashboard/static/js/toast.js` | diff --git a/docs/environment_gaps_and_mitigation.md b/docs/environment_gaps_and_mitigation.md new file mode 100644 index 0000000..2898af5 --- /dev/null +++ b/docs/environment_gaps_and_mitigation.md @@ -0,0 +1,34 @@ +# Environment-dependent Gaps and Mitigation + +## Oracle-dependent checks + +### Gap +- Service/integration paths that execute Oracle SQL require live DB credentials and network reachability. +- Local CI-like runs may not have Oracle connectivity. +- In this environment, `tests/test_cache_integration.py` has Oracle-dependent fallback failures when cache fixtures are insufficient. + +### Mitigation +- Keep unit tests isolated with mocks for SQL entry points. +- Reserve Oracle-connected tests for gated environments. +- Use `testing` config for app factory tests where possible. + +## Redis-dependent checks + +### Gap +- Redis availability differs across environments. +- Health/caching behavior differs between `L1+L2` and `L1-only degraded` modes. + +### Mitigation +- Expose route-cache telemetry in `/health` and `/health/deep`. +- Keep degraded mode visible and non-fatal where DB remains healthy. +- Validate both modes in unit tests (`tests/test_cache.py`, `tests/test_health_routes.py`). + +## Frontend build availability + +### Gap +- Node/npm may be absent on constrained runtime nodes. + +### Mitigation +- Keep inline script fallback in templates when dist assets are missing. +- Build artifacts in deployment pipeline where Node is available. +- Startup script logs fallback mode explicitly on build failure. diff --git a/docs/frontend_compute_shift_plan.md b/docs/frontend_compute_shift_plan.md new file mode 100644 index 0000000..cb8101f --- /dev/null +++ b/docs/frontend_compute_shift_plan.md @@ -0,0 +1,42 @@ +# Frontend Compute Shift Plan + +## Targeted Calculations + +## Resource History (migrated to frontend helpers) +- `ou_pct` +- `availability_pct` +- status percentages: + - `prd_pct` + - `sby_pct` + - `udt_pct` + - `sdt_pct` + - `egt_pct` + - `nst_pct` + +These are now computed by `frontend/src/core/compute.js` via: +- `buildResourceKpiFromHours` +- `calcOuPct` +- `calcAvailabilityPct` +- `calcStatusPct` + +## Parity Rules + +1. Rounding rule +- one decimal place, identical to backend (`round(..., 1)`) + +2. Formula rule +- OU%: `PRD / (PRD + SBY + UDT + SDT + EGT)` +- Availability%: `(PRD + SBY + EGT) / (PRD + SBY + EGT + SDT + UDT + NST)` +- Status%: `status_hours / total_hours` + +3. Zero denominator rule +- all percentages return `0` + +4. Data compatibility rule +- backend keeps existing fields to preserve API compatibility +- frontend recomputes display values from hours for deterministic parity + +## Validation + +- Python backend formula baseline: `mes_dashboard.services.resource_history_service` +- Frontend parity check: `tests/test_frontend_compute_parity.py` diff --git a/docs/migration_gates_and_runbook.md b/docs/migration_gates_and_runbook.md new file mode 100644 index 0000000..838032d --- /dev/null +++ b/docs/migration_gates_and_runbook.md @@ -0,0 +1,113 @@ +# Migration Gates and Runbook + +## Gate Checklist (Cutover Readiness) + +A release is cutover-ready only when all gates pass: + +1. Frontend build gate +- `npm --prefix frontend run build` succeeds +- expected artifacts exist in `src/mes_dashboard/static/dist/` + +2. Root execution gate +- startup and deploy scripts run from repository root only +- no runtime dependency on any legacy subtree path + +3. Functional parity gate +- resource-history frontend compute parity checks pass +- job-query/resource-history export headers match shared field contracts + +4. Cache observability gate +- `/health` returns route cache telemetry and degraded flags +- `/health/deep` returns route cache telemetry for diagnostics +- `/health` includes `database_pool.runtime/state`, `degraded_reason` +- resource/wip derived index telemetry is visible (`resource_cache.derived_index`, `cache.derived_search_index`) + +5. Runtime resilience gate +- pool exhaustion path returns `503` + `DB_POOL_EXHAUSTED` and `Retry-After` +- circuit-open path returns `503` + `CIRCUIT_BREAKER_OPEN` and fail-fast semantics +- frontend client does not aggressively retry on degraded pool exhaustion responses + +6. Conda-systemd contract gate +- `deploy/mes-dashboard.service` and `deploy/mes-dashboard-watchdog.service` both run in the same conda runtime contract +- `WATCHDOG_RESTART_FLAG`, `WATCHDOG_PID_FILE`, `WATCHDOG_STATE_FILE` paths are consistent across app/admin/watchdog +- single-port bind (`GUNICORN_BIND`) remains stable during restart workflow + +7. Regression gate +- focused unit/integration test subset passes (see validation evidence) + +8. Documentation alignment gate +- `README.md` (and project-required mirror docs such as `README.mdj`) reflect current runtime architecture contract +- resilience diagnostics fields (thresholds/churn/recommendation) are documented for operators +- frontend shared-core governance updates are reflected in architecture notes + +## Rollout Procedure + +1. Prepare environment +- Activate conda env (`mes-dashboard`) +- install Python deps: `pip install -r requirements.txt` +- install frontend deps: `npm --prefix frontend install` + +2. Build frontend artifacts +- `npm --prefix frontend run build` + +3. Run migration gate tests +- execute focused pytest set covering templates/cache/contracts/health + +4. Deploy with single-port mode +- start app with root `scripts/start_server.sh` +- verify portal and module pages render on same origin/port + +5. Conda + systemd rehearsal (recommended before production cutover) +- `sudo cp deploy/mes-dashboard.service /etc/systemd/system/` +- `sudo cp deploy/mes-dashboard-watchdog.service /etc/systemd/system/` +- `sudo mkdir -p /etc/mes-dashboard && sudo cp .env /etc/mes-dashboard/mes-dashboard.env` +- `sudo systemctl daemon-reload` +- `sudo systemctl enable --now mes-dashboard mes-dashboard-watchdog` +- call `/admin/api/worker/status` and verify runtime contract paths exist + +6. Post-deploy checks +- call `/health` and `/health/deep` +- confirm route cache mode, degraded flags, and pool/runtime diagnostics align with environment (Redis on/off) +- trigger one controlled worker restart from admin API and verify single-port continuity +- verify README architecture section matches deployed runtime contract + +## Rollback Procedure + +1. Trigger rollback criteria +- any critical gate failure after deployment (page unusable, export mismatch, health degradation beyond acceptable limits) + +2. Operational rollback steps +- stop service: `scripts/start_server.sh stop` +- restore previously known-good build artifacts (or prior release package) +- restart service: `scripts/start_server.sh start` +- if using systemd: `sudo systemctl restart mes-dashboard mes-dashboard-watchdog` + +3. Validation after rollback +- verify `/health` status is at least expected baseline +- re-run focused smoke tests for portal + key pages +- confirm CSV export downloads and headers +- verify degraded reason is cleared or matches expected dependency outage only + +## Rollback Rehearsal Checklist + +1. Simulate failure condition (e.g. invalid dist artifact deployment) +2. Execute stop/restore/start sequence +3. Verify health and page smoke checks +4. Capture timings and any manual intervention points +5. Update this runbook if any step was unclear or missing + +## Alert Thresholds (Operational Contract) + +Use these initial thresholds for alerting/escalation: + +1. Sustained degraded state +- `degraded_reason` non-empty for >= 5 minutes + +2. Worker restart churn +- >= 3 watchdog-triggered restarts within 10 minutes + +3. Pool saturation pressure +- `database_pool.state.saturation >= 0.90` for >= 3 consecutive health probes + +4. Frontend/API retry pressure +- significant increase of client retries for `DB_POOL_EXHAUSTED` or `CIRCUIT_BREAKER_OPEN` responses over baseline diff --git a/docs/migration_validation_evidence.md b/docs/migration_validation_evidence.md new file mode 100644 index 0000000..9bd104b --- /dev/null +++ b/docs/migration_validation_evidence.md @@ -0,0 +1,60 @@ +# Migration Validation Evidence + +Date: 2026-02-07 + +## Build + +Command: +- `npm --prefix frontend run build` + +Result: +- PASS +- Generated page bundles: + - `portal.js` + - `resource-status.js` + - `resource-history.js` + - `job-query.js` + - `excel-query.js` + - `tables.js` + +## Root Startup Smoke + +Command: +- `PYTHONPATH=src python -c \"from mes_dashboard.app import create_app; app=create_app('testing'); print('routes', len(list(app.url_map.iter_rules())))\"` + +Result: +- PASS +- `routes 71` +- Redis/Oracle warnings observed in this local environment; app factory and route registration still completed. + +## Focused Test Gate (root project) + +Command: +- `python -m pytest -q tests/test_app_factory.py tests/test_template_integration.py tests/test_cache.py tests/test_health_routes.py tests/test_field_contracts.py tests/test_frontend_compute_parity.py tests/test_job_query_service.py tests/test_resource_history_service.py` + +Result: +- PASS +- `107 passed` + +## Extended Regression Spot-check + +Command: +- `python -m pytest -q tests/test_job_query_routes.py tests/test_resource_history_routes.py tests/test_cache_integration.py` + +Result: +- PARTIAL +- `45 passed, 2 failed` +- Failed tests: + - `tests/test_cache_integration.py::TestWipApiWithCache::test_wip_matrix_uses_cache` + - `tests/test_cache_integration.py::TestWipApiWithCache::test_packages_uses_cache` + +Failure profile: +- cache-fallback path hit Oracle in local environment and returned ORA connectivity/thick-mode errors. +- categorized as environment-dependent (see `docs/environment_gaps_and_mitigation.md`). + +## Health/Telemetry Coverage + +Validated by tests: +- `/health` includes `route_cache` telemetry and degraded warnings +- `/health/deep` includes route-cache telemetry block +- cache telemetry includes L1/L2 mode, hit/miss counters, degraded state diff --git a/docs/page_architecture_map.md b/docs/page_architecture_map.md new file mode 100644 index 0000000..022479d --- /dev/null +++ b/docs/page_architecture_map.md @@ -0,0 +1,44 @@ +# Page Architecture Map + +## Portal Navigation Model + +Portal (`/`) uses drawer-based navigation and keeps existing operational flow: + +- 報表類 + - `/wip-overview` + - `/resource` + - `/resource-history` +- 查詢類 + - `/tables` + - `/excel-query` + - `/job-query` +- 開發工具 + - `/admin/pages` + - `/admin/performance` + +## Independent Pages + +These pages are independent views (iframe tabs in portal) and can be loaded directly: +- `/wip-overview` +- `/resource` +- `/resource-history` +- `/tables` +- `/excel-query` +- `/job-query` + +## Drill-down Pages + +These pages are drill-down/detail pages, linked from parent views: +- `/wip-detail` (from WIP flows) +- `/hold-detail` (from hold-related flows) + +## Vite Entry Mapping + +- `portal` -> `frontend/src/portal/main.js` +- `resource-status` -> `frontend/src/resource-status/main.js` +- `resource-history` -> `frontend/src/resource-history/main.js` +- `job-query` -> `frontend/src/job-query/main.js` +- `excel-query` -> `frontend/src/excel-query/main.js` +- `tables` -> `frontend/src/tables/main.js` + +All pages keep inline fallback scripts in templates when module assets are unavailable. diff --git a/docs/root_cutover_inventory.md b/docs/root_cutover_inventory.md new file mode 100644 index 0000000..8b50439 --- /dev/null +++ b/docs/root_cutover_inventory.md @@ -0,0 +1,56 @@ +# Root Cutover Inventory + +## Scope +- Workspace root: `/Users/egg/Projects/DashBoard_vite` +- Legacy subtree `DashBoard/`: removed on 2026-02-08 +- Objective: ensure runtime/test/deploy flows depend only on root architecture. + +## 1. Runtime / Test / Deploy Path Audit + +### Legacy path references +- Historical mentions may exist in archived OpenSpec artifacts for traceability. +- Active runtime/test/deploy code MUST NOT reference removed legacy subtree paths. + +### Result +- Legacy code directory is removed. +- No active runtime code in `src/`, `scripts/`, or `tests/` requires legacy subtree paths. +- Remaining mentions are documentation-only migration history. + +## 2. Root-only Execution Hardening + +### Updated +- `scripts/start_server.sh` + - Frontend build readiness now checks all required root dist entries: + - `portal.js` + - `resource-status.js` + - `resource-history.js` + - `job-query.js` + - `excel-query.js` + - `tables.js` + +### Verified behavior target +- Startup/build logic remains anchored to root paths: + - `frontend/` + - `src/mes_dashboard/static/dist/` + - `src/` + +## 3. Root-only Smoke Checks (single-port) + +### Build smoke +- `npm --prefix frontend run build` + +### App import smoke +- `PYTHONPATH=src python -c "from mes_dashboard.app import create_app; app=create_app('testing'); print(app.url_map)"` +- Verified route initialization count (`routes 71`) in root-only execution context. + +### HTTP smoke (Flask test client) +- Verify page renders and module asset tags resolve/fallback: + - `/` + - `/resource` + - `/resource-history` + - `/job-query` + - `/excel-query` + - `/tables` + +### Test smoke +- `python -m pytest -q tests/test_app_factory.py tests/test_template_integration.py tests/test_cache.py` diff --git a/docs/root_refactor_validation_notes.md b/docs/root_refactor_validation_notes.md new file mode 100644 index 0000000..217c3f9 --- /dev/null +++ b/docs/root_refactor_validation_notes.md @@ -0,0 +1,37 @@ +# Root Refactor Validation Notes + +Date: 2026-02-07 + +## Focused Validation (Root Project) + +- Frontend build: + - `npm --prefix frontend run build` ✅ +- Python focused tests: + - `python -m pytest -q tests/test_app_factory.py tests/test_cache.py tests/test_job_query_service.py` ✅ (46 passed) +- Root portal asset integration check: + - GET `/` from Flask test client includes `/static/dist/portal.js` and `/static/dist/portal.css` ✅ + +## Environment-Dependent Gaps + +The following are known non-functional gaps in local validation due to missing external runtime dependencies: + +1. Oracle-dependent integration tests +- Some routes/services start background workers that attempt Oracle queries at app init. +- In local environment without valid Oracle connectivity, logs contain `DPY-3001` and related query failures. + +2. Redis-dependent runtime checks +- Redis is not reachable in local environment (`localhost:6379` connection refused). +- Cache fallback paths continue to run, but Redis health-dependent behavior is not fully exercised. + +3. Dev-page permission tests +- Certain template tests expecting `/tables` or `/excel-query` content may fail when page status is `dev` for non-admin sessions. + +## Recommended Next Validation Stage + +- Run full test suite in an environment with: + - reachable Oracle test endpoint + - reachable Redis endpoint + - page status fixtures aligned with expected test roles +- Add CI matrix split: + - unit/fallback tests (no Oracle/Redis required) + - integration tests (Oracle/Redis required) diff --git a/environment.yml b/environment.yml new file mode 100644 index 0000000..a74722b --- /dev/null +++ b/environment.yml @@ -0,0 +1,46 @@ +# Conda environment for MES Dashboard +# Usage: conda env create -f environment.yml +# conda activate mes-dashboard +# +# Note: Most packages use minimum version pins (>=) to allow automatic security updates. +# For reproducible builds, generate a lock file: pip freeze > requirements.lock + +name: mes-dashboard +channels: + - conda-forge + - defaults +dependencies: + # Python version - pinned for consistency across deployments + - python=3.11 + # Frontend build toolchain (Vite) + - nodejs>=22 + + # Use pip for Python packages (better compatibility with pypi packages) + - pip + - pip: + # Core Framework + - flask>=3.0.0 + + # Database + - oracledb>=2.0.0 + - sqlalchemy>=2.0.0 + + # Data Processing + - pandas>=2.0.0 + - openpyxl>=3.0.0 + + # Cache (Redis) + - redis>=5.0.0 + - hiredis>=2.0.0 # C parser for better performance + + # HTTP Client + - requests>=2.28.0 + + # Configuration + - python-dotenv>=1.0.0 + + # WSGI Server (Production) + - gunicorn>=21.2.0 + + # System Monitoring + - psutil>=5.9.0 diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000..2752eb9 --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,2 @@ +node_modules/ +.DS_Store diff --git a/frontend/package-lock.json b/frontend/package-lock.json new file mode 100644 index 0000000..b2feced --- /dev/null +++ b/frontend/package-lock.json @@ -0,0 +1,1105 @@ +{ + "name": "mes-dashboard-frontend", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "mes-dashboard-frontend", + "version": "0.1.0", + "devDependencies": { + "vite": "^6.3.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.12.tgz", + "integrity": "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.12.tgz", + "integrity": "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.12.tgz", + "integrity": "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.12.tgz", + "integrity": "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.12.tgz", + "integrity": "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.12.tgz", + "integrity": "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.12.tgz", + "integrity": "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.12.tgz", + "integrity": "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.12.tgz", + "integrity": "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.12.tgz", + "integrity": "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.12.tgz", + "integrity": "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.12.tgz", + "integrity": "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.12.tgz", + "integrity": "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.12.tgz", + "integrity": "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.12.tgz", + "integrity": "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.12.tgz", + "integrity": "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.12.tgz", + "integrity": "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.12.tgz", + "integrity": "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.12.tgz", + "integrity": "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.12.tgz", + "integrity": "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.12.tgz", + "integrity": "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.12.tgz", + "integrity": "sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.12.tgz", + "integrity": "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.12.tgz", + "integrity": "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.12.tgz", + "integrity": "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.12.tgz", + "integrity": "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.57.1.tgz", + "integrity": "sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.57.1.tgz", + "integrity": "sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.57.1.tgz", + "integrity": "sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.57.1.tgz", + "integrity": "sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.57.1.tgz", + "integrity": "sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.57.1.tgz", + "integrity": "sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.57.1.tgz", + "integrity": "sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.57.1.tgz", + "integrity": "sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.57.1.tgz", + "integrity": "sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.57.1.tgz", + "integrity": "sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.57.1.tgz", + "integrity": "sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.57.1.tgz", + "integrity": "sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.57.1.tgz", + "integrity": "sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.57.1.tgz", + "integrity": "sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.57.1.tgz", + "integrity": "sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.57.1.tgz", + "integrity": "sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.57.1.tgz", + "integrity": "sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.57.1.tgz", + "integrity": "sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.57.1.tgz", + "integrity": "sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.57.1.tgz", + "integrity": "sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.57.1.tgz", + "integrity": "sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.57.1.tgz", + "integrity": "sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.57.1.tgz", + "integrity": "sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.57.1.tgz", + "integrity": "sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.57.1.tgz", + "integrity": "sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/esbuild": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.12.tgz", + "integrity": "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.12", + "@esbuild/android-arm": "0.25.12", + "@esbuild/android-arm64": "0.25.12", + "@esbuild/android-x64": "0.25.12", + "@esbuild/darwin-arm64": "0.25.12", + "@esbuild/darwin-x64": "0.25.12", + "@esbuild/freebsd-arm64": "0.25.12", + "@esbuild/freebsd-x64": "0.25.12", + "@esbuild/linux-arm": "0.25.12", + "@esbuild/linux-arm64": "0.25.12", + "@esbuild/linux-ia32": "0.25.12", + "@esbuild/linux-loong64": "0.25.12", + "@esbuild/linux-mips64el": "0.25.12", + "@esbuild/linux-ppc64": "0.25.12", + "@esbuild/linux-riscv64": "0.25.12", + "@esbuild/linux-s390x": "0.25.12", + "@esbuild/linux-x64": "0.25.12", + "@esbuild/netbsd-arm64": "0.25.12", + "@esbuild/netbsd-x64": "0.25.12", + "@esbuild/openbsd-arm64": "0.25.12", + "@esbuild/openbsd-x64": "0.25.12", + "@esbuild/openharmony-arm64": "0.25.12", + "@esbuild/sunos-x64": "0.25.12", + "@esbuild/win32-arm64": "0.25.12", + "@esbuild/win32-ia32": "0.25.12", + "@esbuild/win32-x64": "0.25.12" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/rollup": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.57.1.tgz", + "integrity": "sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.57.1", + "@rollup/rollup-android-arm64": "4.57.1", + "@rollup/rollup-darwin-arm64": "4.57.1", + "@rollup/rollup-darwin-x64": "4.57.1", + "@rollup/rollup-freebsd-arm64": "4.57.1", + "@rollup/rollup-freebsd-x64": "4.57.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.57.1", + "@rollup/rollup-linux-arm-musleabihf": "4.57.1", + "@rollup/rollup-linux-arm64-gnu": "4.57.1", + "@rollup/rollup-linux-arm64-musl": "4.57.1", + "@rollup/rollup-linux-loong64-gnu": "4.57.1", + "@rollup/rollup-linux-loong64-musl": "4.57.1", + "@rollup/rollup-linux-ppc64-gnu": "4.57.1", + "@rollup/rollup-linux-ppc64-musl": "4.57.1", + "@rollup/rollup-linux-riscv64-gnu": "4.57.1", + "@rollup/rollup-linux-riscv64-musl": "4.57.1", + "@rollup/rollup-linux-s390x-gnu": "4.57.1", + "@rollup/rollup-linux-x64-gnu": "4.57.1", + "@rollup/rollup-linux-x64-musl": "4.57.1", + "@rollup/rollup-openbsd-x64": "4.57.1", + "@rollup/rollup-openharmony-arm64": "4.57.1", + "@rollup/rollup-win32-arm64-msvc": "4.57.1", + "@rollup/rollup-win32-ia32-msvc": "4.57.1", + "@rollup/rollup-win32-x64-gnu": "4.57.1", + "@rollup/rollup-win32-x64-msvc": "4.57.1", + "fsevents": "~2.3.2" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/vite": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.4.1.tgz", + "integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.25.0", + "fdir": "^6.4.4", + "picomatch": "^4.0.2", + "postcss": "^8.5.3", + "rollup": "^4.34.9", + "tinyglobby": "^0.2.13" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "jiti": ">=1.21.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + } + } +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..c7a63a5 --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,14 @@ +{ + "name": "mes-dashboard-frontend", + "private": true, + "version": "0.1.0", + "type": "module", + "scripts": { + "dev": "vite --host", + "build": "vite build", + "test": "node --test tests/*.test.js" + }, + "devDependencies": { + "vite": "^6.3.0" + } +} diff --git a/frontend/src/core/api.js b/frontend/src/core/api.js new file mode 100644 index 0000000..51a4728 --- /dev/null +++ b/frontend/src/core/api.js @@ -0,0 +1,82 @@ +const DEFAULT_TIMEOUT = 30000; + +function buildApiError(response, payload) { + const message = + payload?.error?.message || + (typeof payload?.error === 'string' ? payload.error : null) || + payload?.message || + `HTTP ${response.status}`; + + const error = new Error(message); + error.status = response.status; + error.payload = payload; + error.errorCode = payload?.error?.code || payload?.code || null; + error.retryAfterSeconds = Number( + payload?.meta?.retry_after_seconds || response.headers.get('Retry-After') || 0 + ) || null; + return error; +} + +async function fetchJson(url, options = {}) { + const timeout = options.timeout ?? DEFAULT_TIMEOUT; + const controller = new AbortController(); + const timer = setTimeout(() => controller.abort(), timeout); + + try { + const response = await fetch(url, { + ...options, + signal: controller.signal + }); + + const data = await response.json(); + if (!response.ok) { + throw buildApiError(response, data); + } + return data; + } finally { + clearTimeout(timer); + } +} + +export async function apiGet(url, options = {}) { + if (window.MesApi?.get) { + return window.MesApi.get(url, options); + } + return fetchJson(url, { ...options, method: 'GET' }); +} + +export async function apiPost(url, payload, options = {}) { + if (window.MesApi?.post) { + return window.MesApi.post(url, payload, options); + } + return fetchJson(url, { + ...options, + method: 'POST', + headers: { + 'Content-Type': 'application/json', + ...(options.headers || {}) + }, + body: JSON.stringify(payload) + }); +} + +export async function apiUpload(url, formData, options = {}) { + return fetchJson(url, { + ...options, + method: 'POST', + body: formData + }); +} + +export function ensureMesApiAvailable() { + if (window.MesApi) { + return window.MesApi; + } + + const bridge = { + get: (url, options) => apiGet(url, options), + post: (url, payload, options) => apiPost(url, payload, options) + }; + window.MesApi = bridge; + return bridge; +} diff --git a/frontend/src/core/autocomplete.js b/frontend/src/core/autocomplete.js new file mode 100644 index 0000000..220f242 --- /dev/null +++ b/frontend/src/core/autocomplete.js @@ -0,0 +1,69 @@ +const DEFAULT_LIMIT = 20; + +const FIELD_MAP = Object.freeze({ + workorder: 'workorder', + lotid: 'lotid', + package: 'package', + type: 'pj_type' +}); + +export function debounce(fn, wait = 300) { + let timer = null; + return (...args) => { + if (timer) { + clearTimeout(timer); + } + timer = setTimeout(() => fn(...args), wait); + }; +} + +export function buildWipAutocompleteParams(searchType, query, filters = {}, limit = DEFAULT_LIMIT) { + const keyword = (query || '').trim(); + if (keyword.length < 2) { + return null; + } + + const params = { + field: FIELD_MAP[searchType] || searchType, + q: keyword, + limit + }; + + const filterKeys = ['workorder', 'lotid', 'package', 'type']; + filterKeys.forEach((key) => { + const value = (filters[key] || '').trim(); + if (key !== searchType && value) { + params[key] = value; + } + }); + + return params; +} + +export async function fetchWipAutocompleteItems({ + searchType, + query, + filters, + request, + limit = DEFAULT_LIMIT, +}) { + const params = buildWipAutocompleteParams(searchType, query, filters, limit); + if (!params) { + return []; + } + try { + const result = await request('/api/wip/meta/search', { + params, + silent: true, + retries: 0, + }); + if (result?.success) { + return result?.data?.items || []; + } + return []; + } catch { + return []; + } +} + +export { FIELD_MAP as WIP_AUTOCOMPLETE_FIELD_MAP }; diff --git a/frontend/src/core/compute.js b/frontend/src/core/compute.js new file mode 100644 index 0000000..a458477 --- /dev/null +++ b/frontend/src/core/compute.js @@ -0,0 +1,59 @@ +function round1(value) { + const scaled = Number(value) * 10; + const sign = Math.sign(scaled) || 1; + const abs = Math.abs(scaled); + const floor = Math.floor(abs); + const diff = abs - floor; + const epsilon = 1e-9; + + let rounded; + if (diff > 0.5 + epsilon) { + rounded = floor + 1; + } else if (diff < 0.5 - epsilon) { + rounded = floor; + } else { + // Match Python round(..., 1): banker's rounding (half to even). + rounded = floor % 2 === 0 ? floor : floor + 1; + } + + return (sign * rounded) / 10; +} + +export function calcOuPct(prd, sby, udt, sdt, egt) { + const denominator = Number(prd) + Number(sby) + Number(udt) + Number(sdt) + Number(egt); + if (denominator <= 0) return 0; + return round1((Number(prd) / denominator) * 100); +} + +export function calcAvailabilityPct(prd, sby, udt, sdt, egt, nst) { + const numerator = Number(prd) + Number(sby) + Number(egt); + const denominator = numerator + Number(sdt) + Number(udt) + Number(nst); + if (denominator <= 0) return 0; + return round1((numerator / denominator) * 100); +} + +export function calcStatusPct(value, total) { + if (Number(total) <= 0) return 0; + return round1((Number(value) / Number(total)) * 100); +} + +export function buildResourceKpiFromHours(hours = {}) { + const prd = Number(hours.prd_hours || 0); + const sby = Number(hours.sby_hours || 0); + const udt = Number(hours.udt_hours || 0); + const sdt = Number(hours.sdt_hours || 0); + const egt = Number(hours.egt_hours || 0); + const nst = Number(hours.nst_hours || 0); + const total = prd + sby + udt + sdt + egt + nst; + + return { + ou_pct: calcOuPct(prd, sby, udt, sdt, egt), + availability_pct: calcAvailabilityPct(prd, sby, udt, sdt, egt, nst), + prd_pct: calcStatusPct(prd, total), + sby_pct: calcStatusPct(sby, total), + udt_pct: calcStatusPct(udt, total), + sdt_pct: calcStatusPct(sdt, total), + egt_pct: calcStatusPct(egt, total), + nst_pct: calcStatusPct(nst, total) + }; +} diff --git a/frontend/src/core/field-contracts.js b/frontend/src/core/field-contracts.js new file mode 100644 index 0000000..9f9f698 --- /dev/null +++ b/frontend/src/core/field-contracts.js @@ -0,0 +1,25 @@ +import rawContracts from '../../../shared/field_contracts.json'; + +const contracts = rawContracts || {}; + +export function getPageContract(pageKey, sectionKey) { + const page = contracts[pageKey] || {}; + const section = page[sectionKey] || []; + return Array.isArray(section) ? section : []; +} + +export function getFieldContractByApiKey(pageKey, sectionKey, apiKey) { + return getPageContract(pageKey, sectionKey).find((field) => field.api_key === apiKey) || null; +} + +export function getUiHeaders(pageKey, sectionKey) { + return getPageContract(pageKey, sectionKey).map((field) => field.ui_label || field.api_key); +} + +export function getExportHeaders(pageKey) { + return getPageContract(pageKey, 'export').map((field) => field.export_header || field.ui_label || field.api_key); +} + +export function getContractRegistry() { + return contracts; +} diff --git a/frontend/src/core/table-tree.js b/frontend/src/core/table-tree.js new file mode 100644 index 0000000..45150fb --- /dev/null +++ b/frontend/src/core/table-tree.js @@ -0,0 +1,44 @@ +export function groupBy(items, keySelector) { + return items.reduce((acc, item) => { + const key = keySelector(item); + if (!acc[key]) { + acc[key] = []; + } + acc[key].push(item); + return acc; + }, {}); +} + +export function sortBy(items, keySelector, direction = 'asc') { + const sign = direction === 'desc' ? -1 : 1; + return [...items].sort((left, right) => { + const a = keySelector(left); + const b = keySelector(right); + if (a === b) return 0; + return a > b ? sign : -sign; + }); +} + +export function toggleTreeState(state, key) { + state[key] = !state[key]; + return state[key]; +} + +export function setTreeStateBulk(state, keys, expanded) { + keys.forEach((key) => { + state[key] = expanded; + }); +} + +export function escapeHtml(value) { + return String(value ?? '') + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/"/g, '"') + .replace(/'/g, '''); +} + +export function safeText(value, fallback = '') { + return value === null || value === undefined ? fallback : String(value); +} diff --git a/frontend/src/excel-query/main.js b/frontend/src/excel-query/main.js new file mode 100644 index 0000000..e5eca33 --- /dev/null +++ b/frontend/src/excel-query/main.js @@ -0,0 +1,624 @@ +import { ensureMesApiAvailable } from '../core/api.js'; +import { getPageContract } from '../core/field-contracts.js'; +import { buildResourceKpiFromHours } from '../core/compute.js'; +import { groupBy, sortBy, toggleTreeState, setTreeStateBulk, escapeHtml, safeText } from '../core/table-tree.js'; + +ensureMesApiAvailable(); +window.__MES_FRONTEND_CORE__ = { buildResourceKpiFromHours, groupBy, sortBy, toggleTreeState, setTreeStateBulk, escapeHtml, safeText }; +window.__FIELD_CONTRACTS__ = window.__FIELD_CONTRACTS__ || {}; +window.__FIELD_CONTRACTS__['excel_query:result_table'] = getPageContract('excel_query', 'result_table'); + + + // State + let excelColumns = []; + let excelColumnTypes = {}; // { columnName: { detected_type, type_label } } + let searchValues = []; + let tableColumns = []; // Array of column names (for backward compat) + let tableColumnsMeta = []; // Array of { name, data_type, is_date, is_number } + let tableMetadata = null; // Full table metadata including time_field, row_count + let queryResult = null; + + // Step 1: Upload Excel + async function uploadExcel() { + const fileInput = document.getElementById('excelFile'); + const file = fileInput.files[0]; + + if (!file) { + alert('請選擇檔案'); + return; + } + + const formData = new FormData(); + formData.append('file', file); + + document.getElementById('uploadInfo').innerHTML = '

上傳中...
'; + + try { + // Note: File upload uses native fetch since MesApi doesn't support FormData + const response = await fetch('/api/excel-query/upload', { + method: 'POST', + body: formData + }); + const data = await response.json(); + + if (data.error) { + document.getElementById('uploadInfo').innerHTML = `
${escapeHtml(data.error)}
`; + return; + } + + excelColumns = data.columns; + document.getElementById('uploadInfo').innerHTML = ` +
+ 檔案上傳成功!共 ${data.total_rows} 行,${data.columns.length} 欄 +
+ `; + + renderPreviewTable(data.columns, data.preview); + + const select = document.getElementById('excelColumn'); + select.innerHTML = ''; + excelColumns.forEach(col => { + select.innerHTML += ``; + }); + + document.getElementById('step2').classList.remove('disabled'); + loadTables(); + + } catch (error) { + document.getElementById('uploadInfo').innerHTML = `
上傳失敗: ${escapeHtml(error.message)}
`; + } + } + + function renderPreviewTable(columns, data) { + if (!data || data.length === 0) return; + + let html = ''; + columns.forEach(col => { + html += ``; + }); + html += ''; + + data.forEach(row => { + html += ''; + columns.forEach(col => { + const val = row[col] !== null && row[col] !== undefined ? row[col] : ''; + const textVal = safeText(val); + const escaped = escapeHtml(textVal); + html += ``; + }); + html += ''; + }); + html += '
${escapeHtml(col)}
${escaped}
'; + + document.getElementById('previewTable').innerHTML = html; + } + + // Step 2: Load column values + async function loadColumnValues() { + const column = document.getElementById('excelColumn').value; + if (!column) { + searchValues = []; + document.getElementById('columnInfo').innerHTML = ''; + return; + } + + document.getElementById('columnInfo').innerHTML = '

讀取中...
'; + + try { + // Get column values + const data = await MesApi.post('/api/excel-query/column-values', { column_name: column }); + + if (data.error) { + document.getElementById('columnInfo').innerHTML = `
${escapeHtml(data.error)}
`; + return; + } + + searchValues = data.values; + + // Get column type detection + try { + const typeData = await MesApi.post('/api/excel-query/column-type', { column_name: column }); + if (!typeData.error) { + excelColumnTypes[column] = typeData; + } + } catch (e) { + console.warn('Could not detect column type:', e); + } + + // Build info display + const typeInfo = excelColumnTypes[column]; + const typeBadge = typeInfo ? `${typeInfo.type_label}` : ''; + const warningClass = data.count > 1000 ? ' warning' : ''; + + document.getElementById('columnInfo').innerHTML = ` +
+ 共 ${data.count} 個不重複值 ${typeBadge} + ${data.count > 1000 ? '(將分批查詢,每批 1000 筆)' : ''} +
+ `; + + document.getElementById('step3').classList.remove('disabled'); + + } catch (error) { + document.getElementById('columnInfo').innerHTML = `
讀取失敗: ${escapeHtml(error.message)}
`; + } + } + + // Load available tables + async function loadTables() { + try { + const data = await MesApi.get('/api/excel-query/tables', { silent: true }); + + const select = document.getElementById('targetTable'); + select.innerHTML = ''; + + data.tables.forEach(table => { + select.innerHTML += ``; + }); + } catch (error) { + console.error('Failed to load tables:', error); + } + } + + // Step 3: Load table columns (using new table-metadata endpoint) + async function loadTableColumns() { + const tableName = document.getElementById('targetTable').value; + if (!tableName) { + tableColumns = []; + tableColumnsMeta = []; + tableMetadata = null; + document.getElementById('tableInfo').innerHTML = ''; + document.getElementById('dateRangeSection').style.display = 'none'; + document.getElementById('performanceWarning').style.display = 'none'; + return; + } + + document.getElementById('tableInfo').innerHTML = '

讀取欄位...
'; + + try { + const data = await MesApi.post('/api/excel-query/table-metadata', { table_name: tableName }); + + if (data.error) { + document.getElementById('tableInfo').innerHTML = `
${escapeHtml(data.error)}
`; + return; + } + + tableColumnsMeta = data.columns || []; + tableColumns = tableColumnsMeta.map(c => c.name); + tableMetadata = data; + + // Show table info + let infoHtml = `共 ${tableColumns.length} 個欄位`; + if (data.row_count) { + infoHtml += ` | 約 ${data.row_count.toLocaleString()} 筆`; + } + if (data.time_field) { + infoHtml += ` | 時間欄位: ${escapeHtml(data.time_field)}`; + } + document.getElementById('tableInfo').innerHTML = `
${infoHtml}
`; + + // Populate search column dropdown with type badges + const searchSelect = document.getElementById('searchColumn'); + searchSelect.innerHTML = ''; + tableColumnsMeta.forEach(col => { + const typeBadge = getTypeBadgeHtml(col.data_type); + searchSelect.innerHTML += ``; + }); + + // Populate return columns with type badges + const container = document.getElementById('returnColumns'); + container.innerHTML = ''; + tableColumnsMeta.forEach(col => { + const typeBadge = getTypeBadgeHtml(col.data_type); + container.innerHTML += ` + + `; + }); + + // Setup date range section + setupDateRangeSection(data); + + // Show performance warning if applicable + if (data.performance_warning) { + document.getElementById('performanceWarning').textContent = data.performance_warning; + document.getElementById('performanceWarning').style.display = 'block'; + } else { + document.getElementById('performanceWarning').style.display = 'none'; + } + + document.getElementById('step4').classList.remove('disabled'); + document.getElementById('step5').classList.remove('disabled'); + + } catch (error) { + document.getElementById('tableInfo').innerHTML = `
讀取失敗: ${escapeHtml(error.message)}
`; + } + } + + // Helper: Get type badge HTML + function getTypeBadgeHtml(dataType) { + if (!dataType || dataType === 'UNKNOWN') return ''; + + const typeMap = { + 'VARCHAR2': { class: 'text', label: '文字' }, + 'CHAR': { class: 'text', label: '文字' }, + 'NVARCHAR2': { class: 'text', label: '文字' }, + 'CLOB': { class: 'text', label: '文字' }, + 'NUMBER': { class: 'number', label: '數值' }, + 'FLOAT': { class: 'number', label: '數值' }, + 'INTEGER': { class: 'number', label: '數值' }, + 'DATE': { class: 'date', label: '日期' }, + 'TIMESTAMP': { class: 'datetime', label: '日期時間' }, + }; + + // Find matching type + for (const [key, val] of Object.entries(typeMap)) { + if (dataType.toUpperCase().includes(key)) { + return `${val.label}`; + } + } + return `${escapeHtml(dataType)}`; + } + + // Setup date range section based on table metadata + function setupDateRangeSection(metadata) { + const section = document.getElementById('dateRangeSection'); + const dateColumnSelect = document.getElementById('dateColumn'); + + // Find date/timestamp columns + const dateColumns = tableColumnsMeta.filter(c => c.is_date); + + if (dateColumns.length === 0 && !metadata.time_field) { + section.style.display = 'none'; + return; + } + + section.style.display = 'block'; + dateColumnSelect.innerHTML = ''; + + // Add configured time_field first if available + if (metadata.time_field) { + dateColumnSelect.innerHTML += ``; + } + + // Add other date columns + dateColumns.forEach(col => { + if (col.name !== metadata.time_field) { + dateColumnSelect.innerHTML += ``; + } + }); + } + + // Set default date range (last 90 days) + function setDefaultDateRange() { + const today = new Date(); + const past = new Date(); + past.setDate(today.getDate() - 90); + + document.getElementById('dateFrom').value = past.toISOString().split('T')[0]; + document.getElementById('dateTo').value = today.toISOString().split('T')[0]; + } + + // Toggle advanced panel + function toggleAdvancedPanel() { + const panel = document.getElementById('advancedPanel'); + panel.classList.toggle('collapsed'); + } + + // Handle query type change + function onQueryTypeChange() { + const queryType = document.getElementById('queryType').value; + const warningDiv = document.getElementById('performanceWarning'); + + // Show warning for LIKE contains on large tables + if (queryType === 'like_contains' && tableMetadata && tableMetadata.row_count > 10000000) { + warningDiv.textContent = '此資料表超過 1000 萬筆,包含查詢可能較慢,建議配合日期範圍縮小查詢範圍'; + warningDiv.style.display = 'block'; + } else if (tableMetadata && tableMetadata.performance_warning && queryType === 'like_contains') { + warningDiv.textContent = tableMetadata.performance_warning; + warningDiv.style.display = 'block'; + } else { + warningDiv.style.display = 'none'; + } + } + + // Check for type mismatch between Excel column and Oracle column + function checkTypeMismatch() { + const warningDiv = document.getElementById('typeMismatchWarning'); + const searchCol = document.getElementById('searchColumn').value; + const excelCol = document.getElementById('excelColumn').value; + + if (!searchCol || !excelCol) { + warningDiv.innerHTML = ''; + return; + } + + // Get types + const oracleMeta = tableColumnsMeta.find(c => c.name === searchCol); + const excelType = excelColumnTypes[excelCol]; + + if (!oracleMeta || !excelType) { + warningDiv.innerHTML = ''; + return; + } + + // Check for potential mismatches + let warning = ''; + if (oracleMeta.is_number && excelType.detected_type === 'text') { + warning = '欄位類型可能不相符:Excel 欄位為文字,Oracle 欄位為數值'; + } else if (oracleMeta.is_date && excelType.detected_type !== 'date' && excelType.detected_type !== 'datetime') { + warning = '欄位類型可能不相符:Oracle 欄位為日期類型'; + } + + if (warning) { + warningDiv.innerHTML = `
${warning}
`; + } else { + warningDiv.innerHTML = ''; + } + } + + function selectAllColumns() { + document.querySelectorAll('#returnColumns input[type="checkbox"]').forEach(cb => cb.checked = true); + } + + function deselectAllColumns() { + document.querySelectorAll('#returnColumns input[type="checkbox"]').forEach(cb => cb.checked = false); + } + + function getSelectedReturnColumns() { + const checkboxes = document.querySelectorAll('#returnColumns input[type="checkbox"]:checked'); + return Array.from(checkboxes).map(cb => cb.value); + } + + function getQueryParams() { + const params = { + table_name: document.getElementById('targetTable').value, + search_column: document.getElementById('searchColumn').value, + return_columns: getSelectedReturnColumns(), + search_values: searchValues, + query_type: document.getElementById('queryType').value + }; + + // Add date range if specified + const dateColumn = document.getElementById('dateColumn').value; + const dateFrom = document.getElementById('dateFrom').value; + const dateTo = document.getElementById('dateTo').value; + + if (dateColumn) { + params.date_column = dateColumn; + if (dateFrom) params.date_from = dateFrom; + if (dateTo) params.date_to = dateTo; + } + + return params; + } + + function validateQuery() { + const params = getQueryParams(); + + if (!params.table_name) { + alert('請選擇資料表'); + return false; + } + if (!params.search_column) { + alert('請選擇查詢欄位'); + return false; + } + if (params.return_columns.length === 0) { + alert('請至少選擇一個回傳欄位'); + return false; + } + if (params.search_values.length === 0) { + alert('無查詢值,請先選擇 Excel 欄位'); + return false; + } + + // Validate LIKE keyword limit + if (params.query_type.startsWith('like_') && params.search_values.length > 100) { + alert('LIKE 查詢最多支援 100 個關鍵字,目前有 ' + params.search_values.length + ' 個'); + return false; + } + + // Validate date range + if (params.date_from && params.date_to) { + const from = new Date(params.date_from); + const to = new Date(params.date_to); + if (from > to) { + alert('起始日期不可晚於結束日期'); + document.getElementById('dateRangeError').textContent = '起始日期不可晚於結束日期'; + document.getElementById('dateRangeError').style.display = 'block'; + return false; + } + const daysDiff = (to - from) / (1000 * 60 * 60 * 24); + if (daysDiff > 365) { + alert('日期範圍不可超過 365 天'); + document.getElementById('dateRangeError').textContent = '日期範圍不可超過 365 天'; + document.getElementById('dateRangeError').style.display = 'block'; + return false; + } + document.getElementById('dateRangeError').style.display = 'none'; + } + + return true; + } + + // Step 5: Execute query + async function executeQuery() { + if (!validateQuery()) return; + + const params = getQueryParams(); + const isAdvanced = params.query_type !== 'in' || params.date_column; + const batchCount = params.query_type === 'in' ? Math.ceil(params.search_values.length / 1000) : 1; + + // Build loading message + let loadingMsg = `查詢中... (${params.search_values.length} 筆`; + if (params.query_type !== 'in') { + const typeLabels = { + 'like_contains': '包含查詢', + 'like_prefix': '前綴查詢', + 'like_suffix': '後綴查詢' + }; + loadingMsg += `,${typeLabels[params.query_type] || params.query_type}`; + } else if (batchCount > 1) { + loadingMsg += `,${batchCount} 批次`; + } + if (params.date_from || params.date_to) { + loadingMsg += `,日期篩選`; + } + loadingMsg += ')'; + + document.getElementById('executeInfo').innerHTML = ` +
+

+ ${loadingMsg} +
+ `; + document.getElementById('resultSection').classList.remove('active'); + + try { + // Use advanced endpoint if using advanced features + const endpoint = isAdvanced ? '/api/excel-query/execute-advanced' : '/api/excel-query/execute'; + const data = await MesApi.post(endpoint, params); + + if (data.error) { + document.getElementById('executeInfo').innerHTML = `
${escapeHtml(data.error)}
`; + return; + } + + queryResult = data; + + // Build result message + let resultMsg = `查詢完成!搜尋 ${data.search_count} 筆,找到 ${data.row_count} 筆結果`; + if (data.query_type && data.query_type !== 'in') { + resultMsg += ` (${data.query_type})`; + } + + document.getElementById('executeInfo').innerHTML = ` +
${resultMsg}
+ `; + + renderResult(data); + + } catch (error) { + document.getElementById('executeInfo').innerHTML = `
查詢失敗: ${escapeHtml(error.message)}
`; + } + } + + function renderResult(data) { + const section = document.getElementById('resultSection'); + const statsDiv = document.getElementById('resultStats'); + const tableDiv = document.getElementById('resultTable'); + + statsDiv.innerHTML = ` + 搜尋值: ${data.search_count} + 結果: ${data.row_count} 筆 + ${data.batch_count > 1 ? `批次: ${data.batch_count}` : ''} + `; + + if (data.data.length === 0) { + tableDiv.innerHTML = '
查無資料
'; + } else { + let html = ''; + data.columns.forEach(col => { + html += ``; + }); + html += ''; + + const previewData = data.data.slice(0, 1000); + previewData.forEach(row => { + html += ''; + data.columns.forEach(col => { + if (row[col] === null || row[col] === undefined) { + html += ''; + } else { + html += ``; + } + }); + html += ''; + }); + html += '
${escapeHtml(col)}
NULL${escapeHtml(safeText(row[col]))}
'; + + if (data.data.length > 1000) { + html += `
+ 顯示前 1000 筆,完整資料請匯出 CSV +
`; + } + + tableDiv.innerHTML = html; + } + + section.classList.add('active'); + section.scrollIntoView({ behavior: 'smooth' }); + } + + // Export CSV + async function exportCSV() { + if (!validateQuery()) return; + + const params = getQueryParams(); + const batchCount = Math.ceil(params.search_values.length / 1000); + + document.getElementById('executeInfo').innerHTML = ` +
+

+ 匯出中... (${params.search_values.length} 筆,${batchCount} 批次) +
+ `; + + try { + // Note: CSV export uses native fetch for blob response + const response = await fetch('/api/excel-query/export-csv', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(params) + }); + + if (!response.ok) { + const data = await response.json(); + document.getElementById('executeInfo').innerHTML = `
${escapeHtml(data.error || '匯出失敗')}
`; + return; + } + + const blob = await response.blob(); + const url = window.URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = 'query_result.csv'; + document.body.appendChild(a); + a.click(); + document.body.removeChild(a); + window.URL.revokeObjectURL(url); + + document.getElementById('executeInfo').innerHTML = ` +
CSV 匯出完成!
+ `; + + } catch (error) { + document.getElementById('executeInfo').innerHTML = `
匯出失敗: ${escapeHtml(error.message)}
`; + } + } + + +Object.assign(window, { +uploadExcel, +renderPreviewTable, +loadColumnValues, +loadTables, +loadTableColumns, +getTypeBadgeHtml, +setupDateRangeSection, +setDefaultDateRange, +toggleAdvancedPanel, +onQueryTypeChange, +checkTypeMismatch, +selectAllColumns, +deselectAllColumns, +getSelectedReturnColumns, +getQueryParams, +validateQuery, +executeQuery, +renderResult, +exportCSV, +}); diff --git a/frontend/src/hold-detail/main.js b/frontend/src/hold-detail/main.js new file mode 100644 index 0000000..7adb45e --- /dev/null +++ b/frontend/src/hold-detail/main.js @@ -0,0 +1,336 @@ +import { ensureMesApiAvailable } from '../core/api.js'; +import { escapeHtml, safeText } from '../core/table-tree.js'; + +ensureMesApiAvailable(); + +(function initHoldDetailPage() { + // ============================================================ + // State + // ============================================================ + const state = { + reason: new URLSearchParams(window.location.search).get('reason') || '', + summary: null, + distribution: null, + lots: null, + page: 1, + perPage: 50, + filters: { + workcenter: null, + package: null, + ageRange: null + } + }; + + // ============================================================ + // Utility + // ============================================================ + function formatNumber(num) { + if (num === null || num === undefined || num === '-') return '-'; + return num.toLocaleString('zh-TW'); + } + + function jsSingleQuote(value) { + return safeText(value, '') + .replace(/\\/g, '\\\\') + .replace(/'/g, "\\'"); + } + + // ============================================================ + // API Functions + // ============================================================ + const API_TIMEOUT = 60000; + + async function fetchSummary() { + const result = await MesApi.get('/api/wip/hold-detail/summary', { + params: { reason: state.reason }, + timeout: API_TIMEOUT + }); + if (result.success) return result.data; + throw new Error(result.error); + } + + async function fetchDistribution() { + const result = await MesApi.get('/api/wip/hold-detail/distribution', { + params: { reason: state.reason }, + timeout: API_TIMEOUT + }); + if (result.success) return result.data; + throw new Error(result.error); + } + + async function fetchLots() { + const params = { + reason: state.reason, + page: state.page, + per_page: state.perPage + }; + if (state.filters.workcenter) params.workcenter = state.filters.workcenter; + if (state.filters.package) params.package = state.filters.package; + if (state.filters.ageRange) params.age_range = state.filters.ageRange; + + const result = await MesApi.get('/api/wip/hold-detail/lots', { + params, + timeout: API_TIMEOUT + }); + if (result.success) return result.data; + throw new Error(result.error); + } + + // ============================================================ + // Render Functions + // ============================================================ + function renderSummary(data) { + document.getElementById('totalLots').textContent = formatNumber(data.totalLots); + document.getElementById('totalQty').textContent = formatNumber(data.totalQty); + document.getElementById('avgAge').textContent = data.avgAge ? `${data.avgAge}天` : '-'; + document.getElementById('maxAge').textContent = data.maxAge ? `${data.maxAge}天` : '-'; + document.getElementById('workcenterCount').textContent = formatNumber(data.workcenterCount); + } + + function renderDistribution(data) { + // Age distribution + const ageMap = {}; + data.byAge.forEach(item => { ageMap[item.range] = item; }); + + const age01 = ageMap['0-1'] || { lots: 0, qty: 0, percentage: 0 }; + const age13 = ageMap['1-3'] || { lots: 0, qty: 0, percentage: 0 }; + const age37 = ageMap['3-7'] || { lots: 0, qty: 0, percentage: 0 }; + const age7 = ageMap['7+'] || { lots: 0, qty: 0, percentage: 0 }; + + document.getElementById('age01Lots').textContent = formatNumber(age01.lots); + document.getElementById('age01Qty').textContent = formatNumber(age01.qty); + document.getElementById('age01Pct').textContent = `${age01.percentage}%`; + + document.getElementById('age13Lots').textContent = formatNumber(age13.lots); + document.getElementById('age13Qty').textContent = formatNumber(age13.qty); + document.getElementById('age13Pct').textContent = `${age13.percentage}%`; + + document.getElementById('age37Lots').textContent = formatNumber(age37.lots); + document.getElementById('age37Qty').textContent = formatNumber(age37.qty); + document.getElementById('age37Pct').textContent = `${age37.percentage}%`; + + document.getElementById('age7Lots').textContent = formatNumber(age7.lots); + document.getElementById('age7Qty').textContent = formatNumber(age7.qty); + document.getElementById('age7Pct').textContent = `${age7.percentage}%`; + + // Workcenter table + const wcBody = document.getElementById('workcenterBody'); + if (data.byWorkcenter.length === 0) { + wcBody.innerHTML = 'No data'; + } else { + wcBody.innerHTML = data.byWorkcenter.map(item => ` + + ${escapeHtml(safeText(item.name))} + ${escapeHtml(formatNumber(item.lots))} + ${escapeHtml(formatNumber(item.qty))} + ${escapeHtml(safeText(item.percentage, 0))}% + + `).join(''); + } + + // Package table + const pkgBody = document.getElementById('packageBody'); + if (data.byPackage.length === 0) { + pkgBody.innerHTML = 'No data'; + } else { + pkgBody.innerHTML = data.byPackage.map(item => ` + + ${escapeHtml(safeText(item.name))} + ${escapeHtml(formatNumber(item.lots))} + ${escapeHtml(formatNumber(item.qty))} + ${escapeHtml(safeText(item.percentage, 0))}% + + `).join(''); + } + } + + function renderLots(data) { + const tbody = document.getElementById('lotBody'); + const lots = data.lots; + + if (lots.length === 0) { + tbody.innerHTML = 'No data'; + document.getElementById('tableInfo').textContent = 'No data'; + document.getElementById('pagination').style.display = 'none'; + return; + } + + tbody.innerHTML = lots.map(lot => ` + + ${escapeHtml(safeText(lot.lotId))} + ${escapeHtml(safeText(lot.workorder))} + ${escapeHtml(formatNumber(lot.qty))} + ${escapeHtml(safeText(lot.package))} + ${escapeHtml(safeText(lot.workcenter))} + ${escapeHtml(safeText(lot.spec))} + ${escapeHtml(safeText(lot.age))}天 + ${escapeHtml(safeText(lot.holdBy))} + ${escapeHtml(safeText(lot.dept))} + ${escapeHtml(safeText(lot.holdComment))} + + `).join(''); + + // Update pagination + const pg = data.pagination; + const start = (pg.page - 1) * pg.perPage + 1; + const end = Math.min(pg.page * pg.perPage, pg.total); + document.getElementById('tableInfo').textContent = `顯示 ${start} - ${end} / ${formatNumber(pg.total)}`; + + if (pg.totalPages > 1) { + document.getElementById('pagination').style.display = 'flex'; + document.getElementById('pageInfo').textContent = `Page ${pg.page} / ${pg.totalPages}`; + document.getElementById('btnPrev').disabled = pg.page <= 1; + document.getElementById('btnNext').disabled = pg.page >= pg.totalPages; + } else { + document.getElementById('pagination').style.display = 'none'; + } + } + + function updateFilterIndicator() { + const indicator = document.getElementById('filterIndicator'); + const text = document.getElementById('filterText'); + const parts = []; + + if (state.filters.workcenter) parts.push(`Workcenter=${state.filters.workcenter}`); + if (state.filters.package) parts.push(`Package=${state.filters.package}`); + if (state.filters.ageRange) parts.push(`Age=${state.filters.ageRange}天`); + + if (parts.length > 0) { + text.textContent = '篩選: ' + parts.join(', '); + indicator.style.display = 'flex'; + } else { + indicator.style.display = 'none'; + } + + // Update active states + document.querySelectorAll('.age-card').forEach(card => { + card.classList.toggle('active', card.dataset.range === state.filters.ageRange); + }); + document.querySelectorAll('#workcenterBody tr').forEach(row => { + row.classList.toggle('active', row.dataset.workcenter === state.filters.workcenter); + }); + document.querySelectorAll('#packageBody tr').forEach(row => { + row.classList.toggle('active', row.dataset.package === state.filters.package); + }); + } + + // ============================================================ + // Filter Functions + // ============================================================ + function toggleAgeFilter(range) { + state.filters.ageRange = state.filters.ageRange === range ? null : range; + state.page = 1; + updateFilterIndicator(); + loadLots(); + } + + function toggleWorkcenterFilter(wc) { + state.filters.workcenter = state.filters.workcenter === wc ? null : wc; + state.page = 1; + updateFilterIndicator(); + loadLots(); + } + + function togglePackageFilter(pkg) { + state.filters.package = state.filters.package === pkg ? null : pkg; + state.page = 1; + updateFilterIndicator(); + loadLots(); + } + + function clearFilters() { + state.filters = { workcenter: null, package: null, ageRange: null }; + state.page = 1; + updateFilterIndicator(); + loadLots(); + } + + // ============================================================ + // Pagination + // ============================================================ + function prevPage() { + if (state.page > 1) { + state.page--; + loadLots(); + } + } + + function nextPage() { + if (state.lots && state.page < state.lots.pagination.totalPages) { + state.page++; + loadLots(); + } + } + + // ============================================================ + // Data Loading + // ============================================================ + async function loadLots() { + document.getElementById('lotBody').innerHTML = 'Loading...'; + document.getElementById('refreshIndicator').classList.add('active'); + + try { + state.lots = await fetchLots(); + renderLots(state.lots); + } catch (error) { + console.error('Load lots failed:', error); + document.getElementById('lotBody').innerHTML = 'Error loading data'; + } finally { + document.getElementById('refreshIndicator').classList.remove('active'); + } + } + + async function loadAllData(showOverlay = true) { + if (showOverlay) { + document.getElementById('loadingOverlay').style.display = 'flex'; + } + document.getElementById('refreshIndicator').classList.add('active'); + + try { + const [summary, distribution, lots] = await Promise.all([ + fetchSummary(), + fetchDistribution(), + fetchLots() + ]); + + state.summary = summary; + state.distribution = distribution; + state.lots = lots; + + renderSummary(summary); + renderDistribution(distribution); + renderLots(lots); + updateFilterIndicator(); + + document.getElementById('lastUpdate').textContent = `Last Update: ${new Date().toLocaleString('zh-TW')}`; + } catch (error) { + console.error('Load data failed:', error); + } finally { + document.getElementById('loadingOverlay').style.display = 'none'; + document.getElementById('refreshIndicator').classList.remove('active'); + } + } + + function manualRefresh() { + loadAllData(false); + } + + // ============================================================ + // Initialize + // ============================================================ + window.onload = function() { + loadAllData(true); + }; + + Object.assign(window, { + toggleAgeFilter, + toggleWorkcenterFilter, + togglePackageFilter, + clearFilters, + prevPage, + nextPage, + manualRefresh, + loadAllData, + loadLots + }); +})(); diff --git a/frontend/src/job-query/main.js b/frontend/src/job-query/main.js new file mode 100644 index 0000000..c1338dc --- /dev/null +++ b/frontend/src/job-query/main.js @@ -0,0 +1,474 @@ +import { ensureMesApiAvailable } from '../core/api.js'; +import { getPageContract } from '../core/field-contracts.js'; +import { escapeHtml, groupBy, sortBy, safeText } from '../core/table-tree.js'; + +ensureMesApiAvailable(); +window.__MES_FRONTEND_CORE__ = { groupBy, sortBy, escapeHtml, safeText }; +window.__FIELD_CONTRACTS__ = window.__FIELD_CONTRACTS__ || {}; +window.__FIELD_CONTRACTS__['job_query:jobs_table'] = getPageContract('job_query', 'jobs_table'); +window.__FIELD_CONTRACTS__['job_query:txn_table'] = getPageContract('job_query', 'txn_table'); + +const jobTableFields = getPageContract('job_query', 'jobs_table'); +const txnTableFields = getPageContract('job_query', 'txn_table'); + +function renderJobCell(job, apiKey) { + if (apiKey === 'JOBSTATUS') { + const value = safeText(job[apiKey]); + return `${value}`; + } + if (apiKey === 'CREATEDATE' || apiKey === 'COMPLETEDATE') { + return formatDate(job[apiKey]); + } + return escapeHtml(safeText(job[apiKey])); +} + +function renderTxnCell(txn, apiKey) { + if (apiKey === 'FROMJOBSTATUS' || apiKey === 'JOBSTATUS') { + const value = safeText(txn[apiKey], '-'); + return `${escapeHtml(value)}`; + } + if (apiKey === 'TXNDATE') { + return formatDate(txn[apiKey]); + } + if (apiKey === 'USER_NAME') { + return escapeHtml(safeText(txn.USER_NAME || txn.EMP_NAME)); + } + return escapeHtml(safeText(txn[apiKey])); +} + + + // State + let allEquipments = []; + let selectedEquipments = new Set(); + let jobsData = []; + let expandedJobs = new Set(); + + // Initialize + document.addEventListener('DOMContentLoaded', () => { + loadEquipments(); + setLast90Days(); + + // Close dropdown when clicking outside + document.addEventListener('click', (e) => { + const dropdown = document.getElementById('equipmentDropdown'); + const selector = document.querySelector('.equipment-selector'); + if (!selector.contains(e.target)) { + dropdown.classList.remove('show'); + } + }); + }); + + // Load equipments from cache + async function loadEquipments() { + try { + const data = await MesApi.get('/api/job-query/resources'); + if (data.error) { + document.getElementById('equipmentList').innerHTML = `
${data.error}
`; + return; + } + + allEquipments = data.data; + renderEquipmentList(allEquipments); + } catch (error) { + document.getElementById('equipmentList').innerHTML = `
載入失敗: ${error.message}
`; + } + } + + // Render equipment list + function renderEquipmentList(equipments) { + const container = document.getElementById('equipmentList'); + + if (!equipments || equipments.length === 0) { + container.innerHTML = '
無設備資料
'; + return; + } + + let html = ''; + const grouped = groupBy(equipments, (eq) => safeText(eq.WORKCENTERNAME, '未分類')); + const workcenters = sortBy(Object.keys(grouped), (name) => name); + + workcenters.forEach((workcenterName) => { + html += `
${escapeHtml(workcenterName)}
`; + grouped[workcenterName].forEach((eq) => { + const isSelected = selectedEquipments.has(eq.RESOURCEID); + const resourceId = escapeHtml(safeText(eq.RESOURCEID)); + const resourceName = escapeHtml(safeText(eq.RESOURCENAME)); + const familyName = escapeHtml(safeText(eq.RESOURCEFAMILYNAME)); + + html += ` +
+ +
+
${resourceName}
+
${familyName}
+
+
+ `; + }); + }); + + container.innerHTML = html; + } + + // Toggle equipment dropdown + function toggleEquipmentDropdown() { + const dropdown = document.getElementById('equipmentDropdown'); + dropdown.classList.toggle('show'); + } + + // Filter equipments by search + function filterEquipments(query) { + const q = query.toLowerCase(); + const filtered = allEquipments.filter(eq => + (eq.RESOURCENAME && eq.RESOURCENAME.toLowerCase().includes(q)) || + (eq.WORKCENTERNAME && eq.WORKCENTERNAME.toLowerCase().includes(q)) || + (eq.RESOURCEFAMILYNAME && eq.RESOURCEFAMILYNAME.toLowerCase().includes(q)) + ); + renderEquipmentList(filtered); + } + + // Toggle equipment selection + function toggleEquipment(resourceId) { + if (selectedEquipments.has(resourceId)) { + selectedEquipments.delete(resourceId); + } else { + selectedEquipments.add(resourceId); + } + updateSelectedDisplay(); + renderEquipmentList(allEquipments.filter(eq => { + const search = document.querySelector('.equipment-search'); + if (!search || !search.value) return true; + const q = search.value.toLowerCase(); + return (eq.RESOURCENAME && eq.RESOURCENAME.toLowerCase().includes(q)) || + (eq.WORKCENTERNAME && eq.WORKCENTERNAME.toLowerCase().includes(q)); + })); + } + + // Update selected display + function updateSelectedDisplay() { + const display = document.getElementById('equipmentDisplay'); + const count = document.getElementById('selectedCount'); + + if (selectedEquipments.size === 0) { + display.textContent = '點擊選擇設備...'; + count.textContent = ''; + } else if (selectedEquipments.size <= 3) { + const names = allEquipments + .filter(eq => selectedEquipments.has(eq.RESOURCEID)) + .map(eq => eq.RESOURCENAME) + .join(', '); + display.textContent = names; + count.textContent = `已選擇 ${selectedEquipments.size} 台設備`; + } else { + display.textContent = `已選擇 ${selectedEquipments.size} 台設備`; + count.textContent = ''; + } + } + + // Set last 90 days + function setLast90Days() { + const today = new Date(); + const past = new Date(); + past.setDate(today.getDate() - 90); + + document.getElementById('dateFrom').value = past.toISOString().split('T')[0]; + document.getElementById('dateTo').value = today.toISOString().split('T')[0]; + } + + // Validate inputs + function validateInputs() { + if (selectedEquipments.size === 0) { + Toast.error('請選擇至少一台設備'); + return false; + } + + const dateFrom = document.getElementById('dateFrom').value; + const dateTo = document.getElementById('dateTo').value; + + if (!dateFrom || !dateTo) { + Toast.error('請指定日期範圍'); + return false; + } + + const from = new Date(dateFrom); + const to = new Date(dateTo); + + if (to < from) { + Toast.error('結束日期不可早於起始日期'); + return false; + } + + const daysDiff = (to - from) / (1000 * 60 * 60 * 24); + if (daysDiff > 365) { + Toast.error('日期範圍不可超過 365 天'); + return false; + } + + return true; + } + + // Query jobs + async function queryJobs() { + if (!validateInputs()) return; + + const resultSection = document.getElementById('resultSection'); + resultSection.innerHTML = ` +
+
+
查詢中... +
+ `; + + document.getElementById('queryBtn').disabled = true; + document.getElementById('exportBtn').disabled = true; + + try { + const data = await MesApi.post('/api/job-query/jobs', { + resource_ids: Array.from(selectedEquipments), + start_date: document.getElementById('dateFrom').value, + end_date: document.getElementById('dateTo').value + }); + + if (data.error) { + resultSection.innerHTML = `
${data.error}
`; + return; + } + + jobsData = data.data; + expandedJobs.clear(); + renderJobsTable(); + + document.getElementById('exportBtn').disabled = jobsData.length === 0; + + } catch (error) { + resultSection.innerHTML = `
查詢失敗: ${error.message}
`; + } finally { + document.getElementById('queryBtn').disabled = false; + } + } + + // Render jobs table + function renderJobsTable() { + const resultSection = document.getElementById('resultSection'); + const jobHeaders = jobTableFields.map((field) => `${escapeHtml(field.ui_label)}`).join(''); + + if (!jobsData || jobsData.length === 0) { + resultSection.innerHTML = ` +
+

無符合條件的工單

+
+ `; + return; + } + + let html = ` +
+
共 ${jobsData.length} 筆工單
+
+ + +
+
+
+ + + + + ${jobHeaders} + + + + `; + + jobsData.forEach((job, idx) => { + const isExpanded = expandedJobs.has(job.JOBID); + const jobCells = jobTableFields + .map((field) => ``) + .join(''); + html += ` + + + ${jobCells} + + + + + `; + }); + + html += ` + +
${renderJobCell(job, field.api_key)}
+ +
+
+ ${isExpanded ? '
' : ''} +
+
+
+ `; + + resultSection.innerHTML = html; + + // Load expanded histories + expandedJobs.forEach(jobId => { + const idx = jobsData.findIndex(j => j.JOBID === jobId); + if (idx >= 0) loadJobHistory(jobId, idx); + }); + } + + // Toggle job history + async function toggleJobHistory(jobId, idx) { + const txnRow = document.getElementById(`txn-row-${idx}`); + const jobRow = document.getElementById(`job-row-${idx}`); + const arrow = jobRow.querySelector('.arrow-icon'); + + if (expandedJobs.has(jobId)) { + expandedJobs.delete(jobId); + txnRow.classList.remove('show'); + jobRow.classList.remove('expanded'); + arrow.classList.remove('rotated'); + } else { + expandedJobs.add(jobId); + txnRow.classList.add('show'); + jobRow.classList.add('expanded'); + arrow.classList.add('rotated'); + loadJobHistory(jobId, idx); + } + } + + // Load job history + async function loadJobHistory(jobId, idx) { + const container = document.getElementById(`txn-content-${idx}`); + container.innerHTML = '
'; + + try { + const data = await MesApi.get(`/api/job-query/txn/${jobId}`); + + if (data.error) { + container.innerHTML = `
${data.error}
`; + return; + } + + if (!data.data || data.data.length === 0) { + container.innerHTML = '
無交易歷史記錄
'; + return; + } + + const txnHeaders = txnTableFields.map((field) => `${escapeHtml(field.ui_label)}`).join(''); + let html = ` + + + + ${txnHeaders} + + + + `; + + data.data.forEach(txn => { + const txnCells = txnTableFields + .map((field) => ``) + .join(''); + html += ` + + ${txnCells} + + `; + }); + + html += '
${renderTxnCell(txn, field.api_key)}
'; + container.innerHTML = html; + + } catch (error) { + container.innerHTML = `
載入失敗: ${error.message}
`; + } + } + + // Expand all + function expandAll() { + jobsData.forEach((job, idx) => { + if (!expandedJobs.has(job.JOBID)) { + expandedJobs.add(job.JOBID); + } + }); + renderJobsTable(); + } + + // Collapse all + function collapseAll() { + expandedJobs.clear(); + renderJobsTable(); + } + + // Export CSV + async function exportCsv() { + if (!validateInputs()) return; + + document.getElementById('exportBtn').disabled = true; + document.getElementById('exportBtn').textContent = '匯出中...'; + + try { + const response = await fetch('/api/job-query/export', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + resource_ids: Array.from(selectedEquipments), + start_date: document.getElementById('dateFrom').value, + end_date: document.getElementById('dateTo').value + }) + }); + + if (!response.ok) { + const data = await response.json(); + throw new Error(data.error || '匯出失敗'); + } + + // Download file + const blob = await response.blob(); + const url = window.URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = `job_history_${document.getElementById('dateFrom').value}_${document.getElementById('dateTo').value}.csv`; + document.body.appendChild(a); + a.click(); + document.body.removeChild(a); + window.URL.revokeObjectURL(url); + + Toast.success('CSV 匯出完成'); + + } catch (error) { + Toast.error('匯出失敗: ' + error.message); + } finally { + document.getElementById('exportBtn').disabled = false; + document.getElementById('exportBtn').textContent = '匯出 CSV'; + } + } + + // Format date + function formatDate(dateStr) { + if (!dateStr) return ''; + return dateStr.replace('T', ' ').substring(0, 19); + } + + +Object.assign(window, { +loadEquipments, +renderEquipmentList, +toggleEquipmentDropdown, +filterEquipments, +toggleEquipment, +updateSelectedDisplay, +setLast90Days, +validateInputs, +queryJobs, +renderJobsTable, +toggleJobHistory, +loadJobHistory, +expandAll, +collapseAll, +exportCsv, +formatDate, +}); diff --git a/frontend/src/portal/main.js b/frontend/src/portal/main.js new file mode 100644 index 0000000..0852c7e --- /dev/null +++ b/frontend/src/portal/main.js @@ -0,0 +1,193 @@ +import './portal.css'; + +(function initPortal() { + const tabs = document.querySelectorAll('.tab'); + const frames = document.querySelectorAll('iframe'); + const toolFrame = document.getElementById('toolFrame'); + const healthDot = document.getElementById('healthDot'); + const healthLabel = document.getElementById('healthLabel'); + const healthPopup = document.getElementById('healthPopup'); + const healthStatus = document.getElementById('healthStatus'); + const dbStatus = document.getElementById('dbStatus'); + const redisStatus = document.getElementById('redisStatus'); + const cacheEnabled = document.getElementById('cacheEnabled'); + const cacheSysDate = document.getElementById('cacheSysDate'); + const cacheUpdatedAt = document.getElementById('cacheUpdatedAt'); + const resourceCacheEnabled = document.getElementById('resourceCacheEnabled'); + const resourceCacheCount = document.getElementById('resourceCacheCount'); + const resourceCacheUpdatedAt = document.getElementById('resourceCacheUpdatedAt'); + const routeCacheMode = document.getElementById('routeCacheMode'); + const routeCacheHitRate = document.getElementById('routeCacheHitRate'); + const routeCacheDegraded = document.getElementById('routeCacheDegraded'); + + function setFrameHeight() { + const header = document.querySelector('.header'); + const tabArea = document.querySelector('.tabs'); + if (!header || !tabArea) return; + const height = Math.max(600, window.innerHeight - header.offsetHeight - tabArea.offsetHeight - 60); + frames.forEach((frame) => { + frame.style.height = `${height}px`; + }); + } + + function activateTab(targetId) { + tabs.forEach((tab) => tab.classList.remove('active')); + frames.forEach((frame) => frame.classList.remove('active')); + + const tabBtn = document.querySelector(`[data-target="${targetId}"]`); + const targetFrame = document.getElementById(targetId); + + if (tabBtn) tabBtn.classList.add('active'); + if (targetFrame) { + targetFrame.classList.add('active'); + if (targetFrame.dataset.src && !targetFrame.src) { + targetFrame.src = targetFrame.dataset.src; + } + } + } + + function openTool(path) { + if (!toolFrame) return false; + tabs.forEach((tab) => tab.classList.remove('active')); + frames.forEach((frame) => frame.classList.remove('active')); + toolFrame.classList.add('active'); + if (toolFrame.src !== path) { + toolFrame.src = path; + } + return false; + } + + function toggleHealthPopup() { + if (!healthPopup) return; + healthPopup.classList.toggle('show'); + } + + function formatStatus(status) { + const icons = { ok: '✓', error: '✗', disabled: '○' }; + return icons[status] || status; + } + + function setStatusClass(element, status) { + if (!element) return; + element.classList.remove('ok', 'error', 'disabled'); + element.classList.add(status === 'ok' ? 'ok' : status === 'error' ? 'error' : 'disabled'); + } + + function formatDateTime(dateStr) { + if (!dateStr) return '--'; + try { + const date = new Date(dateStr); + if (Number.isNaN(date.getTime())) return dateStr; + return date.toLocaleString('zh-TW', { + month: '2-digit', + day: '2-digit', + hour: '2-digit', + minute: '2-digit' + }); + } catch { + return dateStr; + } + } + + async function checkHealth() { + if (!healthDot || !healthLabel) return; + try { + const response = await fetch('/health', { cache: 'no-store' }); + const data = await response.json(); + + healthDot.classList.remove('loading', 'healthy', 'degraded', 'unhealthy'); + if (data.status === 'healthy') { + healthDot.classList.add('healthy'); + healthLabel.textContent = '連線正常'; + } else if (data.status === 'degraded') { + healthDot.classList.add('degraded'); + healthLabel.textContent = '部分降級'; + } else { + healthDot.classList.add('unhealthy'); + healthLabel.textContent = '連線異常'; + } + + const dbState = data.services?.database || 'error'; + if (dbStatus) dbStatus.innerHTML = `${formatStatus(dbState)} ${dbState === 'ok' ? '正常' : '異常'}`; + setStatusClass(dbStatus, dbState); + + const redisState = data.services?.redis || 'disabled'; + const redisText = redisState === 'ok' ? '正常' : redisState === 'disabled' ? '未啟用' : '異常'; + if (redisStatus) redisStatus.innerHTML = `${formatStatus(redisState)} ${redisText}`; + setStatusClass(redisStatus, redisState); + + const cache = data.cache || {}; + if (cacheEnabled) cacheEnabled.textContent = cache.enabled ? '已啟用' : '未啟用'; + if (cacheSysDate) cacheSysDate.textContent = cache.sys_date || '--'; + if (cacheUpdatedAt) cacheUpdatedAt.textContent = formatDateTime(cache.updated_at); + + const resCache = data.resource_cache || {}; + if (resCache.enabled) { + if (resourceCacheEnabled) { + resourceCacheEnabled.textContent = resCache.loaded ? '已載入' : '未載入'; + resourceCacheEnabled.style.color = resCache.loaded ? '#22c55e' : '#f59e0b'; + } + if (resourceCacheCount) resourceCacheCount.textContent = resCache.count ? `${resCache.count} 筆` : '--'; + if (resourceCacheUpdatedAt) resourceCacheUpdatedAt.textContent = formatDateTime(resCache.updated_at); + } else { + if (resourceCacheEnabled) { + resourceCacheEnabled.textContent = '未啟用'; + resourceCacheEnabled.style.color = '#9ca3af'; + } + if (resourceCacheCount) resourceCacheCount.textContent = '--'; + if (resourceCacheUpdatedAt) resourceCacheUpdatedAt.textContent = '--'; + } + + const routeCache = data.route_cache || {}; + if (routeCacheMode) { + routeCacheMode.textContent = routeCache.mode || '--'; + } + if (routeCacheHitRate) { + const l1 = routeCache.l1_hit_rate ?? '--'; + const l2 = routeCache.l2_hit_rate ?? '--'; + routeCacheHitRate.textContent = `${l1} / ${l2}`; + } + if (routeCacheDegraded) { + routeCacheDegraded.textContent = routeCache.degraded ? '是' : '否'; + routeCacheDegraded.style.color = routeCache.degraded ? '#f59e0b' : '#22c55e'; + } + } catch (error) { + console.error('Health check failed:', error); + healthDot.classList.remove('loading', 'healthy', 'degraded'); + healthDot.classList.add('unhealthy'); + healthLabel.textContent = '無法連線'; + if (dbStatus) { + dbStatus.innerHTML = '✗ 無法確認'; + setStatusClass(dbStatus, 'error'); + } + if (redisStatus) { + redisStatus.innerHTML = '✗ 無法確認'; + setStatusClass(redisStatus, 'error'); + } + } + } + + tabs.forEach((tab) => { + tab.addEventListener('click', () => activateTab(tab.dataset.target)); + }); + + if (tabs.length > 0) { + activateTab(tabs[0].dataset.target); + } + + window.openTool = openTool; + window.toggleHealthPopup = toggleHealthPopup; + if (healthStatus) { + healthStatus.addEventListener('click', toggleHealthPopup); + } + document.addEventListener('click', (e) => { + if (!e.target.closest('#healthStatus') && !e.target.closest('#healthPopup') && healthPopup) { + healthPopup.classList.remove('show'); + } + }); + + checkHealth(); + setInterval(checkHealth, 30000); + window.addEventListener('resize', setFrameHeight); + setFrameHeight(); +})(); diff --git a/frontend/src/portal/portal.css b/frontend/src/portal/portal.css new file mode 100644 index 0000000..97ac7c6 --- /dev/null +++ b/frontend/src/portal/portal.css @@ -0,0 +1,29 @@ +.drawer { + background: #fff; + border-radius: 10px; + border: 1px solid #e3e8f2; + box-shadow: 0 1px 4px rgba(0, 0, 0, 0.06); + overflow: hidden; +} + +.drawer > summary { + list-style: none; + cursor: pointer; + padding: 10px 14px; + font-size: 14px; + font-weight: 600; + color: #334155; + background: #f8fafc; + border-bottom: 1px solid #e2e8f0; +} + +.drawer > summary::-webkit-details-marker { + display: none; +} + +.drawer-content { + display: flex; + flex-wrap: wrap; + gap: 10px; + padding: 12px; +} diff --git a/frontend/src/resource-history/main.js b/frontend/src/resource-history/main.js new file mode 100644 index 0000000..30ea38f --- /dev/null +++ b/frontend/src/resource-history/main.js @@ -0,0 +1,844 @@ +import { ensureMesApiAvailable } from '../core/api.js'; +import { getPageContract } from '../core/field-contracts.js'; +import { buildResourceKpiFromHours } from '../core/compute.js'; +import { groupBy, sortBy, toggleTreeState, setTreeStateBulk, escapeHtml, safeText } from '../core/table-tree.js'; + +ensureMesApiAvailable(); +window.__MES_FRONTEND_CORE__ = { buildResourceKpiFromHours, groupBy, sortBy, toggleTreeState, setTreeStateBulk, escapeHtml, safeText }; +window.__FIELD_CONTRACTS__ = window.__FIELD_CONTRACTS__ || {}; +window.__FIELD_CONTRACTS__['resource_history:detail_table'] = getPageContract('resource_history', 'detail_table'); +window.__FIELD_CONTRACTS__['resource_history:kpi'] = getPageContract('resource_history', 'kpi'); + +const detailTableFields = getPageContract('resource_history', 'detail_table'); + + +(function() { + // ============================================================ + // State + // ============================================================ + let currentGranularity = 'day'; + let summaryData = null; + let detailData = null; + let hierarchyState = {}; // Track expanded/collapsed state + let charts = {}; + + // ============================================================ + // DOM Elements + // ============================================================ + const startDateInput = document.getElementById('startDate'); + const endDateInput = document.getElementById('endDate'); + const workcenterGroupsTrigger = document.getElementById('workcenterGroupsTrigger'); + const workcenterGroupsDropdown = document.getElementById('workcenterGroupsDropdown'); + const workcenterGroupsOptions = document.getElementById('workcenterGroupsOptions'); + const familiesTrigger = document.getElementById('familiesTrigger'); + const familiesDropdown = document.getElementById('familiesDropdown'); + const familiesOptions = document.getElementById('familiesOptions'); + const isProductionCheckbox = document.getElementById('isProduction'); + const isKeyCheckbox = document.getElementById('isKey'); + const isMonitorCheckbox = document.getElementById('isMonitor'); + const queryBtn = document.getElementById('queryBtn'); + const exportBtn = document.getElementById('exportBtn'); + const expandAllBtn = document.getElementById('expandAllBtn'); + const collapseAllBtn = document.getElementById('collapseAllBtn'); + const loadingOverlay = document.getElementById('loadingOverlay'); + + // Selected values for multi-select + let selectedWorkcenterGroups = []; + let selectedFamilies = []; + + // ============================================================ + // Initialization + // ============================================================ + function init() { + setDefaultDates(); + applyDetailTableHeaders(); + loadFilterOptions(); + setupEventListeners(); + initCharts(); + } + + function setDefaultDates() { + const today = new Date(); + const endDate = new Date(today); + endDate.setDate(endDate.getDate() - 1); // Yesterday + const startDate = new Date(endDate); + startDate.setDate(startDate.getDate() - 6); // 7 days ago + + startDateInput.value = formatDate(startDate); + endDateInput.value = formatDate(endDate); + } + + function formatDate(date) { + return date.toISOString().split('T')[0]; + } + + function setupEventListeners() { + // Granularity buttons + document.querySelectorAll('.granularity-btns button').forEach(btn => { + btn.addEventListener('click', () => { + document.querySelectorAll('.granularity-btns button').forEach(b => b.classList.remove('active')); + btn.classList.add('active'); + currentGranularity = btn.dataset.granularity; + }); + }); + + // Query button + queryBtn.addEventListener('click', executeQuery); + + // Export button + exportBtn.addEventListener('click', exportCsv); + + // Expand/Collapse buttons + expandAllBtn.addEventListener('click', () => toggleAllRows(true)); + collapseAllBtn.addEventListener('click', () => toggleAllRows(false)); + } + + function applyDetailTableHeaders() { + const headers = document.querySelectorAll('.detail-table thead th'); + if (!headers || headers.length < 10) return; + + const byKey = {}; + detailTableFields.forEach((field) => { + byKey[field.api_key] = field.ui_label; + }); + + headers[1].textContent = byKey.ou_pct || headers[1].textContent; + headers[2].textContent = byKey.availability_pct || headers[2].textContent; + headers[3].textContent = byKey.prd_hours ? byKey.prd_hours.replace('(h)', '') : headers[3].textContent; + headers[4].textContent = byKey.sby_hours ? byKey.sby_hours.replace('(h)', '') : headers[4].textContent; + headers[5].textContent = byKey.udt_hours ? byKey.udt_hours.replace('(h)', '') : headers[5].textContent; + headers[6].textContent = byKey.sdt_hours ? byKey.sdt_hours.replace('(h)', '') : headers[6].textContent; + headers[7].textContent = byKey.egt_hours ? byKey.egt_hours.replace('(h)', '') : headers[7].textContent; + headers[8].textContent = byKey.nst_hours ? byKey.nst_hours.replace('(h)', '') : headers[8].textContent; + } + + function initCharts() { + charts.trend = echarts.init(document.getElementById('trendChart')); + charts.stacked = echarts.init(document.getElementById('stackedChart')); + charts.comparison = echarts.init(document.getElementById('comparisonChart')); + charts.heatmap = echarts.init(document.getElementById('heatmapChart')); + + // Handle window resize + window.addEventListener('resize', () => { + Object.values(charts).forEach(chart => chart.resize()); + }); + } + + // ============================================================ + // API Calls (using MesApi client with timeout and retry) + // ============================================================ + const API_TIMEOUT = 60000; // 60 seconds timeout + + async function loadFilterOptions() { + try { + const result = await MesApi.get('/api/resource/history/options', { + timeout: API_TIMEOUT, + silent: true // Don't show toast for filter options + }); + if (result.success) { + populateMultiSelect(workcenterGroupsOptions, result.data.workcenter_groups, 'workcenter'); + populateMultiSelect(familiesOptions, result.data.families.map(f => ({name: f})), 'family'); + setupMultiSelectDropdowns(); + } + } catch (error) { + console.error('Failed to load filter options:', error); + } + } + + function populateMultiSelect(container, options, type) { + container.innerHTML = ''; + options.forEach(opt => { + const name = opt.name || opt; + const div = document.createElement('div'); + div.className = 'multi-select-option'; + div.innerHTML = ` + + ${name} + `; + div.querySelector('input').addEventListener('change', (e) => { + if (type === 'workcenter') { + updateSelectedWorkcenterGroups(); + } else { + updateSelectedFamilies(); + } + }); + container.appendChild(div); + }); + } + + function setupMultiSelectDropdowns() { + // Workcenter Groups dropdown toggle + workcenterGroupsTrigger.addEventListener('click', (e) => { + e.stopPropagation(); + workcenterGroupsDropdown.classList.toggle('show'); + familiesDropdown.classList.remove('show'); + }); + + // Families dropdown toggle + familiesTrigger.addEventListener('click', (e) => { + e.stopPropagation(); + familiesDropdown.classList.toggle('show'); + workcenterGroupsDropdown.classList.remove('show'); + }); + + // Close dropdowns when clicking outside + document.addEventListener('click', () => { + workcenterGroupsDropdown.classList.remove('show'); + familiesDropdown.classList.remove('show'); + }); + + // Prevent dropdown close when clicking inside + workcenterGroupsDropdown.addEventListener('click', (e) => e.stopPropagation()); + familiesDropdown.addEventListener('click', (e) => e.stopPropagation()); + } + + function updateSelectedWorkcenterGroups() { + const checkboxes = workcenterGroupsOptions.querySelectorAll('input[type="checkbox"]:checked'); + selectedWorkcenterGroups = Array.from(checkboxes).map(cb => cb.value); + updateMultiSelectText(workcenterGroupsTrigger, selectedWorkcenterGroups, '全部站點'); + } + + function updateSelectedFamilies() { + const checkboxes = familiesOptions.querySelectorAll('input[type="checkbox"]:checked'); + selectedFamilies = Array.from(checkboxes).map(cb => cb.value); + updateMultiSelectText(familiesTrigger, selectedFamilies, '全部型號'); + } + + function updateMultiSelectText(trigger, selected, defaultText) { + const textSpan = trigger.querySelector('.multi-select-text'); + if (selected.length === 0) { + textSpan.textContent = defaultText; + } else if (selected.length === 1) { + textSpan.textContent = selected[0]; + } else { + textSpan.textContent = `已選 ${selected.length} 項`; + } + } + + // Global functions for select all / clear all + window.selectAllWorkcenterGroups = function() { + workcenterGroupsOptions.querySelectorAll('input[type="checkbox"]').forEach(cb => cb.checked = true); + updateSelectedWorkcenterGroups(); + }; + + window.clearAllWorkcenterGroups = function() { + workcenterGroupsOptions.querySelectorAll('input[type="checkbox"]').forEach(cb => cb.checked = false); + updateSelectedWorkcenterGroups(); + }; + + window.selectAllFamilies = function() { + familiesOptions.querySelectorAll('input[type="checkbox"]').forEach(cb => cb.checked = true); + updateSelectedFamilies(); + }; + + window.clearAllFamilies = function() { + familiesOptions.querySelectorAll('input[type="checkbox"]').forEach(cb => cb.checked = false); + updateSelectedFamilies(); + }; + + function buildQueryString() { + const params = new URLSearchParams(); + params.append('start_date', startDateInput.value); + params.append('end_date', endDateInput.value); + params.append('granularity', currentGranularity); + + // Add multi-select params + selectedWorkcenterGroups.forEach(g => params.append('workcenter_groups', g)); + selectedFamilies.forEach(f => params.append('families', f)); + + if (isProductionCheckbox.checked) params.append('is_production', '1'); + if (isKeyCheckbox.checked) params.append('is_key', '1'); + if (isMonitorCheckbox.checked) params.append('is_monitor', '1'); + + return params.toString(); + } + + async function executeQuery() { + // Validate date range + const startDate = new Date(startDateInput.value); + const endDate = new Date(endDateInput.value); + const diffDays = (endDate - startDate) / (1000 * 60 * 60 * 24); + + if (diffDays > 730) { + Toast.warning('查詢範圍不可超過兩年'); + return; + } + + if (diffDays < 0) { + Toast.warning('結束日期必須大於起始日期'); + return; + } + + showLoading(); + queryBtn.disabled = true; + + try { + const queryString = buildQueryString(); + const summaryUrl = `/api/resource/history/summary?${queryString}`; + const detailUrl = `/api/resource/history/detail?${queryString}`; + + // Fetch summary and detail in parallel using MesApi + const [summaryResult, detailResult] = await Promise.all([ + MesApi.get(summaryUrl, { timeout: API_TIMEOUT }), + MesApi.get(detailUrl, { timeout: API_TIMEOUT }) + ]); + + if (summaryResult.success) { + const rawSummary = summaryResult.data || {}; + const computedKpi = mergeComputedKpi(rawSummary.kpi || {}); + const computedTrend = (rawSummary.trend || []).map((trendPoint) => mergeComputedKpi(trendPoint)); + summaryData = { + ...rawSummary, + kpi: computedKpi, + trend: computedTrend + }; + + updateKpiCards(summaryData.kpi); + updateTrendChart(summaryData.trend); + updateStackedChart(summaryData.trend); + updateComparisonChart(summaryData.workcenter_comparison); + updateHeatmapChart(summaryData.heatmap); + } else { + Toast.error(summaryResult.error || '查詢摘要失敗'); + } + + if (detailResult.success) { + detailData = detailResult.data; + hierarchyState = {}; + renderDetailTable(detailData); + + // Show warning if data was truncated + if (detailResult.truncated) { + Toast.warning(`明細資料超過 ${detailResult.max_records} 筆,僅顯示前 ${detailResult.max_records} 筆。請使用篩選條件縮小範圍。`); + } + } else { + Toast.error(detailResult.error || '查詢明細失敗'); + } + + } catch (error) { + console.error('Query failed:', error); + Toast.error('查詢失敗: ' + error.message); + } finally { + hideLoading(); + queryBtn.disabled = false; + } + } + + // ============================================================ + // KPI Cards + // ============================================================ + function mergeComputedKpi(kpi) { + return { + ...kpi, + ...buildResourceKpiFromHours(kpi) + }; + } + + function updateKpiCards(kpi) { + // OU% and AVAIL% + document.getElementById('kpiOuPct').textContent = kpi.ou_pct + '%'; + document.getElementById('kpiAvailabilityPct').textContent = kpi.availability_pct + '%'; + + // PRD + document.getElementById('kpiPrdHours').textContent = formatHours(kpi.prd_hours); + document.getElementById('kpiPrdPct').textContent = `生產 (${kpi.prd_pct || 0}%)`; + + // SBY + document.getElementById('kpiSbyHours').textContent = formatHours(kpi.sby_hours); + document.getElementById('kpiSbyPct').textContent = `待機 (${kpi.sby_pct || 0}%)`; + + // UDT + document.getElementById('kpiUdtHours').textContent = formatHours(kpi.udt_hours); + document.getElementById('kpiUdtPct').textContent = `非計畫停機 (${kpi.udt_pct || 0}%)`; + + // SDT + document.getElementById('kpiSdtHours').textContent = formatHours(kpi.sdt_hours); + document.getElementById('kpiSdtPct').textContent = `計畫停機 (${kpi.sdt_pct || 0}%)`; + + // EGT + document.getElementById('kpiEgtHours').textContent = formatHours(kpi.egt_hours); + document.getElementById('kpiEgtPct').textContent = `工程 (${kpi.egt_pct || 0}%)`; + + // NST + document.getElementById('kpiNstHours').textContent = formatHours(kpi.nst_hours); + document.getElementById('kpiNstPct').textContent = `未排程 (${kpi.nst_pct || 0}%)`; + + // Machine count + const machineCount = Number(kpi.machine_count || 0); + document.getElementById('kpiMachineCount').textContent = machineCount.toLocaleString(); + } + + function formatHours(hours) { + if (hours >= 1000) { + return (hours / 1000).toFixed(1) + 'K'; + } + return hours.toLocaleString(); + } + + // ============================================================ + // Charts + // ============================================================ + function updateTrendChart(trend) { + const dates = trend.map(t => t.date); + const ouPcts = trend.map(t => t.ou_pct); + const availabilityPcts = trend.map(t => t.availability_pct); + + charts.trend.setOption({ + tooltip: { + trigger: 'axis', + formatter: function(params) { + const d = trend[params[0].dataIndex]; + return `${d.date}
+ OU%: ${d.ou_pct}%
+ AVAIL%: ${d.availability_pct}%
+ PRD: ${d.prd_hours}h
+ SBY: ${d.sby_hours}h
+ UDT: ${d.udt_hours}h`; + } + }, + legend: { + data: ['OU%', 'AVAIL%'], + bottom: 0, + textStyle: { fontSize: 11 } + }, + xAxis: { + type: 'category', + data: dates, + axisLabel: { fontSize: 11 } + }, + yAxis: { + type: 'value', + name: '%', + max: 100, + axisLabel: { formatter: '{value}%' } + }, + series: [ + { + name: 'OU%', + data: ouPcts, + type: 'line', + smooth: true, + areaStyle: { opacity: 0.2 }, + itemStyle: { color: '#3B82F6' }, + lineStyle: { width: 2 } + }, + { + name: 'AVAIL%', + data: availabilityPcts, + type: 'line', + smooth: true, + areaStyle: { opacity: 0.2 }, + itemStyle: { color: '#10B981' }, + lineStyle: { width: 2 } + } + ], + grid: { left: 50, right: 20, top: 30, bottom: 50 } + }); + } + + function updateStackedChart(trend) { + const dates = trend.map(t => t.date); + + charts.stacked.setOption({ + tooltip: { + trigger: 'axis', + axisPointer: { type: 'shadow' }, + formatter: function(params) { + const idx = params[0].dataIndex; + const d = trend[idx]; + const total = d.prd_hours + d.sby_hours + d.udt_hours + d.sdt_hours + d.egt_hours + d.nst_hours; + const pct = (v) => total > 0 ? (v / total * 100).toFixed(1) : 0; + return `${d.date}
+ PRD: ${d.prd_hours}h (${pct(d.prd_hours)}%)
+ SBY: ${d.sby_hours}h (${pct(d.sby_hours)}%)
+ UDT: ${d.udt_hours}h (${pct(d.udt_hours)}%)
+ SDT: ${d.sdt_hours}h (${pct(d.sdt_hours)}%)
+ EGT: ${d.egt_hours}h (${pct(d.egt_hours)}%)
+ NST: ${d.nst_hours}h (${pct(d.nst_hours)}%)
+ Total: ${total.toFixed(1)}h`; + } + }, + legend: { + data: ['PRD', 'SBY', 'UDT', 'SDT', 'EGT', 'NST'], + bottom: 0, + textStyle: { fontSize: 10 } + }, + xAxis: { + type: 'category', + data: dates, + axisLabel: { fontSize: 10 } + }, + yAxis: { + type: 'value', + name: '時數', + axisLabel: { formatter: '{value}h' } + }, + series: [ + { name: 'PRD', type: 'bar', stack: 'total', data: trend.map(t => t.prd_hours), itemStyle: { color: '#22c55e' } }, + { name: 'SBY', type: 'bar', stack: 'total', data: trend.map(t => t.sby_hours), itemStyle: { color: '#3b82f6' } }, + { name: 'UDT', type: 'bar', stack: 'total', data: trend.map(t => t.udt_hours), itemStyle: { color: '#ef4444' } }, + { name: 'SDT', type: 'bar', stack: 'total', data: trend.map(t => t.sdt_hours), itemStyle: { color: '#f59e0b' } }, + { name: 'EGT', type: 'bar', stack: 'total', data: trend.map(t => t.egt_hours), itemStyle: { color: '#8b5cf6' } }, + { name: 'NST', type: 'bar', stack: 'total', data: trend.map(t => t.nst_hours), itemStyle: { color: '#64748b' } } + ], + grid: { left: 50, right: 20, top: 30, bottom: 60 } + }); + } + + function updateComparisonChart(comparison) { + // Take top 15 workcenters and reverse for bottom-to-top display (highest at top) + const data = comparison.slice(0, 15).reverse(); + const workcenters = data.map(d => d.workcenter); + const ouPcts = data.map(d => d.ou_pct); + + charts.comparison.setOption({ + tooltip: { + trigger: 'axis', + axisPointer: { type: 'shadow' }, + formatter: function(params) { + const d = data[params[0].dataIndex]; + return `${d.workcenter}
OU%: ${d.ou_pct}%
機台數: ${d.machine_count}`; + } + }, + xAxis: { + type: 'value', + name: 'OU%', + max: 100, + axisLabel: { formatter: '{value}%' } + }, + yAxis: { + type: 'category', + data: workcenters, + axisLabel: { fontSize: 10 } + }, + series: [{ + type: 'bar', + data: ouPcts, + itemStyle: { + color: function(params) { + const val = params.value; + if (val >= 80) return '#22c55e'; + if (val >= 50) return '#f59e0b'; + return '#ef4444'; + } + } + }], + grid: { left: 100, right: 30, top: 20, bottom: 30 } + }); + } + + function updateHeatmapChart(heatmap) { + if (!heatmap || heatmap.length === 0) { + charts.heatmap.clear(); + return; + } + + // Build workcenter list with sequence for sorting + const wcSeqMap = {}; + heatmap.forEach(h => { + wcSeqMap[h.workcenter] = h.workcenter_seq ?? 999; + }); + + // Get unique workcenters sorted by sequence ascending (smaller sequence first, e.g. 點測 before TMTT) + const workcenters = [...new Set(heatmap.map(h => h.workcenter))] + .sort((a, b) => wcSeqMap[a] - wcSeqMap[b]); + const dates = [...new Set(heatmap.map(h => h.date))].sort(); + + // Build data matrix + const data = heatmap.map(h => [ + dates.indexOf(h.date), + workcenters.indexOf(h.workcenter), + h.ou_pct + ]); + + charts.heatmap.setOption({ + tooltip: { + position: 'top', + formatter: function(params) { + return `${workcenters[params.value[1]]}
${dates[params.value[0]]}
OU%: ${params.value[2]}%`; + } + }, + xAxis: { + type: 'category', + data: dates, + splitArea: { show: true }, + axisLabel: { fontSize: 9, rotate: 45 } + }, + yAxis: { + type: 'category', + data: workcenters, + splitArea: { show: true }, + axisLabel: { fontSize: 9 } + }, + visualMap: { + min: 0, + max: 100, + calculable: true, + orient: 'horizontal', + left: 'center', + bottom: 0, + inRange: { + color: ['#ef4444', '#f59e0b', '#22c55e'] + } + }, + series: [{ + type: 'heatmap', + data: data, + label: { show: false }, + emphasis: { + itemStyle: { shadowBlur: 10, shadowColor: 'rgba(0, 0, 0, 0.5)' } + } + }], + grid: { left: 100, right: 20, top: 10, bottom: 60 } + }); + } + + // ============================================================ + // Hierarchical Table + // ============================================================ + function renderDetailTable(data) { + const tbody = document.getElementById('detailTableBody'); + + if (!data || data.length === 0) { + tbody.innerHTML = ` + + +
+
🔍
+
無符合條件的資料
+
+ + + `; + return; + } + + // Build hierarchy + const hierarchy = buildHierarchy(data); + + // Render rows + tbody.innerHTML = ''; + hierarchy.forEach(wc => { + // Workcenter level + const wcRow = createRow(wc, 0, `wc_${wc.workcenter}`); + tbody.appendChild(wcRow); + + // Family level + if (hierarchyState[`wc_${wc.workcenter}`]) { + wc.families.forEach(fam => { + const famRow = createRow(fam, 1, `fam_${wc.workcenter}_${fam.family}`); + famRow.dataset.parent = `wc_${wc.workcenter}`; + tbody.appendChild(famRow); + + // Resource level + if (hierarchyState[`fam_${wc.workcenter}_${fam.family}`]) { + fam.resources.forEach(res => { + const resRow = createRow(res, 2); + resRow.dataset.parent = `fam_${wc.workcenter}_${fam.family}`; + tbody.appendChild(resRow); + }); + } + }); + } + }); + } + + function buildHierarchy(data) { + const wcMap = {}; + + data.forEach(item => { + const wc = item.workcenter; + const fam = item.family; + const wcSeq = item.workcenter_seq ?? 999; + + if (!wcMap[wc]) { + wcMap[wc] = { + workcenter: wc, + name: wc, + sequence: wcSeq, + families: [], + familyMap: {}, + ou_pct: 0, availability_pct: 0, prd_hours: 0, prd_pct: 0, + sby_hours: 0, sby_pct: 0, udt_hours: 0, udt_pct: 0, + sdt_hours: 0, sdt_pct: 0, egt_hours: 0, egt_pct: 0, + nst_hours: 0, nst_pct: 0, machine_count: 0 + }; + } + + if (!wcMap[wc].familyMap[fam]) { + wcMap[wc].familyMap[fam] = { + family: fam, + name: fam, + resources: [], + ou_pct: 0, availability_pct: 0, prd_hours: 0, prd_pct: 0, + sby_hours: 0, sby_pct: 0, udt_hours: 0, udt_pct: 0, + sdt_hours: 0, sdt_pct: 0, egt_hours: 0, egt_pct: 0, + nst_hours: 0, nst_pct: 0, machine_count: 0 + }; + wcMap[wc].families.push(wcMap[wc].familyMap[fam]); + } + + // Add resource + wcMap[wc].familyMap[fam].resources.push({ + name: item.resource, + ...item + }); + + // Aggregate to family + const famObj = wcMap[wc].familyMap[fam]; + famObj.prd_hours += item.prd_hours; + famObj.sby_hours += item.sby_hours; + famObj.udt_hours += item.udt_hours; + famObj.sdt_hours += item.sdt_hours; + famObj.egt_hours += item.egt_hours; + famObj.nst_hours += item.nst_hours; + famObj.machine_count += 1; + + // Aggregate to workcenter + wcMap[wc].prd_hours += item.prd_hours; + wcMap[wc].sby_hours += item.sby_hours; + wcMap[wc].udt_hours += item.udt_hours; + wcMap[wc].sdt_hours += item.sdt_hours; + wcMap[wc].egt_hours += item.egt_hours; + wcMap[wc].nst_hours += item.nst_hours; + wcMap[wc].machine_count += 1; + }); + + // Calculate OU% and percentages + Object.values(wcMap).forEach(wc => { + calcPercentages(wc); + wc.families.forEach(fam => { + calcPercentages(fam); + }); + }); + + // Sort by workcenter sequence ascending (smaller sequence first, e.g. 點測 before TMTT) + return Object.values(wcMap).sort((a, b) => a.sequence - b.sequence); + } + + function calcPercentages(obj) { + Object.assign(obj, buildResourceKpiFromHours(obj)); + } + + function createRow(item, level, rowId) { + const tr = document.createElement('tr'); + tr.className = `row-level-${level}`; + if (rowId) tr.dataset.rowId = rowId; + + const indentClass = level > 0 ? `indent-${level}` : ''; + const hasChildren = level < 2 && (item.families?.length > 0 || item.resources?.length > 0); + const isExpanded = rowId ? hierarchyState[rowId] : false; + + const expandBtn = hasChildren + ? `` + : ''; + + tr.innerHTML = ` + ${expandBtn}${item.name} + ${item.ou_pct}% + ${item.availability_pct}% + ${formatHoursPct(item.prd_hours, item.prd_pct)} + ${formatHoursPct(item.sby_hours, item.sby_pct)} + ${formatHoursPct(item.udt_hours, item.udt_pct)} + ${formatHoursPct(item.sdt_hours, item.sdt_pct)} + ${formatHoursPct(item.egt_hours, item.egt_pct)} + ${formatHoursPct(item.nst_hours, item.nst_pct)} + ${item.machine_count} + `; + + return tr; + } + + function formatHoursPct(hours, pct) { + return `${Math.round(hours * 10) / 10}h (${pct}%)`; + } + + // Make toggleRow global + window.toggleRow = function(rowId) { + hierarchyState[rowId] = !hierarchyState[rowId]; + renderDetailTable(detailData); + }; + + function toggleAllRows(expand) { + if (!detailData) return; + + const hierarchy = buildHierarchy(detailData); + hierarchy.forEach(wc => { + hierarchyState[`wc_${wc.workcenter}`] = expand; + wc.families.forEach(fam => { + hierarchyState[`fam_${wc.workcenter}_${fam.family}`] = expand; + }); + }); + renderDetailTable(detailData); + } + + // ============================================================ + // Export + // ============================================================ + function exportCsv() { + if (!startDateInput.value || !endDateInput.value) { + Toast.warning('請先設定查詢條件'); + return; + } + + const queryString = buildQueryString(); + const url = `/api/resource/history/export?${queryString}`; + + // Create download link + const a = document.createElement('a'); + a.href = url; + a.download = `resource_history_${startDateInput.value}_to_${endDateInput.value}.csv`; + document.body.appendChild(a); + a.click(); + document.body.removeChild(a); + + Toast.success('CSV 匯出中...'); + } + + // ============================================================ + // Loading + // ============================================================ + function showLoading() { + loadingOverlay.classList.remove('hidden'); + } + + function hideLoading() { + loadingOverlay.classList.add('hidden'); + } + + Object.assign(window, { + init, + setDefaultDates, + formatDate, + setupEventListeners, + initCharts, + loadFilterOptions, + populateMultiSelect, + setupMultiSelectDropdowns, + updateSelectedWorkcenterGroups, + updateSelectedFamilies, + updateMultiSelectText, + buildQueryString, + executeQuery, + updateKpiCards, + formatHours, + updateTrendChart, + updateStackedChart, + updateComparisonChart, + updateHeatmapChart, + renderDetailTable, + buildHierarchy, + calcPercentages, + createRow, + formatHoursPct, + toggleAllRows, + exportCsv, + showLoading, + hideLoading, + }); + + // ============================================================ + // Start + // ============================================================ + init(); +})(); diff --git a/frontend/src/resource-status/main.js b/frontend/src/resource-status/main.js new file mode 100644 index 0000000..b2a9020 --- /dev/null +++ b/frontend/src/resource-status/main.js @@ -0,0 +1,853 @@ +import { ensureMesApiAvailable } from '../core/api.js'; +import { getPageContract } from '../core/field-contracts.js'; +import { buildResourceKpiFromHours } from '../core/compute.js'; +import { groupBy, sortBy, toggleTreeState, setTreeStateBulk, escapeHtml, safeText } from '../core/table-tree.js'; + +ensureMesApiAvailable(); +window.__MES_FRONTEND_CORE__ = { buildResourceKpiFromHours, groupBy, sortBy, toggleTreeState, setTreeStateBulk, escapeHtml, safeText }; +window.__FIELD_CONTRACTS__ = window.__FIELD_CONTRACTS__ || {}; +window.__FIELD_CONTRACTS__['resource_status:matrix_summary'] = getPageContract('resource_status', 'matrix_summary'); + + + let allEquipment = []; + let workcenterGroups = []; + let matrixFilter = null; // { workcenter_group, status } + let matrixHierarchyState = {}; // Track expanded/collapsed state for matrix rows + + // ============================================================ + // Hierarchical Matrix Functions + // ============================================================ + + function buildMatrixHierarchy(equipment) { + // Build hierarchy: workcenter_group -> resourcefamily -> equipment + const groupMap = {}; + + equipment.forEach(eq => { + const group = eq.WORKCENTER_GROUP || 'UNKNOWN'; + const family = eq.RESOURCEFAMILYNAME || 'UNKNOWN'; + const status = eq.EQUIPMENTASSETSSTATUS || 'OTHER'; + const groupSeq = eq.WORKCENTER_GROUP_SEQ ?? 999; + + // Initialize group + if (!groupMap[group]) { + groupMap[group] = { + name: group, + sequence: groupSeq, + families: {}, + counts: { total: 0, PRD: 0, SBY: 0, UDT: 0, SDT: 0, EGT: 0, NST: 0, OTHER: 0 } + }; + } + + // Initialize family + if (!groupMap[group].families[family]) { + groupMap[group].families[family] = { + name: family, + equipment: [], + counts: { total: 0, PRD: 0, SBY: 0, UDT: 0, SDT: 0, EGT: 0, NST: 0, OTHER: 0 } + }; + } + + // Add equipment to family + groupMap[group].families[family].equipment.push(eq); + + // Map status to count key + let statusKey = 'OTHER'; + if (['PRD'].includes(status)) statusKey = 'PRD'; + else if (['SBY'].includes(status)) statusKey = 'SBY'; + else if (['UDT', 'PM', 'BKD'].includes(status)) statusKey = 'UDT'; + else if (['SDT'].includes(status)) statusKey = 'SDT'; + else if (['EGT', 'ENG'].includes(status)) statusKey = 'EGT'; + else if (['NST', 'OFF'].includes(status)) statusKey = 'NST'; + + // Update counts + groupMap[group].counts.total++; + groupMap[group].counts[statusKey]++; + groupMap[group].families[family].counts.total++; + groupMap[group].families[family].counts[statusKey]++; + }); + + // Convert to array structure + // Sort groups by sequence ascending (smaller sequence first, e.g. 點測 before TMTT) + // Sort families by total count descending + const hierarchy = Object.values(groupMap).map(g => ({ + ...g, + families: Object.values(g.families).sort((a, b) => b.counts.total - a.counts.total) + })).sort((a, b) => a.sequence - b.sequence); + + return hierarchy; + } + + function toggleMatrixRow(rowId) { + matrixHierarchyState[rowId] = !matrixHierarchyState[rowId]; + renderMatrixHierarchy(); + } + + function toggleAllMatrixRows(expand) { + const hierarchy = buildMatrixHierarchy(allEquipment); + hierarchy.forEach(group => { + matrixHierarchyState[`grp_${group.name}`] = expand; + group.families.forEach(fam => { + matrixHierarchyState[`fam_${group.name}_${fam.name}`] = expand; + }); + }); + renderMatrixHierarchy(); + } + + function renderMatrixHierarchy() { + const container = document.getElementById('matrixContainer'); + const hierarchy = buildMatrixHierarchy(allEquipment); + + if (hierarchy.length === 0) { + container.innerHTML = '
無資料
'; + return; + } + + let html = ` + + + + + + + + + + + + + + + + + `; + + hierarchy.forEach(group => { + const grpId = `grp_${group.name}`; + const isGroupExpanded = matrixHierarchyState[grpId]; + const hasChildren = group.families.length > 0; + + // Calculate OU% + const avail = group.counts.PRD + group.counts.SBY + group.counts.UDT + group.counts.SDT + group.counts.EGT; + const ou = avail > 0 ? ((group.counts.PRD / avail) * 100).toFixed(1) : 0; + const ouClass = ou >= 80 ? 'high' : (ou >= 50 ? 'medium' : 'low'); + + // Group row (Level 0) + const expandBtn = hasChildren + ? `` + : ''; + + // Helper to check if this cell is selected (supports all levels) + const isSelected = (wg, st, fam = null, res = null) => { + if (!matrixFilter) return false; + if (matrixFilter.workcenter_group !== wg) return false; + if (matrixFilter.status !== st) return false; + if (fam !== null && matrixFilter.family !== fam) return false; + if (res !== null && matrixFilter.resource !== res) return false; + // Match level: if matrixFilter has family but we're checking group level, no match + if (matrixFilter.family && fam === null) return false; + if (matrixFilter.resource && res === null) return false; + return true; + }; + const grpName = group.name; + + html += ` + + + + + + + + + + + + + `; + + // Family rows (Level 1) + if (isGroupExpanded) { + group.families.forEach(fam => { + const famId = `fam_${group.name}_${fam.name}`; + const isFamExpanded = matrixHierarchyState[famId]; + const hasEquipment = fam.equipment.length > 0; + + const famAvail = fam.counts.PRD + fam.counts.SBY + fam.counts.UDT + fam.counts.SDT + fam.counts.EGT; + const famOu = famAvail > 0 ? ((fam.counts.PRD / famAvail) * 100).toFixed(1) : 0; + const famOuClass = famOu >= 80 ? 'high' : (famOu >= 50 ? 'medium' : 'low'); + + const famExpandBtn = hasEquipment + ? `` + : ''; + + const famName = fam.name; + const escFamName = famName.replace(/'/g, "\\'"); + + html += ` + + + + + + + + + + + + + `; + + // Equipment rows (Level 2) + if (isFamExpanded) { + fam.equipment.forEach(eq => { + const status = eq.EQUIPMENTASSETSSTATUS || '--'; + const statusCat = (eq.STATUS_CATEGORY || 'OTHER').toLowerCase(); + const resId = eq.RESOURCEID || ''; + const resName = eq.RESOURCENAME || eq.RESOURCEID || '--'; + const escResId = resId.replace(/'/g, "\\'"); + + // Determine status category key for this equipment + let eqStatusKey = 'OTHER'; + if (['PRD'].includes(status)) eqStatusKey = 'PRD'; + else if (['SBY'].includes(status)) eqStatusKey = 'SBY'; + else if (['UDT', 'PM', 'BKD'].includes(status)) eqStatusKey = 'UDT'; + else if (['SDT'].includes(status)) eqStatusKey = 'SDT'; + else if (['EGT', 'ENG'].includes(status)) eqStatusKey = 'EGT'; + else if (['NST', 'OFF'].includes(status)) eqStatusKey = 'NST'; + + const isEqSelected = isSelected(grpName, eqStatusKey, famName, resId); + + html += ` + + + + + + + + + + + + + `; + }); + } + }); + } + }); + + html += '
工站群組 / 型號 / 機台總數PRDSBYUDTSDTEGTNSTOTHEROU%
${expandBtn}${group.name}${group.counts.total}${group.counts.PRD}${group.counts.SBY}${group.counts.UDT}${group.counts.SDT}${group.counts.EGT}${group.counts.NST}${group.counts.OTHER}${ou}%
${famExpandBtn}${fam.name}${fam.counts.total}${fam.counts.PRD}${fam.counts.SBY}${fam.counts.UDT}${fam.counts.SDT}${fam.counts.EGT}${fam.counts.NST}${fam.counts.OTHER}${famOu}%
${resName}1${status === 'PRD' ? '●' : '-'}${status === 'SBY' ? '●' : '-'}${['UDT', 'PM', 'BKD'].includes(status) ? '●' : '-'}${status === 'SDT' ? '●' : '-'}${['EGT', 'ENG'].includes(status) ? '●' : '-'}${['NST', 'OFF'].includes(status) ? '●' : '-'}${!['PRD', 'SBY', 'UDT', 'PM', 'BKD', 'SDT', 'EGT', 'ENG', 'NST', 'OFF'].includes(status) ? '●' : '-'}${status}
'; + container.innerHTML = html; + } + + function toggleFilter(checkbox, id) { + const label = document.getElementById(id); + if (checkbox.checked) { + label.classList.add('active'); + } else { + label.classList.remove('active'); + } + loadData(); + } + + function getFilters() { + const params = new URLSearchParams(); + + const group = document.getElementById('filterGroup').value; + if (group) params.append('workcenter_groups', group); + + if (document.querySelector('#chkProduction input').checked) { + params.append('is_production', 'true'); + } + if (document.querySelector('#chkKey input').checked) { + params.append('is_key', 'true'); + } + if (document.querySelector('#chkMonitor input').checked) { + params.append('is_monitor', 'true'); + } + + return params.toString(); + } + + async function loadOptions() { + try { + const result = await MesApi.get('/api/resource/status/options', { silent: true }); + + if (result.success) { + const select = document.getElementById('filterGroup'); + workcenterGroups = result.data.workcenter_groups || []; + + workcenterGroups.forEach(group => { + const opt = document.createElement('option'); + opt.value = group; + opt.textContent = group; + select.appendChild(opt); + }); + } + } catch (e) { + console.error('載入選項失敗:', e); + } + } + + async function loadSummary() { + try { + const queryString = getFilters(); + const endpoint = queryString + ? `/api/resource/status/summary?${queryString}` + : '/api/resource/status/summary'; + const result = await MesApi.get(endpoint, { silent: true }); + + if (result.success) { + const d = result.data; + const total = d.total_count || 0; + const status = d.by_status || {}; + + // Get individual status counts + const prd = status.PRD || 0; + const sby = status.SBY || 0; + const udt = status.UDT || 0; + const sdt = status.SDT || 0; + const egt = status.EGT || 0; + const nst = status.NST || 0; + + // Calculate percentage denominator (includes NST) + const totalStatus = prd + sby + udt + sdt + egt + nst; + + // Update OU% and AVAIL% + const hasOuPct = d.ou_pct !== null && d.ou_pct !== undefined; + const hasAvailPct = d.availability_pct !== null && d.availability_pct !== undefined; + document.getElementById('ouPct').textContent = hasOuPct ? `${d.ou_pct}%` : '--'; + document.getElementById('availabilityPct').textContent = hasAvailPct ? `${d.availability_pct}%` : '--'; + + // Update status cards with count and percentage + document.getElementById('prdCount').textContent = prd; + document.getElementById('prdPct').textContent = totalStatus ? `生產 (${((prd/totalStatus)*100).toFixed(1)}%)` : '生產'; + + document.getElementById('sbyCount').textContent = sby; + document.getElementById('sbyPct').textContent = totalStatus ? `待機 (${((sby/totalStatus)*100).toFixed(1)}%)` : '待機'; + + document.getElementById('udtCount').textContent = udt; + document.getElementById('udtPct').textContent = totalStatus ? `非計畫停機 (${((udt/totalStatus)*100).toFixed(1)}%)` : '非計畫停機'; + + document.getElementById('sdtCount').textContent = sdt; + document.getElementById('sdtPct').textContent = totalStatus ? `計畫停機 (${((sdt/totalStatus)*100).toFixed(1)}%)` : '計畫停機'; + + document.getElementById('egtCount').textContent = egt; + document.getElementById('egtPct').textContent = totalStatus ? `工程 (${((egt/totalStatus)*100).toFixed(1)}%)` : '工程'; + + document.getElementById('nstCount').textContent = nst; + document.getElementById('nstPct').textContent = totalStatus ? `未排程 (${((nst/totalStatus)*100).toFixed(1)}%)` : '未排程'; + + // Update JOB count (equipment with active maintenance/repair job) + const jobCount = d.with_active_job || 0; + document.getElementById('jobCount').textContent = jobCount; + + // Update total count + document.getElementById('totalCount').textContent = total; + } + } catch (e) { + console.error('載入摘要失敗:', e); + } + } + + function loadMatrix() { + // Matrix is now rendered from allEquipment data using hierarchy + // This function is called after loadEquipment populates allEquipment + renderMatrixHierarchy(); + } + + async function loadEquipment() { + const container = document.getElementById('equipmentContainer'); + + // Clear matrix filter when reloading data + matrixFilter = null; + document.getElementById('matrixFilterIndicator').classList.remove('active'); + + try { + const queryString = getFilters(); + const endpoint = queryString + ? `/api/resource/status?${queryString}` + : '/api/resource/status'; + const result = await MesApi.get(endpoint, { silent: true }); + + if (result.success && result.data.length > 0) { + allEquipment = result.data; + document.getElementById('equipmentCount').textContent = result.count; + renderEquipmentList(allEquipment); + } else { + allEquipment = []; + document.getElementById('equipmentCount').textContent = 0; + container.innerHTML = '
無符合條件的設備
'; + } + } catch (e) { + console.error('載入設備失敗:', e); + container.innerHTML = '
載入失敗
'; + } + } + + // ============================================================ + // Floating Tooltip Functions + // ============================================================ + let currentTooltipData = null; + + function showTooltip(event, type, data) { + event.stopPropagation(); + const tooltip = document.getElementById('floatingTooltip'); + const titleEl = document.getElementById('tooltipTitle'); + const contentEl = document.getElementById('tooltipContent'); + + // Set content based on type + if (type === 'lot') { + titleEl.textContent = '在製批次明細'; + contentEl.innerHTML = renderLotContent(data); + } else if (type === 'job') { + titleEl.textContent = 'JOB 單詳細資訊'; + contentEl.innerHTML = renderJobContent(data); + } + + // Position the tooltip + tooltip.classList.add('show'); + + // Get dimensions + const rect = tooltip.getBoundingClientRect(); + const viewportWidth = window.innerWidth; + const viewportHeight = window.innerHeight; + + // Calculate initial position near the click + let x = event.clientX + 10; + let y = event.clientY + 10; + + // Adjust if overflowing right + if (x + rect.width > viewportWidth - 20) { + x = event.clientX - rect.width - 10; + } + + // Adjust if overflowing bottom + if (y + rect.height > viewportHeight - 20) { + y = viewportHeight - rect.height - 20; + } + + // Ensure not off-screen left or top + x = Math.max(10, x); + y = Math.max(10, y); + + tooltip.style.left = x + 'px'; + tooltip.style.top = y + 'px'; + + currentTooltipData = { type, data }; + } + + function hideTooltip() { + const tooltip = document.getElementById('floatingTooltip'); + tooltip.classList.remove('show'); + currentTooltipData = null; + } + + // Close tooltip when clicking outside + document.addEventListener('click', (e) => { + const tooltip = document.getElementById('floatingTooltip'); + if (tooltip && !tooltip.contains(e.target) && !e.target.classList.contains('info-trigger')) { + hideTooltip(); + } + }); + + // Helper functions to show specific tooltip types + function showLotTooltip(event, resourceId) { + const eq = allEquipment.find(e => e.RESOURCEID === resourceId); + if (eq && eq.LOT_DETAILS) { + showTooltip(event, 'lot', eq.LOT_DETAILS); + } + } + + function showJobTooltip(event, resourceId) { + const eq = allEquipment.find(e => e.RESOURCEID === resourceId); + if (eq && eq.JOBORDER) { + showTooltip(event, 'job', eq); + } + } + + function renderLotContent(lotDetails) { + if (!lotDetails || lotDetails.length === 0) return '
無批次資料
'; + + let html = '
'; + lotDetails.forEach(lot => { + const trackinTime = lot.LOTTRACKINTIME ? new Date(lot.LOTTRACKINTIME).toLocaleString('zh-TW') : '--'; + const qty = lot.LOTTRACKINQTY_PCS != null ? lot.LOTTRACKINQTY_PCS.toLocaleString() : '--'; + html += ` +
+
${lot.RUNCARDLOTID || '--'}
+
+
數量:${qty} pcs
+
TrackIn:${trackinTime}
+
操作員:${lot.LOTTRACKINEMPLOYEE || '--'}
+
+
+ `; + }); + html += '
'; + return html; + } + + function renderJobContent(eq) { + const formatDate = (dateStr) => { + if (!dateStr) return '--'; + try { + return new Date(dateStr).toLocaleString('zh-TW'); + } catch { + return dateStr; + } + }; + + const field = (label, value, isHighlight = false) => { + const valueClass = isHighlight ? 'highlight' : ''; + return ` +
+ ${label} + ${value || '--'} +
+ `; + }; + + return ` +
+ ${field('JOBORDER', eq.JOBORDER, true)} + ${field('JOBSTATUS', eq.JOBSTATUS, true)} + ${field('JOBMODEL', eq.JOBMODEL)} + ${field('JOBSTAGE', eq.JOBSTAGE)} + ${field('JOBID', eq.JOBID)} + ${field('建立時間', formatDate(eq.CREATEDATE))} + ${field('建立人員', eq.CREATEUSERNAME || eq.CREATEUSER)} + ${field('技術員', eq.TECHNICIANUSERNAME || eq.TECHNICIANUSER)} + ${field('症狀碼', eq.SYMPTOMCODE)} + ${field('原因碼', eq.CAUSECODE)} + ${field('維修碼', eq.REPAIRCODE)} +
+ `; + } + + function renderEquipmentList(equipment) { + const container = document.getElementById('equipmentContainer'); + + if (equipment.length === 0) { + container.innerHTML = '
無符合條件的設備
'; + return; + } + + let html = '
'; + + equipment.forEach((eq) => { + const statusCat = (eq.STATUS_CATEGORY || 'OTHER').toLowerCase(); + const statusDisplay = getStatusDisplay(eq.EQUIPMENTASSETSSTATUS, eq.STATUS_CATEGORY); + const resourceId = eq.RESOURCEID || ''; + const escapedResourceId = resourceId.replace(/'/g, "\\'"); + + // Build LOT info with click trigger + let lotHtml = ''; + if (eq.LOT_COUNT > 0) { + lotHtml = `📦 ${eq.LOT_COUNT} 批`; + } + + // Build JOB info with click trigger + let jobHtml = ''; + if (eq.JOBORDER) { + jobHtml = `📋 ${eq.JOBORDER}`; + } + + html += ` +
+
+
${eq.RESOURCENAME || eq.RESOURCEID || '--'}
+ ${statusDisplay} +
+
+ 📍 ${eq.WORKCENTERNAME || '--'} + 🏭 ${eq.WORKCENTER_GROUP || '--'} + 🔧 ${eq.RESOURCEFAMILYNAME || '--'} + 🏢 ${eq.LOCATIONNAME || '--'} + ${lotHtml} + ${jobHtml} +
+
+ `; + }); + + html += '
'; + container.innerHTML = html; + } + + function filterByMatrixCell(workcenterGroup, status, family = null, resource = null) { + // Toggle off if clicking same cell (exact match including family and resource) + if (matrixFilter && + matrixFilter.workcenter_group === workcenterGroup && + matrixFilter.status === status && + matrixFilter.family === family && + matrixFilter.resource === resource) { + clearMatrixFilter(); + return; + } + + matrixFilter = { + workcenter_group: workcenterGroup, + status: status, + family: family, + resource: resource + }; + + // Update selected cell highlighting for group and family level cells + document.querySelectorAll('.matrix-table td.clickable').forEach(cell => { + cell.classList.remove('selected'); + const cellWg = cell.dataset.wg; + const cellStatus = cell.dataset.status; + const cellFam = cell.dataset.fam; + + // Match based on level + if (cellWg === workcenterGroup && cellStatus === status) { + if (family === null && resource === null && !cellFam) { + // Group level match + cell.classList.add('selected'); + } else if (family !== null && cellFam === family && resource === null) { + // Family level match + cell.classList.add('selected'); + } + } + }); + + // Update selected row highlighting for equipment level + document.querySelectorAll('.matrix-table tr.clickable-row').forEach(row => { + row.classList.remove('selected'); + if (resource !== null && row.dataset.res === resource) { + row.classList.add('selected'); + } + }); + + // Show filter indicator with hierarchical label + const statusLabels = { + 'PRD': '生產中', + 'SBY': '待機', + 'UDT': '非計畫停機', + 'SDT': '計畫停機', + 'EGT': '工程', + 'NST': '未排程', + 'OTHER': '其他' + }; + + let filterLabel = workcenterGroup; + if (family) filterLabel += ` / ${family}`; + if (resource) { + // Find resource name from allEquipment + const eqInfo = allEquipment.find(e => e.RESOURCEID === resource); + const resName = eqInfo ? (eqInfo.RESOURCENAME || resource) : resource; + filterLabel += ` / ${resName}`; + } + filterLabel += ` - ${statusLabels[status] || status}`; + + document.getElementById('matrixFilterText').textContent = filterLabel; + document.getElementById('matrixFilterIndicator').classList.add('active'); + + // Filter and render equipment list + // Use same grouping logic as buildMatrixHierarchy + const filtered = allEquipment.filter(eq => { + // Match workcenter group + const eqGroup = eq.WORKCENTER_GROUP || 'UNKNOWN'; + if (eqGroup !== workcenterGroup) return false; + + // Match family if specified + if (family !== null) { + const eqFamily = eq.RESOURCEFAMILYNAME || 'UNKNOWN'; + if (eqFamily !== family) return false; + } + + // Match resource if specified + if (resource !== null) { + if (eq.RESOURCEID !== resource) return false; + } + + // Match status based on EQUIPMENTASSETSSTATUS (same logic as matrix calculation) + const eqStatus = eq.EQUIPMENTASSETSSTATUS || ''; + + // Map equipment status to matrix status category (same as buildMatrixHierarchy) + let eqStatusKey = 'OTHER'; + if (['PRD'].includes(eqStatus)) eqStatusKey = 'PRD'; + else if (['SBY'].includes(eqStatus)) eqStatusKey = 'SBY'; + else if (['UDT', 'PM', 'BKD'].includes(eqStatus)) eqStatusKey = 'UDT'; + else if (['SDT'].includes(eqStatus)) eqStatusKey = 'SDT'; + else if (['EGT', 'ENG'].includes(eqStatus)) eqStatusKey = 'EGT'; + else if (['NST', 'OFF'].includes(eqStatus)) eqStatusKey = 'NST'; + + return eqStatusKey === status; + }); + + document.getElementById('equipmentCount').textContent = filtered.length; + renderEquipmentList(filtered); + } + + function clearMatrixFilter() { + matrixFilter = null; + + // Remove selected highlighting from cells + document.querySelectorAll('.matrix-table td.clickable').forEach(cell => { + cell.classList.remove('selected'); + }); + + // Remove selected highlighting from rows + document.querySelectorAll('.matrix-table tr.clickable-row').forEach(row => { + row.classList.remove('selected'); + }); + + // Hide filter indicator + document.getElementById('matrixFilterIndicator').classList.remove('active'); + + // Show all equipment + document.getElementById('equipmentCount').textContent = allEquipment.length; + renderEquipmentList(allEquipment); + } + + function getStatusDisplay(status, category) { + const statusMap = { + 'PRD': '生產中', + 'SBY': '待機', + 'UDT': '非計畫停機', + 'SDT': '計畫停機', + 'EGT': '工程', + 'NST': '未排程' + }; + + if (status && statusMap[status]) { + return statusMap[status]; + } + + const catMap = { + 'PRODUCTIVE': '生產中', + 'STANDBY': '待機', + 'DOWN': '停機', + 'ENGINEERING': '工程', + 'NOT_SCHEDULED': '未排程', + 'INACTIVE': '停用' + }; + + return catMap[category] || status || '--'; + } + + async function checkCacheStatus() { + try { + const data = await MesApi.get('/health', { + silent: true, + retries: 0, + timeout: 15000 + }); + + const dot = document.getElementById('cacheDot'); + const status = document.getElementById('cacheStatus'); + const resCache = data.resource_cache || {}; + const eqCache = data.equipment_status_cache || {}; + + // 使用 resource_cache 的數量(過濾後的設備數) + if (resCache.enabled && resCache.loaded) { + dot.className = 'cache-dot'; + status.textContent = `快取正常 (${resCache.count} 筆)`; + } else if (resCache.enabled) { + dot.className = 'cache-dot loading'; + status.textContent = '快取載入中...'; + } else { + dot.className = 'cache-dot error'; + status.textContent = '快取未啟用'; + } + + // 使用 equipment_status_cache 的更新時間(即時狀態更新時間) + if (eqCache.updated_at) { + document.getElementById('lastUpdate').textContent = + `更新: ${new Date(eqCache.updated_at).toLocaleString('zh-TW')}`; + } + } catch (e) { + document.getElementById('cacheDot').className = 'cache-dot error'; + document.getElementById('cacheStatus').textContent = '無法連線'; + } + } + + async function loadData() { + const btn = document.getElementById('btnRefresh'); + btn.disabled = true; + + try { + // loadSummary can run in parallel + // loadEquipment must complete before loadMatrix (matrix uses allEquipment data) + await Promise.all([ + loadSummary(), + loadEquipment() + ]); + // Now render the matrix from the loaded equipment data + loadMatrix(); + } finally { + btn.disabled = false; + } + } + + // ============================================================ + // Auto-refresh + // ============================================================ + const REFRESH_INTERVAL = 5 * 60 * 1000; // 5 minutes + let refreshTimer = null; + + function startAutoRefresh() { + if (refreshTimer) { + clearInterval(refreshTimer); + } + console.log('[Resource Status] Auto-refresh started, interval:', REFRESH_INTERVAL / 1000, 'seconds'); + refreshTimer = setInterval(() => { + if (!document.hidden) { + console.log('[Resource Status] Auto-refresh triggered at', new Date().toLocaleTimeString()); + checkCacheStatus(); + loadData(); + } else { + console.log('[Resource Status] Auto-refresh skipped (tab hidden)'); + } + }, REFRESH_INTERVAL); + } + + // Handle page visibility - refresh when tab becomes visible + document.addEventListener('visibilitychange', () => { + if (!document.hidden) { + console.log('[Resource Status] Tab became visible, refreshing...'); + checkCacheStatus(); + loadData(); + startAutoRefresh(); + } + }); + + // Initialize + document.addEventListener('DOMContentLoaded', async () => { + await loadOptions(); + await checkCacheStatus(); + await loadData(); + + // Start auto-refresh + startAutoRefresh(); + }); + +Object.assign(window, { +buildMatrixHierarchy, +toggleMatrixRow, +toggleAllMatrixRows, +renderMatrixHierarchy, +toggleFilter, +getFilters, +loadOptions, +loadSummary, +loadMatrix, +loadEquipment, +showTooltip, +hideTooltip, +showLotTooltip, +showJobTooltip, +renderLotContent, +renderJobContent, +renderEquipmentList, +filterByMatrixCell, +clearMatrixFilter, +getStatusDisplay, +checkCacheStatus, +loadData, +startAutoRefresh, +}); diff --git a/frontend/src/tables/main.js b/frontend/src/tables/main.js new file mode 100644 index 0000000..4ae7f98 --- /dev/null +++ b/frontend/src/tables/main.js @@ -0,0 +1,236 @@ +import { ensureMesApiAvailable } from '../core/api.js'; +import { getPageContract } from '../core/field-contracts.js'; +import { buildResourceKpiFromHours } from '../core/compute.js'; +import { groupBy, sortBy, toggleTreeState, setTreeStateBulk, escapeHtml, safeText } from '../core/table-tree.js'; + +ensureMesApiAvailable(); +window.__MES_FRONTEND_CORE__ = { buildResourceKpiFromHours, groupBy, sortBy, toggleTreeState, setTreeStateBulk, escapeHtml, safeText }; +window.__FIELD_CONTRACTS__ = window.__FIELD_CONTRACTS__ || {}; +window.__FIELD_CONTRACTS__['tables:result_table'] = getPageContract('tables', 'result_table'); + + + let currentTable = null; + let currentDisplayName = null; + let currentTimeField = null; + let currentColumns = []; + let currentFilters = {}; + + function toFilterInputId(column) { + return `filter_${encodeURIComponent(safeText(column))}`; + } + + function toJsSingleQuoted(value) { + return safeText(value).replace(/\\/g, '\\\\').replace(/'/g, "\\'"); + } + + async function loadTableData(tableName, displayName, timeField) { + // Mark current selected table + document.querySelectorAll('.table-card').forEach(card => { + card.classList.remove('active'); + }); + event.currentTarget.classList.add('active'); + + currentTable = tableName; + currentDisplayName = displayName; + currentTimeField = timeField || null; + currentFilters = {}; + + const viewer = document.getElementById('dataViewer'); + const title = document.getElementById('viewerTitle'); + const content = document.getElementById('tableContent'); + const statsContainer = document.getElementById('statsContainer'); + + viewer.classList.add('active'); + title.textContent = `正在載入: ${displayName}`; + content.innerHTML = '
正在載入欄位資訊...
'; + statsContainer.innerHTML = ''; + + viewer.scrollIntoView({ behavior: 'smooth', block: 'start' }); + + try { + const data = await MesApi.post('/api/get_table_columns', { table_name: tableName }); + + if (data.error) { + content.innerHTML = `
${escapeHtml(data.error)}
`; + return; + } + + currentColumns = data.columns; + title.textContent = `${displayName} (${currentColumns.length} 欄位)`; + + renderFilterControls(); + } catch (error) { + content.innerHTML = `
請求失敗: ${escapeHtml(error.message)}
`; + } + } + + function renderFilterControls() { + const statsContainer = document.getElementById('statsContainer'); + const content = document.getElementById('tableContent'); + + statsContainer.innerHTML = ` +
+
+
表名
+
${escapeHtml(currentTable)}
+
+
+
欄位數
+
${currentColumns.length}
+
+ 在下方輸入框填入篩選條件 (模糊匹配) + + +
+
+ `; + + let html = ''; + html += ''; + currentColumns.forEach(col => { + html += ``; + }); + html += ''; + + html += ''; + currentColumns.forEach(col => { + const filterId = toFilterInputId(col); + const jsCol = toJsSingleQuoted(col); + html += ``; + }); + html += ''; + + html += ''; + html += ''; + html += '
${escapeHtml(col)}
請輸入篩選條件後點擊「查詢」,或直接點擊「查詢」載入最後 1000 筆資料
'; + + content.innerHTML = html; + } + + function updateFilter(column, value) { + if (value && value.trim()) { + currentFilters[column] = value.trim(); + } else { + delete currentFilters[column]; + } + renderActiveFilters(); + } + + function renderActiveFilters() { + const container = document.getElementById('activeFilters'); + if (!container) return; + + const filterKeys = Object.keys(currentFilters); + if (filterKeys.length === 0) { + container.innerHTML = ''; + return; + } + + let html = ''; + filterKeys.forEach(col => { + html += `${escapeHtml(col)}: ${escapeHtml(currentFilters[col])} ×`; + }); + container.innerHTML = html; + } + + function removeFilter(column) { + delete currentFilters[column]; + const input = document.getElementById(toFilterInputId(column)); + if (input) input.value = ''; + renderActiveFilters(); + } + + function clearFilters() { + currentFilters = {}; + currentColumns.forEach(col => { + const input = document.getElementById(toFilterInputId(col)); + if (input) input.value = ''; + }); + renderActiveFilters(); + } + + function handleFilterKeypress(event) { + if (event.key === 'Enter') { + executeQuery(); + } + } + + async function executeQuery() { + const title = document.getElementById('viewerTitle'); + const tbody = document.getElementById('dataBody'); + + currentFilters = {}; + currentColumns.forEach(col => { + const input = document.getElementById(toFilterInputId(col)); + if (input && input.value.trim()) { + currentFilters[col] = input.value.trim(); + } + }); + renderActiveFilters(); + + title.textContent = `正在查詢: ${currentDisplayName}`; + tbody.innerHTML = `正在查詢資料...`; + + try { + const data = await MesApi.post('/api/query_table', { + table_name: currentTable, + limit: 1000, + time_field: currentTimeField, + filters: Object.keys(currentFilters).length > 0 ? currentFilters : null + }); + + if (data.error) { + tbody.innerHTML = `${escapeHtml(data.error)}`; + return; + } + + const filterCount = Object.keys(currentFilters).length; + const filterText = filterCount > 0 ? ` [${filterCount} 個篩選]` : ''; + title.textContent = `${currentDisplayName} (${data.row_count} 筆)${filterText}`; + + if (data.data.length === 0) { + tbody.innerHTML = `查無資料`; + return; + } + + let html = ''; + data.data.forEach(row => { + html += ''; + currentColumns.forEach(col => { + const value = row[col]; + if (value === null || value === undefined) { + html += 'NULL'; + } else { + html += `${escapeHtml(safeText(value))}`; + } + }); + html += ''; + }); + tbody.innerHTML = html; + } catch (error) { + tbody.innerHTML = `請求失敗: ${escapeHtml(error.message)}`; + } + } + + function closeViewer() { + document.getElementById('dataViewer').classList.remove('active'); + document.querySelectorAll('.table-card').forEach(card => { + card.classList.remove('active'); + }); + currentTable = null; + currentColumns = []; + currentFilters = {}; + } + + +Object.assign(window, { +loadTableData, +renderFilterControls, +updateFilter, +renderActiveFilters, +removeFilter, +clearFilters, +handleFilterKeypress, +executeQuery, +closeViewer, +}); diff --git a/frontend/src/wip-detail/main.js b/frontend/src/wip-detail/main.js new file mode 100644 index 0000000..5d578b1 --- /dev/null +++ b/frontend/src/wip-detail/main.js @@ -0,0 +1,844 @@ +import { ensureMesApiAvailable } from '../core/api.js'; +import { + debounce, + fetchWipAutocompleteItems, +} from '../core/autocomplete.js'; + +ensureMesApiAvailable(); + +(function initWipDetailPage() { + // ============================================================ + // State Management + // ============================================================ + const state = { + workcenter: '', + data: null, + packages: [], + page: 1, + pageSize: 100, + filters: { + package: '', + type: '', + workorder: '', + lotid: '' + }, + isLoading: false, + refreshTimer: null, + REFRESH_INTERVAL: 10 * 60 * 1000, // 10 minutes + }; + + // WIP Status filter (separate from other filters) + let activeStatusFilter = null; // null | 'run' | 'queue' | 'quality-hold' | 'non-quality-hold' + + // AbortController for cancelling in-flight requests + let tableAbortController = null; // For loadTableOnly() + let loadAllAbortController = null; // For loadAllData() + + // ============================================================ + // Utility Functions + // ============================================================ + function formatNumber(num) { + if (num === null || num === undefined || num === '-') return '-'; + return num.toLocaleString('zh-TW'); + } + + function updateElementWithTransition(elementId, newValue) { + const el = document.getElementById(elementId); + const oldValue = el.textContent; + const formattedNew = formatNumber(newValue); + + if (oldValue !== formattedNew) { + el.textContent = formattedNew; + el.classList.add('updated'); + setTimeout(() => el.classList.remove('updated'), 500); + } + } + + function getUrlParam(name) { + const params = new URLSearchParams(window.location.search); + return params.get(name) || ''; + } + + // ============================================================ + // API Functions (using MesApi) + // ============================================================ + const API_TIMEOUT = 60000; // 60 seconds timeout + + async function fetchPackages() { + const result = await MesApi.get('/api/wip/meta/packages', { silent: true, timeout: API_TIMEOUT }); + if (result.success) { + return result.data; + } + throw new Error(result.error || 'Failed to fetch packages'); + } + + async function fetchDetail(signal = null) { + const params = { + page: state.page, + page_size: state.pageSize + }; + + if (state.filters.package) { + params.package = state.filters.package; + } + if (state.filters.type) { + params.type = state.filters.type; + } + if (activeStatusFilter) { + // Handle hold type filters + if (activeStatusFilter === 'quality-hold') { + params.status = 'HOLD'; + params.hold_type = 'quality'; + } else if (activeStatusFilter === 'non-quality-hold') { + params.status = 'HOLD'; + params.hold_type = 'non-quality'; + } else { + // Convert to API status format (RUN/QUEUE) + params.status = activeStatusFilter.toUpperCase(); + } + } + if (state.filters.workorder) { + params.workorder = state.filters.workorder; + } + if (state.filters.lotid) { + params.lotid = state.filters.lotid; + } + + const result = await MesApi.get(`/api/wip/detail/${encodeURIComponent(state.workcenter)}`, { + params, + timeout: API_TIMEOUT, + signal + }); + if (result.success) { + return result.data; + } + throw new Error(result.error || 'Failed to fetch detail'); + } + + async function fetchWorkcenters() { + const result = await MesApi.get('/api/wip/meta/workcenters', { silent: true, timeout: API_TIMEOUT }); + if (result.success) { + return result.data; + } + throw new Error(result.error || 'Failed to fetch workcenters'); + } + + async function searchAutocompleteItems(type, query) { + return fetchWipAutocompleteItems({ + searchType: type, + query, + filters: { + workorder: document.getElementById('filterWorkorder').value, + lotid: document.getElementById('filterLotid').value, + package: document.getElementById('filterPackage').value, + type: document.getElementById('filterType').value, + }, + request: (url, options) => MesApi.get(url, options), + }); + } + + // ============================================================ + // Render Functions + // ============================================================ + function renderSummary(summary) { + if (!summary) return; + + updateElementWithTransition('totalLots', summary.totalLots); + updateElementWithTransition('runLots', summary.runLots); + updateElementWithTransition('queueLots', summary.queueLots); + updateElementWithTransition('qualityHoldLots', summary.qualityHoldLots); + updateElementWithTransition('nonQualityHoldLots', summary.nonQualityHoldLots); + } + + function renderTable(data) { + const container = document.getElementById('tableContainer'); + + if (!data || !data.lots || data.lots.length === 0) { + container.innerHTML = '
No data available
'; + document.getElementById('tableInfo').textContent = 'No data'; + document.getElementById('pagination').style.display = 'none'; + return; + } + + const specs = data.specs || []; + + let html = ''; + // Fixed columns + html += ''; + html += ''; + html += ''; + html += ''; + + // Spec columns + specs.forEach(spec => { + html += ``; + }); + + html += ''; + + data.lots.forEach(lot => { + html += ''; + + // Fixed columns - LOT ID is clickable + const lotIdDisplay = lot.lotId + ? `${lot.lotId}` + : '-'; + html += ``; + html += ``; + + // WIP Status with color and hold reason + const statusClass = `wip-status-${(lot.wipStatus || 'queue').toLowerCase()}`; + let statusText = lot.wipStatus || 'QUEUE'; + if (lot.wipStatus === 'HOLD' && lot.holdReason) { + statusText = `HOLD (${lot.holdReason})`; + } + html += ``; + + html += ``; + + // Spec columns - show QTY in matching spec column + specs.forEach(spec => { + if (lot.spec === spec) { + html += ``; + } else { + html += ''; + } + }); + + html += ''; + }); + + html += '
LOT IDEquipmentWIP StatusPackage${spec}
${lotIdDisplay}${lot.equipment || '-'}${statusText}${lot.package || '-'}${formatNumber(lot.qty)}
'; + container.innerHTML = html; + + // Update info + const pagination = data.pagination; + const start = (pagination.page - 1) * pagination.page_size + 1; + const end = Math.min(pagination.page * pagination.page_size, pagination.total_count); + document.getElementById('tableInfo').textContent = + `Showing ${start} - ${end} of ${formatNumber(pagination.total_count)}`; + + // Update pagination + if (pagination.total_pages > 1) { + document.getElementById('pagination').style.display = 'flex'; + document.getElementById('pageInfo').textContent = + `Page ${pagination.page} / ${pagination.total_pages}`; + document.getElementById('btnPrev').disabled = pagination.page <= 1; + document.getElementById('btnNext').disabled = pagination.page >= pagination.total_pages; + } else { + document.getElementById('pagination').style.display = 'none'; + } + + // Update last update time + if (data.sys_date) { + document.getElementById('lastUpdate').textContent = `Last Update: ${data.sys_date}`; + } + } + + function populatePackageFilter(packages) { + const select = document.getElementById('filterPackage'); + const currentValue = select.value; + + select.innerHTML = ''; + packages.forEach(pkg => { + const option = document.createElement('option'); + option.value = pkg.name; + option.textContent = `${pkg.name} (${pkg.lot_count})`; + select.appendChild(option); + }); + + select.value = currentValue; + } + + // ============================================================ + // Data Loading + // ============================================================ + async function loadAllData(showOverlay = true) { + // Cancel any in-flight request to prevent connection pile-up + if (loadAllAbortController) { + loadAllAbortController.abort(); + console.log('[WIP Detail] Previous request cancelled'); + } + loadAllAbortController = new AbortController(); + const signal = loadAllAbortController.signal; + + state.isLoading = true; + + if (showOverlay) { + document.getElementById('loadingOverlay').style.display = 'flex'; + } + + // Show refresh indicator + document.getElementById('refreshIndicator').classList.add('active'); + document.getElementById('refreshError').classList.remove('active'); + document.getElementById('refreshSuccess').classList.remove('active'); + + try { + // Load packages for filter (non-blocking - don't fail if this times out) + if (state.packages.length === 0) { + try { + state.packages = await fetchPackages(); + populatePackageFilter(state.packages); + } catch (pkgError) { + console.warn('Failed to load packages filter:', pkgError); + } + } + + // Load detail data (main data - this is critical) + state.data = await fetchDetail(signal); + + renderSummary(state.data.summary); + renderTable(state.data); + + // Show success indicator + document.getElementById('refreshSuccess').classList.add('active'); + setTimeout(() => { + document.getElementById('refreshSuccess').classList.remove('active'); + }, 1500); + + } catch (error) { + // Ignore abort errors (expected when user triggers new request) + if (error.name === 'AbortError') { + console.log('[WIP Detail] Request cancelled (new request started)'); + return; + } + console.error('Data load failed:', error); + document.getElementById('refreshError').classList.add('active'); + } finally { + state.isLoading = false; + document.getElementById('loadingOverlay').style.display = 'none'; + document.getElementById('refreshIndicator').classList.remove('active'); + } + } + + // ============================================================ + // Autocomplete Functions + // ============================================================ + function showDropdown(dropdownId, items, onSelect) { + const dropdown = document.getElementById(dropdownId); + if (!items || items.length === 0) { + dropdown.innerHTML = '
No results
'; + dropdown.classList.add('show'); + return; + } + dropdown.innerHTML = items.map(item => + `
${item}
` + ).join(''); + dropdown.classList.add('show'); + + dropdown.querySelectorAll('.autocomplete-item').forEach(el => { + el.addEventListener('click', () => { + onSelect(el.dataset.value); + dropdown.classList.remove('show'); + }); + }); + } + + function hideDropdown(dropdownId) { + document.getElementById(dropdownId).classList.remove('show'); + } + + function showLoading(dropdownId) { + const dropdown = document.getElementById(dropdownId); + dropdown.innerHTML = '
Searching...
'; + dropdown.classList.add('show'); + } + + function setupAutocomplete(inputId, dropdownId, searchType) { + const input = document.getElementById(inputId); + + const doSearch = debounce(async (query) => { + if (query.length < 2) { + hideDropdown(dropdownId); + return; + } + showLoading(dropdownId); + try { + const items = await searchAutocompleteItems(searchType, query); + showDropdown(dropdownId, items, (value) => { + input.value = value; + }); + } catch (e) { + hideDropdown(dropdownId); + } + }, 300); + + input.addEventListener('input', (e) => { + doSearch(e.target.value); + }); + + input.addEventListener('focus', (e) => { + if (e.target.value.length >= 2) { + doSearch(e.target.value); + } + }); + + // Hide dropdown when clicking outside + document.addEventListener('click', (e) => { + if (!e.target.closest(`#${inputId}`) && !e.target.closest(`#${dropdownId}`)) { + hideDropdown(dropdownId); + } + }); + } + + // ============================================================ + // Status Filter Toggle (Clickable Cards) + // ============================================================ + function toggleStatusFilter(status) { + if (activeStatusFilter === status) { + // Clicking the same card again removes the filter + activeStatusFilter = null; + } else { + // Apply new filter + activeStatusFilter = status; + } + + // Update card styles + updateCardStyles(); + + // Update table title + updateTableTitle(); + + // Reset to page 1 and reload table only (no isLoading guard) + state.page = 1; + loadTableOnly(); + } + + async function loadTableOnly() { + // Cancel any in-flight request to prevent pile-up + if (tableAbortController) { + tableAbortController.abort(); + } + tableAbortController = new AbortController(); + + // Show loading in table container + const container = document.getElementById('tableContainer'); + container.innerHTML = '
Loading...
'; + + // Show refresh indicator + document.getElementById('refreshIndicator').classList.add('active'); + + try { + state.data = await fetchDetail(tableAbortController.signal); + renderSummary(state.data.summary); + renderTable(state.data); + + // Show success indicator + document.getElementById('refreshSuccess').classList.add('active'); + setTimeout(() => { + document.getElementById('refreshSuccess').classList.remove('active'); + }, 1500); + } catch (error) { + // Ignore abort errors (expected when user clicks quickly) + if (error.name === 'AbortError') { + console.log('[WIP Detail] Table request cancelled (new filter selected)'); + return; + } + console.error('Table load failed:', error); + container.innerHTML = '
Error loading data
'; + document.getElementById('refreshError').classList.add('active'); + } finally { + document.getElementById('refreshIndicator').classList.remove('active'); + } + } + + function updateCardStyles() { + const row = document.getElementById('summaryRow'); + const statusCards = document.querySelectorAll('.summary-card.status-run, .summary-card.status-queue, .summary-card.status-quality-hold, .summary-card.status-non-quality-hold'); + + // Remove active from all status cards + statusCards.forEach(card => { + card.classList.remove('active'); + }); + + if (activeStatusFilter) { + // Add filtering class to row (dims non-active cards) + row.classList.add('filtering'); + + // Add active to the selected card + const activeCard = document.querySelector(`.summary-card.status-${activeStatusFilter}`); + if (activeCard) { + activeCard.classList.add('active'); + } + } else { + // Remove filtering class + row.classList.remove('filtering'); + } + } + + function updateTableTitle() { + const titleEl = document.querySelector('.table-title'); + const baseTitle = 'Lot Details'; + + if (activeStatusFilter) { + let statusLabel; + if (activeStatusFilter === 'quality-hold') { + statusLabel = '品質異常 Hold'; + } else if (activeStatusFilter === 'non-quality-hold') { + statusLabel = '非品質異常 Hold'; + } else { + statusLabel = activeStatusFilter.toUpperCase(); + } + titleEl.textContent = `${baseTitle} - ${statusLabel} Only`; + } else { + titleEl.textContent = baseTitle; + } + } + + // ============================================================ + // Filter & Pagination + // ============================================================ + function applyFilters() { + state.filters.workorder = document.getElementById('filterWorkorder').value.trim(); + state.filters.lotid = document.getElementById('filterLotid').value.trim(); + state.filters.package = document.getElementById('filterPackage').value.trim(); + state.filters.type = document.getElementById('filterType').value.trim(); + state.page = 1; + loadAllData(false); + } + + function clearFilters() { + document.getElementById('filterWorkorder').value = ''; + document.getElementById('filterLotid').value = ''; + document.getElementById('filterPackage').value = ''; + document.getElementById('filterType').value = ''; + state.filters = { package: '', type: '', workorder: '', lotid: '' }; + + // Also clear status filter + activeStatusFilter = null; + updateCardStyles(); + updateTableTitle(); + + state.page = 1; + loadAllData(false); + } + + function prevPage() { + if (state.page > 1) { + state.page--; + loadAllData(false); + } + } + + function nextPage() { + if (state.data && state.page < state.data.pagination.total_pages) { + state.page++; + loadAllData(false); + } + } + + // ============================================================ + // Auto-refresh + // ============================================================ + function startAutoRefresh() { + if (state.refreshTimer) { + clearInterval(state.refreshTimer); + } + state.refreshTimer = setInterval(() => { + if (!document.hidden) { + loadAllData(false); + } + }, state.REFRESH_INTERVAL); + } + + function manualRefresh() { + startAutoRefresh(); + loadAllData(false); + } + + // ============================================================ + // Lot Detail Functions + // ============================================================ + let selectedLotId = null; + + async function fetchLotDetail(lotId) { + const result = await MesApi.get(`/api/wip/lot/${encodeURIComponent(lotId)}`, { + timeout: API_TIMEOUT + }); + if (result.success) { + return result.data; + } + throw new Error(result.error || 'Failed to fetch lot detail'); + } + + async function showLotDetail(lotId) { + // Update selected state + selectedLotId = lotId; + + // Highlight the selected row + document.querySelectorAll('.lot-id-link').forEach(el => { + el.classList.toggle('active', el.textContent === lotId); + }); + + // Show panel + const panel = document.getElementById('lotDetailPanel'); + panel.classList.add('show'); + + // Update title + document.getElementById('lotDetailLotId').textContent = lotId; + + // Show loading + document.getElementById('lotDetailContent').innerHTML = ` +
+ Loading... +
+ `; + + // Scroll to panel + panel.scrollIntoView({ behavior: 'smooth', block: 'start' }); + + try { + const data = await fetchLotDetail(lotId); + renderLotDetail(data); + } catch (error) { + console.error('Failed to load lot detail:', error); + document.getElementById('lotDetailContent').innerHTML = ` +
+ 載入失敗:${error.message || '未知錯誤'} +
+ `; + } + } + + function renderLotDetail(data) { + const labels = data.fieldLabels || {}; + + // Helper to format value + const formatValue = (value) => { + if (value === null || value === undefined || value === '') { + return '-'; + } + if (typeof value === 'number') { + return formatNumber(value); + } + return value; + }; + + // Helper to create field HTML + const field = (key, customLabel = null) => { + const label = customLabel || labels[key] || key; + const value = data[key]; + let valueClass = ''; + + // Special styling for WIP Status + if (key === 'wipStatus') { + valueClass = `status-${(value || '').toLowerCase()}`; + } + + return ` +
+ ${label} + ${formatValue(value)} +
+ `; + }; + + const html = ` +
+ +
+
基本資訊
+ ${field('lotId')} + ${field('workorder')} + ${field('wipStatus')} + ${field('status')} + ${field('qty')} + ${field('qty2')} + ${field('ageByDays')} + ${field('priority')} +
+ + +
+
產品資訊
+ ${field('product')} + ${field('productLine')} + ${field('packageLef')} + ${field('pjType')} + ${field('pjFunction')} + ${field('bop')} + ${field('dateCode')} + ${field('produceRegion')} +
+ + +
+
製程資訊
+ ${field('workcenterGroup')} + ${field('workcenter')} + ${field('spec')} + ${field('specSequence')} + ${field('workflow')} + ${field('equipment')} + ${field('equipmentCount')} + ${field('location')} +
+ + +
+
物料資訊
+ ${field('waferLotId')} + ${field('waferPn')} + ${field('waferLotPrefix')} + ${field('leadframeName')} + ${field('leadframeOption')} + ${field('compoundName')} + ${field('dieConsumption')} + ${field('uts')} +
+ + + ${data.wipStatus === 'HOLD' || data.holdCount > 0 ? ` +
+
Hold 資訊
+ ${field('holdReason')} + ${field('holdCount')} + ${field('holdEmp')} + ${field('holdDept')} + ${field('holdComment')} + ${field('releaseTime')} + ${field('releaseEmp')} + ${field('releaseComment')} +
+ ` : ''} + + + ${data.ncrId ? ` +
+
NCR 資訊
+ ${field('ncrId')} + ${field('ncrDate')} +
+ ` : ''} + + +
+
備註資訊
+ ${field('comment')} + ${field('commentDate')} + ${field('commentEmp')} + ${field('futureHoldComment')} +
+ + +
+
其他資訊
+ ${field('owner')} + ${field('startDate')} + ${field('tmttRemaining')} + ${field('dataUpdateDate')} +
+
+ `; + + document.getElementById('lotDetailContent').innerHTML = html; + } + + function closeLotDetail() { + const panel = document.getElementById('lotDetailPanel'); + panel.classList.remove('show'); + + // Remove highlight from selected row + document.querySelectorAll('.lot-id-link').forEach(el => { + el.classList.remove('active'); + }); + + selectedLotId = null; + } + + // ============================================================ + // Initialize + // ============================================================ + async function init() { + // Setup autocomplete for WORKORDER, LOT ID, PACKAGE, and TYPE + setupAutocomplete('filterWorkorder', 'workorderDropdown', 'workorder'); + setupAutocomplete('filterLotid', 'lotidDropdown', 'lotid'); + setupAutocomplete('filterPackage', 'packageDropdown', 'package'); + setupAutocomplete('filterType', 'typeDropdown', 'type'); + + // Allow Enter key to trigger filter + document.getElementById('filterWorkorder').addEventListener('keypress', (e) => { + if (e.key === 'Enter') applyFilters(); + }); + document.getElementById('filterLotid').addEventListener('keypress', (e) => { + if (e.key === 'Enter') applyFilters(); + }); + document.getElementById('filterPackage').addEventListener('keypress', (e) => { + if (e.key === 'Enter') applyFilters(); + }); + document.getElementById('filterType').addEventListener('keypress', (e) => { + if (e.key === 'Enter') applyFilters(); + }); + + // Get workcenter from URL or use first available + state.workcenter = getUrlParam('workcenter'); + + // Get filters from URL params (passed from wip_overview) + const urlWorkorder = getUrlParam('workorder'); + const urlLotid = getUrlParam('lotid'); + const urlPackage = getUrlParam('package'); + const urlType = getUrlParam('type'); + if (urlWorkorder) { + state.filters.workorder = urlWorkorder; + document.getElementById('filterWorkorder').value = urlWorkorder; + } + if (urlLotid) { + state.filters.lotid = urlLotid; + document.getElementById('filterLotid').value = urlLotid; + } + if (urlPackage) { + state.filters.package = urlPackage; + document.getElementById('filterPackage').value = urlPackage; + } + if (urlType) { + state.filters.type = urlType; + document.getElementById('filterType').value = urlType; + } + + if (!state.workcenter) { + // Fetch workcenters and use first one + try { + const workcenters = await fetchWorkcenters(); + if (workcenters && workcenters.length > 0) { + state.workcenter = workcenters[0].name; + // Update URL without reload + window.history.replaceState({}, '', `/wip-detail?workcenter=${encodeURIComponent(state.workcenter)}`); + } + } catch (error) { + console.error('Failed to fetch workcenters:', error); + } + } + + if (state.workcenter) { + document.getElementById('pageTitle').textContent = `WIP Detail - ${state.workcenter}`; + loadAllData(true); + startAutoRefresh(); + + // Handle page visibility (must be after workcenter is set) + document.addEventListener('visibilitychange', () => { + if (!document.hidden && state.workcenter) { + loadAllData(false); + startAutoRefresh(); + } + }); + } else { + document.getElementById('tableContainer').innerHTML = + '
No workcenter available
'; + document.getElementById('loadingOverlay').style.display = 'none'; + } + } + + window.onload = init; + + Object.assign(window, { + applyFilters, + clearFilters, + toggleStatusFilter, + prevPage, + nextPage, + manualRefresh, + showLotDetail, + closeLotDetail, + init + }); +})(); diff --git a/frontend/src/wip-overview/main.js b/frontend/src/wip-overview/main.js new file mode 100644 index 0000000..20904a7 --- /dev/null +++ b/frontend/src/wip-overview/main.js @@ -0,0 +1,829 @@ +import { ensureMesApiAvailable } from '../core/api.js'; +import { + debounce, + fetchWipAutocompleteItems, +} from '../core/autocomplete.js'; + +ensureMesApiAvailable(); + +(function initWipOverviewPage() { + // ============================================================ + // State Management + // ============================================================ + const state = { + summary: null, + matrix: null, + hold: null, + isLoading: false, + lastError: false, + refreshTimer: null, + REFRESH_INTERVAL: 10 * 60 * 1000, // 10 minutes + filters: { + workorder: '', + lotid: '', + package: '', + type: '' + } + }; + + // Status filter state (null = no filter, 'run'/'queue'/'hold' = filtered) + let activeStatusFilter = null; + + // AbortController for cancelling in-flight requests + let matrixAbortController = null; // For loadMatrixOnly() + let loadAllAbortController = null; // For loadAllData() + + // ============================================================ + // Utility Functions + // ============================================================ + function formatNumber(num) { + if (num === null || num === undefined || num === '-') return '-'; + return num.toLocaleString('zh-TW'); + } + + function updateElementWithTransition(elementId, newValue) { + const el = document.getElementById(elementId); + const oldValue = el.textContent; + let formattedNew; + if (typeof newValue === 'number') { + formattedNew = formatNumber(newValue); + } else if (newValue === null || newValue === undefined) { + formattedNew = '-'; + } else { + formattedNew = newValue; + } + + if (oldValue !== formattedNew) { + el.textContent = formattedNew; + el.classList.add('updated'); + setTimeout(() => el.classList.remove('updated'), 500); + } + } + + function buildQueryParams() { + const params = {}; + if (state.filters.workorder) { + params.workorder = state.filters.workorder; + } + if (state.filters.lotid) { + params.lotid = state.filters.lotid; + } + if (state.filters.package) { + params.package = state.filters.package; + } + if (state.filters.type) { + params.type = state.filters.type; + } + return params; + } + + // ============================================================ + // API Functions (using MesApi) + // ============================================================ + const API_TIMEOUT = 60000; // 60 seconds timeout + + async function fetchSummary(signal = null) { + const params = buildQueryParams(); + const result = await MesApi.get('/api/wip/overview/summary', { + params, + timeout: API_TIMEOUT, + signal + }); + if (result.success) { + return result.data; + } + throw new Error(result.error || 'Failed to fetch summary'); + } + + async function fetchMatrix(signal = null) { + const params = buildQueryParams(); + // Add status filter if active + if (activeStatusFilter) { + if (activeStatusFilter === 'quality-hold') { + params.status = 'HOLD'; + params.hold_type = 'quality'; + } else if (activeStatusFilter === 'non-quality-hold') { + params.status = 'HOLD'; + params.hold_type = 'non-quality'; + } else { + params.status = activeStatusFilter.toUpperCase(); + } + } + const result = await MesApi.get('/api/wip/overview/matrix', { + params, + timeout: API_TIMEOUT, + signal + }); + if (result.success) { + return result.data; + } + throw new Error(result.error || 'Failed to fetch matrix'); + } + + async function fetchHold(signal = null) { + const params = buildQueryParams(); + const result = await MesApi.get('/api/wip/overview/hold', { + params, + timeout: API_TIMEOUT, + signal + }); + if (result.success) { + return result.data; + } + throw new Error(result.error || 'Failed to fetch hold'); + } + + // ============================================================ + // Autocomplete Functions + // ============================================================ + async function searchAutocomplete(type, query) { + const loadingEl = document.getElementById(`${type}Loading`); + loadingEl.classList.add('active'); + try { + return await fetchWipAutocompleteItems({ + searchType: type, + query, + filters: { + workorder: document.getElementById('filterWorkorder').value, + lotid: document.getElementById('filterLotid').value, + package: document.getElementById('filterPackage').value, + type: document.getElementById('filterType').value, + }, + request: (url, options) => MesApi.get(url, options), + }); + } catch (error) { + console.error(`Search ${type} failed:`, error); + } finally { + loadingEl.classList.remove('active'); + } + return []; + } + + function showDropdown(type, items) { + const dropdown = document.getElementById(`${type}Dropdown`); + + if (items.length === 0) { + dropdown.innerHTML = '
無符合結果
'; + } else { + dropdown.innerHTML = items.map(item => + `
${item}
` + ).join(''); + } + dropdown.classList.add('active'); + } + + function hideDropdown(type) { + const dropdown = document.getElementById(`${type}Dropdown`); + dropdown.classList.remove('active'); + } + + function selectAutocomplete(type, value) { + const input = document.getElementById(`filter${type.charAt(0).toUpperCase() + type.slice(1)}`); + input.value = value; + hideDropdown(type); + } + + // Setup autocomplete for inputs + function setupAutocomplete(type) { + const input = document.getElementById(`filter${type.charAt(0).toUpperCase() + type.slice(1)}`); + + const debouncedSearch = debounce(async (query) => { + if (query.length >= 2) { + const items = await searchAutocomplete(type, query); + showDropdown(type, items); + } else { + hideDropdown(type); + } + }, 300); + + input.addEventListener('input', (e) => { + debouncedSearch(e.target.value); + }); + + input.addEventListener('focus', async () => { + const query = input.value; + if (query.length >= 2) { + const items = await searchAutocomplete(type, query); + showDropdown(type, items); + } + }); + + input.addEventListener('blur', () => { + // Delay hide to allow click on dropdown + setTimeout(() => hideDropdown(type), 200); + }); + + input.addEventListener('keydown', (e) => { + if (e.key === 'Enter') { + hideDropdown(type); + applyFilters(); + } + }); + } + + // ============================================================ + // Filter Functions + // ============================================================ + function applyFilters() { + state.filters.workorder = document.getElementById('filterWorkorder').value.trim(); + state.filters.lotid = document.getElementById('filterLotid').value.trim(); + state.filters.package = document.getElementById('filterPackage').value.trim(); + state.filters.type = document.getElementById('filterType').value.trim(); + + updateActiveFiltersDisplay(); + loadAllData(false); + } + + function clearFilters() { + document.getElementById('filterWorkorder').value = ''; + document.getElementById('filterLotid').value = ''; + document.getElementById('filterPackage').value = ''; + document.getElementById('filterType').value = ''; + state.filters.workorder = ''; + state.filters.lotid = ''; + state.filters.package = ''; + state.filters.type = ''; + + updateActiveFiltersDisplay(); + loadAllData(false); + } + + function removeFilter(type) { + document.getElementById(`filter${type.charAt(0).toUpperCase() + type.slice(1)}`).value = ''; + state.filters[type] = ''; + updateActiveFiltersDisplay(); + loadAllData(false); + } + + function updateActiveFiltersDisplay() { + const container = document.getElementById('activeFilters'); + let html = ''; + + if (state.filters.workorder) { + html += `WO: ${state.filters.workorder} ×`; + } + if (state.filters.lotid) { + html += `Lot: ${state.filters.lotid} ×`; + } + if (state.filters.package) { + html += `Pkg: ${state.filters.package} ×`; + } + if (state.filters.type) { + html += `Type: ${state.filters.type} ×`; + } + + container.innerHTML = html; + } + + // ============================================================ + // Render Functions + // ============================================================ + function renderSummary(data) { + if (!data) return; + + updateElementWithTransition('totalLots', data.totalLots); + updateElementWithTransition('totalQty', data.totalQtyPcs); + + const ws = data.byWipStatus || {}; + const runLots = ws.run?.lots; + const runQty = ws.run?.qtyPcs; + const queueLots = ws.queue?.lots; + const queueQty = ws.queue?.qtyPcs; + const qualityHoldLots = ws.qualityHold?.lots; + const qualityHoldQty = ws.qualityHold?.qtyPcs; + const nonQualityHoldLots = ws.nonQualityHold?.lots; + const nonQualityHoldQty = ws.nonQualityHold?.qtyPcs; + + updateElementWithTransition( + 'runLots', + runLots === null || runLots === undefined ? '-' : `${formatNumber(runLots)} lots` + ); + updateElementWithTransition( + 'runQty', + runQty === null || runQty === undefined ? '-' : formatNumber(runQty) + ); + updateElementWithTransition( + 'queueLots', + queueLots === null || queueLots === undefined ? '-' : `${formatNumber(queueLots)} lots` + ); + updateElementWithTransition( + 'queueQty', + queueQty === null || queueQty === undefined ? '-' : formatNumber(queueQty) + ); + updateElementWithTransition( + 'qualityHoldLots', + qualityHoldLots === null || qualityHoldLots === undefined ? '-' : `${formatNumber(qualityHoldLots)} lots` + ); + updateElementWithTransition( + 'qualityHoldQty', + qualityHoldQty === null || qualityHoldQty === undefined ? '-' : formatNumber(qualityHoldQty) + ); + updateElementWithTransition( + 'nonQualityHoldLots', + nonQualityHoldLots === null || nonQualityHoldLots === undefined ? '-' : `${formatNumber(nonQualityHoldLots)} lots` + ); + updateElementWithTransition( + 'nonQualityHoldQty', + nonQualityHoldQty === null || nonQualityHoldQty === undefined ? '-' : formatNumber(nonQualityHoldQty) + ); + + if (data.dataUpdateDate) { + document.getElementById('lastUpdate').textContent = `Last Update: ${data.dataUpdateDate}`; + } + } + + // ============================================================ + // Status Filter Functions + // ============================================================ + function toggleStatusFilter(status) { + if (activeStatusFilter === status) { + // Deactivate filter + activeStatusFilter = null; + } else { + // Activate new filter + activeStatusFilter = status; + } + + updateCardStyles(); + updateMatrixTitle(); + loadMatrixOnly(); + } + + function updateCardStyles() { + const row = document.querySelector('.wip-status-row'); + document.querySelectorAll('.wip-status-card').forEach(card => { + card.classList.remove('active'); + }); + + if (activeStatusFilter) { + row.classList.add('filtering'); + const activeCard = document.querySelector(`.wip-status-card.${activeStatusFilter}`); + if (activeCard) { + activeCard.classList.add('active'); + } + } else { + row.classList.remove('filtering'); + } + } + + function updateMatrixTitle() { + const titleEl = document.querySelector('.card-title'); + if (!titleEl) return; + + const baseTitle = 'Workcenter x Package Matrix (QTY)'; + if (activeStatusFilter) { + let statusLabel; + if (activeStatusFilter === 'quality-hold') { + statusLabel = '品質異常 Hold'; + } else if (activeStatusFilter === 'non-quality-hold') { + statusLabel = '非品質異常 Hold'; + } else { + statusLabel = activeStatusFilter.toUpperCase(); + } + titleEl.textContent = `${baseTitle} - ${statusLabel} Only`; + } else { + titleEl.textContent = baseTitle; + } + } + + async function loadMatrixOnly() { + // Cancel any in-flight matrix request to prevent pile-up + if (matrixAbortController) { + matrixAbortController.abort(); + } + matrixAbortController = new AbortController(); + + const container = document.getElementById('matrixContainer'); + container.innerHTML = '
Loading...
'; + + try { + const matrix = await fetchMatrix(matrixAbortController.signal); + state.matrix = matrix; + renderMatrix(matrix); + } catch (error) { + // Ignore abort errors (expected when user clicks quickly) + if (error.name === 'AbortError') { + console.log('[WIP Overview] Matrix request cancelled (new filter selected)'); + return; + } + console.error('[WIP Overview] Matrix load failed:', error); + container.innerHTML = '
Error loading data
'; + } + } + + function renderMatrix(data) { + const container = document.getElementById('matrixContainer'); + + if (!data || !data.workcenters || data.workcenters.length === 0) { + container.innerHTML = '
No data available
'; + return; + } + + // Limit packages to top 15 for display + const displayPackages = data.packages.slice(0, 15); + + let html = ''; + html += ''; + displayPackages.forEach(pkg => { + html += ``; + }); + html += ''; + html += ''; + + // Data rows + data.workcenters.forEach(wc => { + html += ''; + html += ``; + + displayPackages.forEach(pkg => { + const qty = data.matrix[wc]?.[pkg] || 0; + html += ``; + }); + + html += ``; + html += ''; + }); + + // Total row + html += ''; + html += ''; + displayPackages.forEach(pkg => { + html += ``; + }); + html += ``; + html += ''; + + html += '
Workcenter${pkg}Total
${wc}${qty ? formatNumber(qty) : '-'}${formatNumber(data.workcenter_totals[wc] || 0)}
Total${formatNumber(data.package_totals[pkg] || 0)}${formatNumber(data.grand_total || 0)}
'; + container.innerHTML = html; + } + + // ============================================================ + // Pareto Chart Functions + // ============================================================ + let paretoCharts = { + quality: null, + nonQuality: null + }; + + // Task 2.1: Split hold data by type + function splitHoldByType(data) { + if (!data || !data.items) { + return { quality: [], nonQuality: [] }; + } + const quality = data.items.filter(item => item.holdType === 'quality'); + const nonQuality = data.items.filter(item => item.holdType !== 'quality'); + return { quality, nonQuality }; + } + + // Task 2.2: Prepare Pareto data (sort by QTY desc, calculate cumulative %) + function prepareParetoData(items) { + if (!items || items.length === 0) { + return { reasons: [], qtys: [], lots: [], cumulative: [], totalQty: 0 }; + } + + // Sort by QTY descending + const sorted = [...items].sort((a, b) => (b.qty || 0) - (a.qty || 0)); + + const reasons = sorted.map(item => item.reason || '未知'); + const qtys = sorted.map(item => item.qty || 0); + const lots = sorted.map(item => item.lots || 0); + const totalQty = qtys.reduce((sum, q) => sum + q, 0); + + // Calculate cumulative percentage + const cumulative = []; + let runningSum = 0; + qtys.forEach(qty => { + runningSum += qty; + cumulative.push(totalQty > 0 ? Math.round((runningSum / totalQty) * 100) : 0); + }); + + return { reasons, qtys, lots, cumulative, totalQty, items: sorted }; + } + + // Task 3.1: Initialize Pareto charts + function initParetoCharts() { + const qualityEl = document.getElementById('qualityParetoChart'); + const nonQualityEl = document.getElementById('nonQualityParetoChart'); + + if (qualityEl && !paretoCharts.quality) { + paretoCharts.quality = echarts.init(qualityEl); + } + if (nonQualityEl && !paretoCharts.nonQuality) { + paretoCharts.nonQuality = echarts.init(nonQualityEl); + } + } + + // Task 3.2: Render Pareto chart with ECharts + function renderParetoChart(chart, paretoData, colorTheme) { + if (!chart) return; + + const barColor = colorTheme === 'quality' ? '#ef4444' : '#f97316'; + const lineColor = colorTheme === 'quality' ? '#991B1B' : '#9A3412'; + + const option = { + tooltip: { + trigger: 'axis', + axisPointer: { type: 'cross' }, + formatter: function(params) { + const reason = params[0].name; + const qty = params[0].value; + const cumPct = params[1] ? params[1].value : 0; + return `${reason}
QTY: ${formatNumber(qty)}
累計: ${cumPct}%`; + } + }, + grid: { + left: '3%', + right: '4%', + bottom: '15%', + top: '10%', + containLabel: true + }, + xAxis: { + type: 'category', + data: paretoData.reasons, + axisLabel: { + rotate: 30, + interval: 0, + fontSize: 10, + formatter: function(value) { + return value.length > 12 ? value.slice(0, 12) + '...' : value; + } + }, + axisTick: { alignWithLabel: true } + }, + yAxis: [ + { + type: 'value', + name: 'QTY', + position: 'left', + axisLabel: { + formatter: function(val) { + return val >= 1000 ? (val / 1000).toFixed(0) + 'k' : val; + } + } + }, + { + type: 'value', + name: '累計%', + position: 'right', + min: 0, + max: 100, + axisLabel: { formatter: '{value}%' } + } + ], + series: [ + { + name: 'QTY', + type: 'bar', + data: paretoData.qtys, + itemStyle: { color: barColor }, + emphasis: { + itemStyle: { color: barColor, opacity: 0.8 } + } + }, + { + name: '累計%', + type: 'line', + yAxisIndex: 1, + data: paretoData.cumulative, + symbol: 'circle', + symbolSize: 6, + lineStyle: { color: lineColor, width: 2 }, + itemStyle: { color: lineColor } + } + ] + }; + + chart.setOption(option); + + // Task 3.3: Add click event for drill-down + chart.off('click'); // Remove existing handlers + chart.on('click', function(params) { + if (params.componentType === 'series' && params.seriesType === 'bar') { + const reason = paretoData.reasons[params.dataIndex]; + if (reason && reason !== '未知') { + window.location.href = `/hold-detail?reason=${encodeURIComponent(reason)}`; + } + } + }); + } + + // Task 4.1 & 4.2: Render Pareto table with drill-down links + function renderParetoTable(containerId, paretoData) { + const container = document.getElementById(containerId); + if (!container) return; + + if (!paretoData.items || paretoData.items.length === 0) { + container.innerHTML = ''; + return; + } + + let html = ''; + html += ''; + html += ''; + html += ''; + html += ''; + html += ''; + + paretoData.items.forEach((item, idx) => { + const reason = item.reason || '未知'; + const reasonLink = item.reason + ? `${reason}` + : reason; + html += ''; + html += ``; + html += ``; + html += ``; + html += ``; + html += ''; + }); + + html += '
Hold ReasonLotsQTY累計%
${reasonLink}${formatNumber(item.lots)}${formatNumber(item.qty)}${paretoData.cumulative[idx]}%
'; + container.innerHTML = html; + } + + // Task 3.4: Handle no data state + function showParetoNoData(type, show) { + const chartEl = document.getElementById(`${type}ParetoChart`); + const noDataEl = document.getElementById(`${type}ParetoNoData`); + if (chartEl) chartEl.style.display = show ? 'none' : 'block'; + if (noDataEl) noDataEl.style.display = show ? 'flex' : 'none'; + } + + // Main render function for Hold data + function renderHold(data) { + initParetoCharts(); + + const { quality, nonQuality } = splitHoldByType(data); + const qualityData = prepareParetoData(quality); + const nonQualityData = prepareParetoData(nonQuality); + + // Update counts in header + document.getElementById('qualityHoldCount').textContent = `${quality.length} 項`; + document.getElementById('nonQualityHoldCount').textContent = `${nonQuality.length} 項`; + + // Quality Pareto + if (quality.length > 0) { + showParetoNoData('quality', false); + renderParetoChart(paretoCharts.quality, qualityData, 'quality'); + renderParetoTable('qualityParetoTable', qualityData); + } else { + showParetoNoData('quality', true); + if (paretoCharts.quality) paretoCharts.quality.clear(); + document.getElementById('qualityParetoTable').innerHTML = ''; + } + + // Non-Quality Pareto + if (nonQuality.length > 0) { + showParetoNoData('nonQuality', false); + renderParetoChart(paretoCharts.nonQuality, nonQualityData, 'non-quality'); + renderParetoTable('nonQualityParetoTable', nonQualityData); + } else { + showParetoNoData('nonQuality', true); + if (paretoCharts.nonQuality) paretoCharts.nonQuality.clear(); + document.getElementById('nonQualityParetoTable').innerHTML = ''; + } + } + + // Task 5.3: Window resize handler for charts + window.addEventListener('resize', function() { + if (paretoCharts.quality) paretoCharts.quality.resize(); + if (paretoCharts.nonQuality) paretoCharts.nonQuality.resize(); + }); + + // ============================================================ + // Navigation + // ============================================================ + function navigateToDetail(workcenter) { + const params = new URLSearchParams(); + params.append('workcenter', workcenter); + if (state.filters.workorder) params.append('workorder', state.filters.workorder); + if (state.filters.lotid) params.append('lotid', state.filters.lotid); + if (state.filters.package) params.append('package', state.filters.package); + if (state.filters.type) params.append('type', state.filters.type); + window.location.href = `/wip-detail?${params.toString()}`; + } + + // ============================================================ + // Data Loading + // ============================================================ + async function loadAllData(showOverlay = true) { + // Cancel any in-flight request to prevent connection pile-up + if (loadAllAbortController) { + loadAllAbortController.abort(); + console.log('[WIP Overview] Previous request cancelled'); + } + loadAllAbortController = new AbortController(); + const signal = loadAllAbortController.signal; + + state.isLoading = true; + console.log('[WIP Overview] Loading data...', showOverlay ? '(with overlay)' : '(background)'); + + if (showOverlay) { + document.getElementById('loadingOverlay').style.display = 'flex'; + } + + // Show refresh indicator + document.getElementById('refreshIndicator').classList.add('active'); + document.getElementById('refreshError').classList.remove('active'); + document.getElementById('refreshSuccess').classList.remove('active'); + + try { + const startTime = performance.now(); + const [summary, matrix, hold] = await Promise.all([ + fetchSummary(signal), + fetchMatrix(signal), + fetchHold(signal) + ]); + const elapsed = Math.round(performance.now() - startTime); + + state.summary = summary; + state.matrix = matrix; + state.hold = hold; + state.lastError = false; + + renderSummary(summary); + renderMatrix(matrix); + renderHold(hold); + + console.log(`[WIP Overview] Data loaded successfully in ${elapsed}ms`); + + // Show success indicator + document.getElementById('refreshSuccess').classList.add('active'); + setTimeout(() => { + document.getElementById('refreshSuccess').classList.remove('active'); + }, 1500); + + } catch (error) { + // Ignore abort errors (expected when user triggers new request) + if (error.name === 'AbortError') { + console.log('[WIP Overview] Request cancelled (new request started)'); + return; + } + console.error('[WIP Overview] Data load failed:', error); + state.lastError = true; + document.getElementById('refreshError').classList.add('active'); + } finally { + state.isLoading = false; + document.getElementById('loadingOverlay').style.display = 'none'; + document.getElementById('refreshIndicator').classList.remove('active'); + } + } + + // ============================================================ + // Auto-refresh + // ============================================================ + function startAutoRefresh() { + if (state.refreshTimer) { + clearInterval(state.refreshTimer); + } + console.log('[WIP Overview] Auto-refresh started, interval:', state.REFRESH_INTERVAL / 1000, 'seconds'); + state.refreshTimer = setInterval(() => { + if (!document.hidden) { + console.log('[WIP Overview] Auto-refresh triggered at', new Date().toLocaleTimeString()); + loadAllData(false); // Don't show overlay for auto-refresh + } else { + console.log('[WIP Overview] Auto-refresh skipped (tab hidden)'); + } + }, state.REFRESH_INTERVAL); + } + + function manualRefresh() { + // Reset timer on manual refresh + startAutoRefresh(); + loadAllData(false); + } + + // Handle page visibility + document.addEventListener('visibilitychange', () => { + if (!document.hidden) { + // Page became visible - refresh immediately + loadAllData(false); + startAutoRefresh(); + } + }); + + // ============================================================ + // Initialize + // ============================================================ + window.onload = function() { + setupAutocomplete('workorder'); + setupAutocomplete('lotid'); + setupAutocomplete('package'); + setupAutocomplete('type'); + loadAllData(true); + startAutoRefresh(); + }; + + Object.assign(window, { + applyFilters, + clearFilters, + toggleStatusFilter, + selectAutocomplete, + removeFilter, + navigateToDetail, + manualRefresh, + loadAllData, + startAutoRefresh + }); +})(); diff --git a/frontend/tests/autocomplete.test.js b/frontend/tests/autocomplete.test.js new file mode 100644 index 0000000..a2117c5 --- /dev/null +++ b/frontend/tests/autocomplete.test.js @@ -0,0 +1,57 @@ +import test from 'node:test'; +import assert from 'node:assert/strict'; + +import { + buildWipAutocompleteParams, + fetchWipAutocompleteItems, +} from '../src/core/autocomplete.js'; + +test('buildWipAutocompleteParams keeps cross-filters except active field', () => { + const params = buildWipAutocompleteParams('lotid', 'L123', { + workorder: 'WO1', + lotid: 'L999', + package: 'PKG-A', + type: 'QFN' + }); + + assert.equal(params.field, 'lotid'); + assert.equal(params.q, 'L123'); + assert.equal(params.workorder, 'WO1'); + assert.equal(params.package, 'PKG-A'); + assert.equal(params.type, 'QFN'); + assert.equal(Object.prototype.hasOwnProperty.call(params, 'lotid'), false); +}); + +test('buildWipAutocompleteParams returns null for short query', () => { + const params = buildWipAutocompleteParams('workorder', 'a', {}); + assert.equal(params, null); +}); + +test('fetchWipAutocompleteItems maps successful API response', async () => { + const items = await fetchWipAutocompleteItems({ + searchType: 'workorder', + query: 'WO', + filters: {}, + request: async () => ({ + success: true, + data: { + items: ['WO1', 'WO2'] + } + }) + }); + + assert.deepEqual(items, ['WO1', 'WO2']); +}); + +test('fetchWipAutocompleteItems swallows API errors and returns empty list', async () => { + const items = await fetchWipAutocompleteItems({ + searchType: 'workorder', + query: 'WO', + filters: {}, + request: async () => { + throw new Error('network down'); + } + }); + + assert.deepEqual(items, []); +}); diff --git a/frontend/vite.config.js b/frontend/vite.config.js new file mode 100644 index 0000000..44c29c8 --- /dev/null +++ b/frontend/vite.config.js @@ -0,0 +1,29 @@ +import { defineConfig } from 'vite'; +import { resolve } from 'node:path'; + +export default defineConfig({ + publicDir: false, + build: { + outDir: '../src/mes_dashboard/static/dist', + emptyOutDir: false, + sourcemap: false, + rollupOptions: { + input: { + portal: resolve(__dirname, 'src/portal/main.js'), + 'wip-overview': resolve(__dirname, 'src/wip-overview/main.js'), + 'wip-detail': resolve(__dirname, 'src/wip-detail/main.js'), + 'hold-detail': resolve(__dirname, 'src/hold-detail/main.js'), + 'resource-status': resolve(__dirname, 'src/resource-status/main.js'), + 'resource-history': resolve(__dirname, 'src/resource-history/main.js'), + 'job-query': resolve(__dirname, 'src/job-query/main.js'), + 'excel-query': resolve(__dirname, 'src/excel-query/main.js'), + tables: resolve(__dirname, 'src/tables/main.js') + }, + output: { + entryFileNames: '[name].js', + chunkFileNames: 'chunks/[name]-[hash].js', + assetFileNames: '[name][extname]' + } + } + } +}); diff --git a/frontend_design/Hold_detail.pen b/frontend_design/Hold_detail.pen new file mode 100644 index 0000000..541f2ff --- /dev/null +++ b/frontend_design/Hold_detail.pen @@ -0,0 +1,2182 @@ +{ + "version": "2.6", + "children": [ + { + "type": "frame", + "id": "bi8Au", + "x": 0, + "y": 0, + "name": "Frame", + "clip": true, + "width": 800, + "height": 600, + "fill": "#FFFFFF", + "layout": "none" + }, + { + "type": "frame", + "id": "7V3YX", + "x": 0, + "y": 0, + "name": "Hold Detail Page", + "width": 1400, + "fill": "#F5F7FA", + "layout": "vertical", + "gap": 16, + "padding": 20, + "children": [ + { + "type": "frame", + "id": "I5lpc", + "name": "header", + "width": "fill_container", + "fill": { + "type": "gradient", + "gradientType": "linear", + "enabled": true, + "rotation": 135, + "size": { + "height": 1 + }, + "colors": [ + { + "color": "#667eea", + "position": 0 + }, + { + "color": "#764ba2", + "position": 1 + } + ] + }, + "cornerRadius": 10, + "padding": [ + 18, + 22 + ], + "justifyContent": "space_between", + "alignItems": "center", + "children": [ + { + "type": "frame", + "id": "EdWXi", + "name": "headerLeft", + "gap": 12, + "alignItems": "center", + "children": [ + { + "type": "frame", + "id": "MVVGK", + "name": "backBtn", + "width": 36, + "height": 36, + "fill": "rgba(255,255,255,0.2)", + "cornerRadius": 8, + "justifyContent": "center", + "alignItems": "center", + "children": [ + { + "type": "icon_font", + "id": "ZJEO4", + "name": "backIcon", + "width": 20, + "height": 20, + "iconFontName": "arrow-left", + "iconFontFamily": "lucide", + "fill": "#FFFFFF" + } + ] + }, + { + "type": "frame", + "id": "8YBhs", + "name": "titleGroup", + "layout": "vertical", + "gap": 4, + "children": [ + { + "type": "text", + "id": "W3jyy", + "name": "pageTitle", + "fill": "#FFFFFF", + "content": "Hold Detail: 缺陷", + "fontFamily": "Inter", + "fontSize": 22, + "fontWeight": "600" + }, + { + "type": "frame", + "id": "V3p7U", + "name": "holdBadge", + "fill": "#FEE2E2", + "cornerRadius": 4, + "gap": 6, + "padding": [ + 4, + 8 + ], + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "6Mvg0", + "name": "badgeText", + "fill": "#991B1B", + "content": "品質異常 Hold", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + } + ] + } + ] + } + ] + }, + { + "type": "frame", + "id": "JpXu7", + "name": "headerRight", + "gap": 12, + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "Bg1BI", + "name": "lastUpdate", + "fill": "rgba(255,255,255,0.8)", + "content": "Last Update: 2026-01-28 10:30:00", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + } + ] + } + ] + }, + { + "type": "frame", + "id": "dH5jc", + "name": "summaryRow", + "width": "fill_container", + "gap": 16, + "children": [ + { + "type": "frame", + "id": "Jcuku", + "name": "card1", + "width": "fill_container", + "fill": "#FFFFFF", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "layout": "vertical", + "gap": 8, + "padding": 20, + "children": [ + { + "type": "text", + "id": "NO3qg", + "name": "card1Label", + "fill": "#666666", + "content": "Total Lots", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "S6x9b", + "name": "card1Value", + "fill": "#222222", + "content": "127", + "fontFamily": "Inter", + "fontSize": 32, + "fontWeight": "700" + } + ] + }, + { + "type": "frame", + "id": "k5Fqe", + "name": "card2", + "width": "fill_container", + "fill": "#FFFFFF", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "layout": "vertical", + "gap": 8, + "padding": 20, + "children": [ + { + "type": "text", + "id": "OzCf3", + "name": "card2Label", + "fill": "#666666", + "content": "Total QTY (pcs)", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "wrtwd", + "name": "card2Value", + "fill": "#222222", + "content": "458,920", + "fontFamily": "Inter", + "fontSize": 32, + "fontWeight": "700" + } + ] + }, + { + "type": "frame", + "id": "tGRHt", + "name": "card3", + "width": "fill_container", + "fill": "#FFFFFF", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "layout": "vertical", + "gap": 8, + "padding": 20, + "children": [ + { + "type": "text", + "id": "WbSE9", + "name": "card3Label", + "fill": "#666666", + "content": "平均當站滯留", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "wUiN4", + "name": "card3Value", + "fill": "#F59E0B", + "content": "3.2 天", + "fontFamily": "Inter", + "fontSize": 32, + "fontWeight": "700" + } + ] + }, + { + "type": "frame", + "id": "BlhLH", + "name": "card4", + "width": "fill_container", + "fill": "#FFFFFF", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "layout": "vertical", + "gap": 8, + "padding": 20, + "children": [ + { + "type": "text", + "id": "CLGML", + "name": "card4Label", + "fill": "#666666", + "content": "最久當站滯留", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "aGrnM", + "name": "card4Value", + "fill": "#EF4444", + "content": "15 天", + "fontFamily": "Inter", + "fontSize": 32, + "fontWeight": "700" + } + ] + }, + { + "type": "frame", + "id": "7uKhR", + "name": "card5", + "width": "fill_container", + "fill": "#FFFFFF", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "layout": "vertical", + "gap": 8, + "padding": 20, + "children": [ + { + "type": "text", + "id": "P1gHF", + "name": "card5Label", + "fill": "#666666", + "content": "影響站群", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "EoKkT", + "name": "card5Value", + "fill": "#222222", + "content": "8", + "fontFamily": "Inter", + "fontSize": 32, + "fontWeight": "700" + } + ] + } + ] + }, + { + "type": "frame", + "id": "qYy5R", + "name": "contentGrid", + "width": "fill_container", + "gap": 16, + "children": [ + { + "type": "frame", + "id": "n6o9k", + "name": "leftCard", + "width": "fill_container", + "fill": "#FFFFFF", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "layout": "vertical", + "children": [ + { + "type": "frame", + "id": "TQjuU", + "name": "leftHeader", + "width": "fill_container", + "stroke": { + "align": "inside", + "thickness": { + "bottom": 1 + }, + "fill": "#E2E6EF" + }, + "padding": [ + 16, + 20 + ], + "children": [ + { + "type": "text", + "id": "9ZCN9", + "name": "leftTitle", + "fill": "#222222", + "content": "依站群分佈 (By Workcenter) - 點擊可篩選", + "fontFamily": "Inter", + "fontSize": 16, + "fontWeight": "600" + }, + { + "type": "frame", + "id": "sRUro", + "name": "clickHint1", + "fill": "#667eea", + "cornerRadius": 4, + "padding": [ + 4, + 8 + ], + "children": [ + { + "type": "text", + "id": "Er9HP", + "name": "clickText1", + "fill": "#FFFFFF", + "content": "可點擊篩選", + "fontFamily": "Inter", + "fontSize": 10, + "fontWeight": "600" + } + ] + } + ] + }, + { + "type": "frame", + "id": "6dcCv", + "name": "leftBody", + "width": "fill_container", + "layout": "vertical", + "children": [ + { + "type": "frame", + "id": "KJxxR", + "name": "tableHeader", + "width": "fill_container", + "fill": "#F9FAFB", + "gap": 16, + "padding": [ + 12, + 20 + ], + "children": [ + { + "type": "text", + "id": "pl1PL", + "name": "th1", + "fill": "#666666", + "content": "Workcenter", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + }, + { + "type": "text", + "id": "0Nler", + "name": "th2", + "fill": "#666666", + "content": "Lots", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + }, + { + "type": "text", + "id": "v8SQy", + "name": "th3", + "fill": "#666666", + "content": "QTY", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + }, + { + "type": "text", + "id": "hgtG9", + "name": "th4", + "fill": "#666666", + "content": "佔比", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "FeNSs", + "name": "row1", + "width": "fill_container", + "fill": "#EEF2FF", + "stroke": { + "align": "inside", + "thickness": 2, + "fill": "#667eea" + }, + "gap": 16, + "padding": [ + 12, + 20 + ], + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "pzbjE", + "name": "r1c1", + "fill": "#222222", + "content": "DIE_BOND", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "ufZSd", + "name": "r1c2", + "fill": "#222222", + "content": "45", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "sNXbx", + "name": "r1c3", + "fill": "#222222", + "content": "156,230", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "R0Zvo", + "name": "r1c4", + "fill": "#667eea", + "content": "34.1%", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "QcUcl", + "name": "row2", + "width": "fill_container", + "stroke": { + "align": "inside", + "thickness": { + "bottom": 1 + }, + "fill": "#F0F0F0" + }, + "gap": 16, + "padding": [ + 12, + 20 + ], + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "pnDQb", + "name": "r2c1", + "fill": "#222222", + "content": "WIRE_BOND", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "8BYJg", + "name": "r2c2", + "fill": "#222222", + "content": "38", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "0ItAD", + "name": "r2c3", + "fill": "#222222", + "content": "128,450", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "EjrRN", + "name": "r2c4", + "fill": "#667eea", + "content": "28.0%", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "0E6IL", + "name": "row3", + "width": "fill_container", + "stroke": { + "align": "inside", + "thickness": { + "bottom": 1 + }, + "fill": "#F0F0F0" + }, + "gap": 16, + "padding": [ + 12, + 20 + ], + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "BuPpm", + "name": "r3c1", + "fill": "#222222", + "content": "MOLDING", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "EePKK", + "name": "r3c2", + "fill": "#222222", + "content": "28", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "2F2i9", + "name": "r3c3", + "fill": "#222222", + "content": "98,120", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "mBXbQ", + "name": "r3c4", + "fill": "#667eea", + "content": "21.4%", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "XoxQ0", + "name": "row4", + "width": "fill_container", + "gap": 16, + "padding": [ + 12, + 20 + ], + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "jiGXt", + "name": "r4c1", + "fill": "#888888", + "content": "Others (5)", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "RJDFw", + "name": "r4c2", + "fill": "#888888", + "content": "16", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "TO1my", + "name": "r4c3", + "fill": "#888888", + "content": "76,120", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "vTEIy", + "name": "r4c4", + "fill": "#888888", + "content": "16.5%", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + } + ] + } + ] + } + ] + }, + { + "type": "frame", + "id": "jJRVr", + "name": "rightCard", + "width": "fill_container", + "fill": "#FFFFFF", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "layout": "vertical", + "children": [ + { + "type": "frame", + "id": "D431a", + "name": "rightHeader", + "width": "fill_container", + "stroke": { + "align": "inside", + "thickness": { + "bottom": 1 + }, + "fill": "#E2E6EF" + }, + "padding": [ + 16, + 20 + ], + "children": [ + { + "type": "text", + "id": "wbKWf", + "name": "rightTitle", + "fill": "#222222", + "content": "依 Package 分佈 - 點擊可篩選", + "fontFamily": "Inter", + "fontSize": 16, + "fontWeight": "600" + }, + { + "type": "frame", + "id": "Pq3Qm", + "name": "clickHint2", + "fill": "#667eea", + "cornerRadius": 4, + "padding": [ + 4, + 8 + ], + "children": [ + { + "type": "text", + "id": "vMiTb", + "name": "clickText2", + "fill": "#FFFFFF", + "content": "可點擊篩選", + "fontFamily": "Inter", + "fontSize": 10, + "fontWeight": "600" + } + ] + } + ] + }, + { + "type": "frame", + "id": "crSW4", + "name": "rightBody", + "width": "fill_container", + "layout": "vertical", + "children": [ + { + "type": "frame", + "id": "CAnxl", + "name": "pTableHeader", + "width": "fill_container", + "fill": "#F9FAFB", + "gap": 16, + "padding": [ + 12, + 20 + ], + "children": [ + { + "type": "text", + "id": "hbJ0a", + "name": "pth1", + "fill": "#666666", + "content": "Package", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + }, + { + "type": "text", + "id": "AjQJS", + "name": "pth2", + "fill": "#666666", + "content": "Lots", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + }, + { + "type": "text", + "id": "JYohj", + "name": "pth3", + "fill": "#666666", + "content": "QTY", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + }, + { + "type": "text", + "id": "MV6jF", + "name": "pth4", + "fill": "#666666", + "content": "佔比", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "j9Ycq", + "name": "prow1", + "width": "fill_container", + "stroke": { + "align": "inside", + "thickness": { + "bottom": 1 + }, + "fill": "#F0F0F0" + }, + "gap": 16, + "padding": [ + 12, + 20 + ], + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "5PO2Y", + "name": "pr1c1", + "fill": "#222222", + "content": "QFN", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "1CgoT", + "name": "pr1c2", + "fill": "#222222", + "content": "52", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "uQB2r", + "name": "pr1c3", + "fill": "#222222", + "content": "189,450", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "wY306", + "name": "pr1c4", + "fill": "#667eea", + "content": "41.3%", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "qI6Ae", + "name": "prow2", + "width": "fill_container", + "stroke": { + "align": "inside", + "thickness": { + "bottom": 1 + }, + "fill": "#F0F0F0" + }, + "gap": 16, + "padding": [ + 12, + 20 + ], + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "E02Zh", + "name": "pr2c1", + "fill": "#222222", + "content": "DFN", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "hFdSa", + "name": "pr2c2", + "fill": "#222222", + "content": "35", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "ZegQX", + "name": "pr2c3", + "fill": "#222222", + "content": "145,230", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "pQQf8", + "name": "pr2c4", + "fill": "#667eea", + "content": "31.6%", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "3zbTw", + "name": "prow3", + "width": "fill_container", + "stroke": { + "align": "inside", + "thickness": { + "bottom": 1 + }, + "fill": "#F0F0F0" + }, + "gap": 16, + "padding": [ + 12, + 20 + ], + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "eago9", + "name": "pr3c1", + "fill": "#222222", + "content": "SOT", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "h4G0R", + "name": "pr3c2", + "fill": "#222222", + "content": "22", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "mwqGr", + "name": "pr3c3", + "fill": "#222222", + "content": "78,120", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "3sOgp", + "name": "pr3c4", + "fill": "#667eea", + "content": "17.0%", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "lDXi1", + "name": "prow4", + "width": "fill_container", + "gap": 16, + "padding": [ + 12, + 20 + ], + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "m8vMX", + "name": "pr4c1", + "fill": "#888888", + "content": "Others (4)", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "HNQIl", + "name": "pr4c2", + "fill": "#888888", + "content": "18", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "uLIWM", + "name": "pr4c3", + "fill": "#888888", + "content": "46,120", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "R4W2c", + "name": "pr4c4", + "fill": "#888888", + "content": "10.1%", + "textAlign": "right", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "frame", + "id": "IRtLq", + "name": "ageSection", + "width": "fill_container", + "fill": "#FFFFFF", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "layout": "vertical", + "children": [ + { + "type": "frame", + "id": "QczQu", + "name": "ageHeader", + "width": "fill_container", + "stroke": { + "align": "inside", + "thickness": { + "bottom": 1 + }, + "fill": "#E2E6EF" + }, + "padding": [ + 16, + 20 + ], + "justifyContent": "space_between", + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "rjWxw", + "name": "ageTitle", + "fill": "#222222", + "content": "當站滯留天數分佈 (Age at Current Station)", + "fontFamily": "Inter", + "fontSize": 16, + "fontWeight": "600" + }, + { + "type": "text", + "id": "o8zso", + "name": "ageNote", + "fill": "#888888", + "content": "依 MOVEIN 時間計算 | 點擊可篩選", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "normal" + } + ] + }, + { + "type": "frame", + "id": "1TyeC", + "name": "ageBody", + "width": "fill_container", + "gap": 16, + "padding": 20, + "justifyContent": "space_between", + "children": [ + { + "type": "frame", + "id": "llWVM", + "name": "bucket1", + "width": "fill_container", + "fill": "#F0FDF4", + "cornerRadius": 8, + "stroke": { + "align": "inside", + "thickness": 2, + "fill": "#22C55E" + }, + "layout": "vertical", + "gap": 8, + "padding": 16, + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "NQoCS", + "name": "b1Label", + "fill": "#166534", + "content": "0-1 天", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "600" + }, + { + "type": "text", + "id": "ovS4b", + "name": "b1Value", + "fill": "#166534", + "content": "42 lots", + "fontFamily": "Inter", + "fontSize": 24, + "fontWeight": "700" + }, + { + "type": "text", + "id": "DWt6p", + "name": "b1Pct", + "fill": "#22C55E", + "content": "33.1%", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "vgoxY", + "name": "b1Qty", + "fill": "#166534", + "content": "152,300", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + } + ] + }, + { + "type": "frame", + "id": "ndAGZ", + "name": "bucket2", + "width": "fill_container", + "fill": "#FFFBEB", + "cornerRadius": 8, + "stroke": { + "align": "inside", + "thickness": 2, + "fill": "#F59E0B" + }, + "layout": "vertical", + "gap": 8, + "padding": 16, + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "icdfd", + "name": "b2Label", + "fill": "#92400E", + "content": "1-3 天", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "600" + }, + { + "type": "text", + "id": "Rvns8", + "name": "b2Value", + "fill": "#92400E", + "content": "38 lots", + "fontFamily": "Inter", + "fontSize": 24, + "fontWeight": "700" + }, + { + "type": "text", + "id": "TuQKb", + "name": "b2Pct", + "fill": "#F59E0B", + "content": "29.9%", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "kEr4r", + "name": "b2Qty", + "fill": "#92400E", + "content": "138,450", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + } + ] + }, + { + "type": "frame", + "id": "gy2x0", + "name": "bucket3", + "width": "fill_container", + "fill": "#FFF7ED", + "cornerRadius": 8, + "stroke": { + "align": "inside", + "thickness": 2, + "fill": "#F97316" + }, + "layout": "vertical", + "gap": 8, + "padding": 16, + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "B98JF", + "name": "b3Label", + "fill": "#9A3412", + "content": "3-7 天", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "600" + }, + { + "type": "text", + "id": "TdRbd", + "name": "b3Value", + "fill": "#9A3412", + "content": "28 lots", + "fontFamily": "Inter", + "fontSize": 24, + "fontWeight": "700" + }, + { + "type": "text", + "id": "KF1LK", + "name": "b3Pct", + "fill": "#F97316", + "content": "22.0%", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "IcmHX", + "name": "b3Qty", + "fill": "#9A3412", + "content": "98,120", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + } + ] + }, + { + "type": "frame", + "id": "iDYN9", + "name": "bucket4", + "width": "fill_container", + "fill": "#FEF2F2", + "cornerRadius": 8, + "stroke": { + "align": "inside", + "thickness": 2, + "fill": "#EF4444" + }, + "layout": "vertical", + "gap": 8, + "padding": 16, + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "Peb5J", + "name": "b4Label", + "fill": "#991B1B", + "content": "7+ 天", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "600" + }, + { + "type": "text", + "id": "ydAd9", + "name": "b4Value", + "fill": "#991B1B", + "content": "19 lots", + "fontFamily": "Inter", + "fontSize": 24, + "fontWeight": "700" + }, + { + "type": "text", + "id": "DVUNT", + "name": "b4Pct", + "fill": "#EF4444", + "content": "15.0%", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "r1aMo", + "name": "b4Qty", + "fill": "#991B1B", + "content": "70,050", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + } + ] + } + ] + } + ] + }, + { + "type": "frame", + "id": "EkLV0", + "name": "lotSection", + "width": "fill_container", + "fill": "#FFFFFF", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "layout": "vertical", + "children": [ + { + "type": "frame", + "id": "6Tg6Z", + "name": "lotHeader", + "width": "fill_container", + "stroke": { + "align": "inside", + "thickness": { + "bottom": 1 + }, + "fill": "#E2E6EF" + }, + "padding": [ + 16, + 20 + ], + "justifyContent": "space_between", + "alignItems": "center", + "children": [ + { + "type": "frame", + "id": "hlYwY", + "name": "lotTitleGroup", + "gap": 12, + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "UvnOY", + "name": "lotTitle", + "fill": "#222222", + "content": "Lot Details", + "fontFamily": "Inter", + "fontSize": 16, + "fontWeight": "600" + }, + { + "type": "frame", + "id": "54DK5", + "name": "lotCount", + "fill": "#667eea", + "cornerRadius": 12, + "padding": [ + 4, + 10 + ], + "children": [ + { + "type": "text", + "id": "65lwE", + "name": "lotCountText", + "fill": "#FFFFFF", + "content": "127 lots", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "OCvgO", + "name": "filterIndicator", + "fill": "#F0FDF4", + "cornerRadius": 12, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#22C55E" + }, + "padding": [ + 4, + 10 + ], + "children": [ + { + "type": "text", + "id": "dYBGb", + "name": "filterText", + "fill": "#166534", + "content": "篩選: DIE_BOND", + "fontFamily": "Inter", + "fontSize": 11, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "hVBnP", + "name": "clearBtn", + "fill": "#FEF2F2", + "cornerRadius": 12, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#EF4444" + }, + "padding": [ + 4, + 10 + ], + "children": [ + { + "type": "text", + "id": "Upvp6", + "name": "clearText", + "fill": "#991B1B", + "content": "✕ 清除篩選", + "fontFamily": "Inter", + "fontSize": 11, + "fontWeight": "600" + } + ] + } + ] + }, + { + "type": "text", + "id": "UAIbQ", + "name": "lotInfo", + "fill": "#888888", + "content": "依滯留天數排序 (最久優先)", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "normal" + } + ] + }, + { + "type": "frame", + "id": "k2PhE", + "name": "lotBody", + "width": "fill_container", + "layout": "vertical", + "children": [ + { + "type": "frame", + "id": "FYGFO", + "name": "lotTableHeader", + "width": "fill_container", + "fill": "#F9FAFB", + "gap": 8, + "padding": [ + 12, + 20 + ], + "children": [ + { + "type": "text", + "id": "LLmJp", + "name": "lth1", + "fill": "#666666", + "content": "LOTID", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + }, + { + "type": "text", + "id": "at5RZ", + "name": "lth2", + "fill": "#666666", + "content": "WORKORDER", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + }, + { + "type": "text", + "id": "ujPfX", + "name": "lth3", + "fill": "#666666", + "content": "QTY", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + }, + { + "type": "text", + "id": "UoBHt", + "name": "lth4", + "fill": "#666666", + "content": "Package", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + }, + { + "type": "text", + "id": "zV5Ma", + "name": "lth5", + "fill": "#666666", + "content": "Workcenter", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + }, + { + "type": "text", + "id": "ArmE7", + "name": "lth6", + "fill": "#666666", + "content": "Spec", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + }, + { + "type": "text", + "id": "jgkw4", + "name": "lth7", + "fill": "#666666", + "content": "Age", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + }, + { + "type": "text", + "id": "IZD0x", + "name": "lth8", + "fill": "#666666", + "content": "Hold By", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + }, + { + "type": "text", + "id": "wk2g6", + "name": "lth9", + "fill": "#666666", + "content": "Dept", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "U7daK", + "name": "lotRow1", + "width": "fill_container", + "stroke": { + "align": "inside", + "thickness": { + "bottom": 1 + }, + "fill": "#F0F0F0" + }, + "gap": 8, + "padding": [ + 12, + 20 + ], + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "t3ceS", + "name": "lr1c1", + "fill": "#222222", + "content": "LOT2401150001", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "LV0bf", + "name": "lr1c2", + "fill": "#222222", + "content": "WO20240115001", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "KPyX9", + "name": "lr1c3", + "fill": "#222222", + "content": "5,200", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "4IAJD", + "name": "lr1c4", + "fill": "#222222", + "content": "QFN", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "ejmJF", + "name": "lr1c5", + "fill": "#222222", + "content": "DIE_BOND", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "YpYn3", + "name": "lr1c6", + "fill": "#222222", + "content": "Die Attach", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "frame", + "id": "0MDI8", + "name": "lr1c7", + "fill": "#FEE2E2", + "cornerRadius": 4, + "padding": [ + 2, + 8 + ], + "children": [ + { + "type": "text", + "id": "wGDd5", + "name": "lr1c7t", + "fill": "#991B1B", + "content": "15d", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + } + ] + }, + { + "type": "text", + "id": "bddVn", + "name": "lr1c8", + "fill": "#222222", + "content": "王小明", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "DrnAd", + "name": "lr1c9", + "fill": "#222222", + "content": "QC", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + } + ] + }, + { + "type": "frame", + "id": "XKBSi", + "name": "lotRow2", + "width": "fill_container", + "stroke": { + "align": "inside", + "thickness": { + "bottom": 1 + }, + "fill": "#F0F0F0" + }, + "gap": 8, + "padding": [ + 12, + 20 + ], + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "lHSqf", + "name": "lr2c1", + "fill": "#222222", + "content": "LOT2401180023", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "xXL5X", + "name": "lr2c2", + "fill": "#222222", + "content": "WO20240118005", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "PCg5f", + "name": "lr2c3", + "fill": "#222222", + "content": "3,800", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "UsM4m", + "name": "lr2c4", + "fill": "#222222", + "content": "DFN", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "soi3b", + "name": "lr2c5", + "fill": "#222222", + "content": "WIRE_BOND", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "rHrFq", + "name": "lr2c6", + "fill": "#222222", + "content": "Wire Bond", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "frame", + "id": "kKLNP", + "name": "lr2c7", + "fill": "#FFF7ED", + "cornerRadius": 4, + "padding": [ + 2, + 8 + ], + "children": [ + { + "type": "text", + "id": "jVduw", + "name": "lr2c7t", + "fill": "#9A3412", + "content": "8d", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + } + ] + }, + { + "type": "text", + "id": "Nsja6", + "name": "lr2c8", + "fill": "#222222", + "content": "李大華", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "0VS56", + "name": "lr2c9", + "fill": "#222222", + "content": "PE", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + } + ] + }, + { + "type": "frame", + "id": "mUQgn", + "name": "lotRow3", + "width": "fill_container", + "gap": 8, + "padding": [ + 12, + 20 + ], + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "yxhdB", + "name": "lr3c1", + "fill": "#222222", + "content": "LOT2401200045", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "zB9xG", + "name": "lr3c2", + "fill": "#222222", + "content": "WO20240120008", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "0GVPK", + "name": "lr3c3", + "fill": "#222222", + "content": "4,500", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "y5oaF", + "name": "lr3c4", + "fill": "#222222", + "content": "SOT", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "lkcvd", + "name": "lr3c5", + "fill": "#222222", + "content": "MOLDING", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "RqRPi", + "name": "lr3c6", + "fill": "#222222", + "content": "Molding", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "frame", + "id": "kMWnK", + "name": "lr3c7", + "fill": "#FFFBEB", + "cornerRadius": 4, + "padding": [ + 2, + 8 + ], + "children": [ + { + "type": "text", + "id": "S80Dg", + "name": "lr3c7t", + "fill": "#92400E", + "content": "2d", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "600" + } + ] + }, + { + "type": "text", + "id": "vC6Ph", + "name": "lr3c8", + "fill": "#222222", + "content": "張三豐", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "xKBf2", + "name": "lr3c9", + "fill": "#222222", + "content": "QC", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + } + ] + } + ] + }, + { + "type": "frame", + "id": "fgdNX", + "name": "pagination", + "width": "fill_container", + "stroke": { + "align": "inside", + "thickness": { + "top": 1 + }, + "fill": "#E2E6EF" + }, + "padding": [ + 12, + 20 + ], + "justifyContent": "space_between", + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "UkFtQ", + "name": "pageInfo", + "fill": "#888888", + "content": "Showing 1-50 of 127 lots", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "frame", + "id": "b2LAR", + "name": "pageButtons", + "gap": 8, + "children": [ + { + "type": "frame", + "id": "QERaB", + "name": "prevBtn", + "cornerRadius": 6, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "padding": [ + 8, + 16 + ], + "justifyContent": "center", + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "Msjmx", + "name": "prevText", + "fill": "#666666", + "content": "← Previous", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + } + ] + }, + { + "type": "frame", + "id": "PFfh6", + "name": "pageNum", + "fill": "#667eea", + "cornerRadius": 6, + "padding": [ + 8, + 16 + ], + "justifyContent": "center", + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "glqu5", + "name": "pageNumText", + "fill": "#FFFFFF", + "content": "1", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "aWWrR", + "name": "nextBtn", + "cornerRadius": 6, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "padding": [ + 8, + 16 + ], + "justifyContent": "center", + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "4pKqZ", + "name": "nextText", + "fill": "#666666", + "content": "Next →", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] +} \ No newline at end of file diff --git a/frontend_design/WIP_main.pen b/frontend_design/WIP_main.pen new file mode 100644 index 0000000..d00b336 --- /dev/null +++ b/frontend_design/WIP_main.pen @@ -0,0 +1,614 @@ +{ + "version": "2.6", + "children": [ + { + "type": "frame", + "id": "GIoPU", + "x": 950, + "y": 0, + "name": "WIP Overview - Integrated", + "width": 1200, + "fill": "#F5F7FA", + "layout": "vertical", + "gap": 16, + "padding": 20, + "children": [ + { + "type": "frame", + "id": "N2qxA", + "name": "header", + "width": "fill_container", + "height": 64, + "fill": { + "type": "gradient", + "gradientType": "linear", + "enabled": true, + "rotation": 135, + "size": { + "height": 1 + }, + "colors": [ + { + "color": "#667eea", + "position": 0 + }, + { + "color": "#764ba2", + "position": 1 + } + ] + }, + "cornerRadius": 10, + "padding": [ + 0, + 22 + ], + "justifyContent": "space_between", + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "7h8YC", + "name": "headerTitle", + "fill": "#FFFFFF", + "content": "WIP Overview Dashboard", + "fontFamily": "Inter", + "fontSize": 24, + "fontWeight": "600" + }, + { + "type": "frame", + "id": "JyskI", + "name": "headerRight", + "gap": 16, + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "8rtgc", + "name": "lastUpdate", + "fill": "rgba(255,255,255,0.8)", + "content": "Last Update: 2026-01-27 14:30", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "normal" + }, + { + "type": "frame", + "id": "ZH0PW", + "name": "refreshBtn", + "fill": "rgba(255,255,255,0.2)", + "cornerRadius": 8, + "padding": [ + 9, + 20 + ], + "children": [ + { + "type": "text", + "id": "wlrMh", + "name": "refreshText", + "fill": "#FFFFFF", + "content": "重新整理", + "fontFamily": "Inter", + "fontSize": 13, + "fontWeight": "600" + } + ] + } + ] + } + ] + }, + { + "type": "frame", + "id": "aYXjP", + "name": "Summary Section", + "width": "fill_container", + "layout": "vertical", + "gap": 12, + "children": [ + { + "type": "frame", + "id": "pFof4", + "name": "kpiRow", + "width": "fill_container", + "gap": 14, + "children": [ + { + "type": "frame", + "id": "0kWPh", + "name": "kpi1", + "width": "fill_container", + "fill": "#FFFFFF", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "layout": "vertical", + "gap": 6, + "padding": [ + 16, + 20 + ], + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "DTtUq", + "name": "kpi1Label", + "fill": "#666666", + "content": "Total Lots", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "vdmq8", + "name": "kpi1Value", + "fill": "#667eea", + "content": "1,234", + "fontFamily": "Inter", + "fontSize": 28, + "fontWeight": "700" + } + ] + }, + { + "type": "frame", + "id": "wEupl", + "name": "kpi2", + "width": "fill_container", + "fill": "#FFFFFF", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "layout": "vertical", + "gap": 6, + "padding": [ + 16, + 20 + ], + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "59OHd", + "name": "kpi2Label", + "fill": "#666666", + "content": "Total QTY", + "fontFamily": "Inter", + "fontSize": 12, + "fontWeight": "normal" + }, + { + "type": "text", + "id": "YkPVl", + "name": "kpi2Value", + "fill": "#667eea", + "content": "56,789", + "fontFamily": "Inter", + "fontSize": 28, + "fontWeight": "700" + } + ] + } + ] + }, + { + "type": "frame", + "id": "g65nT", + "name": "wipStatusRow", + "width": "fill_container", + "gap": 14, + "children": [ + { + "type": "frame", + "id": "sbKdU", + "name": "runCard", + "width": "fill_container", + "fill": "#F0FDF4", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 2, + "fill": "#22C55E" + }, + "layout": "vertical", + "gap": 8, + "padding": [ + 16, + 20 + ], + "justifyContent": "space_between", + "alignItems": "center", + "children": [ + { + "type": "frame", + "id": "EQzBo", + "name": "runLeft", + "width": "fill_container", + "gap": 10, + "justifyContent": "center", + "alignItems": "center", + "children": [ + { + "type": "rectangle", + "cornerRadius": 5, + "id": "m7Prk", + "name": "runDot", + "fill": "#22C55E", + "width": 10, + "height": 10 + }, + { + "type": "text", + "id": "1DMEu", + "name": "runLabel", + "fill": "#166534", + "content": "RUN", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "ZVtRH", + "name": "runRight", + "width": "fill_container", + "gap": 24, + "justifyContent": "center", + "children": [ + { + "type": "text", + "id": "OLwma", + "name": "runLots", + "fill": "#0D0D0D", + "content": "500 lots", + "fontFamily": "Inter", + "fontSize": 24, + "fontWeight": "700" + }, + { + "type": "text", + "id": "OI5f5", + "name": "runQty", + "fill": "#166534", + "content": "30,000 pcs", + "fontFamily": "Inter", + "fontSize": 24, + "fontWeight": "700" + } + ] + } + ] + }, + { + "type": "frame", + "id": "uibRH", + "name": "queueCard", + "width": "fill_container", + "fill": "#FFFBEB", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 2, + "fill": "#F59E0B" + }, + "layout": "vertical", + "gap": 8, + "padding": [ + 16, + 20 + ], + "justifyContent": "space_between", + "alignItems": "center", + "children": [ + { + "type": "frame", + "id": "xeGDP", + "name": "queueLeft", + "width": "fill_container", + "gap": 10, + "justifyContent": "center", + "alignItems": "center", + "children": [ + { + "type": "rectangle", + "cornerRadius": 5, + "id": "KuAgl", + "name": "queueDot", + "fill": "#F59E0B", + "width": 10, + "height": 10 + }, + { + "type": "text", + "id": "TsD9B", + "name": "queueLabel", + "fill": "#92400E", + "content": "QUEUE", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "41Db3", + "name": "queueRight", + "width": "fill_container", + "gap": 24, + "justifyContent": "center", + "children": [ + { + "type": "text", + "id": "dtaqd", + "name": "queueLots", + "fill": "#0D0D0D", + "content": "634 lots", + "fontFamily": "Inter", + "fontSize": 24, + "fontWeight": "700" + }, + { + "type": "text", + "id": "BVusD", + "name": "queueQty", + "fill": "#92400E", + "content": "21,789 pcs", + "fontFamily": "Inter", + "fontSize": 24, + "fontWeight": "700" + } + ] + } + ] + }, + { + "type": "frame", + "id": "Y5gLu", + "name": "holdCard", + "width": "fill_container", + "fill": "#FEF2F2", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 2, + "fill": "#EF4444" + }, + "layout": "vertical", + "gap": 8, + "padding": [ + 16, + 20 + ], + "justifyContent": "space_between", + "alignItems": "center", + "children": [ + { + "type": "frame", + "id": "juHZC", + "name": "holdLeft", + "width": "fill_container", + "gap": 10, + "justifyContent": "center", + "alignItems": "center", + "children": [ + { + "type": "rectangle", + "cornerRadius": 5, + "id": "FW9Vv", + "name": "holdDot", + "fill": "#EF4444", + "width": 10, + "height": 10 + }, + { + "type": "text", + "id": "gEojA", + "name": "holdLabel", + "fill": "#991B1B", + "content": "HOLD", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "3imiS", + "name": "holdRight", + "width": "fill_container", + "gap": 24, + "justifyContent": "center", + "children": [ + { + "type": "text", + "id": "AlTi3", + "name": "holdLots", + "fill": "#0D0D0D", + "content": "100 lots", + "fontFamily": "Inter", + "fontSize": 24, + "fontWeight": "700" + }, + { + "type": "text", + "id": "oKc0i", + "name": "holdQty", + "fill": "#991B1B", + "content": "5,000 pcs", + "fontFamily": "Inter", + "fontSize": 24, + "fontWeight": "700" + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "frame", + "id": "uRXyA", + "name": "Content Grid", + "width": "fill_container", + "gap": 16, + "children": [ + { + "type": "frame", + "id": "7HMip", + "name": "matrixCard", + "width": "fill_container", + "fill": "#FFFFFF", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "layout": "vertical", + "children": [ + { + "type": "frame", + "id": "pxsYm", + "name": "matrixHeader", + "width": "fill_container", + "fill": "#FAFBFC", + "stroke": { + "align": "inside", + "thickness": { + "bottom": 1 + }, + "fill": "#E2E6EF" + }, + "padding": [ + 14, + 20 + ], + "children": [ + { + "type": "text", + "id": "JhSDl", + "name": "matrixTitle", + "fill": "#222222", + "content": "Workcenter x Package Matrix (QTY)", + "fontFamily": "Inter", + "fontSize": 15, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "4hQZP", + "name": "matrixBody", + "width": "fill_container", + "height": 200, + "layout": "vertical", + "padding": 16, + "justifyContent": "center", + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "lH6Yr", + "name": "matrixPlaceholder", + "fill": "#999999", + "content": "[ Matrix Table ]", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + } + ] + } + ] + }, + { + "type": "frame", + "id": "FOIFS", + "name": "holdSummaryCard", + "width": 320, + "fill": "#FFFFFF", + "cornerRadius": 10, + "stroke": { + "align": "inside", + "thickness": 1, + "fill": "#E2E6EF" + }, + "layout": "vertical", + "children": [ + { + "type": "frame", + "id": "uikVi", + "name": "holdSummaryHeader", + "width": "fill_container", + "fill": "#FAFBFC", + "stroke": { + "align": "inside", + "thickness": { + "bottom": 1 + }, + "fill": "#E2E6EF" + }, + "padding": [ + 14, + 20 + ], + "children": [ + { + "type": "text", + "id": "VBWBv", + "name": "holdSummaryTitle", + "fill": "#222222", + "content": "Hold Summary", + "fontFamily": "Inter", + "fontSize": 15, + "fontWeight": "600" + } + ] + }, + { + "type": "frame", + "id": "cFEPm", + "name": "holdSummaryBody", + "width": "fill_container", + "height": 200, + "layout": "vertical", + "padding": 16, + "justifyContent": "center", + "alignItems": "center", + "children": [ + { + "type": "text", + "id": "s7sa1", + "name": "holdSummaryPlaceholder", + "fill": "#999999", + "content": "[ Hold Table ]", + "fontFamily": "Inter", + "fontSize": 14, + "fontWeight": "normal" + } + ] + } + ] + } + ] + } + ] + } + ] +} \ No newline at end of file diff --git a/gunicorn.conf.py b/gunicorn.conf.py new file mode 100644 index 0000000..bcd7e9f --- /dev/null +++ b/gunicorn.conf.py @@ -0,0 +1,38 @@ +import os + +bind = os.getenv("GUNICORN_BIND", "0.0.0.0:8080") +workers = int(os.getenv("GUNICORN_WORKERS", "2")) # 2 workers for redundancy +threads = int(os.getenv("GUNICORN_THREADS", "4")) +worker_class = "gthread" + +# Timeout settings - critical for dashboard stability +timeout = 65 # Worker timeout: must be > call_timeout (55s) +graceful_timeout = 30 # Graceful shutdown timeout (enough for thread cleanup) +keepalive = 5 # Keep-alive connections timeout + +# Worker lifecycle management - prevent state accumulation +max_requests = 1000 # Restart worker after N requests +max_requests_jitter = 100 # Random jitter to prevent simultaneous restarts + + +# ============================================================ +# Worker Lifecycle Hooks +# ============================================================ + +def worker_exit(server, worker): + """Clean up background threads and database connections when worker exits.""" + # Stop background sync threads first + try: + from mes_dashboard.services.realtime_equipment_cache import ( + stop_equipment_status_sync_worker + ) + stop_equipment_status_sync_worker() + except Exception as e: + server.log.warning(f"Error stopping equipment sync worker: {e}") + + # Then dispose database connections + try: + from mes_dashboard.core.database import dispose_engine + dispose_engine() + except Exception as e: + server.log.warning(f"Error disposing database engine: {e}") diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/.openspec.yaml b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/.openspec.yaml new file mode 100644 index 0000000..ba4d3f5 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/.openspec.yaml @@ -0,0 +1,2 @@ +schema: spec-driven +created: 2026-02-07 diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/design.md b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/design.md new file mode 100644 index 0000000..706ae83 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/design.md @@ -0,0 +1,79 @@ +## Context + +`DashBoard_vite` 已完成第一批根目錄重構,但仍有部分頁面維持大量 inline script、部分計算在後端實作且缺乏前後一致性驗證、欄位命名規則未全面治理。`DashBoard/` 目前仍作為結構與行為參考來源。此變更目標是完成最終遷移:以 `DashBoard_vite` 根目錄作為唯一開發/部署主體,並建立可持續的前端模組化、欄位契約、快取可觀測性與遷移門檻。 + +## Goals / Non-Goals + +**Goals:** +- 完成 root cutover,執行與維護流程完全以 `DashBoard_vite` 為主。 +- 將主要頁面前端腳本模組化至 Vite 管理,降低單檔模板複雜度。 +- 將可前端化的展示/聚合計算前移,並建立與既有輸出一致性驗證。 +- 建立 UI/API/Export 欄位契約與自動檢核機制。 +- 強化分層快取的健康指標與退化觀測。 +- 制定遷移驗收門檻、灰度與回退方案。 + +**Non-Goals:** +- 不重寫所有頁面的視覺設計。 +- 不更換資料來源(Oracle schema 與核心資料表不變)。 +- 不改成前後端雙對外服務架構(維持單一 port)。 + +## Decisions + +1. Canonical root ownership +- Decision: `DashBoard_vite` 為唯一可執行主工程;`DashBoard/` 僅保留為對照基準直到遷移結案。 +- Why: 避免規格、程式碼、部署分散在不同根目錄。 +- Alternative: 長期雙根並行;放棄,因維運成本與錯誤率高。 + +2. Page-by-page Vite modularization +- Decision: 以頁面為單位建立 Vite entry,先抽共用 core(API、toast、table/tree、field contract),再遷移頁面。 +- Why: 風險可控,便於逐頁回歸驗證。 +- Alternative: 一次性 SPA rewrite;放棄,風險高且不符合保持既有邏輯要求。 + +3. Compute-shift contract with parity checks +- Decision: 後端保留原始資料查詢與必要彙整,前端承接展示層聚合/格式化;每個前移計算需有 parity fixture。 +- Why: 提升前端互動效率,同時避免行為偏移。 +- Alternative: 全留後端;放棄,無法達成前移目標。 + +4. Field contract registry +- Decision: 建立欄位契約註冊檔(UI label / API key / export header / semantic type),頁面與匯出共用。 +- Why: 消除欄位語義不一致與下載對不上畫面的問題。 +- Alternative: 分頁分散維護;放棄,長期不可控。 + +5. Cache observability first-class +- Decision: 延續 L1 memory + L2 Redis,新增命中率、資料新鮮度、降級狀態指標並在 health/deep-health 可見。 +- Why: 快取是效能與穩定核心,需可觀測才能穩定運維。 +- Alternative: 僅保留功能快取不加觀測;放棄,故障定位成本高。 + +## Risks / Trade-offs + +- [Risk] 模組化拆分期間,舊 inline 與新 module 並存造成行為差異 → Mitigation: 對每頁保留 feature flag 或 fallback,逐頁切換。 +- [Risk] 前移計算造成數值差異(四捨五入、分母定義) → Mitigation: 建立固定測試資料與 snapshot 比對,未通過不得切換。 +- [Risk] 欄位契約改名影響下游報表流程 → Mitigation: 提供 alias 過渡期與變更公告。 +- [Risk] Redis/Oracle 不可用時測試訊號雜訊高 → Mitigation: 分離 unit/fallback 與 integration pipelines。 + +## Migration Plan + +1. Baseline freeze +- 凍結基線 API payload、頁面主要互動、匯出欄位,產生對照清單。 + +2. Cutover preparation +- 補齊根目錄執行文件、CI 與腳本,確保不再依賴 `DashBoard/`。 + +3. Modularization waves +- Wave A: Portal、resource history、job query。 +- Wave B: resource status、excel query、tables。 +- 每波完成後執行頁面回歸與欄位一致性檢核。 + +4. Compute-shift waves +- 先移動展示層聚合與圖表資料整理,再評估進一步前移。 +- 每項前移需 parity 測試與效能比較。 + +5. Final cutover and cleanup +- 滿足驗收門檻後將 `DashBoard/` 標記為 archived reference 或移除。 +- 完成回退文件與操作手冊更新。 + +## Open Questions + +- `DashBoard/` 在結案後保留多久(短期備援或立即封存)? +- 哪一頁的前移計算業務優先級最高(resource_history vs job_query)? +- 是否要求在 cutover 前補齊端對端自動化下載欄位比對? diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/proposal.md b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/proposal.md new file mode 100644 index 0000000..a681b55 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/proposal.md @@ -0,0 +1,32 @@ +## Why + +目前已完成第一批根目錄重構,但仍存在「部分頁面與邏輯尚未完整遷移」的階段性狀態。需要建立完整遷移提案,將 `DashBoard_vite` 根目錄收斂為唯一開發與運行主體,並完成前端模組化與欄位契約治理,避免長期雙結構維運風險。 + +## What Changes + +- 完成從參考結構到根目錄主工程的全面切換,消除對 `DashBoard/` 作為執行依賴。 +- 以 Vite 完整模組化 Portal 與主要業務頁面前端腳本,逐步移除大型 inline scripts。 +- 在不改變既有業務流程前提下,將可前端化的展示/聚合計算由後端移至前端。 +- 建立 UI/API/Export 欄位契約機制,對報表與查詢頁進行一致性治理。 +- 擴充快取與運維可觀測性,明確 Redis 與記憶體快取的行為、指標與退化策略。 +- 建立完整遷移驗收與回退規則,作為 cutover 與後續清理依據。 + +## Capabilities + +### New Capabilities +- `root-cutover-finalization`: 定義並完成根目錄主工程最終切換與遺留結構去依賴。 +- `full-vite-page-modularization`: 完成主要頁面腳本的 Vite 模組化與資產輸出治理。 +- `frontend-compute-shift`: 將展示層可前端化計算從後端搬移到前端,保持行為一致。 +- `field-contract-governance`: 建立並執行欄位契約(UI label / API key / export header)一致性規範。 +- `cache-observability-hardening`: 強化分層快取策略與健康指標,明確失效與退化行為。 +- `migration-gates-and-rollout`: 定義完整遷移的驗收門檻、灰度與回退流程。 + +### Modified Capabilities +- None. + +## Impact + +- Affected code: root `src/`, `frontend/`, `scripts/`, `tests/`, `docs/`。 +- Runtime/deploy: Conda + Node(Vite) build pipeline、Flask/Gunicorn 單一對外 port 模式。 +- APIs/pages: Portal、resource status、resource history、job query、excel query、tables 等頁面腳本與欄位輸出。 +- Ops: Redis 快取、記憶體快取、health/deep health 指標與告警解讀。 diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/cache-observability-hardening/spec.md b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/cache-observability-hardening/spec.md new file mode 100644 index 0000000..ae01a58 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/cache-observability-hardening/spec.md @@ -0,0 +1,15 @@ +## ADDED Requirements + +### Requirement: Layered Cache SHALL Expose Operational State +The route cache implementation SHALL expose layered cache operational state, including mode, freshness, and degradation status. + +#### Scenario: Redis unavailable degradation state +- **WHEN** Redis is unavailable +- **THEN** health endpoints MUST indicate degraded cache mode while keeping L1 memory cache active + +### Requirement: Cache Telemetry MUST be Queryable for Operations +The system MUST provide cache telemetry suitable for operations diagnostics. + +#### Scenario: Telemetry inspection +- **WHEN** operators request deep health status +- **THEN** cache-related metrics/state SHALL be present and interpretable for troubleshooting diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/field-contract-governance/spec.md b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/field-contract-governance/spec.md new file mode 100644 index 0000000..a7a695d --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/field-contract-governance/spec.md @@ -0,0 +1,19 @@ +## ADDED Requirements + +### Requirement: Field Contract Registry SHALL Define UI/API/Export Mapping +The system SHALL maintain a field contract registry mapping UI labels, API keys, export headers, and semantic types. + +#### Scenario: Contract lookup for page rendering +- **WHEN** a page renders table headers and values +- **THEN** it MUST resolve display labels and keys through the shared field contract definitions + +#### Scenario: Contract lookup for export +- **WHEN** export headers are generated +- **THEN** header names MUST follow the same semantic mapping used by the page contract + +### Requirement: Consistency Checks MUST Detect Contract Drift +The system MUST provide automated checks that detect mismatches between UI, API response keys, and export field definitions. + +#### Scenario: Drift detection failure +- **WHEN** a page or export changes a field name without updating the contract +- **THEN** consistency checks MUST report a failing result before release diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/frontend-compute-shift/spec.md b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/frontend-compute-shift/spec.md new file mode 100644 index 0000000..de3af10 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/frontend-compute-shift/spec.md @@ -0,0 +1,15 @@ +## ADDED Requirements + +### Requirement: Display-Layer Computation SHALL be Shifted to Frontend Safely +The system SHALL move eligible display-layer computations from backend to frontend while preserving existing business behavior. + +#### Scenario: Equivalent metric output +- **WHEN** frontend-computed metrics are produced for a supported page +- **THEN** output values MUST match baseline backend results within defined rounding rules + +### Requirement: Compute Shift MUST be Verifiable by Parity Fixtures +Each migrated computation MUST have parity fixtures comparing baseline and migrated outputs. + +#### Scenario: Parity test gating +- **WHEN** a compute-shifted module is changed +- **THEN** parity checks MUST run and fail the migration gate if output differs beyond tolerance diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/full-vite-page-modularization/spec.md b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/full-vite-page-modularization/spec.md new file mode 100644 index 0000000..b4af93f --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/full-vite-page-modularization/spec.md @@ -0,0 +1,19 @@ +## ADDED Requirements + +### Requirement: Major Pages SHALL be Managed by Vite Modules +The system SHALL provide Vite-managed module entries for major portal pages, replacing inline scripts in a phased manner. + +#### Scenario: Portal module loading +- **WHEN** the portal page is rendered +- **THEN** it MUST load its behavior from a Vite-built module asset when available + +#### Scenario: Page module fallback +- **WHEN** a required Vite asset is unavailable +- **THEN** the system MUST keep page behavior functional through explicit fallback logic + +### Requirement: Build Pipeline SHALL Produce Backend-Served Assets +Vite build output MUST be emitted into backend static paths and served by Flask/Gunicorn on the same origin. + +#### Scenario: Build artifact placement +- **WHEN** frontend build is executed +- **THEN** generated JS/CSS files SHALL be written to the configured backend static dist directory diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/migration-gates-and-rollout/spec.md b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/migration-gates-and-rollout/spec.md new file mode 100644 index 0000000..43e948a --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/migration-gates-and-rollout/spec.md @@ -0,0 +1,15 @@ +## ADDED Requirements + +### Requirement: Migration Gates SHALL Define Cutover Readiness +The system SHALL define explicit migration gates for functional parity, build integrity, and operational health before final cutover. + +#### Scenario: Gate evaluation before cutover +- **WHEN** release is prepared for final cutover +- **THEN** all required migration gates MUST pass or cutover SHALL be blocked + +### Requirement: Rollout and Rollback Procedures MUST be Actionable +The system SHALL document actionable rollout and rollback procedures for root migration. + +#### Scenario: Rollback execution +- **WHEN** post-cutover validation fails critical checks +- **THEN** operators MUST be able to execute documented rollback steps to restore previous stable behavior diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/root-cutover-finalization/spec.md b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/root-cutover-finalization/spec.md new file mode 100644 index 0000000..d1494ea --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/specs/root-cutover-finalization/spec.md @@ -0,0 +1,19 @@ +## ADDED Requirements + +### Requirement: Root Project SHALL be the Single Execution Target +The system SHALL run all application startup, test, and deployment workflows from `DashBoard_vite` root without requiring nested `DashBoard/` paths. + +#### Scenario: Root startup script execution +- **WHEN** an operator runs start/deploy scripts from `DashBoard_vite` root +- **THEN** all referenced source/config/script paths MUST resolve inside root project structure + +#### Scenario: Root test execution +- **WHEN** CI or local developer runs test commands from root +- **THEN** tests SHALL execute against root source tree and root config files + +### Requirement: Reference Directory MUST Remain Non-Authoritative +`DashBoard/` SHALL be treated as reference-only and MUST NOT be required for production runtime. + +#### Scenario: Runtime independence +- **WHEN** root application is started in an environment without `DashBoard/` +- **THEN** the application MUST remain functional for the defined migration scope diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/tasks.md b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/tasks.md new file mode 100644 index 0000000..a2b163a --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-complete-migration/tasks.md @@ -0,0 +1,42 @@ +## 1. Root Cutover Finalization + +- [x] 1.1 Inventory all remaining runtime/test/deploy references to nested `DashBoard/` paths. +- [x] 1.2 Remove or replace nested-path dependencies so root scripts and app startup are self-contained. +- [x] 1.3 Define and execute root-only smoke startup checks. + +## 2. Vite Full Page Modularization + +- [x] 2.1 Create/standardize Vite entries for Portal, Resource Status, Resource History, Job Query, Excel Query, and Tables. +- [x] 2.2 Extract shared frontend core modules (API wrappers, table/tree helpers, field contract helpers). +- [x] 2.3 Replace targeted inline scripts with module bootstraps while preserving fallback behavior. +- [x] 2.4 Update template asset resolution to support per-page Vite bundles. + +## 3. Frontend Compute Shift + +- [x] 3.1 Identify display-layer computations eligible for frontend migration and document parity rules. +- [x] 3.2 Migrate selected calculations page by page with deterministic helper functions. +- [x] 3.3 Add parity fixtures/tests comparing baseline backend vs migrated frontend outputs. + +## 4. Field Contract Governance + +- [x] 4.1 Introduce shared field contract registry for UI/API/Export mapping. +- [x] 4.2 Apply the registry to Job Query and Resource History completely (including headers and semantic types). +- [x] 4.3 Extend consistency checks to additional pages and exports. + +## 5. Cache Observability Hardening + +- [x] 5.1 Expand cache telemetry fields in health/deep-health outputs. +- [x] 5.2 Add explicit degraded-mode visibility when Redis is unavailable. +- [x] 5.3 Validate cache behavior and telemetry under L1-only and L1+L2 modes. + +## 6. Migration Gates and Rollout + +- [x] 6.1 Define gate checklist for cutover readiness (tests, parity, build, health). +- [x] 6.2 Document rollout steps and operator runbook for the final cutover. +- [x] 6.3 Document rollback procedure and rehearse rollback validation. + +## 7. Validation and Documentation + +- [x] 7.1 Run focused unit/integration checks in root project and record evidence. +- [x] 7.2 Record known environment-dependent gaps (Oracle/Redis) and mitigation plan. +- [x] 7.3 Update README/docs to declare final root-first workflow and migration status. diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/.openspec.yaml b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/.openspec.yaml new file mode 100644 index 0000000..ba4d3f5 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/.openspec.yaml @@ -0,0 +1,2 @@ +schema: spec-driven +created: 2026-02-07 diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/design.md b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/design.md new file mode 100644 index 0000000..f0921b5 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/design.md @@ -0,0 +1,64 @@ +## Context + +現有程式碼主體在 `DashBoard/` 子目錄,OpenSpec/opsx 在 `DashBoard_vite` 根目錄,造成需求追蹤、實作與驗證分離。重構目標是以 `DashBoard/` 作為參考來源,將可執行專案落到根目錄,並在維持單一對外服務埠前提下導入 Vite 前端建置與模組化。 + +## Goals / Non-Goals + +**Goals:** +- 在 `DashBoard_vite` 根目錄建立可運行工程,與 OpenSpec artifacts 同層。 +- 維持 Flask/Gunicorn 單一對外 port,前端資產由 Flask static 提供。 +- 導覽改為抽屜分組,保持既有頁面與 drill-down 操作語意。 +- 導入分層快取(L1 memory + L2 Redis)取代 NoOp 預設。 +- 建立畫面欄位、API key、下載欄位的一致性規範。 + +**Non-Goals:** +- 不在第一階段重寫所有頁面 UI。 +- 不更動核心商業資料來源(Oracle schema 與主要 SQL 邏輯)。 +- 不在第一階段導入多服務或多 port 架構。 + +## Decisions + +1. Root-first migration(根目錄主工程) +- Decision: 以 `DashBoard/` 為參考,將執行入口、`src/`、`scripts/`、前端建置等移到 `DashBoard_vite` 根目錄。 +- Rationale: 使 OpenSpec 與可執行程式在同一工作根,避免流程分裂。 +- Alternative considered: 繼續在 `DashBoard/` 開發,放棄;因與使用者要求衝突。 + +2. Single-port Vite integration +- Decision: 使用 Vite build 輸出到 Flask static,僅在開發時可選擇 Vite dev server,不作對外正式服務。 +- Rationale: 保持現行部署模型與防火牆策略,降低切換風險。 +- Alternative considered: 分離前後端雙服務;放棄以符合單一 port 約束。 + +3. Layered route cache +- Decision: 路由層快取採用 L1 memory TTL + L2 Redis JSON;Redis 不可用時仍有 L1。 +- Rationale: 改善響應速度與穩定性,避免 NoOp 導致的快取失效。 +- Alternative considered: Redis-only;放棄以避免 Redis 異常時退化過大。 + +4. Navigation IA by drawers +- Decision: 將 portal 導覽分為「報表類、查詢類、開發工具類」抽屜,頁面內容維持原路由/iframe lazy load。 +- Rationale: 降低使用者認知負擔,同時避免一次性替換頁面內邏輯。 +- Alternative considered: 直接改成 SPA router;放棄以降低第一階段風險。 + +5. Field contract normalization +- Decision: 建立欄位契約字典(UI label / API key / export header),並先修正已知不一致。 +- Rationale: 避免匯出與畫面解讀差異造成誤用。 +- Alternative considered: 每頁分散維護;放棄因長期不可維護。 + +## Risks / Trade-offs + +- [Risk] 根目錄遷移時檔案基線混亂(舊目錄與新目錄並存) → Mitigation: 明確標註 `DashBoard/` 為 reference,新增 root 驗證與遷移清單。 +- [Risk] Redis/Oracle 在本機測試環境不可用導致測試波動 → Mitigation: 分離「單元測試通過」與「環境依賴測試」兩條驗證報告。 +- [Risk] Portal 抽屜調整影響既有 E2E selector → Mitigation: 保留原 tab class/data-target,先兼容再逐步更新測試。 +- [Risk] 欄位命名調整影響下游檔案流程 → Mitigation: 提供別名過渡期與欄位映射文件。 + +## Migration Plan + +1. 建立根目錄主工程骨架(參照 `DashBoard/`),保留 `DashBoard/` 作為對照來源。 +2. 導入 Vite build 流程並接入 `deploy/start` 腳本。 +3. 套用 portal 抽屜導覽與快取 backend 重構。 +4. 執行欄位一致性第一批修正(job query / resource history)。 +5. 補齊根目錄測試與操作文件,確認單一 port 運作。 + +## Open Questions + +- 根目錄最終是否保留 `DashBoard/` 作為長期參考,或在完成驗收後移除? +- 第二階段前端運算前移的優先頁面順序(`resource_history` vs `job_query`)是否有業務優先級? diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/proposal.md b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/proposal.md new file mode 100644 index 0000000..ffc1479 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/proposal.md @@ -0,0 +1,29 @@ +## Why + +目前可執行程式碼位於 `DashBoard/` 子目錄,與 `DashBoard_vite` 根目錄的 OpenSpec/opsx 工作流分離,導致規格、實作與驗證不在同一專案根。需要以 `DashBoard` 為參考,將重構主體統一到 `DashBoard_vite` 根目錄,並同時導入 Vite 以改善前端可維護性與體驗。 + +## What Changes + +- 在 `DashBoard_vite` 根目錄建立可執行的重構專案骨架,參照既有 `DashBoard` 功能與路由。 +- 維持 Flask/Gunicorn 單一對外 port,導入 Vite 作為前端建置工具(build artifact 由 Flask 提供)。 +- 導覽由平鋪 tab 重構為功能抽屜(報表類、查詢類、開發工具類),保持既有業務操作路徑。 +- 快取策略改為可運作的分層快取(L1 記憶體 + L2 Redis),不再使用 NoOp 做為預設。 +- 建立前端顯示欄位與下載欄位的一致性規範,先修正已知不一致案例。 + +## Capabilities + +### New Capabilities +- `root-project-restructure`: 以 `DashBoard` 為參考,將可運行的重構工程落在 `DashBoard_vite` 根目錄。 +- `vite-single-port-integration`: Vite 建置結果整合進 Flask static,維持單一 server/port 對外。 +- `portal-drawer-navigation`: Portal 導覽改為抽屜分類且維持原頁面邏輯。 +- `layered-route-cache`: 路由層快取改為 L1 memory + L2 Redis 的可用實作。 +- `field-name-consistency`: 統一畫面欄位、API key 與匯出欄位命名/語義。 + +### Modified Capabilities +- None. + +## Impact + +- Affected codebase root: `DashBoard_vite`(新主工程落點) +- Reference baseline: `DashBoard/`(保留作比對與遷移來源) +- Affected systems: Flask app factory, templates, frontend build pipeline, deployment/start scripts, cache layer, export SQL/headers diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/field-name-consistency/spec.md b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/field-name-consistency/spec.md new file mode 100644 index 0000000..5d649db --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/field-name-consistency/spec.md @@ -0,0 +1,12 @@ +## ADDED Requirements + +### Requirement: UI and Export Fields SHALL Have a Consistent Contract +The system SHALL define and apply a consistent contract among UI column labels, API keys, and export headers for report/query pages. + +#### Scenario: Job query export naming consistency +- **WHEN** job query exports include cause/repair/symptom values +- **THEN** exported field names SHALL reflect semantic value type consistently (e.g., code name vs status name) + +#### Scenario: Resource history field alignment +- **WHEN** resource history detail table shows KPI columns +- **THEN** columns required by export semantics (including Availability%) SHALL be present or explicitly mapped diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/layered-route-cache/spec.md b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/layered-route-cache/spec.md new file mode 100644 index 0000000..87dd388 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/layered-route-cache/spec.md @@ -0,0 +1,19 @@ +## ADDED Requirements + +### Requirement: Route Cache SHALL Use Layered Storage +The route cache SHALL use L1 in-memory TTL cache and L2 Redis JSON cache when Redis is available. + +#### Scenario: L1 cache hit +- **WHEN** a cached key exists in L1 and is unexpired +- **THEN** the API response SHALL be returned from memory without querying Redis + +#### Scenario: L2 fallback +- **WHEN** a cached key is missing in L1 but exists in Redis +- **THEN** the value SHALL be returned and warmed into L1 + +### Requirement: Cache SHALL Degrade Gracefully Without Redis +The route cache SHALL remain functional with L1 cache when Redis is unavailable. + +#### Scenario: Redis unavailable at startup +- **WHEN** Redis health check fails during app initialization +- **THEN** route cache operations SHALL continue using L1 cache without application failure diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/portal-drawer-navigation/spec.md b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/portal-drawer-navigation/spec.md new file mode 100644 index 0000000..64cc314 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/portal-drawer-navigation/spec.md @@ -0,0 +1,15 @@ +## ADDED Requirements + +### Requirement: Portal Navigation SHALL Group Entries by Functional Drawers +The portal SHALL group navigation entries into functional drawers: reports, queries, and development tools. + +#### Scenario: Drawer grouping visibility +- **WHEN** users open the portal +- **THEN** report pages and query pages SHALL appear in separate drawer groups + +### Requirement: Existing Page Behavior SHALL Remain Compatible +The portal navigation refactor SHALL preserve existing target routes and lazy-load behavior for content frames. + +#### Scenario: Route continuity +- **WHEN** a user selects an existing page entry from the new drawer +- **THEN** the corresponding original route SHALL be loaded without changing page business logic behavior diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/root-project-restructure/spec.md b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/root-project-restructure/spec.md new file mode 100644 index 0000000..dec15f1 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/root-project-restructure/spec.md @@ -0,0 +1,12 @@ +## ADDED Requirements + +### Requirement: Root Directory SHALL be the Primary Executable Project +The system SHALL treat `DashBoard_vite` root directory as the primary executable project, while `DashBoard/` remains reference-only during migration. + +#### Scenario: Running app from root +- **WHEN** a developer runs project scripts from `DashBoard_vite` root +- **THEN** the application startup flow SHALL resolve code and config from root project files + +#### Scenario: Reference directory preserved +- **WHEN** migration is in progress +- **THEN** `DashBoard/` SHALL remain available for structure comparison and behavior verification diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/vite-single-port-integration/spec.md b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/vite-single-port-integration/spec.md new file mode 100644 index 0000000..8f0208b --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/specs/vite-single-port-integration/spec.md @@ -0,0 +1,15 @@ +## ADDED Requirements + +### Requirement: Frontend Build SHALL Use Vite With Flask Static Output +The system SHALL use Vite to build frontend assets and output artifacts into Flask static directories served by the backend. + +#### Scenario: Build asset generation +- **WHEN** frontend build is executed +- **THEN** Vite SHALL generate portal-related JS/CSS artifacts into the backend static output path + +### Requirement: Deployment SHALL Preserve Single External Port +The system SHALL preserve single-port external serving through Flask/Gunicorn. + +#### Scenario: Production serving mode +- **WHEN** the system runs in deployment mode +- **THEN** frontend assets SHALL be served through Flask on the same external port as API/page routes diff --git a/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/tasks.md b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/tasks.md new file mode 100644 index 0000000..9c6b53d --- /dev/null +++ b/openspec/changes/archive/2026-02-07-dashboard-vite-root-refactor/tasks.md @@ -0,0 +1,26 @@ +## 1. Root Migration Baseline + +- [x] 1.1 Build root project baseline in `DashBoard_vite` by referencing `DashBoard/` structure while preserving `DashBoard/` as comparison source. +- [x] 1.2 Ensure root-level Python entry/config/scripts can run without depending on nested `DashBoard/` paths. +- [x] 1.3 Update root README and environment setup notes to make root-first workflow explicit. + +## 2. Vite + Single-Port Integration + +- [x] 2.1 Add root frontend Vite project and configure build output to backend static assets. +- [x] 2.2 Integrate frontend build into deploy/start scripts with fallback behavior when npm build is unavailable. +- [x] 2.3 Verify root app serves Vite-built assets through Flask on the same external port. + +## 3. Portal Navigation Refactor + +- [x] 3.1 Refactor root portal navigation to drawer groups (reports/queries/dev-tools) while keeping existing route targets. +- [x] 3.2 Keep lazy-load frame behavior and health popup behavior compatible after navigation refactor. + +## 4. Cache and Field Contract Updates + +- [x] 4.1 Replace default NoOp route cache in root app with layered cache backend (L1 memory + optional Redis). +- [x] 4.2 Align known field-name inconsistencies between UI and export (job query and resource history first batch). + +## 5. Validation and Documentation + +- [x] 5.1 Run focused root tests for app factory/cache/query modules and record results. +- [x] 5.2 Document residual environment-dependent test gaps (Oracle/Redis dependent cases) and next actions. diff --git a/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/.openspec.yaml b/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/.openspec.yaml new file mode 100644 index 0000000..ba4d3f5 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/.openspec.yaml @@ -0,0 +1,2 @@ +schema: spec-driven +created: 2026-02-07 diff --git a/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/design.md b/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/design.md new file mode 100644 index 0000000..09432b8 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/design.md @@ -0,0 +1,50 @@ +## Context + +目前主要報表頁多已採 `frontend_asset(...) + Vite module + inline fallback` 模式,但 `hold_detail` 仍停留在純 inline script。這造成: +- 例外頁面無法受益於共用模組治理與 build pipeline。 +- 動態表格字串拼接保留 XSS 風險。 +- 長期維護出現「主流程已模組化、單頁特例未遷移」的不一致。 + +## Goals / Non-Goals + +**Goals:** +- 讓 `hold_detail` 與其他報表頁採同一套 Vite 載入模式。 +- 保留既有功能語意(篩選、分頁、刷新、導航)與 MesApi 呼叫契約。 +- 將高風險動態輸出改為 escape-safe 渲染。 +- 加上模板整合測試覆蓋 module/fallback 分支。 + +**Non-Goals:** +- 不改後端資料模型與查詢邏輯。 +- 不重設 UI 視覺樣式與互動流程。 +- 不移除 fallback(本次仍保留回退能力)。 + +## Decisions + +### Decision 1: 以「抽取 inline script 到 Vite entry」完成遷移 +- 選擇:新增 `frontend/src/hold-detail/main.js`,以既有邏輯為基礎遷移,模板改為 module 優先、fallback 次之。 +- 理由:最小風險完成頁面納管,避免一次性重寫行為。 + +### Decision 2: 保持全域 handler 相容 +- 選擇:module 內維持 `window` 介面供既有 `onclick` 使用。 +- 理由:降低模板 DOM 大改成本,優先保證 parity。 + +### Decision 3: 在 module 與 fallback 皆補 escape 防護 +- 選擇:對 workcenter/package/lot 資料動態輸出加入 escape/quoted-string 保護。 +- 理由:避免 fallback 成為安全漏洞旁路。 + +## Risks / Trade-offs + +- [Risk] 複製遷移過程遺漏函式導致 runtime error → Mitigation: build + template test 覆蓋 module 路徑。 +- [Risk] fallback 與 module 雙軌造成維護成本 → Mitigation: 保持語意對齊並在後續階段評估移除 fallback。 +- [Risk] escape 導致個別顯示格式變化 → Mitigation: 僅防注入,不改原欄位值與排序/篩選語意。 + +## Migration Plan + +1. 增加 `hold-detail` Vite entry 與 module 檔案。 +2. 調整 `hold_detail.html` scripts block 為 module/fallback 雙軌。 +3. 補強 module + fallback 的動態輸出 escape。 +4. build 與 pytest 驗證,更新 tasks。 + +## Open Questions + +- 是否在下一階段移除 `hold_detail` fallback inline script,以降低雙路徑維運成本。 diff --git a/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/proposal.md b/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/proposal.md new file mode 100644 index 0000000..6711d53 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/proposal.md @@ -0,0 +1,26 @@ +## Why + +`hold_detail` 目前仍是大型 inline script,尚未納入 Vite 模組治理,且動態 HTML 字串拼接存在潛在注入風險。為了完成報表頁一致的現代化架構與安全基線,需要將該頁補齊至與其餘主要頁面相同的模組化與防護水位。 + +## What Changes + +- 新增 `hold-detail` Vite entry 並由模板透過 `frontend_asset(...)` 優先載入 module。 +- 保留現有 inline script 作為 asset 缺失時 fallback,維持既有操作語意不變。 +- 將 `hold_detail` 的動態表格/篩選渲染改為 escape-safe 輸出,避免不受信字串直接注入 DOM。 +- 補充模板整合測試,驗證 `hold_detail` 的 module/fallback 路徑。 + +## Capabilities + +### New Capabilities +- None. + +### Modified Capabilities +- `full-vite-page-modularization`: 擴展 major page 模組化覆蓋到 hold-detail 報表頁。 +- `field-contract-governance`: 將動態渲染安全契約擴展到 hold-detail 報表內容。 +- `report-effects-parity`: 明確要求 hold-detail 的篩選、分頁、分佈互動在遷移後維持等效。 + +## Impact + +- Affected code: `frontend/src/`, `frontend/vite.config.js`, `src/mes_dashboard/templates/hold_detail.html`, `tests/test_template_integration.py`。 +- APIs/routes: `/hold-detail`, `/api/wip/hold-detail/*`(僅前端調用與渲染方式調整,不更動後端契約)。 +- Runtime behavior: 單一 port 與既有 MesApi/retry 行為不變。 diff --git a/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/specs/field-contract-governance/spec.md b/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/specs/field-contract-governance/spec.md new file mode 100644 index 0000000..b9b5428 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/specs/field-contract-governance/spec.md @@ -0,0 +1,8 @@ +## ADDED Requirements + +### Requirement: Hold Detail Dynamic Rendering MUST Sanitize Untrusted Values +Dynamic table and distribution rendering in hold-detail SHALL sanitize untrusted text before injecting into HTML attributes or content. + +#### Scenario: Hold reason distribution contains HTML-like payload +- **WHEN** workcenter/package/lot fields include HTML-like text from upstream data +- **THEN** the hold-detail page MUST render escaped text and MUST NOT execute embedded markup or scripts diff --git a/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/specs/full-vite-page-modularization/spec.md b/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/specs/full-vite-page-modularization/spec.md new file mode 100644 index 0000000..2d7186c --- /dev/null +++ b/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/specs/full-vite-page-modularization/spec.md @@ -0,0 +1,12 @@ +## ADDED Requirements + +### Requirement: Hold Detail Page SHALL Be Served by a Vite Module +The system SHALL provide a dedicated Vite entry bundle for the hold-detail report page. + +#### Scenario: Hold-detail module asset exists +- **WHEN** `/hold-detail` is rendered and `hold-detail.js` exists in static dist +- **THEN** the page MUST load behavior from the Vite module entry + +#### Scenario: Hold-detail module asset missing +- **WHEN** `/hold-detail` is rendered and the module asset is unavailable +- **THEN** the page MUST remain operational through explicit inline fallback logic diff --git a/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/specs/report-effects-parity/spec.md b/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/specs/report-effects-parity/spec.md new file mode 100644 index 0000000..4f53d31 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/specs/report-effects-parity/spec.md @@ -0,0 +1,8 @@ +## ADDED Requirements + +### Requirement: Hold Detail Interaction Semantics SHALL Remain Equivalent After Modularization +Migrating hold-detail to a Vite module SHALL preserve existing filter, pagination, and refresh behavior. + +#### Scenario: User applies filters and paginates on hold-detail +- **WHEN** users toggle age/workcenter/package filters and navigate pages +- **THEN** returned lots, distribution highlights, and pagination state MUST remain behaviorally equivalent to baseline inline behavior diff --git a/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/tasks.md b/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/tasks.md new file mode 100644 index 0000000..a7f2f97 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-hold-detail-vite-hardening/tasks.md @@ -0,0 +1,17 @@ +## 1. Hold Detail Vite Modularization + +- [x] 1.1 Add `hold-detail` entry to Vite build configuration. +- [x] 1.2 Create `frontend/src/hold-detail/main.js` by migrating existing page script while preserving behavior. +- [x] 1.3 Update `hold_detail.html` to prefer `frontend_asset('hold-detail.js')` with inline fallback retention. + +## 2. Security and Parity Hardening + +- [x] 2.1 Sanitize dynamic HTML/attribute interpolation in hold-detail module rendering paths. +- [x] 2.2 Apply equivalent sanitization in inline fallback logic to avoid security bypass. +- [x] 2.3 Preserve legacy global handler compatibility for existing inline event hooks. + +## 3. Validation + +- [x] 3.1 Build frontend and verify `hold-detail.js` output in static dist. +- [x] 3.2 Extend template integration tests for hold-detail module/fallback rendering. +- [x] 3.3 Run focused pytest suite for template/frontend regressions. diff --git a/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/.openspec.yaml b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/.openspec.yaml new file mode 100644 index 0000000..ba4d3f5 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/.openspec.yaml @@ -0,0 +1,2 @@ +schema: spec-driven +created: 2026-02-07 diff --git a/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/design.md b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/design.md new file mode 100644 index 0000000..c3fbfa4 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/design.md @@ -0,0 +1,101 @@ +## Context + +目前根目錄 `DashBoard_vite` 已完成單一埠 Vite 整合與主要頁面模組化,但運行層仍有三類風險: +1. 韌性參數未完全生效(例如 DB pool 參數在設定層存在、engine 層未完全採用)。 +2. 故障語意未完全標準化(pool 耗盡/熔斷開啟/降級回應仍有泛化 500)。 +3. 效能優化尚未形成一致策略(快取資料結構與全量 merge 路徑可再降低 CPU 與記憶體負載)。 + +本設計在不改變業務邏輯與頁面流程前提下,推進 P0/P1/P2: +- P0:穩定性與退避 +- P1:查詢效率與資料結構 +- P2:運維一致性與自癒 + +約束條件: +- `resource`(設備主檔)與 `wip`(即時狀態)維持全表快取,因資料規模可接受且可換取查詢一致性與延遲穩定。 +- Vite 架構持續以「前端可複用元件 + 前端運算前移」為核心方向。 + +## Goals / Non-Goals + +**Goals:** +- 讓 DB pool / timeout / circuit breaker 形成可配置且可驗證的穩定性基線。 +- 在 pool 耗盡與服務降級時,提供可辨識錯誤碼、HTTP 狀態與前端退避策略。 +- 保留全表快取前提下,優化快取資料形狀與索引路徑,降低每次請求全量合併成本。 +- 對齊 conda + systemd + watchdog 運行模型,讓 worker 自癒與重啟流程可操作、可觀測。 +- 持續擴大前端運算前移範圍,並以 parity 驗證保證結果一致。 + +**Non-Goals:** +- 不改變既有頁面資訊架構、分頁/鑽取邏輯與核心業務規則。 +- 不將 `resource/wip` 改為分片快取或拆分多來源讀取。 +- 不引入多埠部署或拆分為前後端不同網域。 +- 不在本次變更中重寫所有歷史 SQL 或全面替換資料來源。 + +## Decisions + +### Decision 1: 以「配置即行為」收斂 DB 連線與保護策略(P0) +- 決策:`database.py` 的 engine 建立必須直接採用 settings/.env 的 pool 與 timeout 參數,並在 `/health/deep` 輸出實際生效值。 +- 原因:目前存在設定值與實際 engine 參數可能分離,導致調參無效。 +- 替代方案: +- 保留硬編碼參數,僅調整 `.env.example`(拒絕,無法保證生效)。 +- 完全改為每環境不同程式碼分支(拒絕,維運成本高)。 + +### Decision 2: 標準化「退避可判讀」錯誤語意(P0) +- 決策:新增/明確化 pool exhausted、circuit open、service degraded 的錯誤碼與 HTTP 映射,並在前端 `MesApi` 依狀態碼與錯誤碼進行退避。 +- 原因:泛化 500 導致前端無法做差異化重試與提示。 +- 替代方案: +- 維持所有 5xx 同一重試邏輯(拒絕,會加劇擁塞)。 +- 僅靠文字訊息判斷(拒絕,不穩定且難國際化)。 + +### Decision 3: 在「全表快取不變」前提下做索引化與增量化(P1) +- 決策:保留 `resource/wip` 全表快取資料來源,但額外建立 process/redis 層索引(如 RESOURCEID → record index)與預聚合中間結果,減少每請求全量 merge。 +- 原因:資料量雖不大,但高併發下重複全量轉換與合併會累積 CPU 成本。 +- 替代方案: +- 改為分片快取(拒絕,破壞已確認的資料一致性策略)。 +- 完全回 Oracle 即時計算(拒絕,增加 DB 壓力與延遲波動)。 + +### Decision 4: 前端運算前移採「可驗證前移」策略(P1) +- 決策:優先前移展示層聚合/比率/圖表資料整理,並為每個前移計算建立 parity fixture 與容差規則。 +- 原因:符合 Vite 架構目的,減輕後端負擔,同時避免靜默偏差。 +- 替代方案: +- 一次性大量前移(拒絕,驗證風險高)。 +- 完全不前移(拒絕,無法達成改造目標)。 + +### Decision 5: 運維流程統一以 conda + systemd + watchdog(P2) +- 決策:部署與監控路徑統一到 conda 環境;systemd 服務模板、啟停腳本、watchdog PID/flag 路徑統一;加入自癒與告警門檻。 +- 原因:避免 `venv`/`conda` 混用造成重啟失效或定位困難。 +- 替代方案: +- 保持雙系統共存(拒絕,長期不一致風險高)。 + +## Risks / Trade-offs + +- [Risk] 調整錯誤碼與狀態碼可能影響既有前端假設 → Mitigation:先以向後相容 envelope 保留既有 `success/error` 結構,再新增標準化 code/meta 欄位。 +- [Risk] 啟用 circuit breaker 後短時間內可能增加 503 可見度 → Mitigation:設定合理門檻與 recovery timeout,並提供管理頁可觀測狀態與手動恢復流程。 +- [Risk] 新索引/預聚合增加記憶體占用 → Mitigation:設 TTL、大小監控與健康檢查輸出,必要時可透過配置關閉特定索引層。 +- [Risk] 前端運算前移可能出現精度差異 → Mitigation:定義 rounding/tolerance 並在 CI gate 執行 parity 測試。 +- [Risk] systemd 與腳本改動可能影響部署流程 → Mitigation:提供 rollout/rollback 演練步驟與 smoke check。 + +## Migration Plan + +1. P0 先行(穩定性) +- 讓 DB pool/call timeout/circuit breaker 參數化且生效。 +- 新增 pool exhausted 與 degraded 錯誤語意;前端 `MesApi` 加入對應退避策略。 +- 補充 health/deep 與 admin status 的可觀測欄位。 + +2. P1 續行(效率) +- 保留 `resource/wip` 全表快取資料源。 +- 加入索引化/預聚合路徑與增量更新鉤子,降低全量 merge 次數。 +- 擴充前端 compute-shift,補 parity fixtures。 + +3. P2 收斂(運維) +- 統一 conda + systemd + watchdog 服務定義與文件。 +- 設定 worker 自癒與告警門檻(重啟頻率、pool 飽和、降級持續時間)。 +- 完成壓測與重啟演練 gate 後放行。 + +4. Rollback +- 任一 gate 失敗即回退到前一穩定版本(腳本 + artifacts + 服務模板)。 +- 保留向後相容錯誤回應欄位以降低回退期間前端風險。 + +## Open Questions + +- pool exhausted 的最終 HTTP 語意是否固定為 `503`(含 `Retry-After`)或在部分查詢端點使用 `429`? +- 告警通道是否先落地在 log + health gate,或直接接既有監控平台(若有)? +- 前端計算容差的全域預設值是否統一(如 1e-6 / 小數 1 位),或按指標分類? diff --git a/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/proposal.md b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/proposal.md new file mode 100644 index 0000000..97798f7 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/proposal.md @@ -0,0 +1,39 @@ +## Why + +目前根目錄遷移與 Vite 架構已完成可用性與功能對齊,但「穩定性、退避、自癒、查詢效率」仍未被完整定義為可驗收的規格。現在需要在不改變既有業務邏輯的前提下,將運行韌性與前端運算前移策略正式化,避免 cutover 後在高負載或故障情境下出現不一致行為。 + +## What Changes + +- 以三階段推進非破壞式優化: +- P0(先救穩定):讓 DB pool 參數真正生效、在生產基線啟用 circuit breaker、補齊 pool exhausted 的專用錯誤語意與前後端退避行為。 +- P1(再拚效率):重整快取資料結構與查詢路徑(索引化/增量化),降低每次請求的全量 merge 成本。 +- P2(運維收斂):統一 conda + systemd 執行模型,補齊 worker 自癒與告警門檻,讓 watchdog/restart 流程可操作且可觀測。 +- 明確保留既有架構原則: +- `resource`(設備基礎資料)與 `wip`(線上即時狀況)維持全表快取策略,不改成分片或拆表快取。 +- Vite 架構持續以「元件複用(圖表/查詢/抽屜)」與「運算前移至瀏覽器」為主軸,前端承接可前移的聚合與呈現計算。 + +## Capabilities + +### New Capabilities +- `runtime-resilience-recovery`: 定義 DB pool 耗盡、worker 異常、服務降級時的標準退避、恢復與熱重啟流程。 +- `conda-systemd-runtime-alignment`: 定義 conda 環境、systemd 服務、watchdog 與啟停腳本的一致部署契約與驗收門檻。 + +### Modified Capabilities +- `frontend-compute-shift`: 擴充前端運算前移邊界與 parity 驗證,確保前端計算結果與後端契約一致。 +- `full-vite-page-modularization`: 強化跨頁可複用元件與共用核心模組(圖表、查詢、抽屜、欄位契約)的要求。 +- `layered-route-cache`: 明確要求保留 `resource/wip` 全表快取,並在此基礎上優化索引與資料形狀。 +- `cache-observability-hardening`: 擴充快取/連線池/熔斷器的可觀測欄位、降級訊號與告警閾值。 +- `migration-gates-and-rollout`: 新增穩定性壓測、pool 壓力、worker 重啟演練等遷移門檻。 + +## Impact + +- Affected code: +- Backend: `src/mes_dashboard/core/database.py`, `src/mes_dashboard/core/circuit_breaker.py`, `src/mes_dashboard/core/cache.py`, `src/mes_dashboard/routes/*.py`, `src/mes_dashboard/services/resource_cache.py`, `src/mes_dashboard/services/realtime_equipment_cache.py`。 +- Frontend: `frontend/src/core/*` 與各頁 entry 模組,持續抽取可複用圖表/查詢邏輯。 +- Ops: `scripts/start_server.sh`, `scripts/worker_watchdog.py`, `deploy/mes-dashboard-watchdog.service`, `.env.example`, `README.md`。 +- API/behavior: +- 新增或標準化故障語意(含 pool exhausted / circuit open / degraded)與對應退避策略。 +- Dependencies/systems: +- 維持單一埠服務模型;持續使用 conda + gunicorn + redis + systemd/watchdog。 +- Validation: +- 增加 resilience/performance 測試與 rollout gate,驗證降級、恢復、快取一致性與前後端計算一致性。 diff --git a/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/cache-observability-hardening/spec.md b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/cache-observability-hardening/spec.md new file mode 100644 index 0000000..1fbe8df --- /dev/null +++ b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/cache-observability-hardening/spec.md @@ -0,0 +1,22 @@ +## ADDED Requirements + +### Requirement: Health Endpoints SHALL Expose Pool Saturation and Degradation Reason Codes +Operational health endpoints MUST report connection pool saturation indicators and explicit degradation reason codes. + +#### Scenario: Pool saturation observed +- **WHEN** checked-out connections and overflow approach configured limits +- **THEN** deep health output MUST expose saturation metrics and degraded reason classification + +### Requirement: Degraded Responses MUST Be Correlatable Across API and Health Telemetry +Error responses for degraded states SHALL include stable codes that can be mapped to health telemetry and operational dashboards. + +#### Scenario: Degraded API response correlation +- **WHEN** an API request fails due to circuit-open or pool-exhausted conditions +- **THEN** operators MUST be able to match the response code to current health telemetry state + +### Requirement: Operational Alert Thresholds SHALL Be Explicitly Defined +The system MUST define alert thresholds for sustained degraded state, repeated worker recovery, and abnormal retry pressure. + +#### Scenario: Sustained degradation threshold exceeded +- **WHEN** degraded status persists beyond configured duration +- **THEN** the monitoring contract MUST classify the service as alert-worthy with actionable context diff --git a/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/conda-systemd-runtime-alignment/spec.md b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/conda-systemd-runtime-alignment/spec.md new file mode 100644 index 0000000..956a0df --- /dev/null +++ b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/conda-systemd-runtime-alignment/spec.md @@ -0,0 +1,22 @@ +## ADDED Requirements + +### Requirement: Production Service Runtime SHALL Use Conda-Aligned Execution Paths +Service units and operational scripts MUST run with a consistent conda-managed Python runtime. + +#### Scenario: Service unit starts application +- **WHEN** systemd starts the dashboard service and watchdog +- **THEN** both processes MUST execute using the configured conda environment binaries and paths + +### Requirement: Watchdog and Runtime Paths MUST Be Operationally Consistent +PID files, restart flag paths, state files, and worker control interfaces SHALL be consistent across scripts, environment variables, and systemd units. + +#### Scenario: Watchdog handles restart flag +- **WHEN** a restart flag is written by admin control endpoints +- **THEN** watchdog MUST read the same configured path set and signal the correct Gunicorn master process + +### Requirement: Deployment Documentation MUST Match Runtime Contract +Runbooks and deployment documentation MUST describe the same conda/systemd/watchdog contract used by the deployed system. + +#### Scenario: Operator follows deployment runbook +- **WHEN** an operator performs deploy, health check, and rollback from documentation +- **THEN** documented commands and paths MUST work without requiring venv-specific assumptions diff --git a/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/frontend-compute-shift/spec.md b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/frontend-compute-shift/spec.md new file mode 100644 index 0000000..a7454dc --- /dev/null +++ b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/frontend-compute-shift/spec.md @@ -0,0 +1,22 @@ +## ADDED Requirements + +### Requirement: Compute-Shifted Logic SHALL Be Exposed as Reusable Frontend Core Modules +Frontend-computed metrics and transformations MUST be implemented as reusable, testable modules instead of page-local inline logic. + +#### Scenario: Multiple pages consume shared compute logic +- **WHEN** two or more pages require the same metric transformation or aggregation +- **THEN** they MUST import a shared frontend core module and produce consistent outputs + +### Requirement: Frontend Compute Parity MUST Include Tolerance Contracts Per Metric +Parity verification SHALL define explicit tolerance and rounding contracts per migrated metric. + +#### Scenario: Parity check for migrated metric +- **WHEN** migrated frontend computation is validated against baseline output +- **THEN** parity tests MUST evaluate the metric against its declared tolerance and fail when outside bounds + +### Requirement: Compute Shift MUST Preserve Existing User-Facing Logic +Frontend compute migration MUST preserve existing filter semantics, drill-down behavior, and displayed totals. + +#### Scenario: Existing dashboard interactions after compute shift +- **WHEN** users apply filters and navigate drill-down flows on migrated pages +- **THEN** interaction results MUST remain behaviorally equivalent to the pre-shift baseline diff --git a/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/full-vite-page-modularization/spec.md b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/full-vite-page-modularization/spec.md new file mode 100644 index 0000000..268fa5b --- /dev/null +++ b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/full-vite-page-modularization/spec.md @@ -0,0 +1,22 @@ +## ADDED Requirements + +### Requirement: Vite Page Modules SHALL Reuse Shared Chart and Query Building Blocks +Page entry modules MUST consume shared chart/query/drawer utilities for common behaviors. + +#### Scenario: Common chart behavior across pages +- **WHEN** multiple report pages render equivalent chart interactions +- **THEN** the behavior MUST be provided by shared Vite modules rather than duplicated page-local implementations + +### Requirement: Modularization MUST Preserve Established Navigation and Drill-Down Semantics +Refactoring into Vite modules SHALL not alter existing page transitions, independent tabs, and drill-down entry points. + +#### Scenario: User follows existing drill-down path +- **WHEN** the user navigates from summary page to detail views +- **THEN** the resulting flow and parameter semantics MUST match the established baseline behavior + +### Requirement: Module Boundaries SHALL Support Frontend Compute Expansion +Vite module structure MUST keep compute logic decoupled from DOM wiring so additional backend-to-frontend computation shifts can be added safely. + +#### Scenario: Adding a new frontend-computed metric +- **WHEN** a new metric is migrated from backend to frontend +- **THEN** the metric logic MUST be integrated through shared compute modules without rewriting page routing structure diff --git a/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/layered-route-cache/spec.md b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/layered-route-cache/spec.md new file mode 100644 index 0000000..e235894 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/layered-route-cache/spec.md @@ -0,0 +1,22 @@ +## ADDED Requirements + +### Requirement: Resource and WIP Full-Table Cache SHALL Remain the Authoritative Cached Dataset +The system MUST keep `resource` and `wip` full-table cache datasets as the canonical cached source for downstream route queries. + +#### Scenario: Route query reads cached baseline +- **WHEN** an endpoint requires resource or wip data +- **THEN** it MUST read from the corresponding full-table cache baseline before applying derived filters or aggregations + +### Requirement: Cache Access Paths SHALL Support Index-Based Lookup and Derived Views +The caching layer SHALL support index and derived-view access paths to reduce per-request full-table merge and transformation overhead. + +#### Scenario: Lookup by key under concurrent load +- **WHEN** requests query by high-cardinality keys such as RESOURCEID +- **THEN** the system MUST serve lookups via indexed cache access instead of repeated full-array scans + +### Requirement: Full-Table Cache Refresh MUST Support Incremental Derivation Updates +Derived cache indices and aggregates MUST be refreshed consistently when the underlying full-table cache version changes. + +#### Scenario: Cache version update +- **WHEN** full-table cache is refreshed to a new version +- **THEN** dependent indices and derived views MUST be rebuilt or updated before being exposed for reads diff --git a/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/migration-gates-and-rollout/spec.md b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/migration-gates-and-rollout/spec.md new file mode 100644 index 0000000..94322fb --- /dev/null +++ b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/migration-gates-and-rollout/spec.md @@ -0,0 +1,22 @@ +## ADDED Requirements + +### Requirement: Migration Gates SHALL Include Runtime Resilience Validation +Cutover readiness gates MUST include resilience checks for pool exhaustion handling, circuit-breaker fail-fast behavior, and recovery flow. + +#### Scenario: Resilience gate evaluation +- **WHEN** migration gates are executed before release +- **THEN** resilience tests MUST pass for degraded-response semantics and recovery path validation + +### Requirement: Migration Gates SHALL Include Frontend Compute Parity Validation +Cutover readiness MUST include parity validation for metrics shifted from backend to frontend computation. + +#### Scenario: Compute parity gate +- **WHEN** a release includes additional frontend-computed metrics +- **THEN** gate execution MUST verify parity fixtures and fail if tolerance contracts are violated + +### Requirement: Rollout Procedure MUST Include Conda-Systemd-Watchdog Rehearsal +Rollout and rollback runbooks SHALL include an operational rehearsal for service start, watchdog-triggered reload, and post-restart health checks under the conda/systemd runtime contract. + +#### Scenario: Pre-cutover rehearsal +- **WHEN** operators execute pre-cutover rehearsal +- **THEN** they MUST successfully complete conda-based start, worker reload, and health verification steps documented in the runbook diff --git a/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/runtime-resilience-recovery/spec.md b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/runtime-resilience-recovery/spec.md new file mode 100644 index 0000000..77fee10 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/specs/runtime-resilience-recovery/spec.md @@ -0,0 +1,29 @@ +## ADDED Requirements + +### Requirement: Database Pool Runtime Configuration SHALL Be Enforced +The system SHALL apply database pool and timeout parameters from runtime configuration to the active SQLAlchemy engine used by request handling. + +#### Scenario: Runtime pool configuration takes effect +- **WHEN** operators set pool and timeout values via environment configuration and start the service +- **THEN** the active engine MUST use those values for pool size, overflow, wait timeout, and query call timeout + +### Requirement: Pool Exhaustion MUST Return Retry-Aware Degraded Responses +The system MUST return explicit degraded responses for connection pool exhaustion and include machine-readable metadata for retry/backoff behavior. + +#### Scenario: Pool exhausted under load +- **WHEN** concurrent requests exceed available database connections and pool wait timeout is reached +- **THEN** the API MUST return a dedicated error code and retry guidance instead of a generic 500 failure + +### Requirement: Runtime Degradation MUST Integrate Circuit Breaker State +Database-facing API behavior SHALL distinguish circuit-breaker-open degradation from transient query failures. + +#### Scenario: Circuit breaker is open +- **WHEN** the circuit breaker transitions to OPEN state +- **THEN** database-backed endpoints MUST fail fast with a stable degradation response contract + +### Requirement: Worker Recovery SHALL Support Hot Reload and Watchdog-Assisted Recovery +The runtime MUST support graceful worker hot reload and watchdog-triggered recovery without requiring a port change or full system reboot. + +#### Scenario: Worker restart requested +- **WHEN** an authorized operator requests worker restart during degraded operation +- **THEN** the service MUST trigger graceful reload and preserve single-port availability diff --git a/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/tasks.md b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/tasks.md new file mode 100644 index 0000000..9351f38 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-stability-and-frontend-compute-shift/tasks.md @@ -0,0 +1,36 @@ +## 1. P0 Runtime Resilience Baseline + +- [x] 1.1 Make `database.py` read and enforce runtime pool/timeouts from settings/env instead of hardcoded constants. +- [x] 1.2 Add explicit degraded error mapping for pool exhaustion and circuit-open states (stable error codes + retry metadata). +- [x] 1.3 Update API response handling so degraded errors are returned consistently across WIP/Resource/Dashboard endpoints. +- [x] 1.4 Extend frontend `MesApi` retry/backoff policy to respect degraded error codes and avoid aggressive retries under pool exhaustion. + +## 2. P0 Observability and Recovery Controls + +- [x] 2.1 Extend `/health` and `/health/deep` payloads with pool configuration, saturation indicators, and degradation reason classification. +- [x] 2.2 Expose runtime-resilience diagnostics in admin status API for operations triage. +- [x] 2.3 Ensure hot-reload/restart controls preserve single-port availability and return actionable status for watchdog-driven recovery. + +## 3. P1 Cache and Query Efficiency (Keep Full-Table Cache) + +- [x] 3.1 Preserve `resource/wip` full-table cache as authoritative baseline while introducing indexed lookup helpers for high-frequency access paths. +- [x] 3.2 Reduce repeated full-array merge cost in resource status composition by using prebuilt lookup/index structures. +- [x] 3.3 Add cache version-coupled rebuild/update flow for derived indices and expose telemetry for index freshness. + +## 4. P1 Frontend Compute Shift Expansion + +- [x] 4.1 Refactor compute-heavy display transformations into reusable frontend core modules. +- [x] 4.2 Add parity fixtures/tests for newly shifted computations with explicit tolerance contracts. +- [x] 4.3 Ensure migrated pages preserve existing tab/drill-down behavior while consuming shared Vite modules. + +## 5. P2 Conda/Systemd/Watchdog Runtime Alignment + +- [x] 5.1 Align systemd service templates and runtime paths with conda-based execution model. +- [x] 5.2 Align startup/deploy scripts, watchdog config, and documentation to a single runtime contract. +- [x] 5.3 Define and document alert thresholds for sustained degraded state, restart churn, and retry pressure. + +## 6. Validation and Migration Gates + +- [x] 6.1 Add/extend tests for pool exhaustion semantics, circuit-breaker fail-fast behavior, and degraded response contracts. +- [x] 6.2 Add/extend tests for indexed cache access and frontend compute parity. +- [x] 6.3 Update migration gate/runbook docs to include resilience checks, conda-systemd rehearsal, and rollback verification. diff --git a/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/.openspec.yaml b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/.openspec.yaml new file mode 100644 index 0000000..ba4d3f5 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/.openspec.yaml @@ -0,0 +1,2 @@ +schema: spec-driven +created: 2026-02-07 diff --git a/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/design.md b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/design.md new file mode 100644 index 0000000..1fa2821 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/design.md @@ -0,0 +1,65 @@ +## Context + +`DashBoard_vite` 已完成主體搬遷,但報表頁仍處於混合狀態: +- `resource-status`、`resource-history` 等頁面已有 Vite 版本,卻存在實際行為缺陷。 +- `wip_overview`、`wip_detail` 仍以 inline script 為主,尚未納入 Vite entry 與共用模組治理。 +- 部分頁面仍有直接字串拼接輸出與原生 `fetch` 路徑,無法完整承接既有 `MesApi` 的降級重試契約。 + +此變更是「遷移後硬化」階段:不改變既有業務操作語意,但將效果對齊、模組化覆蓋與前端複用一起完成。 + +## Goals / Non-Goals + +**Goals:** +- 讓 WIP 報表頁進入 Vite entry 管理,並保留目前 tab/drill-down 與 `onclick` 操作語意。 +- 修復已遷移模組中會影響報表可用性的缺陷(初始化、KPI、矩陣選取、API 呼叫路徑)。 +- 強化共用路徑(escape、欄位契約、MesApi/backoff)以支撐後續前端運算擴展。 +- 用測試明確覆蓋「asset exists -> module」、「asset missing -> fallback」的模板行為。 + +**Non-Goals:** +- 不改動後端路由設計與單一 port 服務模型。 +- 不重寫 UI 視覺風格或更動既有商業邏輯判斷規則。 +- 不在本次引入新的大型前端框架(維持 Vanilla + Vite entry 模式)。 + +## Decisions + +### Decision 1: 採用「模板雙軌載入」完成 WIP 遷移 +- 選擇:在 `wip_overview.html`、`wip_detail.html` 加入 `frontend_asset()` module 載入,保留既有 inline script 作 fallback。 +- 理由:可在不破壞現場可用性的前提下,讓 Vite bundle 成為預設執行路徑,符合先前頁面遷移模式。 +- 替代方案:直接刪除 inline script。 + - 未採用原因:回退能力不足,且無法快速比對 parity。 + +### Decision 2: 模組保持全域 handler 相容層 +- 選擇:Vite entry 內對舊有 `onclick` 所需函式維持 `window` 綁定,避免模板同步大改。 +- 理由:降低一次性改動範圍,先確保行為完全對齊,再逐步收斂事件綁定方式。 +- 替代方案:全面改為 addEventListener 並移除 inline `onclick`。 + - 未採用原因:本次目標是 parity hardening,不是互動模型重寫。 + +### Decision 3: 前端 API 路徑統一走 MesApi +- 選擇:JSON API 優先走 `MesApi.get/post`(或 core api bridge),僅 blob/download 等必要場景保留原生 fetch。 +- 理由:沿用既有降級錯誤碼與 retry/backoff 策略,避免 pool exhausted 時前端重試失控。 +- 替代方案:維持頁面各自 `fetch`。 + - 未採用原因:會破壞 resilience contract,一致性不足。 + +### Decision 4: 字串輸出與欄位命名同步納入治理 +- 選擇:針對動態 HTML 內容補 escape,並對照 field contract 驗證表格欄位與下載標頭語意一致。 +- 理由:遷移期間常見 XSS/欄位漂移問題,必須和模組化同時收斂。 + +## Risks / Trade-offs + +- [Risk] 大型 inline script 搬入 module 時可能出現作用域差異 → Mitigation: 先保留 fallback,並針對 `window` handler 做顯式綁定。 +- [Risk] 模組與 fallback 並存造成測試分支增加 → Mitigation: 以 template integration 測試固定兩條路徑行為。 +- [Risk] escape 補強可能改變少數欄位原始顯示格式 → Mitigation: 僅針對 HTML 注入風險欄位處理,保留 NULL/日期等既有顯示語意。 +- [Risk] 前端改走 MesApi 使錯誤提示型態改變 → Mitigation: 保持原錯誤訊息文案,僅替換底層請求路徑。 + +## Migration Plan + +1. 先完成 OpenSpec task 分解與可執行順序。 +2. 新增 WIP Vite entries,更新 vite config,模板加上 module/fallback 雙軌。 +3. 修復 `resource-history`、`resource-status` 關鍵缺陷並補安全性修正。 +4. Build + pytest 驗證,更新 task 勾選。 +5. 交付變更摘要與剩餘風險,供後續 archive。 + +## Open Questions + +- 是否需要在下一階段移除 WIP fallback inline script(目前先保留作為回退機制)。 +- 是否要擴充前端單元測試(Vitest)覆蓋更多 DOM 互動,而不只依賴後端模板整合測試。 diff --git a/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/proposal.md b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/proposal.md new file mode 100644 index 0000000..be268c4 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/proposal.md @@ -0,0 +1,28 @@ +## Why + +目前仍有部分報表頁維持大型 inline script,且已遷移的 Vite 模組存在實際行為缺口(例如 KPI 0% 呈現、矩陣篩選選取、模組作用域匯出失敗)。這造成「舊版 Jinja 報表效果」與「新架構模組化」之間存在落差,無法完全發揮 Vite 在複用、可維護性與前端運算轉移的優勢。 + +## What Changes + +- 將 WIP Overview / WIP Detail 的報表互動完整納入 Vite entry,保留既有頁面操作語意與 drill-down 路徑。 +- 修復已遷移頁面的核心行為缺陷(Resource History 模組初始化、Resource Status KPI 與矩陣交互)。 +- 統一報表前端 API 呼叫路徑,優先透過 `MesApi` 以承接既有 retry/backoff 與降級錯誤契約。 +- 補強報表頁字串輸出安全與欄位契約一致性,確保畫面欄位、查詢結果與下載欄位名稱一致。 +- 新增/調整模板整合驗證,確保 Vite 模組載入與 fallback 行為在報表頁完整覆蓋。 + +## Capabilities + +### New Capabilities +- `report-effects-parity`: 定義舊版 Jinja 報表在新 Vite 架構下的效果對齊要求(圖表、篩選、表格、KPI、互動與下載語意)。 + +### Modified Capabilities +- `full-vite-page-modularization`: 擴展到 WIP 報表頁完整模組化與 fallback 覆蓋。 +- `frontend-compute-shift`: 擴大前端運算承載並修復前端計算與呈現邏輯缺陷。 +- `field-contract-governance`: 強化欄位名稱與匯出標頭一致性及頁面渲染安全。 +- `runtime-resilience-recovery`: 明確要求前端呼叫在降級/壓力情境下遵循退避契約。 + +## Impact + +- Affected code: `frontend/src/`, `frontend/vite.config.js`, `src/mes_dashboard/templates/`, `tests/test_template_integration.py`。 +- Affected runtime behavior: 報表頁 JS 載入模式、矩陣/篩選互動、KPI 顯示與下載欄位對齊。 +- Affected operations: 單一對外 port 架構不變,仍由 Flask/Gunicorn 提供頁面與 Vite build 輸出資產。 diff --git a/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/field-contract-governance/spec.md b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/field-contract-governance/spec.md new file mode 100644 index 0000000..4b295cb --- /dev/null +++ b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/field-contract-governance/spec.md @@ -0,0 +1,15 @@ +## ADDED Requirements + +### Requirement: Dynamic Report Rendering MUST Sanitize Untrusted Values +Dynamic table/list rendering in report and query pages SHALL sanitize untrusted text before injecting HTML. + +#### Scenario: HTML-like payload in query result +- **WHEN** an API result field contains HTML-like text payload +- **THEN** the rendered page MUST display escaped text and MUST NOT execute embedded script content + +### Requirement: UI Table and Download Headers SHALL Follow the Same Field Contract +Page table headers and exported file headers SHALL map to the same field contract definition for the same dataset. + +#### Scenario: Header consistency check +- **WHEN** users view a report table and then export the corresponding data +- **THEN** header labels MUST remain semantically aligned and avoid conflicting naming for identical fields diff --git a/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/frontend-compute-shift/spec.md b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/frontend-compute-shift/spec.md new file mode 100644 index 0000000..fdcfd5b --- /dev/null +++ b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/frontend-compute-shift/spec.md @@ -0,0 +1,15 @@ +## ADDED Requirements + +### Requirement: Frontend Compute Paths MUST Handle Zero and Boundary Values Correctly +Frontend-computed report metrics SHALL preserve valid zero values and boundary conditions in user-visible KPI and summary components. + +#### Scenario: Zero-value KPI rendering +- **WHEN** OU% or availability metrics are computed as `0` +- **THEN** the page MUST render `0%` (or configured numeric format) instead of placeholder values + +### Requirement: Hierarchical Filter Compute Logic SHALL Be Deterministic Across Levels +Frontend matrix/filter computations SHALL produce deterministic selection and filtering outcomes for group, family, and resource levels. + +#### Scenario: Matrix selection at multiple hierarchy levels +- **WHEN** users toggle matrix cells across group, family, and resource rows +- **THEN** selected-state rendering and filtered equipment result sets MUST remain level-correct and reversible diff --git a/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/full-vite-page-modularization/spec.md b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/full-vite-page-modularization/spec.md new file mode 100644 index 0000000..09e6a72 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/full-vite-page-modularization/spec.md @@ -0,0 +1,19 @@ +## ADDED Requirements + +### Requirement: WIP Report Pages SHALL Be Served by Vite Modules +The system SHALL provide Vite entry bundles for WIP overview and WIP detail pages, with template-level asset resolution. + +#### Scenario: WIP module asset available +- **WHEN** the built asset exists in backend static dist +- **THEN** the page MUST load behavior from the corresponding Vite module entry + +#### Scenario: WIP module asset unavailable +- **WHEN** the built asset is not present +- **THEN** the page MUST retain equivalent behavior through explicit inline fallback logic + +### Requirement: Vite Modules MUST Preserve Legacy Handler Compatibility +Vite report modules SHALL expose required global handlers for existing inline entry points until event wiring is fully migrated. + +#### Scenario: Inline-triggered handler compatibility +- **WHEN** a template control invokes existing global handler names +- **THEN** the migrated module MUST provide compatible callable handlers without runtime scope errors diff --git a/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/report-effects-parity/spec.md b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/report-effects-parity/spec.md new file mode 100644 index 0000000..bb815f9 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/report-effects-parity/spec.md @@ -0,0 +1,19 @@ +## ADDED Requirements + +### Requirement: Report Effect Parity SHALL Be Preserved During Vite Migration +The system SHALL preserve existing Jinja-era report interactions when report pages are served by Vite modules. + +#### Scenario: WIP overview interactions remain equivalent +- **WHEN** users operate WIP overview filters, KPI cards, chart refresh, and drill-down entry +- **THEN** the resulting state transitions and navigation parameters MUST remain behaviorally equivalent to the baseline page logic + +#### Scenario: WIP detail interactions remain equivalent +- **WHEN** users operate WIP detail filters, pagination, lot detail popup, and back-to-overview transitions +- **THEN** the resulting data scope and interaction behavior MUST match baseline semantics + +### Requirement: Report Visual Semantics MUST Remain Consistent +Report pages SHALL keep established status color semantics, KPI display rules, and table/chart synchronization behavior after migration. + +#### Scenario: KPI and matrix state consistency +- **WHEN** metric values are zero or filters target specific matrix levels +- **THEN** KPI values and selected-state highlights MUST render correctly without collapsing valid zero values or losing selection state diff --git a/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/runtime-resilience-recovery/spec.md b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/runtime-resilience-recovery/spec.md new file mode 100644 index 0000000..3b4199a --- /dev/null +++ b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/specs/runtime-resilience-recovery/spec.md @@ -0,0 +1,8 @@ +## ADDED Requirements + +### Requirement: Report Frontend API Access SHALL Honor Degraded Retry Contracts +Report pages SHALL use retry-aware API access paths for JSON endpoints so degraded backend responses propagate retry metadata to UI behavior. + +#### Scenario: Pool exhaustion or circuit-open response +- **WHEN** report API endpoints return degraded error codes with retry hints +- **THEN** frontend calls MUST flow through MesApi-compatible behavior and avoid aggressive uncontrolled retry loops diff --git a/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/tasks.md b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/tasks.md new file mode 100644 index 0000000..33e69c5 --- /dev/null +++ b/openspec/changes/archive/2026-02-07-vite-jinja-report-parity-hardening/tasks.md @@ -0,0 +1,28 @@ +## 1. OpenSpec Scope and Parity Baseline + +- [x] 1.1 Confirm report parity target pages and interaction scope (WIP overview/detail, resource status/history, query pages). +- [x] 1.2 Capture concrete parity defects in current Vite modules (runtime errors, KPI/matrix mismatch, API path inconsistency). + +## 2. WIP Pages Vite Modularization + +- [x] 2.1 Add Vite entries for `wip-overview` and `wip-detail`. +- [x] 2.2 Update templates to load `frontend_asset(...)` module bundles with inline fallback retention. +- [x] 2.3 Preserve legacy global handler compatibility for existing inline-triggered actions. + +## 3. Report Behavior and Compute Fixes + +- [x] 3.1 Fix `resource-history` module initialization/export scope error. +- [x] 3.2 Fix `resource-status` matrix selection logic and KPI zero-value rendering parity. +- [x] 3.3 Align report JSON API calls to MesApi-compatible paths for degraded retry behavior. + +## 4. Field Contract and Rendering Hardening + +- [x] 4.1 Patch dynamic table/query rendering to escape untrusted values. +- [x] 4.2 Verify UI table headers and export header naming consistency for touched report flows. +- [x] 4.3 Fix missing report style tokens affecting visual consistency. + +## 5. Validation and Regression Guard + +- [x] 5.1 Build frontend bundles and ensure new entries are emitted into backend static dist. +- [x] 5.2 Extend/update template integration tests for WIP module/fallback behavior. +- [x] 5.3 Run focused pytest suite for template/frontend/report regressions and record outcomes. diff --git a/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/.openspec.yaml b/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/.openspec.yaml new file mode 100644 index 0000000..ba4d3f5 --- /dev/null +++ b/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/.openspec.yaml @@ -0,0 +1,2 @@ +schema: spec-driven +created: 2026-02-07 diff --git a/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/design.md b/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/design.md new file mode 100644 index 0000000..e0cec4f --- /dev/null +++ b/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/design.md @@ -0,0 +1,67 @@ +## Context + +`DashBoard_vite` 已完成單一 port 的 Flask + Vite 架構整併,並具備降級回應、circuit breaker、watchdog 熱重啟與多層快取。 +目前主要缺口不是功能不存在,而是「運維可操作性」與「前端治理粒度」: + +1. health/admin 雖有狀態,但缺少門檻與建議動作,值班時仍需人工判讀。 +2. watchdog 僅保留最後一次重啟紀錄,無法直接判斷短時間 churn。 +3. WIP overview/detail 仍有 autocomplete/filter 搜尋邏輯重複,後續擴展成本高。 +4. README 需要明確反映最新架構契約與改善策略,避免文件落後於實作。 + +## Goals / Non-Goals + +**Goals:** +- 提供可操作的韌性診斷輸出(thresholds、churn、recovery recommendation)。 +- 保持既有單 port 與手動重啟控制模型,不引入高風險自動重啟風暴。 +- 抽離 WIP 頁面共用 autocomplete/filter 查詢邏輯到 Vite core,降低重複。 +- 新增對應測試與文件更新,讓 gate 與 README 可驗證。 + +**Non-Goals:** +- 不做整站 SPA rewrite。 +- 不改動既有 drill-down 路徑與使用者操作語意。 +- 不預設啟用「條件達成即自動重啟 worker」的強制策略。 + +## Decisions + +1. 韌性診斷採「可觀測 + 建議」而非預設自動重啟 +- Decision: 在 `/health`、`/health/deep`、`/admin/api/system-status`、`/admin/api/worker/status` 增加 thresholds/churn/recommendation。 +- Rationale: 目前已具備 degraded response + backoff + admin restart;先提升判讀與操作性,避免未設防的自動重啟造成抖動。 +- Alternative considered: 直接在 pool exhausted 時自動重啟 worker;未採用,因 root cause 多為慢查詢/瞬時壅塞,重啟治標不治本且有風暴風險。 + +2. watchdog state 擴充最近重啟歷史 +- Decision: 在 state 檔保留 bounded restart history 並計算 churn summary。 +- Rationale: 提供運維端可觀測的重啟密度訊號,支援告警與 runbook 決策。 +- Alternative considered: 僅依日誌分析;未採用,因 API 需要機器可讀狀態。 + +3. WIP autocomplete/filter 抽共用核心模組 +- Decision: 新增 `frontend/src/core/autocomplete.js`,由 `wip-overview` / `wip-detail` 共用。 +- Rationale: 保留既有 API 與頁面互動語意,同時降低重複與 bug 修補成本。 +- Alternative considered: 全量頁面元件化框架重寫;未採用,因超出本次風險與範圍。 + +4. README 架構契約同步 +- Decision: 更新 README(並提供 `README.mdj` 鏡像)記錄新的韌性診斷與前端共用模組策略。 +- Rationale: 交付後文件應可直接支援運維與交接。 + +## Risks / Trade-offs + +- [Risk] 韌性輸出欄位增加可能影響依賴固定 schema 的外部腳本 + - Mitigation: 採向後相容擴充,不移除既有欄位。 + +- [Risk] 共用 autocomplete 模組抽離後可能引入搜尋參數差異 + - Mitigation: 保持原有欄位映射與 cross-filter 規則,並補單元測試覆蓋。 + +- [Risk] restart history 持久化不當可能造成 state 膨脹 + - Mitigation: 使用 bounded history(固定上限)與窗口彙總。 + +## Migration Plan + +1. 實作 resilience diagnostics(thresholds/churn/recommendation)與 watchdog state 擴充。 +2. 更新 health/admin API 輸出並補測試。 +3. 抽離前端 autocomplete 共用模組,更新 WIP 頁面引用並執行 Vite build。 +4. 更新 README/README.mdj 與 runbook 對應段落。 +5. 執行 focused pytest + frontend build 驗證,確認單 port 契約不變。 + +## Open Questions + +- 是否在下一階段將 recommendation 與告警 webhook(Slack/Teams)直接整合? +- 是否要把 restart churn 門檻與 UI 告警顏色標準化到 admin/performance 頁? diff --git a/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/proposal.md b/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/proposal.md new file mode 100644 index 0000000..ea6f711 --- /dev/null +++ b/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/proposal.md @@ -0,0 +1,40 @@ +## Why + +Vite migration已完成主要功能遷移,但目前仍有兩個可見風險:一是運維端缺少「可操作」的韌性判斷(僅有狀態,缺少建議動作與重啟 churn 訊號);二是前端主要報表頁仍存在可抽離的重複互動邏輯,會增加後續維護成本。現在補齊這兩塊,可在不改變既有使用流程下提高穩定性與可演進性。 + +## What Changes + +- 擴充 runtime resilience 診斷契約:在 health/admin payload 提供門檻設定、重啟 churn 與可行動建議。 +- 強化 watchdog state:保留最近重啟歷史,支持 churn 計算與觀測。 +- 將 WIP overview/detail 重複的 autocomplete/filter 查詢邏輯抽成共用 Vite core 模組。 +- 增加前端核心模組與韌性診斷的測試覆蓋。 +- 更新專案說明文件(README)反映最新架構、治理策略與操作準則。 + +## Capabilities + +### New Capabilities +- None. + +### Modified Capabilities +- `runtime-resilience-recovery`: 新增重啟 churn 與復原建議契約,讓降級狀態具備可操作的 runbook 訊號。 +- `full-vite-page-modularization`: 新增 WIP 報表共用 autocomplete/filter building blocks 要求,降低頁面重複實作。 +- `migration-gates-and-rollout`: 新增文件與前端治理 gate,確保架構說明與實際部署契約一致。 + +## Impact + +- Affected code: + - `src/mes_dashboard/routes/health_routes.py` + - `src/mes_dashboard/routes/admin_routes.py` + - `scripts/worker_watchdog.py` + - `frontend/src/core/` + - `frontend/src/wip-overview/main.js` + - `frontend/src/wip-detail/main.js` + - `tests/` + - `README.md`(以及使用者要求的 README.mdj) +- APIs: + - `/health` + - `/health/deep` + - `/admin/api/system-status` + - `/admin/api/worker/status` +- Operational behavior: + - 保持單一 port 與既有手動重啟流程;新增觀測與建議,不預設啟用自動重啟風暴風險。 diff --git a/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/specs/full-vite-page-modularization/spec.md b/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/specs/full-vite-page-modularization/spec.md new file mode 100644 index 0000000..9b39c0f --- /dev/null +++ b/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/specs/full-vite-page-modularization/spec.md @@ -0,0 +1,12 @@ +## ADDED Requirements + +### Requirement: WIP Modules SHALL Reuse Shared Autocomplete and Filter Query Utilities +WIP overview and WIP detail Vite entry modules SHALL use shared frontend core utilities for autocomplete request construction and cross-filter behavior. + +#### Scenario: Cross-filter autocomplete parity across WIP pages +- **WHEN** users type in workorder/lot/package/type filters on either WIP overview or WIP detail pages +- **THEN** both pages MUST generate equivalent autocomplete request parameters and return behaviorally consistent dropdown results + +#### Scenario: Shared utility change propagates across both pages +- **WHEN** autocomplete mapping rules are updated in the shared core module +- **THEN** both WIP overview and WIP detail modules MUST consume the updated behavior without duplicated page-local logic edits diff --git a/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/specs/migration-gates-and-rollout/spec.md b/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/specs/migration-gates-and-rollout/spec.md new file mode 100644 index 0000000..1c16fcd --- /dev/null +++ b/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/specs/migration-gates-and-rollout/spec.md @@ -0,0 +1,12 @@ +## ADDED Requirements + +### Requirement: Migration Gates SHALL Enforce Architecture Documentation Consistency +Cutover governance MUST include verification that runtime architecture contracts documented for operators match implemented deployment and resilience behavior. + +#### Scenario: Documentation gate before release +- **WHEN** release gates are executed for a migration or hardening change +- **THEN** project README artifacts MUST be updated to reflect current single-port runtime contract, resilience diagnostics, and frontend modularization strategy + +#### Scenario: Gate fails on stale architecture contract +- **WHEN** implementation introduces resilience or module-governance changes but README architecture section remains outdated +- **THEN** release governance MUST treat the gate as failed until documentation is aligned diff --git a/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/specs/runtime-resilience-recovery/spec.md b/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/specs/runtime-resilience-recovery/spec.md new file mode 100644 index 0000000..8edbbab --- /dev/null +++ b/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/specs/runtime-resilience-recovery/spec.md @@ -0,0 +1,12 @@ +## ADDED Requirements + +### Requirement: Runtime Resilience Diagnostics MUST Expose Actionable Signals +The system MUST expose machine-readable resilience thresholds, restart-churn indicators, and operator action recommendations so degraded states can be triaged consistently. + +#### Scenario: Health payload includes resilience diagnostics +- **WHEN** clients call `/health` or `/health/deep` +- **THEN** responses MUST include resilience thresholds and a recommendation field describing whether to observe, throttle, or trigger controlled worker recovery + +#### Scenario: Admin status includes restart churn summary +- **WHEN** operators call `/admin/api/system-status` or `/admin/api/worker/status` +- **THEN** responses MUST include bounded restart history summary within a configured time window and indicate whether churn threshold is exceeded diff --git a/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/tasks.md b/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/tasks.md new file mode 100644 index 0000000..fbd3e49 --- /dev/null +++ b/openspec/changes/archive/2026-02-08-post-migration-resilience-governance/tasks.md @@ -0,0 +1,23 @@ +## 1. Runtime Resilience Diagnostics Hardening + +- [x] 1.1 Add shared resilience threshold/recommendation helpers for health/admin payloads. +- [x] 1.2 Extend watchdog restart state to include bounded restart history and churn summary. +- [x] 1.3 Expose thresholds/churn/recommendation fields in `/health`, `/health/deep`, `/admin/api/system-status`, and `/admin/api/worker/status`. + +## 2. Frontend WIP Module Reuse + +- [x] 2.1 Add shared Vite core autocomplete/filter utility module. +- [x] 2.2 Refactor WIP overview/detail modules to consume shared autocomplete utilities while preserving behavior. +- [x] 2.3 Verify Vite build output remains valid for single-port backend delivery. + +## 3. Validation Coverage + +- [x] 3.1 Add backend tests for resilience diagnostics and restart churn telemetry contracts. +- [x] 3.2 Add frontend tests for shared autocomplete request parameter behavior. +- [x] 3.3 Run focused backend/frontend validation commands and record pass results. + +## 4. Documentation Alignment + +- [x] 4.1 Update `README.md` architecture/operations sections to reflect latest resilience and frontend-governance model. +- [x] 4.2 Add/update `README.mdj` to mirror latest architecture contract for your requested documentation path. +- [x] 4.3 Update migration runbook notes to include documentation-alignment gate. diff --git a/openspec/config.yaml b/openspec/config.yaml new file mode 100644 index 0000000..392946c --- /dev/null +++ b/openspec/config.yaml @@ -0,0 +1,20 @@ +schema: spec-driven + +# Project context (optional) +# This is shown to AI when creating artifacts. +# Add your tech stack, conventions, style guides, domain knowledge, etc. +# Example: +# context: | +# Tech stack: TypeScript, React, Node.js +# We use conventional commits +# Domain: e-commerce platform + +# Per-artifact rules (optional) +# Add custom rules for specific artifacts. +# Example: +# rules: +# proposal: +# - Keep proposals under 500 words +# - Always include a "Non-goals" section +# tasks: +# - Break tasks into chunks of max 2 hours diff --git a/openspec/specs/cache-observability-hardening/spec.md b/openspec/specs/cache-observability-hardening/spec.md new file mode 100644 index 0000000..f027cc2 --- /dev/null +++ b/openspec/specs/cache-observability-hardening/spec.md @@ -0,0 +1,38 @@ +## Purpose +Define stable requirements for cache-observability-hardening. +## Requirements +### Requirement: Layered Cache SHALL Expose Operational State +The route cache implementation SHALL expose layered cache operational state, including mode, freshness, and degradation status. + +#### Scenario: Redis unavailable degradation state +- **WHEN** Redis is unavailable +- **THEN** health endpoints MUST indicate degraded cache mode while keeping L1 memory cache active + +### Requirement: Cache Telemetry MUST be Queryable for Operations +The system MUST provide cache telemetry suitable for operations diagnostics. + +#### Scenario: Telemetry inspection +- **WHEN** operators request deep health status +- **THEN** cache-related metrics/state SHALL be present and interpretable for troubleshooting + +### Requirement: Health Endpoints SHALL Expose Pool Saturation and Degradation Reason Codes +Operational health endpoints MUST report connection pool saturation indicators and explicit degradation reason codes. + +#### Scenario: Pool saturation observed +- **WHEN** checked-out connections and overflow approach configured limits +- **THEN** deep health output MUST expose saturation metrics and degraded reason classification + +### Requirement: Degraded Responses MUST Be Correlatable Across API and Health Telemetry +Error responses for degraded states SHALL include stable codes that can be mapped to health telemetry and operational dashboards. + +#### Scenario: Degraded API response correlation +- **WHEN** an API request fails due to circuit-open or pool-exhausted conditions +- **THEN** operators MUST be able to match the response code to current health telemetry state + +### Requirement: Operational Alert Thresholds SHALL Be Explicitly Defined +The system MUST define alert thresholds for sustained degraded state, repeated worker recovery, and abnormal retry pressure. + +#### Scenario: Sustained degradation threshold exceeded +- **WHEN** degraded status persists beyond configured duration +- **THEN** the monitoring contract MUST classify the service as alert-worthy with actionable context + diff --git a/openspec/specs/conda-systemd-runtime-alignment/spec.md b/openspec/specs/conda-systemd-runtime-alignment/spec.md new file mode 100644 index 0000000..614e07f --- /dev/null +++ b/openspec/specs/conda-systemd-runtime-alignment/spec.md @@ -0,0 +1,26 @@ +# conda-systemd-runtime-alignment Specification + +## Purpose +TBD - created by archiving change stability-and-frontend-compute-shift. Update Purpose after archive. +## Requirements +### Requirement: Production Service Runtime SHALL Use Conda-Aligned Execution Paths +Service units and operational scripts MUST run with a consistent conda-managed Python runtime. + +#### Scenario: Service unit starts application +- **WHEN** systemd starts the dashboard service and watchdog +- **THEN** both processes MUST execute using the configured conda environment binaries and paths + +### Requirement: Watchdog and Runtime Paths MUST Be Operationally Consistent +PID files, restart flag paths, state files, and worker control interfaces SHALL be consistent across scripts, environment variables, and systemd units. + +#### Scenario: Watchdog handles restart flag +- **WHEN** a restart flag is written by admin control endpoints +- **THEN** watchdog MUST read the same configured path set and signal the correct Gunicorn master process + +### Requirement: Deployment Documentation MUST Match Runtime Contract +Runbooks and deployment documentation MUST describe the same conda/systemd/watchdog contract used by the deployed system. + +#### Scenario: Operator follows deployment runbook +- **WHEN** an operator performs deploy, health check, and rollback from documentation +- **THEN** documented commands and paths MUST work without requiring venv-specific assumptions + diff --git a/openspec/specs/field-contract-governance/spec.md b/openspec/specs/field-contract-governance/spec.md new file mode 100644 index 0000000..89cb6d6 --- /dev/null +++ b/openspec/specs/field-contract-governance/spec.md @@ -0,0 +1,42 @@ +## Purpose +Define stable requirements for field-contract-governance. +## Requirements +### Requirement: Field Contract Registry SHALL Define UI/API/Export Mapping +The system SHALL maintain a field contract registry mapping UI labels, API keys, export headers, and semantic types. + +#### Scenario: Contract lookup for page rendering +- **WHEN** a page renders table headers and values +- **THEN** it MUST resolve display labels and keys through the shared field contract definitions + +#### Scenario: Contract lookup for export +- **WHEN** export headers are generated +- **THEN** header names MUST follow the same semantic mapping used by the page contract + +### Requirement: Consistency Checks MUST Detect Contract Drift +The system MUST provide automated checks that detect mismatches between UI, API response keys, and export field definitions. + +#### Scenario: Drift detection failure +- **WHEN** a page or export changes a field name without updating the contract +- **THEN** consistency checks MUST report a failing result before release + +### Requirement: Dynamic Report Rendering MUST Sanitize Untrusted Values +Dynamic table/list rendering in report and query pages SHALL sanitize untrusted text before injecting HTML. + +#### Scenario: HTML-like payload in query result +- **WHEN** an API result field contains HTML-like text payload +- **THEN** the rendered page MUST display escaped text and MUST NOT execute embedded script content + +### Requirement: UI Table and Download Headers SHALL Follow the Same Field Contract +Page table headers and exported file headers SHALL map to the same field contract definition for the same dataset. + +#### Scenario: Header consistency check +- **WHEN** users view a report table and then export the corresponding data +- **THEN** header labels MUST remain semantically aligned and avoid conflicting naming for identical fields + +### Requirement: Hold Detail Dynamic Rendering MUST Sanitize Untrusted Values +Dynamic table and distribution rendering in hold-detail SHALL sanitize untrusted text before injecting into HTML attributes or content. + +#### Scenario: Hold reason distribution contains HTML-like payload +- **WHEN** workcenter/package/lot fields include HTML-like text from upstream data +- **THEN** the hold-detail page MUST render escaped text and MUST NOT execute embedded markup or scripts + diff --git a/openspec/specs/field-name-consistency/spec.md b/openspec/specs/field-name-consistency/spec.md new file mode 100644 index 0000000..7392305 --- /dev/null +++ b/openspec/specs/field-name-consistency/spec.md @@ -0,0 +1,16 @@ +## Purpose +Define stable requirements for field-name-consistency. + +## Requirements + + +### Requirement: UI and Export Fields SHALL Have a Consistent Contract +The system SHALL define and apply a consistent contract among UI column labels, API keys, and export headers for report/query pages. + +#### Scenario: Job query export naming consistency +- **WHEN** job query exports include cause/repair/symptom values +- **THEN** exported field names SHALL reflect semantic value type consistently (e.g., code name vs status name) + +#### Scenario: Resource history field alignment +- **WHEN** resource history detail table shows KPI columns +- **THEN** columns required by export semantics (including Availability%) SHALL be present or explicitly mapped diff --git a/openspec/specs/frontend-compute-shift/spec.md b/openspec/specs/frontend-compute-shift/spec.md new file mode 100644 index 0000000..2a6075a --- /dev/null +++ b/openspec/specs/frontend-compute-shift/spec.md @@ -0,0 +1,52 @@ +## Purpose +Define stable requirements for frontend-compute-shift. +## Requirements +### Requirement: Display-Layer Computation SHALL be Shifted to Frontend Safely +The system SHALL move eligible display-layer computations from backend to frontend while preserving existing business behavior. + +#### Scenario: Equivalent metric output +- **WHEN** frontend-computed metrics are produced for a supported page +- **THEN** output values MUST match baseline backend results within defined rounding rules + +### Requirement: Compute Shift MUST be Verifiable by Parity Fixtures +Each migrated computation MUST have parity fixtures comparing baseline and migrated outputs. + +#### Scenario: Parity test gating +- **WHEN** a compute-shifted module is changed +- **THEN** parity checks MUST run and fail the migration gate if output differs beyond tolerance + +### Requirement: Compute-Shifted Logic SHALL Be Exposed as Reusable Frontend Core Modules +Frontend-computed metrics and transformations MUST be implemented as reusable, testable modules instead of page-local inline logic. + +#### Scenario: Multiple pages consume shared compute logic +- **WHEN** two or more pages require the same metric transformation or aggregation +- **THEN** they MUST import a shared frontend core module and produce consistent outputs + +### Requirement: Frontend Compute Parity MUST Include Tolerance Contracts Per Metric +Parity verification SHALL define explicit tolerance and rounding contracts per migrated metric. + +#### Scenario: Parity check for migrated metric +- **WHEN** migrated frontend computation is validated against baseline output +- **THEN** parity tests MUST evaluate the metric against its declared tolerance and fail when outside bounds + +### Requirement: Compute Shift MUST Preserve Existing User-Facing Logic +Frontend compute migration MUST preserve existing filter semantics, drill-down behavior, and displayed totals. + +#### Scenario: Existing dashboard interactions after compute shift +- **WHEN** users apply filters and navigate drill-down flows on migrated pages +- **THEN** interaction results MUST remain behaviorally equivalent to the pre-shift baseline + +### Requirement: Frontend Compute Paths MUST Handle Zero and Boundary Values Correctly +Frontend-computed report metrics SHALL preserve valid zero values and boundary conditions in user-visible KPI and summary components. + +#### Scenario: Zero-value KPI rendering +- **WHEN** OU% or availability metrics are computed as `0` +- **THEN** the page MUST render `0%` (or configured numeric format) instead of placeholder values + +### Requirement: Hierarchical Filter Compute Logic SHALL Be Deterministic Across Levels +Frontend matrix/filter computations SHALL produce deterministic selection and filtering outcomes for group, family, and resource levels. + +#### Scenario: Matrix selection at multiple hierarchy levels +- **WHEN** users toggle matrix cells across group, family, and resource rows +- **THEN** selected-state rendering and filtered equipment result sets MUST remain level-correct and reversible + diff --git a/openspec/specs/full-vite-page-modularization/spec.md b/openspec/specs/full-vite-page-modularization/spec.md new file mode 100644 index 0000000..75ecc7a --- /dev/null +++ b/openspec/specs/full-vite-page-modularization/spec.md @@ -0,0 +1,81 @@ +## Purpose +Define stable requirements for full-vite-page-modularization. +## Requirements +### Requirement: Major Pages SHALL be Managed by Vite Modules +The system SHALL provide Vite-managed module entries for major portal pages, replacing inline scripts in a phased manner. + +#### Scenario: Portal module loading +- **WHEN** the portal page is rendered +- **THEN** it MUST load its behavior from a Vite-built module asset when available + +#### Scenario: Page module fallback +- **WHEN** a required Vite asset is unavailable +- **THEN** the system MUST keep page behavior functional through explicit fallback logic + +### Requirement: Build Pipeline SHALL Produce Backend-Served Assets +Vite build output MUST be emitted into backend static paths and served by Flask/Gunicorn on the same origin. + +#### Scenario: Build artifact placement +- **WHEN** frontend build is executed +- **THEN** generated JS/CSS files SHALL be written to the configured backend static dist directory + +### Requirement: Vite Page Modules SHALL Reuse Shared Chart and Query Building Blocks +Page entry modules MUST consume shared chart/query/drawer utilities for common behaviors. + +#### Scenario: Common chart behavior across pages +- **WHEN** multiple report pages render equivalent chart interactions +- **THEN** the behavior MUST be provided by shared Vite modules rather than duplicated page-local implementations + +### Requirement: Modularization MUST Preserve Established Navigation and Drill-Down Semantics +Refactoring into Vite modules SHALL not alter existing page transitions, independent tabs, and drill-down entry points. + +#### Scenario: User follows existing drill-down path +- **WHEN** the user navigates from summary page to detail views +- **THEN** the resulting flow and parameter semantics MUST match the established baseline behavior + +### Requirement: Module Boundaries SHALL Support Frontend Compute Expansion +Vite module structure MUST keep compute logic decoupled from DOM wiring so additional backend-to-frontend computation shifts can be added safely. + +#### Scenario: Adding a new frontend-computed metric +- **WHEN** a new metric is migrated from backend to frontend +- **THEN** the metric logic MUST be integrated through shared compute modules without rewriting page routing structure + +### Requirement: WIP Report Pages SHALL Be Served by Vite Modules +The system SHALL provide Vite entry bundles for WIP overview and WIP detail pages, with template-level asset resolution. + +#### Scenario: WIP module asset available +- **WHEN** the built asset exists in backend static dist +- **THEN** the page MUST load behavior from the corresponding Vite module entry + +#### Scenario: WIP module asset unavailable +- **WHEN** the built asset is not present +- **THEN** the page MUST retain equivalent behavior through explicit inline fallback logic + +### Requirement: Vite Modules MUST Preserve Legacy Handler Compatibility +Vite report modules SHALL expose required global handlers for existing inline entry points until event wiring is fully migrated. + +#### Scenario: Inline-triggered handler compatibility +- **WHEN** a template control invokes existing global handler names +- **THEN** the migrated module MUST provide compatible callable handlers without runtime scope errors + +### Requirement: Hold Detail Page SHALL Be Served by a Vite Module +The system SHALL provide a dedicated Vite entry bundle for the hold-detail report page. + +#### Scenario: Hold-detail module asset exists +- **WHEN** `/hold-detail` is rendered and `hold-detail.js` exists in static dist +- **THEN** the page MUST load behavior from the Vite module entry + +#### Scenario: Hold-detail module asset missing +- **WHEN** `/hold-detail` is rendered and the module asset is unavailable +- **THEN** the page MUST remain operational through explicit inline fallback logic + +### Requirement: WIP Modules SHALL Reuse Shared Autocomplete and Filter Query Utilities +WIP overview and WIP detail Vite entry modules SHALL use shared frontend core utilities for autocomplete request construction and cross-filter behavior. + +#### Scenario: Cross-filter autocomplete parity across WIP pages +- **WHEN** users type in workorder/lot/package/type filters on either WIP overview or WIP detail pages +- **THEN** both pages MUST generate equivalent autocomplete request parameters and return behaviorally consistent dropdown results + +#### Scenario: Shared utility change propagates across both pages +- **WHEN** autocomplete mapping rules are updated in the shared core module +- **THEN** both WIP overview and WIP detail modules MUST consume the updated behavior without duplicated page-local logic edits diff --git a/openspec/specs/layered-route-cache/spec.md b/openspec/specs/layered-route-cache/spec.md new file mode 100644 index 0000000..77349c8 --- /dev/null +++ b/openspec/specs/layered-route-cache/spec.md @@ -0,0 +1,42 @@ +## Purpose +Define stable requirements for layered-route-cache. +## Requirements +### Requirement: Route Cache SHALL Use Layered Storage +The route cache SHALL use L1 in-memory TTL cache and L2 Redis JSON cache when Redis is available. + +#### Scenario: L1 cache hit +- **WHEN** a cached key exists in L1 and is unexpired +- **THEN** the API response SHALL be returned from memory without querying Redis + +#### Scenario: L2 fallback +- **WHEN** a cached key is missing in L1 but exists in Redis +- **THEN** the value SHALL be returned and warmed into L1 + +### Requirement: Cache SHALL Degrade Gracefully Without Redis +The route cache SHALL remain functional with L1 cache when Redis is unavailable. + +#### Scenario: Redis unavailable at startup +- **WHEN** Redis health check fails during app initialization +- **THEN** route cache operations SHALL continue using L1 cache without application failure + +### Requirement: Resource and WIP Full-Table Cache SHALL Remain the Authoritative Cached Dataset +The system MUST keep `resource` and `wip` full-table cache datasets as the canonical cached source for downstream route queries. + +#### Scenario: Route query reads cached baseline +- **WHEN** an endpoint requires resource or wip data +- **THEN** it MUST read from the corresponding full-table cache baseline before applying derived filters or aggregations + +### Requirement: Cache Access Paths SHALL Support Index-Based Lookup and Derived Views +The caching layer SHALL support index and derived-view access paths to reduce per-request full-table merge and transformation overhead. + +#### Scenario: Lookup by key under concurrent load +- **WHEN** requests query by high-cardinality keys such as RESOURCEID +- **THEN** the system MUST serve lookups via indexed cache access instead of repeated full-array scans + +### Requirement: Full-Table Cache Refresh MUST Support Incremental Derivation Updates +Derived cache indices and aggregates MUST be refreshed consistently when the underlying full-table cache version changes. + +#### Scenario: Cache version update +- **WHEN** full-table cache is refreshed to a new version +- **THEN** dependent indices and derived views MUST be rebuilt or updated before being exposed for reads + diff --git a/openspec/specs/migration-gates-and-rollout/spec.md b/openspec/specs/migration-gates-and-rollout/spec.md new file mode 100644 index 0000000..ccf5396 --- /dev/null +++ b/openspec/specs/migration-gates-and-rollout/spec.md @@ -0,0 +1,48 @@ +## Purpose +Define stable requirements for migration-gates-and-rollout. +## Requirements +### Requirement: Migration Gates SHALL Define Cutover Readiness +The system SHALL define explicit migration gates for functional parity, build integrity, and operational health before final cutover. + +#### Scenario: Gate evaluation before cutover +- **WHEN** release is prepared for final cutover +- **THEN** all required migration gates MUST pass or cutover SHALL be blocked + +### Requirement: Rollout and Rollback Procedures MUST be Actionable +The system SHALL document actionable rollout and rollback procedures for root migration. + +#### Scenario: Rollback execution +- **WHEN** post-cutover validation fails critical checks +- **THEN** operators MUST be able to execute documented rollback steps to restore previous stable behavior + +### Requirement: Migration Gates SHALL Include Runtime Resilience Validation +Cutover readiness gates MUST include resilience checks for pool exhaustion handling, circuit-breaker fail-fast behavior, and recovery flow. + +#### Scenario: Resilience gate evaluation +- **WHEN** migration gates are executed before release +- **THEN** resilience tests MUST pass for degraded-response semantics and recovery path validation + +### Requirement: Migration Gates SHALL Include Frontend Compute Parity Validation +Cutover readiness MUST include parity validation for metrics shifted from backend to frontend computation. + +#### Scenario: Compute parity gate +- **WHEN** a release includes additional frontend-computed metrics +- **THEN** gate execution MUST verify parity fixtures and fail if tolerance contracts are violated + +### Requirement: Rollout Procedure MUST Include Conda-Systemd-Watchdog Rehearsal +Rollout and rollback runbooks SHALL include an operational rehearsal for service start, watchdog-triggered reload, and post-restart health checks under the conda/systemd runtime contract. + +#### Scenario: Pre-cutover rehearsal +- **WHEN** operators execute pre-cutover rehearsal +- **THEN** they MUST successfully complete conda-based start, worker reload, and health verification steps documented in the runbook + +### Requirement: Migration Gates SHALL Enforce Architecture Documentation Consistency +Cutover governance MUST include verification that runtime architecture contracts documented for operators match implemented deployment and resilience behavior. + +#### Scenario: Documentation gate before release +- **WHEN** release gates are executed for a migration or hardening change +- **THEN** project README artifacts MUST be updated to reflect current single-port runtime contract, resilience diagnostics, and frontend modularization strategy + +#### Scenario: Gate fails on stale architecture contract +- **WHEN** implementation introduces resilience or module-governance changes but README architecture section remains outdated +- **THEN** release governance MUST treat the gate as failed until documentation is aligned diff --git a/openspec/specs/portal-drawer-navigation/spec.md b/openspec/specs/portal-drawer-navigation/spec.md new file mode 100644 index 0000000..d392571 --- /dev/null +++ b/openspec/specs/portal-drawer-navigation/spec.md @@ -0,0 +1,19 @@ +## Purpose +Define stable requirements for portal-drawer-navigation. + +## Requirements + + +### Requirement: Portal Navigation SHALL Group Entries by Functional Drawers +The portal SHALL group navigation entries into functional drawers: reports, queries, and development tools. + +#### Scenario: Drawer grouping visibility +- **WHEN** users open the portal +- **THEN** report pages and query pages SHALL appear in separate drawer groups + +### Requirement: Existing Page Behavior SHALL Remain Compatible +The portal navigation refactor SHALL preserve existing target routes and lazy-load behavior for content frames. + +#### Scenario: Route continuity +- **WHEN** a user selects an existing page entry from the new drawer +- **THEN** the corresponding original route SHALL be loaded without changing page business logic behavior diff --git a/openspec/specs/report-effects-parity/spec.md b/openspec/specs/report-effects-parity/spec.md new file mode 100644 index 0000000..006d3d8 --- /dev/null +++ b/openspec/specs/report-effects-parity/spec.md @@ -0,0 +1,30 @@ +# report-effects-parity Specification + +## Purpose +TBD - created by archiving change vite-jinja-report-parity-hardening. Update Purpose after archive. +## Requirements +### Requirement: Report Effect Parity SHALL Be Preserved During Vite Migration +The system SHALL preserve existing Jinja-era report interactions when report pages are served by Vite modules. + +#### Scenario: WIP overview interactions remain equivalent +- **WHEN** users operate WIP overview filters, KPI cards, chart refresh, and drill-down entry +- **THEN** the resulting state transitions and navigation parameters MUST remain behaviorally equivalent to the baseline page logic + +#### Scenario: WIP detail interactions remain equivalent +- **WHEN** users operate WIP detail filters, pagination, lot detail popup, and back-to-overview transitions +- **THEN** the resulting data scope and interaction behavior MUST match baseline semantics + +### Requirement: Report Visual Semantics MUST Remain Consistent +Report pages SHALL keep established status color semantics, KPI display rules, and table/chart synchronization behavior after migration. + +#### Scenario: KPI and matrix state consistency +- **WHEN** metric values are zero or filters target specific matrix levels +- **THEN** KPI values and selected-state highlights MUST render correctly without collapsing valid zero values or losing selection state + +### Requirement: Hold Detail Interaction Semantics SHALL Remain Equivalent After Modularization +Migrating hold-detail to a Vite module SHALL preserve existing filter, pagination, and refresh behavior. + +#### Scenario: User applies filters and paginates on hold-detail +- **WHEN** users toggle age/workcenter/package filters and navigate pages +- **THEN** returned lots, distribution highlights, and pagination state MUST remain behaviorally equivalent to baseline inline behavior + diff --git a/openspec/specs/root-cutover-finalization/spec.md b/openspec/specs/root-cutover-finalization/spec.md new file mode 100644 index 0000000..fe62cf4 --- /dev/null +++ b/openspec/specs/root-cutover-finalization/spec.md @@ -0,0 +1,23 @@ +## Purpose +Define stable requirements for root-cutover-finalization. + +## Requirements + + +### Requirement: Root Project SHALL be the Single Execution Target +The system SHALL run all application startup, test, and deployment workflows from `DashBoard_vite` root without requiring nested `DashBoard/` paths. + +#### Scenario: Root startup script execution +- **WHEN** an operator runs start/deploy scripts from `DashBoard_vite` root +- **THEN** all referenced source/config/script paths MUST resolve inside root project structure + +#### Scenario: Root test execution +- **WHEN** CI or local developer runs test commands from root +- **THEN** tests SHALL execute against root source tree and root config files + +### Requirement: Reference Directory MUST Remain Non-Authoritative +`DashBoard/` SHALL be treated as reference-only and MUST NOT be required for production runtime. + +#### Scenario: Runtime independence +- **WHEN** root application is started in an environment without `DashBoard/` +- **THEN** the application MUST remain functional for the defined migration scope diff --git a/openspec/specs/root-project-restructure/spec.md b/openspec/specs/root-project-restructure/spec.md new file mode 100644 index 0000000..2a1cd76 --- /dev/null +++ b/openspec/specs/root-project-restructure/spec.md @@ -0,0 +1,16 @@ +## Purpose +Define stable requirements for root-project-restructure. + +## Requirements + + +### Requirement: Root Directory SHALL be the Primary Executable Project +The system SHALL treat `DashBoard_vite` root directory as the primary executable project, while `DashBoard/` remains reference-only during migration. + +#### Scenario: Running app from root +- **WHEN** a developer runs project scripts from `DashBoard_vite` root +- **THEN** the application startup flow SHALL resolve code and config from root project files + +#### Scenario: Reference directory preserved +- **WHEN** migration is in progress +- **THEN** `DashBoard/` SHALL remain available for structure comparison and behavior verification diff --git a/openspec/specs/runtime-resilience-recovery/spec.md b/openspec/specs/runtime-resilience-recovery/spec.md new file mode 100644 index 0000000..67b2566 --- /dev/null +++ b/openspec/specs/runtime-resilience-recovery/spec.md @@ -0,0 +1,50 @@ +# runtime-resilience-recovery Specification + +## Purpose +TBD - created by archiving change stability-and-frontend-compute-shift. Update Purpose after archive. +## Requirements +### Requirement: Database Pool Runtime Configuration SHALL Be Enforced +The system SHALL apply database pool and timeout parameters from runtime configuration to the active SQLAlchemy engine used by request handling. + +#### Scenario: Runtime pool configuration takes effect +- **WHEN** operators set pool and timeout values via environment configuration and start the service +- **THEN** the active engine MUST use those values for pool size, overflow, wait timeout, and query call timeout + +### Requirement: Pool Exhaustion MUST Return Retry-Aware Degraded Responses +The system MUST return explicit degraded responses for connection pool exhaustion and include machine-readable metadata for retry/backoff behavior. + +#### Scenario: Pool exhausted under load +- **WHEN** concurrent requests exceed available database connections and pool wait timeout is reached +- **THEN** the API MUST return a dedicated error code and retry guidance instead of a generic 500 failure + +### Requirement: Runtime Degradation MUST Integrate Circuit Breaker State +Database-facing API behavior SHALL distinguish circuit-breaker-open degradation from transient query failures. + +#### Scenario: Circuit breaker is open +- **WHEN** the circuit breaker transitions to OPEN state +- **THEN** database-backed endpoints MUST fail fast with a stable degradation response contract + +### Requirement: Worker Recovery SHALL Support Hot Reload and Watchdog-Assisted Recovery +The runtime MUST support graceful worker hot reload and watchdog-triggered recovery without requiring a port change or full system reboot. + +#### Scenario: Worker restart requested +- **WHEN** an authorized operator requests worker restart during degraded operation +- **THEN** the service MUST trigger graceful reload and preserve single-port availability + +### Requirement: Report Frontend API Access SHALL Honor Degraded Retry Contracts +Report pages SHALL use retry-aware API access paths for JSON endpoints so degraded backend responses propagate retry metadata to UI behavior. + +#### Scenario: Pool exhaustion or circuit-open response +- **WHEN** report API endpoints return degraded error codes with retry hints +- **THEN** frontend calls MUST flow through MesApi-compatible behavior and avoid aggressive uncontrolled retry loops + +### Requirement: Runtime Resilience Diagnostics MUST Expose Actionable Signals +The system MUST expose machine-readable resilience thresholds, restart-churn indicators, and operator action recommendations so degraded states can be triaged consistently. + +#### Scenario: Health payload includes resilience diagnostics +- **WHEN** clients call `/health` or `/health/deep` +- **THEN** responses MUST include resilience thresholds and a recommendation field describing whether to observe, throttle, or trigger controlled worker recovery + +#### Scenario: Admin status includes restart churn summary +- **WHEN** operators call `/admin/api/system-status` or `/admin/api/worker/status` +- **THEN** responses MUST include bounded restart history summary within a configured time window and indicate whether churn threshold is exceeded diff --git a/openspec/specs/vite-single-port-integration/spec.md b/openspec/specs/vite-single-port-integration/spec.md new file mode 100644 index 0000000..f5d4b49 --- /dev/null +++ b/openspec/specs/vite-single-port-integration/spec.md @@ -0,0 +1,19 @@ +## Purpose +Define stable requirements for vite-single-port-integration. + +## Requirements + + +### Requirement: Frontend Build SHALL Use Vite With Flask Static Output +The system SHALL use Vite to build frontend assets and output artifacts into Flask static directories served by the backend. + +#### Scenario: Build asset generation +- **WHEN** frontend build is executed +- **THEN** Vite SHALL generate portal-related JS/CSS artifacts into the backend static output path + +### Requirement: Deployment SHALL Preserve Single External Port +The system SHALL preserve single-port external serving through Flask/Gunicorn. + +#### Scenario: Production serving mode +- **WHEN** the system runs in deployment mode +- **THEN** frontend assets SHALL be served through Flask on the same external port as API/page routes diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..952ecdc --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,72 @@ +[build-system] +requires = ["setuptools>=68", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "mes-dashboard" +version = "0.1.0" +description = "MES Dashboard Portal" +readme = "README.md" +requires-python = ">=3.9" +license = { text = "MIT" } +authors = [ + { name = "MES Dashboard Team" } +] +keywords = ["flask", "mes", "dashboard"] +classifiers = [ + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "License :: OSI Approved :: MIT License", +] + +# Note: Using minimum version pins (>=) to allow automatic security updates. +# For reproducible builds, use: pip freeze > requirements.lock +dependencies = [ + # Core Framework + "flask>=3.0.0", + + # Database + "oracledb>=2.0.0", + "sqlalchemy>=2.0.0", + + # Data Processing + "pandas>=2.0.0", + "openpyxl>=3.0.0", + + # Cache (Redis) + "redis>=5.0.0", + "hiredis>=2.0.0", + + # HTTP Client + "requests>=2.28.0", + + # Configuration + "python-dotenv>=1.0.0", + + # WSGI Server + "gunicorn>=21.2.0", + "waitress>=2.1.2; platform_system == 'Windows'", + + # System Monitoring + "psutil>=5.9.0", +] + +[project.optional-dependencies] +test = [ + "pytest>=7.0.0", + "pytest-playwright>=0.4.0", + "playwright>=1.40.0", +] + +[tool.setuptools] +package-dir = {"" = "src"} +include-package-data = true + +[tool.setuptools.packages.find] +where = ["src"] + +[tool.setuptools.package-data] +mes_dashboard = ["templates/**/*", "sql/**/*.sql"] diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..8f06f60 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,12 @@ +[pytest] +testpaths = tests +python_files = test_*.py +python_classes = Test* +python_functions = test_* +addopts = -v --tb=short + +markers = + integration: mark test as integration test (requires database) + e2e: mark test as end-to-end test (requires running server and playwright) + stress: mark test as stress test (may take longer, tests system stability) + load: mark test as load test (concurrent requests, tests throughput) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..a3a137f --- /dev/null +++ b/requirements.txt @@ -0,0 +1,36 @@ +# MES Dashboard Dependencies +# =========================== +# +# Version Strategy: +# Using minimum version pins (>=) to allow automatic security updates. +# For reproducible builds, generate a lock file: pip freeze > requirements.lock +# +# Python Version: >=3.9 (recommended: 3.11) + +# Core Framework +flask>=3.0.0 + +# Database +oracledb>=2.0.0 +sqlalchemy>=2.0.0 + +# Data Processing +pandas>=2.0.0 # Note: numpy is installed as a dependency +openpyxl>=3.0.0 # Excel file support + +# Cache (Redis) +redis>=5.0.0 +hiredis>=2.0.0 # C parser for better Redis performance + +# HTTP Client +requests>=2.28.0 + +# Configuration +python-dotenv>=1.0.0 + +# WSGI Server +gunicorn>=21.2.0 # Linux/macOS production server +waitress>=2.1.2; platform_system=="Windows" # Windows alternative + +# System Monitoring +psutil>=5.9.0 # Process and system utilities diff --git a/scripts/deploy.sh b/scripts/deploy.sh new file mode 100644 index 0000000..fb27b0c --- /dev/null +++ b/scripts/deploy.sh @@ -0,0 +1,289 @@ +#!/usr/bin/env bash +# +# MES Dashboard Deployment Script +# Usage: ./deploy.sh [--skip-db-check] +# +set -euo pipefail + +# ============================================================ +# Configuration +# ============================================================ +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +CONDA_ENV="mes-dashboard" +PYTHON_VERSION="3.11" +REDIS_CONF="/etc/redis/redis.conf" + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +# ============================================================ +# Helper Functions +# ============================================================ +log_info() { + echo -e "${BLUE}[INFO]${NC} $1" +} + +log_success() { + echo -e "${GREEN}[OK]${NC} $1" +} + +log_warn() { + echo -e "${YELLOW}[WARN]${NC} $1" +} + +log_error() { + echo -e "${RED}[ERROR]${NC} $1" +} + +log_important() { + echo -e "${YELLOW}[IMPORTANT]${NC} $1" +} + +# ============================================================ +# Deployment Functions +# ============================================================ + +check_prerequisites() { + log_info "Checking prerequisites..." + + # Check conda + if ! command -v conda &> /dev/null; then + log_error "Conda not found. Please install Miniconda/Anaconda first." + log_info "Download from: https://docs.conda.io/en/latest/miniconda.html" + exit 1 + fi + log_success "Conda found" + + # Source conda + source "$(conda info --base)/etc/profile.d/conda.sh" +} + +check_redis() { + log_info "Checking Redis installation..." + + # Check if redis-server is installed + if ! command -v redis-server &> /dev/null; then + log_error "Redis server not found." + log_info "Install with: sudo apt install redis-server" + exit 1 + fi + log_success "Redis server found" + + # Check if redis-cli is installed + if ! command -v redis-cli &> /dev/null; then + log_error "Redis CLI not found." + exit 1 + fi + log_success "Redis CLI found" + + # Check if Redis service is enabled + if systemctl is-enabled redis-server &>/dev/null; then + log_success "Redis service is enabled" + else + log_warn "Redis service is not enabled for auto-start" + log_info "Enable with: sudo systemctl enable redis-server" + fi + + # Check if Redis is running + if systemctl is-active redis-server &>/dev/null; then + log_success "Redis service is running" + else + log_warn "Redis service is not running" + log_info "Start with: sudo systemctl start redis-server" + fi + + # Test Redis connectivity + if redis-cli ping &>/dev/null; then + log_success "Redis connectivity OK (PONG received)" + else + log_warn "Cannot connect to Redis (service may need to be started)" + fi +} + +setup_conda_env() { + log_info "Setting up conda environment..." + + # Check if environment exists + if conda env list | grep -q "^${CONDA_ENV} "; then + log_success "Environment '${CONDA_ENV}' already exists" + else + log_info "Creating conda environment '${CONDA_ENV}' with Python ${PYTHON_VERSION}..." + conda create -n "$CONDA_ENV" python="$PYTHON_VERSION" -y + log_success "Environment '${CONDA_ENV}' created" + fi + + # Activate environment + conda activate "$CONDA_ENV" + log_success "Environment '${CONDA_ENV}' activated" +} + +install_dependencies() { + log_info "Installing dependencies..." + + if [ -f "${ROOT}/requirements.txt" ]; then + pip install -r "${ROOT}/requirements.txt" --quiet + log_success "Dependencies installed" + else + log_error "requirements.txt not found" + exit 1 + fi +} + +install_frontend() { + if [ ! -f "${ROOT}/frontend/package.json" ]; then + log_info "No frontend package.json found, skipping Vite setup" + return 0 + fi + + if ! command -v npm &> /dev/null; then + log_warn "npm not found. Skip frontend build (Flask fallback mode only)." + return 0 + fi + + log_info "Installing frontend dependencies..." + npm --prefix "${ROOT}/frontend" install --no-audit --no-fund + + log_info "Building frontend assets (Vite)..." + npm --prefix "${ROOT}/frontend" run build + log_success "Frontend assets built" +} + +setup_env_file() { + log_info "Setting up configuration..." + + if [ -f "${ROOT}/.env" ]; then + log_success ".env file already exists" + return 0 + fi + + if [ ! -f "${ROOT}/.env.example" ]; then + log_error ".env.example not found" + exit 1 + fi + + log_warn ".env file not found" + log_info "Copying .env.example to .env" + cp "${ROOT}/.env.example" "${ROOT}/.env" + + echo "" + log_important "Please edit .env with your database credentials:" + echo " nano ${ROOT}/.env" + echo "" + echo "Required settings:" + echo " - DB_USER: Your database username" + echo " - DB_PASSWORD: Your database password" + echo " - SECRET_KEY: A secure random key for production" + echo "" + + read -p "Press Enter after editing .env to continue..." + echo "" +} + +verify_database() { + local skip_db="${1:-false}" + + if [ "$skip_db" = "true" ]; then + log_warn "Skipping database verification" + return 0 + fi + + log_info "Verifying database connection..." + + # Load .env + if [ -f "${ROOT}/.env" ]; then + set -a + source "${ROOT}/.env" + set +a + fi + + export PYTHONPATH="${ROOT}/src:${PYTHONPATH:-}" + + if python -c " +from sqlalchemy import text +from mes_dashboard.core.database import get_engine +engine = get_engine() +with engine.connect() as conn: + conn.execute(text('SELECT 1 FROM DUAL')) +" 2>/dev/null; then + log_success "Database connection successful" + else + log_warn "Database connection failed" + log_info "You can still proceed, but the application may not work correctly" + log_info "Please check your DB_* settings in .env" + fi +} + +show_next_steps() { + echo "" + echo "==========================================" + echo " Deployment Complete!" + echo "==========================================" + echo "" + echo "Start the server:" + echo " ./scripts/start_server.sh start" + echo "" + echo "View logs:" + echo " ./scripts/start_server.sh logs follow" + echo "" + echo "Check status:" + echo " ./scripts/start_server.sh status" + echo "" + echo "Access URL:" + local port=$(grep -E "^GUNICORN_BIND=" "${ROOT}/.env" 2>/dev/null | cut -d: -f2 || echo "8080") + echo " http://localhost:${port:-8080}" + echo "" + echo "Optional: install conda+systemd services" + echo " sudo mkdir -p /etc/mes-dashboard" + echo " sudo cp .env /etc/mes-dashboard/mes-dashboard.env" + echo " sudo cp deploy/mes-dashboard.service /etc/systemd/system/" + echo " sudo cp deploy/mes-dashboard-watchdog.service /etc/systemd/system/" + echo " sudo systemctl daemon-reload" + echo " sudo systemctl enable --now mes-dashboard mes-dashboard-watchdog" + echo "" + echo "==========================================" +} + +# ============================================================ +# Main +# ============================================================ +main() { + local skip_db=false + + # Parse arguments + for arg in "$@"; do + case "$arg" in + --skip-db-check) + skip_db=true + ;; + --help|-h) + echo "Usage: $0 [--skip-db-check]" + echo "" + echo "Options:" + echo " --skip-db-check Skip database connection verification" + echo " --help, -h Show this help message" + exit 0 + ;; + esac + done + + echo "" + echo "==========================================" + echo " MES Dashboard Deployment" + echo "==========================================" + echo "" + + check_prerequisites + check_redis + setup_conda_env + install_dependencies + install_frontend + setup_env_file + verify_database "$skip_db" + show_next_steps +} + +main "$@" diff --git a/scripts/run_stress_tests.py b/scripts/run_stress_tests.py new file mode 100644 index 0000000..2c9217c --- /dev/null +++ b/scripts/run_stress_tests.py @@ -0,0 +1,195 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Stress Test Runner for MES Dashboard + +Runs comprehensive stress tests including: +- Backend API load tests +- Frontend browser stress tests + +Usage: + python scripts/run_stress_tests.py [options] + +Options: + --backend-only Run only backend API tests + --frontend-only Run only frontend Playwright tests + --quick Quick test with minimal load (good for CI) + --heavy Heavy load test (10x normal) + --url URL Target URL (default: http://127.0.0.1:5000) + --report FILE Save report to file +""" + +import argparse +import subprocess +import sys +import os +import time +from datetime import datetime + + +def run_backend_tests(url: str, config: dict) -> dict: + """Run backend API stress tests.""" + env = os.environ.copy() + env['STRESS_TEST_URL'] = url + env['STRESS_CONCURRENT_USERS'] = str(config.get('concurrent_users', 10)) + env['STRESS_REQUESTS_PER_USER'] = str(config.get('requests_per_user', 20)) + env['STRESS_TIMEOUT'] = str(config.get('timeout', 30)) + + print("\n" + "=" * 60) + print("Running Backend API Load Tests") + print("=" * 60) + print(f" URL: {url}") + print(f" Concurrent Users: {config.get('concurrent_users', 10)}") + print(f" Requests/User: {config.get('requests_per_user', 20)}") + print() + + start_time = time.time() + result = subprocess.run( + ['python', '-m', 'pytest', 'tests/stress/test_api_load.py', '-v', '-s', '--tb=short'], + env=env, + capture_output=False, + cwd=os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + ) + duration = time.time() - start_time + + return { + 'name': 'Backend API Load Tests', + 'passed': result.returncode == 0, + 'duration': duration, + 'returncode': result.returncode + } + + +def run_frontend_tests(url: str, config: dict) -> dict: + """Run frontend Playwright stress tests.""" + env = os.environ.copy() + env['STRESS_TEST_URL'] = url + + print("\n" + "=" * 60) + print("Running Frontend Playwright Stress Tests") + print("=" * 60) + print(f" URL: {url}") + print() + + start_time = time.time() + result = subprocess.run( + ['python', '-m', 'pytest', 'tests/stress/test_frontend_stress.py', '-v', '-s', '--tb=short'], + env=env, + capture_output=False, + cwd=os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + ) + duration = time.time() - start_time + + return { + 'name': 'Frontend Playwright Stress Tests', + 'passed': result.returncode == 0, + 'duration': duration, + 'returncode': result.returncode + } + + +def generate_report(results: list, url: str, config: dict) -> str: + """Generate a text report of stress test results.""" + report_lines = [ + "=" * 60, + "MES Dashboard Stress Test Report", + "=" * 60, + f"Date: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}", + f"Target URL: {url}", + f"Configuration: {config}", + "", + "-" * 60, + "Test Results:", + "-" * 60, + ] + + total_duration = 0 + passed_count = 0 + + for result in results: + status = "PASSED" if result['passed'] else "FAILED" + report_lines.append(f" {result['name']}: {status}") + report_lines.append(f" Duration: {result['duration']:.2f}s") + total_duration += result['duration'] + if result['passed']: + passed_count += 1 + + report_lines.extend([ + "", + "-" * 60, + "Summary:", + "-" * 60, + f" Total Tests: {len(results)}", + f" Passed: {passed_count}", + f" Failed: {len(results) - passed_count}", + f" Total Duration: {total_duration:.2f}s", + "=" * 60, + ]) + + return "\n".join(report_lines) + + +def main(): + parser = argparse.ArgumentParser(description='Run MES Dashboard stress tests') + parser.add_argument('--backend-only', action='store_true', help='Run only backend tests') + parser.add_argument('--frontend-only', action='store_true', help='Run only frontend tests') + parser.add_argument('--quick', action='store_true', help='Quick test with minimal load') + parser.add_argument('--heavy', action='store_true', help='Heavy load test') + parser.add_argument('--url', default='http://127.0.0.1:5000', help='Target URL') + parser.add_argument('--report', help='Save report to file') + + args = parser.parse_args() + + # Configure load levels + if args.quick: + config = { + 'concurrent_users': 3, + 'requests_per_user': 5, + 'timeout': 30 + } + elif args.heavy: + config = { + 'concurrent_users': 50, + 'requests_per_user': 50, + 'timeout': 60 + } + else: + config = { + 'concurrent_users': 10, + 'requests_per_user': 20, + 'timeout': 30 + } + + print("\n" + "=" * 60) + print("MES Dashboard Stress Test Suite") + print("=" * 60) + print(f"Target: {args.url}") + print(f"Mode: {'Quick' if args.quick else 'Heavy' if args.heavy else 'Normal'}") + print() + + results = [] + + # Run tests based on flags + if not args.frontend_only: + results.append(run_backend_tests(args.url, config)) + + if not args.backend_only: + results.append(run_frontend_tests(args.url, config)) + + # Generate report + report = generate_report(results, args.url, config) + print("\n" + report) + + # Save report if requested + if args.report: + with open(args.report, 'w', encoding='utf-8') as f: + f.write(report) + print(f"\nReport saved to: {args.report}") + + # Exit with appropriate code + all_passed = all(r['passed'] for r in results) + sys.exit(0 if all_passed else 1) + + +if __name__ == '__main__': + main() diff --git a/scripts/start_server.sh b/scripts/start_server.sh new file mode 100644 index 0000000..1334ff2 --- /dev/null +++ b/scripts/start_server.sh @@ -0,0 +1,689 @@ +#!/usr/bin/env bash +# +# MES Dashboard Server Management Script +# Usage: ./start_server.sh [start|stop|restart|status|logs] +# +set -uo pipefail + +# ============================================================ +# Configuration +# ============================================================ +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +CONDA_ENV="mes-dashboard" +APP_NAME="mes-dashboard" +PID_FILE_DEFAULT="${ROOT}/tmp/gunicorn.pid" +PID_FILE="${WATCHDOG_PID_FILE:-${PID_FILE_DEFAULT}}" +LOG_DIR="${ROOT}/logs" +ACCESS_LOG="${LOG_DIR}/access.log" +ERROR_LOG="${LOG_DIR}/error.log" +STARTUP_LOG="${LOG_DIR}/startup.log" +DEFAULT_PORT="${GUNICORN_BIND:-0.0.0.0:8080}" +PORT=$(echo "$DEFAULT_PORT" | cut -d: -f2) + +# Redis configuration +REDIS_ENABLED="${REDIS_ENABLED:-true}" + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +# ============================================================ +# Helper Functions +# ============================================================ +log_info() { + echo -e "${BLUE}[INFO]${NC} $1" +} + +log_success() { + echo -e "${GREEN}[OK]${NC} $1" +} + +log_warn() { + echo -e "${YELLOW}[WARN]${NC} $1" +} + +log_error() { + echo -e "${RED}[ERROR]${NC} $1" +} + +timestamp() { + date '+%Y-%m-%d %H:%M:%S' +} + +resolve_runtime_paths() { + WATCHDOG_RUNTIME_DIR="${WATCHDOG_RUNTIME_DIR:-${ROOT}/tmp}" + WATCHDOG_RESTART_FLAG="${WATCHDOG_RESTART_FLAG:-${WATCHDOG_RUNTIME_DIR}/mes_dashboard_restart.flag}" + WATCHDOG_PID_FILE="${WATCHDOG_PID_FILE:-${PID_FILE_DEFAULT}}" + WATCHDOG_STATE_FILE="${WATCHDOG_STATE_FILE:-${WATCHDOG_RUNTIME_DIR}/mes_dashboard_restart_state.json}" + PID_FILE="${WATCHDOG_PID_FILE}" + export WATCHDOG_RUNTIME_DIR WATCHDOG_RESTART_FLAG WATCHDOG_PID_FILE WATCHDOG_STATE_FILE +} + +# Load .env file if exists +load_env() { + if [ -f "${ROOT}/.env" ]; then + log_info "Loading environment from .env" + set -a # Mark all variables for export + source "${ROOT}/.env" + set +a + fi +} + +# ============================================================ +# Environment Check Functions +# ============================================================ +check_conda() { + if ! command -v conda &> /dev/null; then + log_error "Conda not found. Please install Miniconda/Anaconda." + return 1 + fi + + # Source conda + source "$(conda info --base)/etc/profile.d/conda.sh" + + # Check if environment exists + if ! conda env list | grep -q "^${CONDA_ENV} "; then + log_error "Conda environment '${CONDA_ENV}' not found." + log_info "Create it with: conda create -n ${CONDA_ENV} python=3.11" + return 1 + fi + + log_success "Conda environment '${CONDA_ENV}' found" + return 0 +} + +check_dependencies() { + conda activate "$CONDA_ENV" + + local missing=() + + # Check critical packages + python -c "import flask" 2>/dev/null || missing+=("flask") + python -c "import gunicorn" 2>/dev/null || missing+=("gunicorn") + python -c "import pandas" 2>/dev/null || missing+=("pandas") + python -c "import oracledb" 2>/dev/null || missing+=("oracledb") + + if [ ${#missing[@]} -gt 0 ]; then + log_error "Missing dependencies: ${missing[*]}" + log_info "Install with: pip install ${missing[*]}" + return 1 + fi + + log_success "All dependencies installed" + return 0 +} + +check_env_file() { + if [ ! -f "${ROOT}/.env" ]; then + if [ -f "${ROOT}/.env.example" ]; then + log_warn ".env file not found, but .env.example exists" + log_info "Copy and configure: cp .env.example .env" + else + log_warn ".env file not found (optional but recommended)" + fi + return 0 + fi + + log_success ".env file found" + return 0 +} + +check_port() { + if lsof -i ":${PORT}" -sTCP:LISTEN &>/dev/null; then + local pid=$(lsof -t -i ":${PORT}" -sTCP:LISTEN 2>/dev/null | head -1) + log_error "Port ${PORT} is already in use (PID: ${pid})" + log_info "Stop the existing process or change GUNICORN_BIND" + return 1 + fi + + log_success "Port ${PORT} is available" + return 0 +} + +check_database() { + conda activate "$CONDA_ENV" + export PYTHONPATH="${ROOT}/src:${PYTHONPATH:-}" + + if python -c " +from sqlalchemy import text +from mes_dashboard.core.database import get_engine +engine = get_engine() +with engine.connect() as conn: + conn.execute(text('SELECT 1 FROM DUAL')) +" 2>/dev/null; then + log_success "Database connection OK" + return 0 + else + log_warn "Database connection failed (service may still start)" + return 0 # Non-fatal, allow startup + fi +} + +build_frontend_assets() { + if [ "${FRONTEND_BUILD_ON_START:-true}" != "true" ]; then + log_info "Skip frontend build (FRONTEND_BUILD_ON_START=${FRONTEND_BUILD_ON_START})" + return 0 + fi + + if [ ! -f "${ROOT}/frontend/package.json" ]; then + return 0 + fi + + if ! command -v npm &> /dev/null; then + log_warn "npm not found, skip frontend build" + return 0 + fi + + local required_entries=( + "portal.js" + "resource-status.js" + "resource-history.js" + "job-query.js" + "excel-query.js" + "tables.js" + ) + local needs_build=false + local newest_entry="" + + for entry in "${required_entries[@]}"; do + local entry_path="${ROOT}/src/mes_dashboard/static/dist/${entry}" + if [ ! -f "${entry_path}" ]; then + needs_build=true + break + fi + if [ -z "${newest_entry}" ] || [ "${entry_path}" -nt "${newest_entry}" ]; then + newest_entry="${entry_path}" + fi + done + + if [ "$needs_build" = false ] && find "${ROOT}/frontend/src" -type f -newer "${newest_entry}" | grep -q .; then + needs_build=true + fi + if [ "$needs_build" = false ] && ([ "${ROOT}/frontend/package.json" -nt "${newest_entry}" ] || [ "${ROOT}/frontend/vite.config.js" -nt "${newest_entry}" ]); then + needs_build=true + fi + + if [ "$needs_build" = false ]; then + log_success "Frontend assets are up to date" + return 0 + fi + + log_info "Building frontend assets with Vite..." + if npm --prefix "${ROOT}/frontend" run build >/dev/null 2>&1; then + log_success "Frontend assets built" + else + log_warn "Frontend build failed; continuing with fallback inline scripts" + fi +} + +# ============================================================ +# Redis Management Functions +# ============================================================ +check_redis() { + if [ "$REDIS_ENABLED" != "true" ]; then + log_info "Redis is disabled (REDIS_ENABLED=${REDIS_ENABLED})" + return 0 + fi + + if ! command -v redis-cli &> /dev/null; then + log_warn "Redis CLI not found (Redis features will be disabled)" + return 0 + fi + + if redis-cli ping &>/dev/null; then + log_success "Redis connection OK" + return 0 + else + log_warn "Redis not responding (will attempt to start)" + return 1 + fi +} + +start_redis() { + if [ "$REDIS_ENABLED" != "true" ]; then + return 0 + fi + + if ! command -v redis-cli &> /dev/null; then + return 0 + fi + + # Check if Redis is already running + if redis-cli ping &>/dev/null; then + log_success "Redis is already running" + return 0 + fi + + # Try to start Redis via systemctl + if command -v systemctl &> /dev/null; then + log_info "Starting Redis service..." + if sudo systemctl start redis-server 2>/dev/null; then + sleep 1 + if redis-cli ping &>/dev/null; then + log_success "Redis service started" + return 0 + fi + fi + fi + + log_warn "Could not start Redis (fallback mode will be used)" + return 0 +} + +stop_redis() { + if [ "$REDIS_ENABLED" != "true" ]; then + return 0 + fi + + if ! command -v redis-cli &> /dev/null; then + return 0 + fi + + # Check if Redis is running + if ! redis-cli ping &>/dev/null; then + log_info "Redis is not running" + return 0 + fi + + # Stop Redis via systemctl + if command -v systemctl &> /dev/null; then + log_info "Stopping Redis service..." + if sudo systemctl stop redis-server 2>/dev/null; then + log_success "Redis service stopped" + return 0 + fi + fi + + log_warn "Could not stop Redis service" + return 0 +} + +redis_status() { + if [ "$REDIS_ENABLED" != "true" ]; then + echo -e " Redis: ${YELLOW}DISABLED${NC}" + return 0 + fi + + if ! command -v redis-cli &> /dev/null; then + echo -e " Redis: ${YELLOW}NOT INSTALLED${NC}" + return 0 + fi + + if redis-cli ping &>/dev/null; then + local info=$(redis-cli info memory 2>/dev/null | grep "used_memory_human" | cut -d: -f2 | tr -d '\r') + echo -e " Redis: ${GREEN}RUNNING${NC} (Memory: ${info:-unknown})" + else + echo -e " Redis: ${RED}STOPPED${NC}" + fi +} + +run_all_checks() { + log_info "Running environment checks..." + echo "" + + check_conda || return 1 + check_dependencies || return 1 + check_env_file + load_env + resolve_runtime_paths + check_port || return 1 + check_database + check_redis + + echo "" + log_success "All checks passed" + return 0 +} + +# ============================================================ +# Service Management Functions +# ============================================================ +ensure_dirs() { + mkdir -p "${LOG_DIR}" + mkdir -p "${LOG_DIR}/archive" + mkdir -p "$(dirname "${PID_FILE}")" + mkdir -p "${WATCHDOG_RUNTIME_DIR}" +} + +rotate_logs() { + # Archive existing logs with timestamp before starting new session + local ts=$(date '+%Y%m%d_%H%M%S') + + if [ -f "$ACCESS_LOG" ] && [ -s "$ACCESS_LOG" ]; then + mv "$ACCESS_LOG" "${LOG_DIR}/archive/access_${ts}.log" + log_info "Archived access.log -> archive/access_${ts}.log" + fi + + if [ -f "$ERROR_LOG" ] && [ -s "$ERROR_LOG" ]; then + mv "$ERROR_LOG" "${LOG_DIR}/archive/error_${ts}.log" + log_info "Archived error.log -> archive/error_${ts}.log" + fi + + # Clean up old archives (keep last 10) + cd "${LOG_DIR}/archive" 2>/dev/null && \ + ls -t access_*.log 2>/dev/null | tail -n +11 | xargs -r rm -f && \ + ls -t error_*.log 2>/dev/null | tail -n +11 | xargs -r rm -f + cd "$ROOT" + + # Create fresh log files + touch "$ACCESS_LOG" "$ERROR_LOG" +} + +get_pid() { + if [ -f "$PID_FILE" ]; then + local pid=$(cat "$PID_FILE" 2>/dev/null) + if [ -n "$pid" ] && kill -0 "$pid" 2>/dev/null; then + echo "$pid" + return 0 + fi + fi + + # Fallback: find by port + local pid=$(lsof -t -i ":${PORT}" -sTCP:LISTEN 2>/dev/null | head -1) + if [ -n "$pid" ]; then + echo "$pid" + return 0 + fi + + return 1 +} + +is_running() { + get_pid &>/dev/null +} + +do_start() { + local foreground=false + + if [ "${1:-}" = "-f" ] || [ "${1:-}" = "--foreground" ]; then + foreground=true + fi + + load_env + resolve_runtime_paths + + if is_running; then + local pid=$(get_pid) + log_warn "Server is already running (PID: ${pid})" + return 1 + fi + + # Run checks + run_all_checks || return 1 + + echo "" + + # Start Redis if enabled + start_redis + + log_info "Starting ${APP_NAME} server..." + + ensure_dirs + rotate_logs # Archive old logs before starting new session + conda activate "$CONDA_ENV" + load_env # Load environment variables from .env file + resolve_runtime_paths + # Re-evaluate port after loading .env (GUNICORN_BIND may have changed) + PORT=$(echo "${GUNICORN_BIND:-0.0.0.0:8080}" | cut -d: -f2) + export PYTHONPATH="${ROOT}/src:${PYTHONPATH:-}" + cd "$ROOT" + build_frontend_assets + + # Log startup + echo "[$(timestamp)] Starting server" >> "$STARTUP_LOG" + + if [ "$foreground" = true ]; then + log_info "Running in foreground mode (Ctrl+C to stop)" + exec gunicorn \ + --config gunicorn.conf.py \ + --pid "$PID_FILE" \ + --access-logfile "$ACCESS_LOG" \ + --error-logfile "$ERROR_LOG" \ + --capture-output \ + "mes_dashboard:create_app()" + else + gunicorn \ + --config gunicorn.conf.py \ + --pid "$PID_FILE" \ + --access-logfile "$ACCESS_LOG" \ + --error-logfile "$ERROR_LOG" \ + --capture-output \ + --daemon \ + "mes_dashboard:create_app()" + + sleep 1 + + if is_running; then + local pid=$(get_pid) + log_success "Server started successfully (PID: ${pid})" + log_info "Access URL: http://localhost:${PORT}" + log_info "Logs: ${LOG_DIR}/" + echo "[$(timestamp)] Server started (PID: ${pid})" >> "$STARTUP_LOG" + else + log_error "Failed to start server" + log_info "Check error log: ${ERROR_LOG}" + echo "[$(timestamp)] Server start failed" >> "$STARTUP_LOG" + return 1 + fi + fi +} + +do_stop() { + load_env + resolve_runtime_paths + + if ! is_running; then + log_warn "Server is not running" + return 0 + fi + + local pid=$(get_pid) + log_info "Stopping server (PID: ${pid})..." + + # Find all gunicorn processes (master + workers) + local all_pids=$(pgrep -f "gunicorn.*mes_dashboard" 2>/dev/null | tr '\n' ' ') + + # Graceful shutdown with SIGTERM + kill -TERM "$pid" 2>/dev/null + + # Wait for graceful shutdown (max 10 seconds) + local count=0 + while kill -0 "$pid" 2>/dev/null && [ $count -lt 10 ]; do + sleep 1 + count=$((count + 1)) + echo -n "." + done + echo "" + + # Force kill if still running (including orphaned workers) + if kill -0 "$pid" 2>/dev/null || [ -n "$(pgrep -f 'gunicorn.*mes_dashboard' 2>/dev/null)" ]; then + log_warn "Graceful shutdown timeout, forcing..." + # Kill all gunicorn processes related to mes_dashboard + pkill -9 -f "gunicorn.*mes_dashboard" 2>/dev/null + sleep 1 + fi + + # Cleanup PID file + rm -f "$PID_FILE" + + # Verify all processes are stopped + if [ -z "$(pgrep -f 'gunicorn.*mes_dashboard' 2>/dev/null)" ]; then + log_success "Server stopped" + echo "[$(timestamp)] Server stopped (PID: ${pid})" >> "$STARTUP_LOG" + else + log_error "Failed to stop server" + return 1 + fi +} + +do_restart() { + log_info "Restarting ${APP_NAME} server..." + do_stop + sleep 1 + do_start "$@" +} + +do_status() { + # Load environment to get REDIS_ENABLED + load_env + resolve_runtime_paths + + echo "" + echo "==========================================" + echo " ${APP_NAME} Server Status" + echo "==========================================" + echo "" + + if is_running; then + local pid=$(get_pid) + echo -e " Server: ${GREEN}RUNNING${NC}" + echo " PID: ${pid}" + echo " Port: ${PORT}" + echo " URL: http://localhost:${PORT}" + echo " PIDFile: ${PID_FILE}" + echo " Watchdog Runtime: ${WATCHDOG_RUNTIME_DIR}" + else + echo -e " Server: ${RED}STOPPED${NC}" + fi + + # Show Redis status + redis_status + + if is_running; then + echo "" + + # Show process info + local pid=$(get_pid) + if command -v ps &>/dev/null; then + echo " Process Info:" + ps -p "$pid" -o pid,ppid,%cpu,%mem,etime,cmd --no-headers 2>/dev/null | \ + awk '{printf " PID: %s | CPU: %s%% | MEM: %s%% | Uptime: %s\n", $1, $3, $4, $5}' + fi + + # Show recent log entries + if [ -f "$ERROR_LOG" ]; then + echo "" + echo " Recent Errors (last 3):" + tail -3 "$ERROR_LOG" 2>/dev/null | sed 's/^/ /' + fi + else + echo "" + echo " Start with: $0 start" + fi + + echo "" + echo "==========================================" +} + +do_logs() { + local log_type="${1:-all}" + local lines="${2:-50}" + + case "$log_type" in + access) + if [ -f "$ACCESS_LOG" ]; then + log_info "Access log (last ${lines} lines):" + tail -n "$lines" "$ACCESS_LOG" + else + log_warn "Access log not found" + fi + ;; + error) + if [ -f "$ERROR_LOG" ]; then + log_info "Error log (last ${lines} lines):" + tail -n "$lines" "$ERROR_LOG" + else + log_warn "Error log not found" + fi + ;; + follow) + log_info "Following logs (Ctrl+C to stop)..." + tail -f "$ACCESS_LOG" "$ERROR_LOG" 2>/dev/null + ;; + *) + log_info "=== Error Log (last 20 lines) ===" + tail -20 "$ERROR_LOG" 2>/dev/null || echo "(empty)" + echo "" + log_info "=== Access Log (last 20 lines) ===" + tail -20 "$ACCESS_LOG" 2>/dev/null || echo "(empty)" + ;; + esac +} + +do_check() { + run_all_checks +} + +show_help() { + echo "" + echo "Usage: $0 [options]" + echo "" + echo "Commands:" + echo " start [-f] Start the server (-f for foreground mode)" + echo " stop Stop the server gracefully" + echo " restart Restart the server" + echo " status Show server and Redis status" + echo " logs [type] View logs (access|error|follow|all)" + echo " check Run environment checks only" + echo " help Show this help message" + echo "" + echo "Examples:" + echo " $0 start # Start in background (with Redis)" + echo " $0 start -f # Start in foreground" + echo " $0 logs follow # Follow logs in real-time" + echo " $0 logs error 100 # Show last 100 error log lines" + echo "" + echo "Environment Variables:" + echo " GUNICORN_BIND Bind address (default: 0.0.0.0:8080)" + echo " GUNICORN_WORKERS Number of workers (default: 1)" + echo " GUNICORN_THREADS Threads per worker (default: 4)" + echo " REDIS_ENABLED Enable Redis cache (default: true)" + echo " REDIS_URL Redis connection URL" + echo "" +} + +# ============================================================ +# Main +# ============================================================ +main() { + local command="${1:-}" + shift || true + + case "$command" in + start) + do_start "$@" + ;; + stop) + do_stop + ;; + restart) + do_restart "$@" + ;; + status) + do_status + ;; + logs) + do_logs "$@" + ;; + check) + do_check + ;; + help|--help|-h) + show_help + ;; + "") + # Default: start in foreground for backward compatibility + do_start + ;; + *) + log_error "Unknown command: ${command}" + show_help + exit 1 + ;; + esac +} + +main "$@" diff --git a/scripts/worker_watchdog.py b/scripts/worker_watchdog.py new file mode 100644 index 0000000..c354610 --- /dev/null +++ b/scripts/worker_watchdog.py @@ -0,0 +1,302 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +"""Worker watchdog for MES Dashboard. + +Monitors a restart flag file and signals Gunicorn master to gracefully +reload workers when the flag is detected. + +Usage: + python scripts/worker_watchdog.py + +The watchdog: +- Checks for /tmp/mes_dashboard_restart.flag every 5 seconds +- Sends SIGHUP to Gunicorn master process when flag is detected +- Removes the flag file after signaling +- Logs all restart events + +Configuration via environment variables: +- WATCHDOG_CHECK_INTERVAL: Check interval in seconds (default: 5) +- WATCHDOG_RESTART_FLAG: Path to restart flag file +- WATCHDOG_PID_FILE: Path to Gunicorn PID file +""" + +from __future__ import annotations + +import json +import logging +import os +import signal +import sys +import time +from datetime import datetime +from pathlib import Path + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + handlers=[ + logging.StreamHandler(sys.stdout), + ] +) +logger = logging.getLogger('mes_dashboard.watchdog') + +# ============================================================ +# Configuration +# ============================================================ + +CHECK_INTERVAL = int(os.getenv('WATCHDOG_CHECK_INTERVAL', '5')) + + +def _env_int(name: str, default: int) -> int: + try: + return int(os.getenv(name, str(default))) + except (TypeError, ValueError): + return default + + +PROJECT_ROOT = Path(__file__).resolve().parents[1] +DEFAULT_RUNTIME_DIR = Path( + os.getenv('WATCHDOG_RUNTIME_DIR', str(PROJECT_ROOT / 'tmp')) +) +RESTART_FLAG_PATH = os.getenv( + 'WATCHDOG_RESTART_FLAG', + str(DEFAULT_RUNTIME_DIR / 'mes_dashboard_restart.flag') +) +GUNICORN_PID_FILE = os.getenv( + 'WATCHDOG_PID_FILE', + str(DEFAULT_RUNTIME_DIR / 'gunicorn.pid') +) +RESTART_STATE_FILE = os.getenv( + 'WATCHDOG_STATE_FILE', + str(DEFAULT_RUNTIME_DIR / 'mes_dashboard_restart_state.json') +) +RESTART_HISTORY_MAX = _env_int('WATCHDOG_RESTART_HISTORY_MAX', 50) + + +# ============================================================ +# Watchdog Implementation +# ============================================================ + +def get_gunicorn_pid() -> int | None: + """Get Gunicorn master PID from PID file. + + Returns: + PID of Gunicorn master process, or None if not found. + """ + pid_path = Path(GUNICORN_PID_FILE) + + if not pid_path.exists(): + logger.warning(f"PID file not found: {GUNICORN_PID_FILE}") + return None + + try: + pid = int(pid_path.read_text().strip()) + # Verify process exists + os.kill(pid, 0) + return pid + except (ValueError, ProcessLookupError, PermissionError) as e: + logger.warning(f"Invalid or stale PID file: {e}") + return None + + +def read_restart_flag() -> dict | None: + """Read and parse the restart flag file. + + Returns: + Dictionary with restart metadata, or None if no flag exists. + """ + flag_path = Path(RESTART_FLAG_PATH) + + if not flag_path.exists(): + return None + + try: + content = flag_path.read_text().strip() + if content: + return json.loads(content) + return {"timestamp": datetime.now().isoformat()} + except (json.JSONDecodeError, IOError) as e: + logger.warning(f"Error reading restart flag: {e}") + return {"timestamp": datetime.now().isoformat(), "error": str(e)} + + +def remove_restart_flag() -> bool: + """Remove the restart flag file. + + Returns: + True if file was removed, False otherwise. + """ + flag_path = Path(RESTART_FLAG_PATH) + + try: + if flag_path.exists(): + flag_path.unlink() + return True + return False + except IOError as e: + logger.error(f"Failed to remove restart flag: {e}") + return False + + +def load_restart_state() -> dict: + """Load persisted restart state from disk.""" + state_path = Path(RESTART_STATE_FILE) + if not state_path.exists(): + return {} + try: + return json.loads(state_path.read_text()) + except (json.JSONDecodeError, IOError): + return {} + + +def save_restart_state( + requested_by: str | None = None, + requested_at: str | None = None, + requested_ip: str | None = None, + completed_at: str | None = None, + success: bool = True +) -> None: + """Save restart state for status queries. + + Args: + requested_by: Username who requested the restart. + requested_at: ISO timestamp when restart was requested. + requested_ip: IP address of requester. + completed_at: ISO timestamp when restart was completed. + success: Whether the restart was successful. + """ + state_path = Path(RESTART_STATE_FILE) + + entry = { + "requested_by": requested_by, + "requested_at": requested_at, + "requested_ip": requested_ip, + "completed_at": completed_at, + "success": success + } + current_state = load_restart_state() + history = current_state.get("history", []) + if not isinstance(history, list): + history = [] + history.append(entry) + if len(history) > RESTART_HISTORY_MAX: + history = history[-RESTART_HISTORY_MAX:] + + state = { + "last_restart": entry, + "history": history, + "history_limit": RESTART_HISTORY_MAX, + } + + try: + state_path.parent.mkdir(parents=True, exist_ok=True) + state_path.write_text(json.dumps(state, indent=2)) + except IOError as e: + logger.error(f"Failed to save restart state: {e}") + + +def send_reload_signal(pid: int) -> bool: + """Send SIGHUP to Gunicorn master to reload workers. + + Args: + pid: PID of Gunicorn master process. + + Returns: + True if signal was sent successfully, False otherwise. + """ + try: + os.kill(pid, signal.SIGHUP) + logger.info(f"Sent SIGHUP to Gunicorn master (PID: {pid})") + return True + except ProcessLookupError: + logger.error(f"Process {pid} not found") + return False + except PermissionError: + logger.error(f"Permission denied sending signal to PID {pid}") + return False + + +def process_restart_request() -> bool: + """Process a restart request if flag file exists. + + Returns: + True if restart was processed, False if no restart needed. + """ + flag_data = read_restart_flag() + + if flag_data is None: + return False + + logger.info(f"Restart flag detected: {flag_data}") + + # Get Gunicorn master PID + pid = get_gunicorn_pid() + + if pid is None: + logger.error("Cannot restart: Gunicorn master PID not found") + # Still remove flag to prevent infinite loop + remove_restart_flag() + save_restart_state( + requested_by=flag_data.get("user"), + requested_at=flag_data.get("timestamp"), + requested_ip=flag_data.get("ip"), + completed_at=datetime.now().isoformat(), + success=False + ) + return True + + # Send reload signal + success = send_reload_signal(pid) + + # Remove flag file + remove_restart_flag() + + # Save state + save_restart_state( + requested_by=flag_data.get("user"), + requested_at=flag_data.get("timestamp"), + requested_ip=flag_data.get("ip"), + completed_at=datetime.now().isoformat(), + success=success + ) + + if success: + logger.info( + f"Worker restart completed - " + f"Requested by: {flag_data.get('user', 'unknown')}, " + f"IP: {flag_data.get('ip', 'unknown')}" + ) + + return True + + +def run_watchdog() -> None: + """Main watchdog loop.""" + logger.info( + f"Worker watchdog started - " + f"Check interval: {CHECK_INTERVAL}s, " + f"Flag path: {RESTART_FLAG_PATH}, " + f"PID file: {GUNICORN_PID_FILE}" + ) + + while True: + try: + process_restart_request() + except Exception as e: + logger.exception(f"Error in watchdog loop: {e}") + + time.sleep(CHECK_INTERVAL) + + +def main() -> None: + """Entry point for watchdog script.""" + try: + run_watchdog() + except KeyboardInterrupt: + logger.info("Watchdog stopped by user") + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/shared/field_contracts.json b/shared/field_contracts.json new file mode 100644 index 0000000..eba4c5d --- /dev/null +++ b/shared/field_contracts.json @@ -0,0 +1,110 @@ +{ + "job_query": { + "jobs_table": [ + {"api_key": "RESOURCENAME", "ui_label": "設備名稱", "export_header": "設備名稱", "semantic_type": "text"}, + {"api_key": "JOBID", "ui_label": "JOBID", "export_header": "工單ID", "semantic_type": "identifier"}, + {"api_key": "JOBSTATUS", "ui_label": "狀態", "export_header": "工單狀態", "semantic_type": "status"}, + {"api_key": "JOBMODELNAME", "ui_label": "類型", "export_header": "工單類型", "semantic_type": "text"}, + {"api_key": "CREATEDATE", "ui_label": "建立時間", "export_header": "工單建立時間", "semantic_type": "datetime"}, + {"api_key": "COMPLETEDATE", "ui_label": "完成時間", "export_header": "工單完成時間", "semantic_type": "datetime"}, + {"api_key": "CAUSECODENAME", "ui_label": "故障碼", "export_header": "工單故障碼", "semantic_type": "text"}, + {"api_key": "REPAIRCODENAME", "ui_label": "維修碼", "export_header": "工單維修碼", "semantic_type": "text"} + ], + "txn_table": [ + {"api_key": "TXNDATE", "ui_label": "交易時間", "export_header": "交易時間", "semantic_type": "datetime"}, + {"api_key": "FROMJOBSTATUS", "ui_label": "原狀態", "export_header": "原狀態", "semantic_type": "status"}, + {"api_key": "JOBSTATUS", "ui_label": "新狀態", "export_header": "新狀態", "semantic_type": "status"}, + {"api_key": "STAGENAME", "ui_label": "階段", "export_header": "階段", "semantic_type": "text"}, + {"api_key": "CAUSECODENAME", "ui_label": "故障碼", "export_header": "交易故障碼", "semantic_type": "text"}, + {"api_key": "REPAIRCODENAME", "ui_label": "維修碼", "export_header": "交易維修碼", "semantic_type": "text"}, + {"api_key": "USER_NAME", "ui_label": "操作者", "export_header": "使用者帳號", "semantic_type": "text"}, + {"api_key": "COMMENTS", "ui_label": "備註", "export_header": "備註", "semantic_type": "text"} + ], + "export": [ + {"api_key": "RESOURCENAME", "ui_label": "設備名稱", "export_header": "設備名稱", "semantic_type": "text"}, + {"api_key": "JOBID", "ui_label": "JOBID", "export_header": "工單ID", "semantic_type": "identifier"}, + {"api_key": "JOB_FINAL_STATUS", "ui_label": "工單最終狀態", "export_header": "工單最終狀態", "semantic_type": "status"}, + {"api_key": "JOBMODELNAME", "ui_label": "類型", "export_header": "工單類型", "semantic_type": "text"}, + {"api_key": "JOBORDERNAME", "ui_label": "工單序號", "export_header": "工單序號", "semantic_type": "identifier"}, + {"api_key": "JOB_CREATEDATE", "ui_label": "建立時間", "export_header": "工單建立時間", "semantic_type": "datetime"}, + {"api_key": "JOB_COMPLETEDATE", "ui_label": "完成時間", "export_header": "工單完成時間", "semantic_type": "datetime"}, + {"api_key": "JOB_CAUSECODENAME", "ui_label": "故障碼", "export_header": "工單故障碼", "semantic_type": "text"}, + {"api_key": "JOB_REPAIRCODENAME", "ui_label": "維修碼", "export_header": "工單維修碼", "semantic_type": "text"}, + {"api_key": "JOB_SYMPTOMCODENAME", "ui_label": "症狀碼", "export_header": "工單症狀碼", "semantic_type": "text"}, + {"api_key": "TXNDATE", "ui_label": "交易時間", "export_header": "交易時間", "semantic_type": "datetime"}, + {"api_key": "FROMJOBSTATUS", "ui_label": "原狀態", "export_header": "原狀態", "semantic_type": "status"}, + {"api_key": "TXN_JOBSTATUS", "ui_label": "新狀態", "export_header": "新狀態", "semantic_type": "status"}, + {"api_key": "STAGENAME", "ui_label": "階段", "export_header": "階段", "semantic_type": "text"}, + {"api_key": "TXN_CAUSECODENAME", "ui_label": "交易故障碼", "export_header": "交易故障碼", "semantic_type": "text"}, + {"api_key": "TXN_REPAIRCODENAME", "ui_label": "交易維修碼", "export_header": "交易維修碼", "semantic_type": "text"}, + {"api_key": "TXN_SYMPTOMCODENAME", "ui_label": "交易症狀碼", "export_header": "交易症狀碼", "semantic_type": "text"}, + {"api_key": "USER_NAME", "ui_label": "使用者帳號", "export_header": "使用者帳號", "semantic_type": "text"}, + {"api_key": "EMP_NAME", "ui_label": "員工姓名", "export_header": "員工姓名", "semantic_type": "text"}, + {"api_key": "COMMENTS", "ui_label": "備註", "export_header": "備註", "semantic_type": "text"} + ] + }, + "resource_history": { + "detail_table": [ + {"api_key": "workcenter", "ui_label": "站點", "export_header": "站點", "semantic_type": "category"}, + {"api_key": "family", "ui_label": "型號", "export_header": "型號", "semantic_type": "category"}, + {"api_key": "resource", "ui_label": "機台", "export_header": "機台", "semantic_type": "text"}, + {"api_key": "ou_pct", "ui_label": "OU%", "export_header": "OU%", "semantic_type": "percent"}, + {"api_key": "availability_pct", "ui_label": "Availability%", "export_header": "Availability%", "semantic_type": "percent"}, + {"api_key": "prd_hours", "ui_label": "PRD(h)", "export_header": "PRD(h)", "semantic_type": "hours"}, + {"api_key": "prd_pct", "ui_label": "PRD(%)", "export_header": "PRD(%)", "semantic_type": "percent"}, + {"api_key": "sby_hours", "ui_label": "SBY(h)", "export_header": "SBY(h)", "semantic_type": "hours"}, + {"api_key": "sby_pct", "ui_label": "SBY(%)", "export_header": "SBY(%)", "semantic_type": "percent"}, + {"api_key": "udt_hours", "ui_label": "UDT(h)", "export_header": "UDT(h)", "semantic_type": "hours"}, + {"api_key": "udt_pct", "ui_label": "UDT(%)", "export_header": "UDT(%)", "semantic_type": "percent"}, + {"api_key": "sdt_hours", "ui_label": "SDT(h)", "export_header": "SDT(h)", "semantic_type": "hours"}, + {"api_key": "sdt_pct", "ui_label": "SDT(%)", "export_header": "SDT(%)", "semantic_type": "percent"}, + {"api_key": "egt_hours", "ui_label": "EGT(h)", "export_header": "EGT(h)", "semantic_type": "hours"}, + {"api_key": "egt_pct", "ui_label": "EGT(%)", "export_header": "EGT(%)", "semantic_type": "percent"}, + {"api_key": "nst_hours", "ui_label": "NST(h)", "export_header": "NST(h)", "semantic_type": "hours"}, + {"api_key": "nst_pct", "ui_label": "NST(%)", "export_header": "NST(%)", "semantic_type": "percent"} + ], + "kpi": [ + {"api_key": "ou_pct", "ui_label": "OU%", "export_header": "OU%", "semantic_type": "percent"}, + {"api_key": "availability_pct", "ui_label": "AVAIL%", "export_header": "Availability%", "semantic_type": "percent"}, + {"api_key": "machine_count", "ui_label": "機台數", "export_header": "機台數", "semantic_type": "count"} + ], + "export": [ + {"api_key": "workcenter", "ui_label": "站點", "export_header": "站點", "semantic_type": "category"}, + {"api_key": "family", "ui_label": "型號", "export_header": "型號", "semantic_type": "category"}, + {"api_key": "resource", "ui_label": "機台", "export_header": "機台", "semantic_type": "text"}, + {"api_key": "ou_pct", "ui_label": "OU%", "export_header": "OU%", "semantic_type": "percent"}, + {"api_key": "availability_pct", "ui_label": "Availability%", "export_header": "Availability%", "semantic_type": "percent"}, + {"api_key": "prd_hours", "ui_label": "PRD(h)", "export_header": "PRD(h)", "semantic_type": "hours"}, + {"api_key": "prd_pct", "ui_label": "PRD(%)", "export_header": "PRD(%)", "semantic_type": "percent"}, + {"api_key": "sby_hours", "ui_label": "SBY(h)", "export_header": "SBY(h)", "semantic_type": "hours"}, + {"api_key": "sby_pct", "ui_label": "SBY(%)", "export_header": "SBY(%)", "semantic_type": "percent"}, + {"api_key": "udt_hours", "ui_label": "UDT(h)", "export_header": "UDT(h)", "semantic_type": "hours"}, + {"api_key": "udt_pct", "ui_label": "UDT(%)", "export_header": "UDT(%)", "semantic_type": "percent"}, + {"api_key": "sdt_hours", "ui_label": "SDT(h)", "export_header": "SDT(h)", "semantic_type": "hours"}, + {"api_key": "sdt_pct", "ui_label": "SDT(%)", "export_header": "SDT(%)", "semantic_type": "percent"}, + {"api_key": "egt_hours", "ui_label": "EGT(h)", "export_header": "EGT(h)", "semantic_type": "hours"}, + {"api_key": "egt_pct", "ui_label": "EGT(%)", "export_header": "EGT(%)", "semantic_type": "percent"}, + {"api_key": "nst_hours", "ui_label": "NST(h)", "export_header": "NST(h)", "semantic_type": "hours"}, + {"api_key": "nst_pct", "ui_label": "NST(%)", "export_header": "NST(%)", "semantic_type": "percent"} + ] + }, + "tables": { + "result_table": [ + {"api_key": "column_name", "ui_label": "欄位", "export_header": "欄位", "semantic_type": "text"}, + {"api_key": "value", "ui_label": "值", "export_header": "值", "semantic_type": "text"} + ] + }, + "excel_query": { + "result_table": [ + {"api_key": "search_column", "ui_label": "查詢欄位", "export_header": "查詢欄位", "semantic_type": "text"}, + {"api_key": "result_column", "ui_label": "回傳欄位", "export_header": "回傳欄位", "semantic_type": "text"} + ] + }, + "resource_status": { + "matrix_summary": [ + {"api_key": "workcenter_group", "ui_label": "站點群組", "export_header": "站點群組", "semantic_type": "category"}, + {"api_key": "resourcefamily", "ui_label": "設備群", "export_header": "設備群", "semantic_type": "category"}, + {"api_key": "equipment_status", "ui_label": "狀態", "export_header": "狀態", "semantic_type": "status"} + ] + } +} diff --git a/src/mes_dashboard/__init__.py b/src/mes_dashboard/__init__.py new file mode 100644 index 0000000..48c9f14 --- /dev/null +++ b/src/mes_dashboard/__init__.py @@ -0,0 +1,5 @@ +"""MES Dashboard package.""" + +from .app import create_app + +__all__ = ["create_app"] diff --git a/src/mes_dashboard/__main__.py b/src/mes_dashboard/__main__.py new file mode 100644 index 0000000..5c68323 --- /dev/null +++ b/src/mes_dashboard/__main__.py @@ -0,0 +1,12 @@ +"""Development entry point for MES Dashboard.""" + +from mes_dashboard.app import create_app + + +def main() -> None: + app = create_app() + app.run(debug=True, use_reloader=True, host="0.0.0.0", port=8080) + + +if __name__ == '__main__': + main() diff --git a/src/mes_dashboard/app.py b/src/mes_dashboard/app.py new file mode 100644 index 0000000..1433684 --- /dev/null +++ b/src/mes_dashboard/app.py @@ -0,0 +1,366 @@ +# -*- coding: utf-8 -*- +"""Flask application factory for MES Dashboard.""" + +from __future__ import annotations + +import logging +import os +import sys + +from flask import Flask, jsonify, redirect, render_template, request, session, url_for + +from mes_dashboard.config.tables import TABLES_CONFIG +from mes_dashboard.config.settings import get_config +from mes_dashboard.core.cache import create_default_cache_backend +from mes_dashboard.core.database import get_table_data, get_table_columns, get_engine, init_db, start_keepalive +from mes_dashboard.core.permissions import is_admin_logged_in, _is_ajax_request +from mes_dashboard.routes import register_routes +from mes_dashboard.routes.auth_routes import auth_bp +from mes_dashboard.routes.admin_routes import admin_bp +from mes_dashboard.routes.health_routes import health_bp +from mes_dashboard.services.page_registry import get_page_status, is_api_public +from mes_dashboard.core.cache_updater import start_cache_updater, stop_cache_updater +from mes_dashboard.services.realtime_equipment_cache import init_realtime_equipment_cache + + +def _configure_logging(app: Flask) -> None: + """Configure application logging. + + Sets up logging to stderr (captured by Gunicorn's --capture-output). + Additionally sets up SQLite log store for admin dashboard queries. + + Log levels: + - DEBUG: Query completion times, connection events + - WARNING: Slow queries (>1s) + - ERROR: Connection failures, query errors with ORA codes + """ + # Configure the mes_dashboard logger + logger = logging.getLogger('mes_dashboard') + logger.setLevel(logging.DEBUG if app.debug else logging.INFO) + + # Only add handler if not already configured (avoid duplicates) + if not logger.handlers: + # Console handler (stderr - captured by Gunicorn) + handler = logging.StreamHandler(sys.stderr) + handler.setLevel(logging.DEBUG) + formatter = logging.Formatter( + '%(asctime)s [%(levelname)s] %(name)s: %(message)s', + datefmt='%Y-%m-%d %H:%M:%S' + ) + handler.setFormatter(formatter) + logger.addHandler(handler) + + # SQLite log handler for admin dashboard (INFO level and above) + try: + from mes_dashboard.core.log_store import get_sqlite_log_handler, LOG_STORE_ENABLED + if LOG_STORE_ENABLED: + sqlite_handler = get_sqlite_log_handler() + sqlite_handler.setLevel(logging.INFO) + logger.addHandler(sqlite_handler) + logger.debug("SQLite log handler registered") + except Exception as e: + logger.warning(f"Failed to initialize SQLite log handler: {e}") + + # Prevent propagation to root logger (avoid duplicate logs) + logger.propagate = False + + +def create_app(config_name: str | None = None) -> Flask: + """Create and configure the Flask app instance.""" + app = Flask(__name__, template_folder="templates") + + config_class = get_config(config_name) + app.config.from_object(config_class) + + # Session configuration + app.secret_key = os.environ.get("SECRET_KEY", "dev-secret-key-change-in-prod") + + # Session cookie security settings + # SECURE: Only send cookie over HTTPS (disable for local development) + app.config['SESSION_COOKIE_SECURE'] = os.environ.get("FLASK_ENV") == "production" + # HTTPONLY: Prevent JavaScript access to session cookie (XSS protection) + app.config['SESSION_COOKIE_HTTPONLY'] = True + # SAMESITE: Prevent CSRF by restricting cross-site cookie sending + app.config['SESSION_COOKIE_SAMESITE'] = 'Lax' + + # Configure logging first + _configure_logging(app) + + # Route-level cache backend (L1 memory + optional L2 Redis) + app.extensions["cache"] = create_default_cache_backend() + + # Initialize database teardown and pool + init_db(app) + with app.app_context(): + get_engine() + start_keepalive() # Keep database connections alive + start_cache_updater() # Start Redis cache updater + init_realtime_equipment_cache(app) # Start realtime equipment status cache + + # Register API routes + register_routes(app) + + # Register auth, admin, and health routes + app.register_blueprint(auth_bp) + app.register_blueprint(admin_bp) + app.register_blueprint(health_bp) + + # ======================================================== + # Permission Middleware + # ======================================================== + + @app.before_request + def check_page_access(): + """Check page access permissions before each request.""" + # Skip static files + if request.endpoint == "static": + return None + + # Health check endpoint - no auth required + if request.path == "/health": + return None + + # API endpoints check + if request.path.startswith("/api/"): + if is_api_public(): + return None + if not is_admin_logged_in(): + from mes_dashboard.core.response import unauthorized_error + return unauthorized_error() + return None + + # Skip auth-related pages (login/logout) + if request.path.startswith("/admin/login") or request.path.startswith("/admin/logout"): + return None + + # Admin pages require login + if request.path.startswith("/admin/"): + if not is_admin_logged_in(): + # For AJAX requests, return JSON error instead of redirect + if _is_ajax_request(): + return jsonify({"error": "請先登入管理員帳號", "login_required": True}), 401 + return redirect(url_for("auth.login", next=request.url)) + return None + + # Check page status for registered pages only + # Unregistered pages pass through to Flask routing (may return 404) + page_status = get_page_status(request.path) + if page_status == "dev" and not is_admin_logged_in(): + return render_template("403.html"), 403 + + return None + + # ======================================================== + # Template Context Processor + # ======================================================== + + @app.context_processor + def inject_admin(): + """Inject admin info into all templates.""" + admin = is_admin_logged_in() + + def can_view_page(route: str) -> bool: + """Check if current user can view a page.""" + status = get_page_status(route) + # Unregistered pages (None) are viewable + if status is None: + return True + # Released pages are viewable by all + if status == "released": + return True + # Dev pages only viewable by admin + return admin + + def frontend_asset(filename: str) -> str | None: + """Resolve built frontend asset from static/dist if available.""" + if not filename: + return None + dist_path = os.path.join(app.static_folder or "", "dist", filename) + if os.path.exists(dist_path): + return url_for("static", filename=f"dist/{filename}") + return None + + return { + "is_admin": admin, + "admin_user": session.get("admin"), + "can_view_page": can_view_page, + "frontend_asset": frontend_asset, + } + + # ======================================================== + # Page Routes + # ======================================================== + + @app.route('/') + def portal_index(): + """Portal home with tabs.""" + return render_template('portal.html') + + @app.route('/tables') + def tables_page(): + """Table viewer page.""" + return render_template('index.html', tables_config=TABLES_CONFIG) + + @app.route('/wip-overview') + def wip_overview_page(): + """WIP Overview Dashboard - for executives.""" + return render_template('wip_overview.html') + + @app.route('/wip-detail') + def wip_detail_page(): + """WIP Detail Dashboard - for production lines.""" + return render_template('wip_detail.html') + + @app.route('/resource') + def resource_page(): + """Resource status report page.""" + return render_template('resource_status.html') + + @app.route('/excel-query') + def excel_query_page(): + """Excel batch query tool page.""" + return render_template('excel_query.html') + + @app.route('/resource-history') + def resource_history_page(): + """Resource history analysis page.""" + return render_template('resource_history.html') + + # ======================================================== + # Table Query APIs (for table_data_viewer) + # ======================================================== + + @app.route('/api/query_table', methods=['POST']) + def query_table(): + """API: query table data with optional column filters.""" + data = request.get_json() + table_name = data.get('table_name') + limit = data.get('limit', 1000) + time_field = data.get('time_field') + filters = data.get('filters') + + if not table_name: + return jsonify({'error': '請指定表名'}), 400 + + result = get_table_data(table_name, limit, time_field, filters) + return jsonify(result) + + @app.route('/api/get_table_columns', methods=['POST']) + def api_get_table_columns(): + """API: get column names for a table.""" + data = request.get_json() + table_name = data.get('table_name') + + if not table_name: + return jsonify({'error': '請指定表名'}), 400 + + columns = get_table_columns(table_name) + return jsonify({'columns': columns}) + + @app.route('/api/get_table_info', methods=['GET']) + def get_table_info(): + """API: get tables config.""" + return jsonify(TABLES_CONFIG) + + # ======================================================== + # Global Error Handlers + # ======================================================== + _register_error_handlers(app) + + return app + + +def _register_error_handlers(app: Flask) -> None: + """Register global error handlers with standardized response format.""" + from mes_dashboard.core.response import ( + unauthorized_error, + forbidden_error, + not_found_error, + internal_error, + pool_exhausted_error, + error_response, + INTERNAL_ERROR + ) + from mes_dashboard.core.database import ( + DatabasePoolExhaustedError, + DatabaseCircuitOpenError, + ) + from mes_dashboard.core.response import circuit_breaker_error + + @app.errorhandler(401) + def handle_unauthorized(e): + """Handle 401 Unauthorized errors.""" + return unauthorized_error() + + @app.errorhandler(403) + def handle_forbidden(e): + """Handle 403 Forbidden errors.""" + return forbidden_error() + + @app.errorhandler(404) + def handle_not_found(e): + """Handle 404 Not Found errors.""" + # For API routes, return JSON; for pages, render template + if request.path.startswith('/api/'): + return not_found_error() + return render_template('404.html'), 404 + + def _is_api_request() -> bool: + """Check if the current request is an API request.""" + return (request.path.startswith('/api/') or + '/api/' in request.path or + request.accept_mimetypes.best == 'application/json') + + @app.errorhandler(500) + def handle_internal_error(e): + """Handle 500 Internal Server errors.""" + logger = logging.getLogger('mes_dashboard') + logger.error(f"Internal server error: {e}", exc_info=True) + if _is_api_request(): + return internal_error(str(e) if app.debug else None) + # Fallback to HTML template for non-API requests. + try: + return render_template('500.html'), 500 + except Exception: + return internal_error(str(e) if app.debug else None) + + @app.errorhandler(DatabasePoolExhaustedError) + def handle_pool_exhausted(e: DatabasePoolExhaustedError): + """Handle DB pool exhaustion with degraded response contract.""" + retry_after = max(int(getattr(e, "retry_after_seconds", 5)), 1) + return pool_exhausted_error( + str(e) if app.debug else None, + retry_after_seconds=retry_after, + ) + + @app.errorhandler(DatabaseCircuitOpenError) + def handle_circuit_open(e: DatabaseCircuitOpenError): + """Handle circuit-open condition with degraded response contract.""" + retry_after = max(int(getattr(e, "retry_after_seconds", 30)), 1) + return circuit_breaker_error( + str(e) if app.debug else None, + retry_after_seconds=retry_after, + ) + + @app.errorhandler(Exception) + def handle_exception(e): + """Handle uncaught exceptions.""" + logger = logging.getLogger('mes_dashboard') + logger.error(f"Uncaught exception: {e}", exc_info=True) + if _is_api_request(): + return error_response( + INTERNAL_ERROR, + "伺服器發生未預期的錯誤", + str(e) if app.debug else None, + status_code=500 + ) + # Fallback to JSON if template not found + try: + return render_template('500.html'), 500 + except Exception: + return error_response( + INTERNAL_ERROR, + "伺服器發生未預期的錯誤", + str(e) if app.debug else None, + status_code=500 + ) diff --git a/src/mes_dashboard/config/__init__.py b/src/mes_dashboard/config/__init__.py new file mode 100644 index 0000000..0927541 --- /dev/null +++ b/src/mes_dashboard/config/__init__.py @@ -0,0 +1,47 @@ +"""Configuration modules for MES Dashboard.""" + +from .database import DB_CONFIG, CONNECTION_STRING +from .tables import TABLES_CONFIG +from .constants import ( + EXCLUDED_LOCATIONS, + EXCLUDED_ASSET_STATUSES, + EQUIPMENT_TYPE_FILTER, + CACHE_TTL_DEFAULT, + CACHE_TTL_FILTER_OPTIONS, + CACHE_TTL_PIVOT_COLUMNS, + CACHE_TTL_KPI, + CACHE_TTL_TREND, + DEFAULT_DAYS_BACK, + DEFAULT_WIP_DAYS_BACK, + DEFAULT_PAGE_SIZE, + MAX_PAGE_SIZE, + STATUS_DISPLAY_NAMES, + WIP_EXCLUDED_STATUS, +) +from .workcenter_groups import WORKCENTER_GROUPS, get_workcenter_group +from .field_contracts import get_page_contract, get_export_headers, get_export_api_keys + +__all__ = [ + "DB_CONFIG", + "CONNECTION_STRING", + "TABLES_CONFIG", + "EXCLUDED_LOCATIONS", + "EXCLUDED_ASSET_STATUSES", + "EQUIPMENT_TYPE_FILTER", + "CACHE_TTL_DEFAULT", + "CACHE_TTL_FILTER_OPTIONS", + "CACHE_TTL_PIVOT_COLUMNS", + "CACHE_TTL_KPI", + "CACHE_TTL_TREND", + "DEFAULT_DAYS_BACK", + "DEFAULT_WIP_DAYS_BACK", + "DEFAULT_PAGE_SIZE", + "MAX_PAGE_SIZE", + "STATUS_DISPLAY_NAMES", + "WIP_EXCLUDED_STATUS", + "WORKCENTER_GROUPS", + "get_workcenter_group", + "get_page_contract", + "get_export_headers", + "get_export_api_keys", +] diff --git a/src/mes_dashboard/config/constants.py b/src/mes_dashboard/config/constants.py new file mode 100644 index 0000000..99ef155 --- /dev/null +++ b/src/mes_dashboard/config/constants.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- +"""Constants and configuration values for MES Dashboard. + +Centralized location for all constant values used across the application. +""" + +# ============================================================ +# Location / Area Exclusions +# ============================================================ + +# Locations to exclude from equipment queries +EXCLUDED_LOCATIONS = [ + 'ATEC', + 'F區', + 'F區焊接站', + '報廢', + '實驗室', + '山東', + '成型站_F區', + '焊接F區', + '無錫', + '熒茂', +] + +# Asset statuses to exclude +EXCLUDED_ASSET_STATUSES = ['Disapproved'] + + +# ============================================================ +# Equipment Type Filters +# ============================================================ + +# SQL condition for filtering valid equipment types +EQUIPMENT_TYPE_FILTER = """ +((OBJECTCATEGORY = 'ASSEMBLY' AND OBJECTTYPE = 'ASSEMBLY') + OR (OBJECTCATEGORY = 'WAFERSORT' AND OBJECTTYPE = 'WAFERSORT')) +""" + +# Equipment flag filter templates +EQUIPMENT_FLAG_FILTERS = { + 'isProduction': "NVL(PJ_ISPRODUCTION, 0) = 1", + 'isKey': "NVL(PJ_ISKEY, 0) = 1", + 'isMonitor': "NVL(PJ_ISMONITOR, 0) = 1", +} + + +# ============================================================ +# Cache TTL Settings (in seconds) +# ============================================================ + +CACHE_TTL_DEFAULT = 60 # Default cache TTL: 1 minute +CACHE_TTL_FILTER_OPTIONS = 600 # Filter options: 10 minutes +CACHE_TTL_PIVOT_COLUMNS = 300 # Pivot columns: 5 minutes +CACHE_TTL_KPI = 60 # KPI data: 1 minute +CACHE_TTL_TREND = 300 # Trend data: 5 minutes + + +# ============================================================ +# Query Defaults +# ============================================================ + +DEFAULT_DAYS_BACK = 365 # Default days to look back for queries +DEFAULT_WIP_DAYS_BACK = 90 # Default days for WIP queries +DEFAULT_PAGE_SIZE = 100 # Default pagination size +MAX_PAGE_SIZE = 500 # Maximum allowed page size + + +# ============================================================ +# Status Definitions +# ============================================================ + +# Equipment status codes and their display names +STATUS_DISPLAY_NAMES = { + 'PRD': '生產中', + 'SBY': '待機', + 'UDT': '非計畫停機', + 'SDT': '計畫停機', + 'EGT': '工程時間', + 'NST': '未排單', +} + +# WIP status codes to exclude (completed/scrapped) +WIP_EXCLUDED_STATUS = (8, 128) + + +# ============================================================ +# Redis Key Prefixes - Realtime Equipment Status +# ============================================================ + +EQUIPMENT_STATUS_DATA_KEY = "equipment_status:data" +EQUIPMENT_STATUS_INDEX_KEY = "equipment_status:index" +EQUIPMENT_STATUS_META_UPDATED_KEY = "equipment_status:meta:updated" +EQUIPMENT_STATUS_META_COUNT_KEY = "equipment_status:meta:count" + + +# ============================================================ +# Status Category Classification +# ============================================================ + +# Map equipment status to category for grouping/display +STATUS_CATEGORY_MAP = { + 'PRD': 'PRODUCTIVE', + 'SBY': 'STANDBY', + 'UDT': 'DOWN', + 'SDT': 'DOWN', + 'EGT': 'ENGINEERING', + 'NST': 'NOT_SCHEDULED', + 'SCRAP': 'INACTIVE', + '設備-LOST': 'INACTIVE', + '設備-RUN': 'PRODUCTIVE', +} + +# All possible status categories +STATUS_CATEGORIES = [ + 'PRODUCTIVE', + 'STANDBY', + 'DOWN', + 'ENGINEERING', + 'NOT_SCHEDULED', + 'INACTIVE', + 'OTHER', +] diff --git a/src/mes_dashboard/config/database.py b/src/mes_dashboard/config/database.py new file mode 100644 index 0000000..eafbc4c --- /dev/null +++ b/src/mes_dashboard/config/database.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +"""Database configuration for MES Dashboard. + +Centralized database connection settings used by all modules. +Loads credentials from environment variables (.env file). +""" + +import os +from pathlib import Path +from urllib.parse import quote_plus + +# Load .env file if python-dotenv is available +try: + from dotenv import load_dotenv + + # Find .env file in project root + env_path = Path(__file__).resolve().parents[3] / '.env' + load_dotenv(env_path) +except ImportError: + pass # python-dotenv not installed, rely on system environment variables + +# Database connection settings from environment variables +# All values MUST be set in .env file - no hardcoded defaults for security +DB_HOST = os.getenv('DB_HOST', '') +DB_PORT = os.getenv('DB_PORT', '1521') +DB_SERVICE = os.getenv('DB_SERVICE', '') +DB_USER = os.getenv('DB_USER', '') +DB_PASSWORD = os.getenv('DB_PASSWORD', '') + +# Oracle Database connection config (for direct oracledb connections) +DB_CONFIG = { + 'user': DB_USER, + 'password': DB_PASSWORD, + 'dsn': f'{DB_HOST}:{DB_PORT}/{DB_SERVICE}' +} + +# SQLAlchemy connection string +# Note: Password is URL-encoded to handle special characters (@:/?# etc.) +CONNECTION_STRING = ( + f"oracle+oracledb://{DB_USER}:{quote_plus(DB_PASSWORD)}" + f"@{DB_HOST}:{DB_PORT}/?service_name={DB_SERVICE}" +) diff --git a/src/mes_dashboard/config/field_contracts.py b/src/mes_dashboard/config/field_contracts.py new file mode 100644 index 0000000..f8dd8cd --- /dev/null +++ b/src/mes_dashboard/config/field_contracts.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +"""Shared field contracts for UI/API/export mapping.""" + +from __future__ import annotations + +import json +from functools import lru_cache +from pathlib import Path +from typing import Any + +_CONTRACTS_PATH = Path(__file__).resolve().parents[3] / "shared" / "field_contracts.json" + + +@lru_cache(maxsize=1) +def _load_contracts() -> dict[str, Any]: + with _CONTRACTS_PATH.open("r", encoding="utf-8") as fp: + payload = json.load(fp) + return payload if isinstance(payload, dict) else {} + + +def get_page_contract(page: str, section: str) -> list[dict[str, Any]]: + """Return contract list for a page section. + + Args: + page: Page key, e.g. ``job_query``. + section: Contract section key, e.g. ``export``. + """ + page_contract = _load_contracts().get(page, {}) + fields = page_contract.get(section, []) if isinstance(page_contract, dict) else [] + return fields if isinstance(fields, list) else [] + + +def get_export_headers(page: str) -> list[str]: + """Return export headers in canonical order for a page.""" + return [field.get("export_header", "") for field in get_page_contract(page, "export") if field.get("export_header")] + + +def get_export_api_keys(page: str) -> list[str]: + """Return export API keys in canonical order for a page.""" + return [field.get("api_key", "") for field in get_page_contract(page, "export") if field.get("api_key")] diff --git a/src/mes_dashboard/config/settings.py b/src/mes_dashboard/config/settings.py new file mode 100644 index 0000000..c9c283a --- /dev/null +++ b/src/mes_dashboard/config/settings.py @@ -0,0 +1,115 @@ +"""Application configuration classes for MES Dashboard.""" + +from __future__ import annotations + +import os +from typing import Type + + +def _int_env(name: str, default: int) -> int: + try: + return int(os.getenv(name, str(default))) + except (TypeError, ValueError): + return default + + +def _float_env(name: str, default: float) -> float: + try: + return float(os.getenv(name, str(default))) + except (TypeError, ValueError): + return default + + +class Config: + """Base configuration.""" + + DEBUG = False + TESTING = False + ENV = "production" + + # Database pool defaults (can be overridden by env) + DB_POOL_SIZE = _int_env("DB_POOL_SIZE", 5) + DB_MAX_OVERFLOW = _int_env("DB_MAX_OVERFLOW", 10) + DB_POOL_TIMEOUT = _int_env("DB_POOL_TIMEOUT", 30) + DB_POOL_RECYCLE = _int_env("DB_POOL_RECYCLE", 1800) + DB_TCP_CONNECT_TIMEOUT = _int_env("DB_TCP_CONNECT_TIMEOUT", 10) + DB_CONNECT_RETRY_COUNT = _int_env("DB_CONNECT_RETRY_COUNT", 1) + DB_CONNECT_RETRY_DELAY = _float_env("DB_CONNECT_RETRY_DELAY", 1.0) + DB_CALL_TIMEOUT_MS = _int_env("DB_CALL_TIMEOUT_MS", 55000) + + # Auth configuration - MUST be set in .env file + LDAP_API_URL = os.getenv("LDAP_API_URL", "") + ADMIN_EMAILS = os.getenv("ADMIN_EMAILS", "") + SECRET_KEY = os.getenv("SECRET_KEY", "dev-secret-key-change-in-prod") + + # Session configuration + PERMANENT_SESSION_LIFETIME = _int_env("SESSION_LIFETIME", 28800) # 8 hours + + # Realtime Equipment Status Cache + REALTIME_EQUIPMENT_CACHE_ENABLED = os.getenv( + "REALTIME_EQUIPMENT_CACHE_ENABLED", "true" + ).lower() in ("true", "1", "yes") + EQUIPMENT_STATUS_SYNC_INTERVAL = _int_env("EQUIPMENT_STATUS_SYNC_INTERVAL", 300) # 5 minutes + + # Workcenter Mapping Cache + WORKCENTER_MAPPING_SYNC_INTERVAL = _int_env("WORKCENTER_MAPPING_SYNC_INTERVAL", 86400) # 24 hours + + +class DevelopmentConfig(Config): + """Development configuration.""" + + DEBUG = True + ENV = "development" + + # Smaller pool to ensure keep-alive covers all connections + DB_POOL_SIZE = _int_env("DB_POOL_SIZE", 2) + DB_MAX_OVERFLOW = _int_env("DB_MAX_OVERFLOW", 3) + DB_POOL_TIMEOUT = _int_env("DB_POOL_TIMEOUT", 30) + DB_POOL_RECYCLE = _int_env("DB_POOL_RECYCLE", 1800) + DB_TCP_CONNECT_TIMEOUT = _int_env("DB_TCP_CONNECT_TIMEOUT", 10) + DB_CONNECT_RETRY_COUNT = _int_env("DB_CONNECT_RETRY_COUNT", 1) + DB_CONNECT_RETRY_DELAY = _float_env("DB_CONNECT_RETRY_DELAY", 1.0) + DB_CALL_TIMEOUT_MS = _int_env("DB_CALL_TIMEOUT_MS", 55000) + + +class ProductionConfig(Config): + """Production configuration.""" + + DEBUG = False + ENV = "production" + + DB_POOL_SIZE = _int_env("DB_POOL_SIZE", 10) + DB_MAX_OVERFLOW = _int_env("DB_MAX_OVERFLOW", 20) + DB_POOL_TIMEOUT = _int_env("DB_POOL_TIMEOUT", 30) + DB_POOL_RECYCLE = _int_env("DB_POOL_RECYCLE", 1800) + DB_TCP_CONNECT_TIMEOUT = _int_env("DB_TCP_CONNECT_TIMEOUT", 10) + DB_CONNECT_RETRY_COUNT = _int_env("DB_CONNECT_RETRY_COUNT", 1) + DB_CONNECT_RETRY_DELAY = _float_env("DB_CONNECT_RETRY_DELAY", 1.0) + DB_CALL_TIMEOUT_MS = _int_env("DB_CALL_TIMEOUT_MS", 55000) + + +class TestingConfig(Config): + """Testing configuration.""" + + DEBUG = True + TESTING = True + ENV = "testing" + + DB_POOL_SIZE = 1 + DB_MAX_OVERFLOW = 0 + DB_POOL_TIMEOUT = 5 + DB_POOL_RECYCLE = 300 + DB_TCP_CONNECT_TIMEOUT = 5 + DB_CONNECT_RETRY_COUNT = 0 + DB_CONNECT_RETRY_DELAY = 0.0 + DB_CALL_TIMEOUT_MS = 5000 + + +def get_config(env: str | None = None) -> Type[Config]: + """Select config class based on environment name.""" + value = (env or os.getenv("FLASK_ENV", "development")).lower() + if value in {"prod", "production"}: + return ProductionConfig + if value in {"test", "testing"}: + return TestingConfig + return DevelopmentConfig diff --git a/src/mes_dashboard/config/tables.py b/src/mes_dashboard/config/tables.py new file mode 100644 index 0000000..88cfc3a --- /dev/null +++ b/src/mes_dashboard/config/tables.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +"""Table configuration metadata for MES Dashboard. + +Row counts updated from data/table_schema_info.json (2026-01-29) +""" + +# 19 core tables config (with categories) +TABLES_CONFIG = { + '即時數據表 (View)': [ + { + 'name': 'DWH.DW_MES_LOT_V', + 'display_name': 'WIP 即時批次 (DWH.DW_MES_LOT_V)', + 'row_count': 9468, # 動態變化,約 9000-12000 + 'time_field': 'SYS_DATE', + 'description': 'MES 即時 WIP View - 每 5 分鐘更新,包含完整批次狀態、工站、設備、Hold 原因等 70 欄位' + }, + { + 'name': 'DWH.DW_MES_EQUIPMENTSTATUS_WIP_V', + 'display_name': '設備狀態+WIP 視圖 (DWH.DW_MES_EQUIPMENTSTATUS_WIP_V)', + 'row_count': 2631, + 'time_field': None, + 'description': '設備即時狀態視圖 - 透過 DB Link 取得即時設備狀態、維修工單、資產狀態等 32 欄位。用於 realtime-equipment-cache(5 分鐘同步)' + }, + { + 'name': 'DWH.DW_MES_SPEC_WORKCENTER_V', + 'display_name': '規格工站對照 (DWH.DW_MES_SPEC_WORKCENTER_V)', + 'row_count': 230, + 'time_field': None, + 'description': '工站分組對照視圖 - WORK_CENTER 到 WORK_CENTER_GROUP 映射,含 WORKCENTERSEQUENCE_GROUP 排序。用於 filter-cache 的工站分組(每日同步)' + } + ], + '現況快照表': [ + { + 'name': 'DWH.DW_MES_WIP', + 'display_name': 'WIP (DWH.DW_MES_WIP)', + 'row_count': 79058085, + 'time_field': 'TXNDATE', + 'description': '在製品現況表(含歷史累積)- 當前 WIP 狀態/數量' + }, + { + 'name': 'DWH.DW_MES_RESOURCE', + 'display_name': 'RESOURCE (DWH.DW_MES_RESOURCE)', + 'row_count': 91329, + 'time_field': None, + 'description': '資源表 - 設備/載具等資源基本資料(OBJECTCATEGORY=ASSEMBLY 時,RESOURCENAME 為設備編號)' + }, + { + 'name': 'DWH.DW_MES_CONTAINER', + 'display_name': 'CONTAINER (DWH.DW_MES_CONTAINER)', + 'row_count': 5218406, + 'time_field': 'LASTMOVEOUTTIMESTAMP', + 'description': '容器/批次主檔 - 目前在製容器狀態、數量與流程資訊' + }, + { + 'name': 'DWH.DW_MES_JOB', + 'display_name': 'JOB (DWH.DW_MES_JOB)', + 'row_count': 1248622, + 'time_field': 'CREATEDATE', + 'description': '設備維修工單表 - 維修工單的當前狀態與流程' + } + ], + '歷史累積表': [ + { + 'name': 'DWH.DW_MES_RESOURCESTATUS', + 'display_name': 'RESOURCESTATUS (DWH.DW_MES_RESOURCESTATUS)', + 'row_count': 65742614, + 'time_field': 'OLDLASTSTATUSCHANGEDATE', + 'description': '設備狀態變更歷史表 - 狀態切換與原因' + }, + { + 'name': 'DWH.DW_MES_RESOURCESTATUS_SHIFT', + 'display_name': 'RESOURCESTATUS_SHIFT (DWH.DW_MES_RESOURCESTATUS_SHIFT)', + 'row_count': 74820134, + 'time_field': 'DATADATE', + 'description': '設備狀態班次彙總表 - 班次級狀態/工時' + }, + { + 'name': 'DWH.DW_MES_LOTWIPHISTORY', + 'display_name': 'LOTWIPHISTORY (DWH.DW_MES_LOTWIPHISTORY)', + 'row_count': 53454213, + 'time_field': 'TRACKINTIMESTAMP', + 'description': '在製流轉歷史表 - 批次進出站與流程軌跡' + }, + { + 'name': 'DWH.DW_MES_LOTWIPDATAHISTORY', + 'display_name': 'LOTWIPDATAHISTORY (DWH.DW_MES_LOTWIPDATAHISTORY)', + 'row_count': 77960216, + 'time_field': 'TXNTIMESTAMP', + 'description': '在製數據採集歷史表 - 製程量測/參數紀錄' + }, + { + 'name': 'DWH.DW_MES_HM_LOTMOVEOUT', + 'display_name': 'HM_LOTMOVEOUT (DWH.DW_MES_HM_LOTMOVEOUT)', + 'row_count': 48645692, + 'time_field': 'TXNDATE', + 'description': '批次出站事件歷史表 - 出站/移出交易' + }, + { + 'name': 'DWH.DW_MES_JOBTXNHISTORY', + 'display_name': 'JOBTXNHISTORY (DWH.DW_MES_JOBTXNHISTORY)', + 'row_count': 9554723, + 'time_field': 'TXNDATE', + 'description': '維修工單交易歷史表 - 工單狀態變更紀錄' + }, + { + 'name': 'DWH.DW_MES_LOTREJECTHISTORY', + 'display_name': 'LOTREJECTHISTORY (DWH.DW_MES_LOTREJECTHISTORY)', + 'row_count': 15786025, + 'time_field': 'TXNDATE', + 'description': '批次不良/報廢歷史表 - 不良原因與數量' + }, + { + 'name': 'DWH.DW_MES_LOTMATERIALSHISTORY', + 'display_name': 'LOTMATERIALSHISTORY (DWH.DW_MES_LOTMATERIALSHISTORY)', + 'row_count': 17829931, + 'time_field': 'TXNDATE', + 'description': '批次物料消耗歷史表 - 用料與批次關聯' + }, + { + 'name': 'DWH.DW_MES_HOLDRELEASEHISTORY', + 'display_name': 'HOLDRELEASEHISTORY (DWH.DW_MES_HOLDRELEASEHISTORY)', + 'row_count': 310737, + 'time_field': 'HOLDTXNDATE', + 'description': 'Hold/Release 歷史表 - 批次停工與解除紀錄' + }, + { + 'name': 'DWH.DW_MES_MAINTENANCE', + 'display_name': 'MAINTENANCE (DWH.DW_MES_MAINTENANCE)', + 'row_count': 52060026, + 'time_field': 'TXNDATE', + 'description': '設備保養/維護紀錄表 - 保養計畫與點檢數據' + } + ], + '輔助表': [ + { + 'name': 'DWH.DW_MES_PARTREQUESTORDER', + 'display_name': 'PARTREQUESTORDER (DWH.DW_MES_PARTREQUESTORDER)', + 'row_count': 61396, + 'time_field': None, + 'description': '維修用料請求表 - 維修/設備零件請領' + }, + { + 'name': 'DWH.DW_MES_PJ_COMBINEDASSYLOTS', + 'display_name': 'PJ_COMBINEDASSYLOTS (DWH.DW_MES_PJ_COMBINEDASSYLOTS)', + 'row_count': 1965425, + 'time_field': None, + 'description': '併批紀錄表 - 合批/合併批次關聯與數量資訊' + } + ] +} diff --git a/src/mes_dashboard/config/workcenter_groups.py b/src/mes_dashboard/config/workcenter_groups.py new file mode 100644 index 0000000..81af6ca --- /dev/null +++ b/src/mes_dashboard/config/workcenter_groups.py @@ -0,0 +1,138 @@ +# -*- coding: utf-8 -*- +"""Workcenter grouping configuration for MES Dashboard. + +Defines how individual workcenters are grouped and their display order. +This configuration is used across WIP reports, resource status, and dashboard. +""" + +from typing import Tuple, Optional + +# ============================================================ +# Workcenter Group Definitions +# ============================================================ +# Order determines display sequence (left to right in tables, top to bottom in charts) +# Patterns are matched case-insensitively +# Exclude patterns take precedence over include patterns + +WORKCENTER_GROUPS = { + '切割': { + 'order': 0, + 'patterns': ['切割'], + 'exclude': ['元件切割', 'PKG_SAW'] # 元件切割 is a separate group + }, + '焊接_DB': { + 'order': 1, + 'patterns': ['焊接_DB', '焊_DB_料', '焊_DB'] + }, + '焊接_WB': { + 'order': 2, + 'patterns': ['焊接_WB', '焊_WB_料', '焊_WB'] + }, + '焊接_DW': { + 'order': 3, + 'patterns': ['焊接_DW', '焊_DW', '焊_DW_料'] + }, + '成型': { + 'order': 4, + 'patterns': ['成型', '成型_料'] + }, + '去膠': { + 'order': 5, + 'patterns': ['去膠'] + }, + '水吹砂': { + 'order': 6, + 'patterns': ['水吹砂'] + }, + '電鍍': { + 'order': 7, + 'patterns': ['掛鍍', '滾鍍', '條鍍', '電鍍', '補鍍', 'TOTAI', 'BANDL'] + }, + '移印': { + 'order': 8, + 'patterns': ['移印'] + }, + '切彎腳': { + 'order': 9, + 'patterns': ['切彎腳'] + }, + '元件切割': { + 'order': 10, + 'patterns': ['元件切割', 'PKG_SAW'] + }, + '測試': { + 'order': 11, + 'patterns': ['TMTT', '測試'] + } +} + +# Group order for sorting (exported for frontend use) +GROUP_ORDER = {name: config['order'] for name, config in WORKCENTER_GROUPS.items()} + + +def get_workcenter_group(workcenter_name: Optional[str]) -> Tuple[Optional[str], int]: + """Map workcenter name to its group name and order. + + Args: + workcenter_name: The original workcenter name from database + + Returns: + Tuple of (group_name, order) where: + - group_name: The merged group name (e.g., '焊接_DB') or None if unmatched + - order: The display order (0-11 for defined groups, 999 for unmatched) + + Examples: + >>> get_workcenter_group('焊接_DB') + ('焊接_DB', 1) + >>> get_workcenter_group('焊_DB_料') + ('焊接_DB', 1) + >>> get_workcenter_group('切割') + ('切割', 0) + >>> get_workcenter_group('元件切割') + ('元件切割', 10) + >>> get_workcenter_group('Unknown_WC') + (None, 999) + """ + if not workcenter_name: + return None, 999 + + wc_upper = workcenter_name.upper() + + for group_name, config in WORKCENTER_GROUPS.items(): + # Check exclusions first (important for '切割' vs '元件切割') + if 'exclude' in config: + excluded = False + for excl in config['exclude']: + if excl.upper() in wc_upper: + excluded = True + break + if excluded: + continue + + # Check patterns + for pattern in config['patterns']: + if pattern.upper() in wc_upper: + return group_name, config['order'] + + return None, 999 # Unmatched workcenters + + +def get_all_group_names() -> list: + """Get all group names in order. + + Returns: + List of group names sorted by their order. + """ + return sorted(WORKCENTER_GROUPS.keys(), key=lambda x: WORKCENTER_GROUPS[x]['order']) + + +def get_group_order(group_name: str) -> int: + """Get the order number for a group name. + + Args: + group_name: The group name to look up + + Returns: + Order number (0-11) or 999 if not found + """ + return GROUP_ORDER.get(group_name, 999) diff --git a/src/mes_dashboard/core/__init__.py b/src/mes_dashboard/core/__init__.py new file mode 100644 index 0000000..65f76d5 --- /dev/null +++ b/src/mes_dashboard/core/__init__.py @@ -0,0 +1,39 @@ +"""Core utilities module for MES Dashboard.""" + +from .database import ( + get_db_connection, + get_engine, + get_db, + read_sql_df, + get_table_data, + get_table_columns, + init_db, +) +from .cache import cache_get, cache_set, make_cache_key, CacheBackend, NoOpCache +from .utils import ( + get_days_back, + build_filter_conditions, + build_equipment_filter_sql, + convert_datetime_fields, + format_api_response, +) + +__all__ = [ + "get_db_connection", + "get_engine", + "get_db", + "read_sql_df", + "get_table_data", + "get_table_columns", + "init_db", + "cache_get", + "cache_set", + "make_cache_key", + "CacheBackend", + "NoOpCache", + "get_days_back", + "build_filter_conditions", + "build_equipment_filter_sql", + "convert_datetime_fields", + "format_api_response", +] diff --git a/src/mes_dashboard/core/cache.py b/src/mes_dashboard/core/cache.py new file mode 100644 index 0000000..f5f5906 --- /dev/null +++ b/src/mes_dashboard/core/cache.py @@ -0,0 +1,437 @@ +# -*- coding: utf-8 -*- +"""Cache abstraction for MES Dashboard. + +Provides table-level caching for WIP data using Redis. +Falls back to Oracle direct query when Redis is unavailable. +""" + +from __future__ import annotations + +import io +import json +import logging +import threading +import time +from typing import Any, Optional, Protocol, Tuple + +import pandas as pd +from flask import current_app + +from mes_dashboard.config.constants import CACHE_TTL_DEFAULT +from mes_dashboard.core.redis_client import ( + get_redis_client, + get_key, + redis_available, + REDIS_ENABLED +) + +logger = logging.getLogger('mes_dashboard.cache') + + +# ============================================================ +# Process-Level DataFrame Cache (Prevents redundant JSON parsing) +# ============================================================ + +class ProcessLevelCache: + """Thread-safe process-level cache for parsed DataFrames. + + Prevents redundant JSON parsing across concurrent requests. + Uses a lock to ensure only one thread parses at a time. + """ + + def __init__(self, ttl_seconds: int = 30): + self._cache: dict[str, Tuple[pd.DataFrame, float]] = {} + self._lock = threading.Lock() + self._ttl = ttl_seconds + + def get(self, key: str) -> Optional[pd.DataFrame]: + """Get cached DataFrame if not expired.""" + with self._lock: + if key not in self._cache: + return None + df, timestamp = self._cache[key] + if time.time() - timestamp > self._ttl: + del self._cache[key] + return None + return df + + def set(self, key: str, df: pd.DataFrame) -> None: + """Cache a DataFrame with current timestamp.""" + with self._lock: + self._cache[key] = (df, time.time()) + + def invalidate(self, key: str) -> None: + """Remove a key from cache.""" + with self._lock: + self._cache.pop(key, None) + + def clear(self) -> None: + """Clear all cached data.""" + with self._lock: + self._cache.clear() + + +# Global process-level cache for WIP DataFrame (30s TTL) +_wip_df_cache = ProcessLevelCache(ttl_seconds=30) +_wip_parse_lock = threading.Lock() + +# ============================================================ +# Legacy Cache Backend Interface (for backwards compatibility) +# ============================================================ + + +class CacheBackend(Protocol): + """Protocol for cache backends.""" + + def get(self, key: str) -> Optional[Any]: + ... + + def set(self, key: str, value: Any, ttl: int) -> None: + ... + + +class NoOpCache: + """No-op cache backend (default).""" + + def get(self, key: str) -> Optional[Any]: + return None + + def set(self, key: str, value: Any, ttl: int) -> None: + return None + + +class MemoryTTLCache: + """Thread-safe in-memory TTL cache backend. + + This is used as the L1 cache for route-level API responses. + """ + + def __init__(self) -> None: + self._store: dict[str, tuple[Any, float]] = {} + self._lock = threading.Lock() + + def get(self, key: str) -> Optional[Any]: + now = time.time() + with self._lock: + payload = self._store.get(key) + if payload is None: + return None + value, expires_at = payload + if expires_at <= now: + self._store.pop(key, None) + return None + return value + + def set(self, key: str, value: Any, ttl: int) -> None: + expires_at = time.time() + max(ttl, 1) + with self._lock: + self._store[key] = (value, expires_at) + + def size(self) -> int: + """Return live key count (best effort).""" + with self._lock: + return len(self._store) + + +class RedisJSONCache: + """Redis cache backend for JSON-serializable API responses.""" + + def __init__(self, namespace: str = "route_cache") -> None: + self._namespace = namespace + self._error_count = 0 + self._last_error: str | None = None + self._last_error_at: float | None = None + + def _full_key(self, key: str) -> str: + return get_key(f"{self._namespace}:{key}") + + def get(self, key: str) -> Optional[Any]: + if not REDIS_ENABLED: + return None + + client = get_redis_client() + if client is None: + return None + + try: + payload = client.get(self._full_key(key)) + if payload is None: + return None + return json.loads(payload) + except Exception as exc: + logger.warning("Failed to read route cache from Redis: %s", exc) + self._error_count += 1 + self._last_error = str(exc) + self._last_error_at = time.time() + return None + + def set(self, key: str, value: Any, ttl: int) -> None: + if not REDIS_ENABLED: + return + + client = get_redis_client() + if client is None: + return + + try: + payload = json.dumps(value, ensure_ascii=False, default=str) + client.setex(self._full_key(key), max(ttl, 1), payload) + except Exception as exc: + logger.warning("Failed to write route cache to Redis: %s", exc) + self._error_count += 1 + self._last_error = str(exc) + self._last_error_at = time.time() + + def telemetry(self) -> dict[str, Any]: + return { + "namespace": self._namespace, + "error_count": self._error_count, + "last_error": self._last_error, + "last_error_at": self._last_error_at, + } + + +class LayeredCache: + """L1 memory + L2 Redis cache backend.""" + + def __init__( + self, + l1: MemoryTTLCache, + l2: Optional[RedisJSONCache] = None, + redis_expected: bool = False, + ): + self._l1 = l1 + self._l2 = l2 + self._redis_expected = redis_expected + self._l1_hits = 0 + self._l2_hits = 0 + self._misses = 0 + self._writes = 0 + + def get(self, key: str) -> Optional[Any]: + value = self._l1.get(key) + if value is not None: + self._l1_hits += 1 + return value + + if self._l2 is None: + self._misses += 1 + return None + + value = self._l2.get(key) + if value is not None: + # Keep warm in memory for fast subsequent reads. + self._l1.set(key, value, CACHE_TTL_DEFAULT) + self._l2_hits += 1 + return value + + self._misses += 1 + return value + + def set(self, key: str, value: Any, ttl: int) -> None: + self._writes += 1 + self._l1.set(key, value, ttl) + if self._l2 is not None: + self._l2.set(key, value, ttl) + + def telemetry(self) -> dict[str, Any]: + mode = "l1+l2" if self._l2 is not None else "l1-only" + degraded = self._redis_expected and self._l2 is None + total_reads = self._l1_hits + self._l2_hits + self._misses + l1_hit_rate = round(self._l1_hits / total_reads, 4) if total_reads else 0 + l2_hit_rate = round(self._l2_hits / total_reads, 4) if total_reads else 0 + miss_rate = round(self._misses / total_reads, 4) if total_reads else 0 + return { + "mode": mode, + "degraded": degraded, + "redis_expected": self._redis_expected, + "l1_size": self._l1.size(), + "reads_total": total_reads, + "writes_total": self._writes, + "l1_hits": self._l1_hits, + "l2_hits": self._l2_hits, + "misses": self._misses, + "l1_hit_rate": l1_hit_rate, + "l2_hit_rate": l2_hit_rate, + "miss_rate": miss_rate, + "l2_telemetry": self._l2.telemetry() if self._l2 is not None else None, + } + + +def create_default_cache_backend() -> CacheBackend: + """Create the default route cache backend. + + Uses in-memory TTL cache for all environments and adds Redis as L2 + when Redis is available. + """ + l1_cache = MemoryTTLCache() + l2_cache = RedisJSONCache() if redis_available() else None + return LayeredCache(l1=l1_cache, l2=l2_cache, redis_expected=REDIS_ENABLED) + + +def get_cache() -> CacheBackend: + """Return the configured cache backend or a no-op default.""" + try: + cache = current_app.extensions.get("cache") + except RuntimeError: + cache = None + return cache if cache is not None else NoOpCache() + + +def cache_get(key: str) -> Optional[Any]: + """Get value from cache backend.""" + return get_cache().get(key) + + +def cache_set(key: str, value: Any, ttl: int = CACHE_TTL_DEFAULT) -> None: + """Set value on cache backend.""" + get_cache().set(key, value, ttl) + + +def make_cache_key(prefix: str, days_back: Optional[int] = None, filters: Optional[dict] = None) -> str: + """Generate a cache key from prefix and parameters.""" + filters_key = json.dumps(filters, sort_keys=True, ensure_ascii=False) if filters else "" + return f"{prefix}:{days_back}:{filters_key}" + + +# ============================================================ +# WIP Table-Level Cache Functions +# ============================================================ + + +def get_cached_wip_data() -> Optional[pd.DataFrame]: + """Get cached WIP data from Redis with process-level caching. + + Uses a two-tier cache strategy: + 1. Process-level cache: Parsed DataFrame (30s TTL) - fast, no parsing + 2. Redis cache: Raw JSON data - shared across workers + + This prevents redundant JSON parsing of 14+ MB data across + concurrent requests, significantly improving response times. + + Returns: + DataFrame with full DWH.DW_MES_LOT_V data, or None if cache miss. + """ + cache_key = "wip_dataframe" + + # Tier 1: Check process-level cache first (fast path) + cached_df = _wip_df_cache.get(cache_key) + if cached_df is not None: + logger.debug(f"Process cache hit: {len(cached_df)} rows") + return cached_df + + # Tier 2: Parse from Redis (slow path - needs lock) + if not REDIS_ENABLED: + return None + + client = get_redis_client() + if client is None: + return None + + # Use lock to prevent multiple threads from parsing simultaneously + with _wip_parse_lock: + # Double-check after acquiring lock (another thread may have parsed) + cached_df = _wip_df_cache.get(cache_key) + if cached_df is not None: + logger.debug(f"Process cache hit (after lock): {len(cached_df)} rows") + return cached_df + + try: + start_time = time.time() + data_json = client.get(get_key("data")) + if data_json is None: + logger.debug("Cache miss: no data in Redis") + return None + + # Parse JSON to DataFrame + df = pd.read_json(io.StringIO(data_json), orient='records') + parse_time = time.time() - start_time + + # Store in process-level cache + _wip_df_cache.set(cache_key, df) + + logger.debug(f"Cache hit: loaded {len(df)} rows from Redis (parsed in {parse_time:.2f}s)") + return df + except Exception as e: + logger.warning(f"Failed to read cache: {e}") + return None + + +def get_cached_sys_date() -> Optional[str]: + """Get cached SYS_DATE from Redis. + + Returns: + SYS_DATE string or None if not cached. + """ + if not REDIS_ENABLED: + return None + + client = get_redis_client() + if client is None: + return None + + try: + return client.get(get_key("meta:sys_date")) + except Exception as e: + logger.warning(f"Failed to get cached SYS_DATE: {e}") + return None + + +def get_cache_updated_at() -> Optional[str]: + """Get cache update timestamp from Redis. + + Returns: + ISO 8601 timestamp string or None. + """ + if not REDIS_ENABLED: + return None + + client = get_redis_client() + if client is None: + return None + + try: + return client.get(get_key("meta:updated_at")) + except Exception as e: + logger.warning(f"Failed to get cache updated_at: {e}") + return None + + +def is_cache_available() -> bool: + """Check if WIP cache is available and populated. + + Returns: + True if Redis has cached data. + """ + if not REDIS_ENABLED: + return False + + client = get_redis_client() + if client is None: + return False + + try: + return client.exists(get_key("data")) > 0 + except Exception as e: + logger.warning(f"Failed to check cache availability: {e}") + return False + + +def get_wip_data_with_fallback(fallback_fn) -> pd.DataFrame: + """Get WIP data from cache, falling back to Oracle if needed. + + Args: + fallback_fn: Function to call for Oracle direct query. + Should return a DataFrame. + + Returns: + DataFrame with WIP data (from cache or Oracle). + """ + # Try cache first + df = get_cached_wip_data() + if df is not None: + return df + + # Fallback to Oracle + logger.info("Cache miss or unavailable, falling back to Oracle query") + return fallback_fn() diff --git a/src/mes_dashboard/core/cache_updater.py b/src/mes_dashboard/core/cache_updater.py new file mode 100644 index 0000000..b4033cd --- /dev/null +++ b/src/mes_dashboard/core/cache_updater.py @@ -0,0 +1,328 @@ +# -*- coding: utf-8 -*- +"""Background task for updating WIP and Resource cache from Oracle to Redis.""" + +from __future__ import annotations + +import json +import logging +import os +import threading +import time +from datetime import datetime +from typing import Optional + +import pandas as pd + +from mes_dashboard.core.redis_client import ( + get_redis_client, + get_key, + redis_available, + REDIS_ENABLED, + try_acquire_lock, + release_lock, +) +from mes_dashboard.core.database import read_sql_df + +logger = logging.getLogger('mes_dashboard.cache_updater') + +# ============================================================ +# Configuration +# ============================================================ + +CACHE_CHECK_INTERVAL = int(os.getenv('CACHE_CHECK_INTERVAL', '600')) # 10 minutes +WIP_VIEW = "DWH.DW_MES_LOT_V" + +# Resource cache sync interval (default: 4 hours) +RESOURCE_SYNC_INTERVAL = int(os.getenv('RESOURCE_SYNC_INTERVAL', '14400')) + +# ============================================================ +# Cache Updater Class +# ============================================================ + + +class CacheUpdater: + """Background task that periodically checks SYS_DATE and updates cache.""" + + def __init__(self, interval: int = CACHE_CHECK_INTERVAL): + """Initialize cache updater. + + Args: + interval: Check interval in seconds (default: 600) + """ + self.interval = interval + self.resource_sync_interval = RESOURCE_SYNC_INTERVAL + self._stop_event = threading.Event() + self._thread: Optional[threading.Thread] = None + self._is_running = False + self._last_resource_sync: Optional[float] = None + + def start(self) -> None: + """Start the background update thread.""" + if not REDIS_ENABLED: + logger.info("Redis is disabled, cache updater will not start") + return + + if self._thread is not None and self._thread.is_alive(): + logger.warning("Cache updater is already running") + return + + self._stop_event.clear() + self._thread = threading.Thread( + target=self._worker, + daemon=True, + name="cache-updater" + ) + self._thread.start() + self._is_running = True + logger.info(f"Cache updater started (interval: {self.interval}s)") + + def stop(self) -> None: + """Stop the background update thread.""" + if self._thread is None or not self._thread.is_alive(): + return + + self._stop_event.set() + self._thread.join(timeout=5) + self._is_running = False + logger.info("Cache updater stopped") + + def is_running(self) -> bool: + """Check if the updater is running.""" + return self._is_running and self._thread is not None and self._thread.is_alive() + + def force_update(self) -> bool: + """Force an immediate cache update. + + Returns: + True if update was successful. + """ + return self._check_and_update(force=True) + + def _worker(self) -> None: + """Background worker that runs the update loop.""" + # Initial update on startup + logger.info("Performing initial cache load...") + self._check_and_update(force=True) + + # Initial resource cache load + self._check_resource_update(force=True) + + # Periodic updates + while not self._stop_event.wait(self.interval): + try: + self._check_and_update() + self._check_resource_update() + except Exception as e: + logger.error(f"Cache update failed: {e}", exc_info=True) + + def _check_and_update(self, force: bool = False) -> bool: + """Check SYS_DATE and update cache if needed. + + Uses distributed lock to prevent multiple workers from updating simultaneously. + + Args: + force: If True, update regardless of SYS_DATE. + + Returns: + True if cache was updated. + """ + if not redis_available(): + logger.warning("Redis not available, skipping cache update") + return False + + # Try to acquire distributed lock (non-blocking) + if not try_acquire_lock("wip_cache_update", ttl_seconds=120): + logger.debug("Another worker is updating WIP cache, skipping") + return False + + try: + # Get current SYS_DATE from Oracle + oracle_sys_date = self._check_sys_date() + if oracle_sys_date is None: + logger.error("Failed to get SYS_DATE from Oracle") + return False + + # Get cached SYS_DATE from Redis + cached_sys_date = self._get_cached_sys_date() + + # Compare and decide whether to update + if not force and cached_sys_date == oracle_sys_date: + logger.debug(f"SYS_DATE unchanged ({oracle_sys_date}), skipping update") + return False + + logger.info(f"SYS_DATE changed: {cached_sys_date} -> {oracle_sys_date}, updating cache...") + + # Load full table and update Redis + df = self._load_full_table() + if df is None or df.empty: + logger.error("Failed to load data from Oracle") + return False + + success = self._update_redis_cache(df, oracle_sys_date) + if success: + logger.info(f"Cache updated successfully ({len(df)} rows)") + return success + + except Exception as e: + logger.error(f"Error in cache update: {e}", exc_info=True) + return False + finally: + release_lock("wip_cache_update") + + def _check_sys_date(self) -> Optional[str]: + """Query Oracle for MAX(SYS_DATE). + + Returns: + SYS_DATE string or None if query failed. + """ + sql = f"SELECT MAX(SYS_DATE) as SYS_DATE FROM {WIP_VIEW}" + try: + df = read_sql_df(sql) + if df is not None and not df.empty: + sys_date = df.iloc[0]['SYS_DATE'] + return str(sys_date) if sys_date else None + return None + except Exception as e: + logger.error(f"Failed to query SYS_DATE: {e}") + return None + + def _get_cached_sys_date(self) -> Optional[str]: + """Get cached SYS_DATE from Redis. + + Returns: + Cached SYS_DATE string or None. + """ + client = get_redis_client() + if client is None: + return None + + try: + return client.get(get_key("meta:sys_date")) + except Exception as e: + logger.warning(f"Failed to get cached SYS_DATE: {e}") + return None + + def _load_full_table(self) -> Optional[pd.DataFrame]: + """Load entire DWH.DW_MES_LOT_V table from Oracle. + + Returns: + DataFrame with all rows, or None if failed. + """ + sql = f""" + SELECT * + FROM {WIP_VIEW} + WHERE WORKORDER IS NOT NULL + """ + try: + df = read_sql_df(sql) + return df + except Exception as e: + logger.error(f"Failed to load full table: {e}") + return None + + def _update_redis_cache(self, df: pd.DataFrame, sys_date: str) -> bool: + """Update Redis cache with new data using pipeline for atomicity. + + Args: + df: DataFrame with full table data. + sys_date: Current SYS_DATE from Oracle. + + Returns: + True if update was successful. + """ + client = get_redis_client() + if client is None: + return False + + try: + # Convert DataFrame to JSON + # Handle datetime columns + for col in df.select_dtypes(include=['datetime64']).columns: + df[col] = df[col].astype(str) + + data_json = df.to_json(orient='records', force_ascii=False) + + # Atomic update using pipeline + now = datetime.now().isoformat() + pipe = client.pipeline() + pipe.set(get_key("data"), data_json) + pipe.set(get_key("meta:sys_date"), sys_date) + pipe.set(get_key("meta:updated_at"), now) + pipe.execute() + + return True + except Exception as e: + logger.error(f"Failed to update Redis cache: {e}") + return False + + def _check_resource_update(self, force: bool = False) -> bool: + """Check and update resource cache if needed. + + Uses distributed lock to prevent multiple workers from updating simultaneously. + + Args: + force: If True, update regardless of interval. + + Returns: + True if cache was updated. + """ + from mes_dashboard.services.resource_cache import ( + refresh_cache as refresh_resource_cache, + RESOURCE_CACHE_ENABLED, + ) + + if not RESOURCE_CACHE_ENABLED: + return False + + # Check if sync is needed based on interval + now = time.time() + if not force and self._last_resource_sync is not None: + elapsed = now - self._last_resource_sync + if elapsed < self.resource_sync_interval: + logger.debug( + f"Resource sync not due yet ({elapsed:.0f}s < {self.resource_sync_interval}s)" + ) + return False + + # Try to acquire distributed lock (non-blocking) + if not try_acquire_lock("resource_cache_update", ttl_seconds=300): + logger.debug("Another worker is updating resource cache, skipping") + return False + + # Perform sync + logger.info("Checking resource cache for updates...") + try: + updated = refresh_resource_cache(force=force) + self._last_resource_sync = now + return updated + except Exception as e: + logger.error(f"Resource cache update failed: {e}", exc_info=True) + return False + finally: + release_lock("resource_cache_update") + + +# ============================================================ +# Global Instance +# ============================================================ + +_CACHE_UPDATER: Optional[CacheUpdater] = None + + +def get_cache_updater() -> CacheUpdater: + """Get or create the global cache updater instance.""" + global _CACHE_UPDATER + if _CACHE_UPDATER is None: + _CACHE_UPDATER = CacheUpdater() + return _CACHE_UPDATER + + +def start_cache_updater() -> None: + """Start the global cache updater.""" + get_cache_updater().start() + + +def stop_cache_updater() -> None: + """Stop the global cache updater.""" + if _CACHE_UPDATER is not None: + _CACHE_UPDATER.stop() diff --git a/src/mes_dashboard/core/circuit_breaker.py b/src/mes_dashboard/core/circuit_breaker.py new file mode 100644 index 0000000..ded364a --- /dev/null +++ b/src/mes_dashboard/core/circuit_breaker.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +"""Circuit breaker implementation for database protection. + +Prevents cascading failures by temporarily stopping requests to a failing service. + +States: +- CLOSED: Normal operation, requests pass through +- OPEN: Failures exceeded threshold, requests are rejected immediately +- HALF_OPEN: Testing if service has recovered, limited requests allowed +""" + +from __future__ import annotations + +import logging +import os +import threading +import time +from collections import deque +from dataclasses import dataclass +from enum import Enum +from typing import Deque, Optional + +logger = logging.getLogger('mes_dashboard.circuit_breaker') + +# ============================================================ +# Configuration +# ============================================================ + +_env_name = os.getenv('FLASK_ENV', 'development').lower() +_prod_default_enabled = _env_name in {'prod', 'production'} +CIRCUIT_BREAKER_ENABLED = os.getenv( + 'CIRCUIT_BREAKER_ENABLED', + 'true' if _prod_default_enabled else 'false', +).lower() == 'true' + +# Minimum failures before circuit can open +FAILURE_THRESHOLD = int(os.getenv('CIRCUIT_BREAKER_FAILURE_THRESHOLD', '5')) + +# Failure rate threshold (0.0 - 1.0) +FAILURE_RATE_THRESHOLD = float(os.getenv('CIRCUIT_BREAKER_FAILURE_RATE', '0.5')) + +# Seconds to wait in OPEN state before trying HALF_OPEN +RECOVERY_TIMEOUT = int(os.getenv('CIRCUIT_BREAKER_RECOVERY_TIMEOUT', '30')) + +# Sliding window size for counting successes/failures +WINDOW_SIZE = int(os.getenv('CIRCUIT_BREAKER_WINDOW_SIZE', '10')) + + +# ============================================================ +# Types +# ============================================================ + +class CircuitState(Enum): + """Circuit breaker states.""" + CLOSED = "CLOSED" + OPEN = "OPEN" + HALF_OPEN = "HALF_OPEN" + + +@dataclass +class CircuitBreakerStatus: + """Circuit breaker status information.""" + state: str + failure_count: int + success_count: int + total_count: int + failure_rate: float + last_failure_time: Optional[str] + open_until: Optional[str] + enabled: bool + + +# ============================================================ +# Circuit Breaker Implementation +# ============================================================ + +class CircuitBreaker: + """Circuit breaker for protecting database operations. + + Thread-safe implementation using a sliding window to track + successes and failures. + + Usage: + cb = CircuitBreaker("database") + + if not cb.allow_request(): + return error_response(CIRCUIT_BREAKER_OPEN, "Service degraded") + + try: + result = execute_query() + cb.record_success() + return result + except Exception as e: + cb.record_failure() + raise + """ + + def __init__( + self, + name: str, + failure_threshold: int = FAILURE_THRESHOLD, + failure_rate_threshold: float = FAILURE_RATE_THRESHOLD, + recovery_timeout: int = RECOVERY_TIMEOUT, + window_size: int = WINDOW_SIZE + ): + """Initialize circuit breaker. + + Args: + name: Identifier for this circuit breaker. + failure_threshold: Minimum failures before opening. + failure_rate_threshold: Failure rate to trigger opening (0.0-1.0). + recovery_timeout: Seconds to wait before half-open. + window_size: Size of sliding window for tracking. + """ + self.name = name + self.failure_threshold = failure_threshold + self.failure_rate_threshold = failure_rate_threshold + self.recovery_timeout = recovery_timeout + self.window_size = window_size + + self._state = CircuitState.CLOSED + self._lock = threading.Lock() + + # Sliding window: True = success, False = failure + self._results: Deque[bool] = deque(maxlen=window_size) + + self._last_failure_time: Optional[float] = None + self._open_time: Optional[float] = None + + @property + def state(self) -> CircuitState: + """Get current circuit state, handling state transitions.""" + with self._lock: + if self._state == CircuitState.OPEN: + # Check if we should transition to HALF_OPEN + if self._open_time and time.time() - self._open_time >= self.recovery_timeout: + self._transition_to(CircuitState.HALF_OPEN) + return self._state + + def allow_request(self) -> bool: + """Check if a request should be allowed. + + Returns: + True if request should proceed, False if circuit is open. + """ + if not CIRCUIT_BREAKER_ENABLED: + return True + + current_state = self.state + + if current_state == CircuitState.CLOSED: + return True + elif current_state == CircuitState.HALF_OPEN: + # Allow limited requests in half-open state + return True + else: # OPEN + return False + + def record_success(self) -> None: + """Record a successful operation.""" + if not CIRCUIT_BREAKER_ENABLED: + return + + with self._lock: + self._results.append(True) + + if self._state == CircuitState.HALF_OPEN: + # Success in half-open means we can close + self._transition_to(CircuitState.CLOSED) + + def record_failure(self) -> None: + """Record a failed operation.""" + if not CIRCUIT_BREAKER_ENABLED: + return + + with self._lock: + self._results.append(False) + self._last_failure_time = time.time() + + if self._state == CircuitState.HALF_OPEN: + # Failure in half-open means back to open + self._transition_to(CircuitState.OPEN) + elif self._state == CircuitState.CLOSED: + # Check if we should open + self._check_and_open() + + def _check_and_open(self) -> None: + """Check failure rate and open circuit if needed. + + Must be called with lock held. + """ + if len(self._results) < self.failure_threshold: + return + + failure_count = sum(1 for r in self._results if not r) + failure_rate = failure_count / len(self._results) + + if (failure_count >= self.failure_threshold and + failure_rate >= self.failure_rate_threshold): + self._transition_to(CircuitState.OPEN) + + def _transition_to(self, new_state: CircuitState) -> None: + """Transition to a new state with logging. + + Must be called with lock held. + """ + old_state = self._state + self._state = new_state + + if new_state == CircuitState.OPEN: + self._open_time = time.time() + logger.warning( + f"Circuit breaker '{self.name}' OPENED: " + f"state {old_state.value} -> {new_state.value}, " + f"failures: {sum(1 for r in self._results if not r)}/{len(self._results)}" + ) + elif new_state == CircuitState.HALF_OPEN: + logger.info( + f"Circuit breaker '{self.name}' entering HALF_OPEN: " + f"testing service recovery..." + ) + elif new_state == CircuitState.CLOSED: + self._open_time = None + self._results.clear() + logger.info( + f"Circuit breaker '{self.name}' CLOSED: " + f"service recovered" + ) + + def get_status(self) -> CircuitBreakerStatus: + """Get current status information.""" + with self._lock: + # Use _state directly to avoid deadlock (self.state would try to acquire lock again) + current_state = self._state + failure_count = sum(1 for r in self._results if not r) + success_count = sum(1 for r in self._results if r) + total = len(self._results) + failure_rate = failure_count / total if total > 0 else 0.0 + + open_until = None + if current_state == CircuitState.OPEN and self._open_time: + open_until_time = self._open_time + self.recovery_timeout + from datetime import datetime + open_until = datetime.fromtimestamp(open_until_time).isoformat() + + last_failure = None + if self._last_failure_time: + from datetime import datetime + last_failure = datetime.fromtimestamp(self._last_failure_time).isoformat() + + return CircuitBreakerStatus( + state=current_state.value, + failure_count=failure_count, + success_count=success_count, + total_count=total, + failure_rate=failure_rate, + last_failure_time=last_failure, + open_until=open_until, + enabled=CIRCUIT_BREAKER_ENABLED + ) + + def reset(self) -> None: + """Reset the circuit breaker to initial state.""" + with self._lock: + self._state = CircuitState.CLOSED + self._results.clear() + self._last_failure_time = None + self._open_time = None + logger.info(f"Circuit breaker '{self.name}' reset") + + +# ============================================================ +# Global Database Circuit Breaker +# ============================================================ + +_DATABASE_CIRCUIT_BREAKER: Optional[CircuitBreaker] = None + + +def get_database_circuit_breaker() -> CircuitBreaker: + """Get or create the global database circuit breaker.""" + global _DATABASE_CIRCUIT_BREAKER + if _DATABASE_CIRCUIT_BREAKER is None: + _DATABASE_CIRCUIT_BREAKER = CircuitBreaker("database") + return _DATABASE_CIRCUIT_BREAKER + + +def get_circuit_breaker_status() -> dict: + """Get current circuit breaker status as a dictionary. + + Returns: + Dictionary with circuit breaker status information. + """ + cb = get_database_circuit_breaker() + status = cb.get_status() + return { + "state": status.state, + "failure_count": status.failure_count, + "success_count": status.success_count, + "total_count": status.total_count, + "window_size": cb.window_size, + "failure_rate": round(status.failure_rate, 2), + "last_failure_time": status.last_failure_time, + "open_until": status.open_until, + "enabled": status.enabled + } diff --git a/src/mes_dashboard/core/database.py b/src/mes_dashboard/core/database.py new file mode 100644 index 0000000..728a274 --- /dev/null +++ b/src/mes_dashboard/core/database.py @@ -0,0 +1,693 @@ +# -*- coding: utf-8 -*- +"""Database connection and query utilities for MES Dashboard. + +Connection Management: + - Uses SQLAlchemy with QueuePool for connection pooling + - Background keep-alive thread prevents idle connection drops + - Request-scoped connections via Flask g object + +Query Execution (Recommended Pattern): + Use SQLLoader + QueryBuilder for safe, parameterized queries: + + >>> from mes_dashboard.sql import SQLLoader, QueryBuilder + >>> from mes_dashboard.core.database import read_sql_df + >>> + >>> # Load SQL template from file + >>> sql = SQLLoader.load("resource/by_status") + >>> + >>> # Build conditions with parameters (SQL injection safe) + >>> builder = QueryBuilder() + >>> builder.add_in_condition("STATUS", ["PRD", "SBY"]) + >>> builder.add_param_condition("LOCATION", "FAB1") + >>> where_clause, params = builder.build_where_only() + >>> + >>> # Replace placeholders and execute + >>> sql = sql.replace("{{ WHERE_CLAUSE }}", where_clause) + >>> df = read_sql_df(sql, params) + + SQL files are stored in src/mes_dashboard/sql//.sql + with LRU caching (max 100 files). +""" + +from __future__ import annotations + +import logging +import os +import re +import threading +import time +from typing import Optional, Dict, Any + +import oracledb +import pandas as pd +from flask import g, current_app +from sqlalchemy import create_engine, event, text +from sqlalchemy.exc import TimeoutError as SQLAlchemyTimeoutError +from sqlalchemy.pool import QueuePool + +from mes_dashboard.config.database import DB_CONFIG, CONNECTION_STRING +from mes_dashboard.config.settings import get_config + +# Configure module logger +logger = logging.getLogger('mes_dashboard.database') + +# ============================================================ +# SQLAlchemy Engine (QueuePool - connection pooling) +# ============================================================ +# Using QueuePool for better performance and connection reuse. +# pool_pre_ping ensures connections are valid before use. +# pool_recycle prevents stale connections from firewalls/NAT. + +_ENGINE = None +_DB_RUNTIME_CONFIG: Optional[Dict[str, Any]] = None + + +class DatabaseDegradedError(RuntimeError): + """Base class for degraded database conditions.""" + + def __init__(self, message: str, retry_after_seconds: int = 5): + super().__init__(message) + self.retry_after_seconds = max(int(retry_after_seconds), 1) + + +class DatabasePoolExhaustedError(DatabaseDegradedError): + """Raised when DB connection pool is exhausted.""" + + +class DatabaseCircuitOpenError(DatabaseDegradedError): + """Raised when circuit breaker blocks DB access.""" + + +def _from_app_or_env_int(name: str, fallback: int) -> int: + try: + app_value = current_app.config.get(name) + if app_value is not None: + return int(app_value) + except RuntimeError: + pass + + env_value = os.getenv(name) + if env_value is not None: + try: + return int(env_value) + except (TypeError, ValueError): + pass + return int(fallback) + + +def _from_app_or_env_float(name: str, fallback: float) -> float: + try: + app_value = current_app.config.get(name) + if app_value is not None: + return float(app_value) + except RuntimeError: + pass + + env_value = os.getenv(name) + if env_value is not None: + try: + return float(env_value) + except (TypeError, ValueError): + pass + return float(fallback) + + +def get_db_runtime_config(refresh: bool = False) -> Dict[str, Any]: + """Get effective DB runtime configuration used by pool and direct connections.""" + global _DB_RUNTIME_CONFIG + if _DB_RUNTIME_CONFIG is not None and not refresh: + return _DB_RUNTIME_CONFIG.copy() + + config_class = get_config(os.getenv("FLASK_ENV")) + + _DB_RUNTIME_CONFIG = { + "pool_size": _from_app_or_env_int("DB_POOL_SIZE", config_class.DB_POOL_SIZE), + "max_overflow": _from_app_or_env_int("DB_MAX_OVERFLOW", config_class.DB_MAX_OVERFLOW), + "pool_timeout": _from_app_or_env_int("DB_POOL_TIMEOUT", config_class.DB_POOL_TIMEOUT), + "pool_recycle": _from_app_or_env_int("DB_POOL_RECYCLE", config_class.DB_POOL_RECYCLE), + "tcp_connect_timeout": _from_app_or_env_int( + "DB_TCP_CONNECT_TIMEOUT", + config_class.DB_TCP_CONNECT_TIMEOUT, + ), + "retry_count": _from_app_or_env_int("DB_CONNECT_RETRY_COUNT", config_class.DB_CONNECT_RETRY_COUNT), + "retry_delay": _from_app_or_env_float("DB_CONNECT_RETRY_DELAY", config_class.DB_CONNECT_RETRY_DELAY), + "call_timeout_ms": _from_app_or_env_int("DB_CALL_TIMEOUT_MS", config_class.DB_CALL_TIMEOUT_MS), + } + return _DB_RUNTIME_CONFIG.copy() + + +def get_pool_runtime_config() -> Dict[str, Any]: + """Expose effective DB pool configuration for health diagnostics.""" + return get_db_runtime_config().copy() + + +def get_pool_status() -> Dict[str, Any]: + """Expose current DB pool state for health diagnostics.""" + runtime = get_db_runtime_config() + engine = get_engine() + pool = engine.pool + pool_size = int(runtime["pool_size"]) + max_overflow = int(runtime["max_overflow"]) + max_capacity = max(pool_size + max_overflow, 1) + checked_out = int(pool.checkedout()) + overflow = int(pool.overflow()) + saturation = round(min(max(checked_out / max_capacity, 0.0), 1.0), 4) + return { + "size": int(pool.size()), + "checked_out": checked_out, + "overflow": overflow, + "checked_in": int(pool.checkedin()), + "max_capacity": max_capacity, + "saturation": saturation, + } + + +def get_engine(): + """Get SQLAlchemy engine with connection pooling. + + Uses QueuePool for connection reuse and better performance. + - pool_size: Base number of persistent connections + - max_overflow: Additional connections during peak load + - pool_timeout: Max wait time for available connection + - pool_recycle: Recycle connections after 30 minutes + - pool_pre_ping: Validate connection before checkout + """ + global _ENGINE + if _ENGINE is None: + runtime = get_db_runtime_config() + _ENGINE = create_engine( + CONNECTION_STRING, + poolclass=QueuePool, + pool_size=runtime["pool_size"], + max_overflow=runtime["max_overflow"], + pool_timeout=runtime["pool_timeout"], + pool_recycle=runtime["pool_recycle"], + pool_pre_ping=True, # Validate connection before use + connect_args={ + "tcp_connect_timeout": runtime["tcp_connect_timeout"], + "retry_count": runtime["retry_count"], + "retry_delay": runtime["retry_delay"], + } + ) + # Register pool event listeners for monitoring + _register_pool_events(_ENGINE, runtime["call_timeout_ms"]) + logger.info( + "Database engine created with QueuePool " + f"(pool_size={runtime['pool_size']}, " + f"max_overflow={runtime['max_overflow']}, " + f"pool_timeout={runtime['pool_timeout']}, " + f"pool_recycle={runtime['pool_recycle']}, " + f"call_timeout_ms={runtime['call_timeout_ms']})" + ) + return _ENGINE + + +def _register_pool_events(engine, call_timeout_ms: int): + """Register event listeners for connection pool monitoring.""" + + @event.listens_for(engine, "checkout") + def on_checkout(dbapi_conn, connection_record, connection_proxy): + # Keep DB call timeout below worker timeout to avoid wedged workers. + dbapi_conn.call_timeout = call_timeout_ms + logger.debug("Connection checked out from pool (call_timeout_ms=%s)", call_timeout_ms) + + @event.listens_for(engine, "checkin") + def on_checkin(dbapi_conn, connection_record): + logger.debug("Connection returned to pool") + + @event.listens_for(engine, "invalidate") + def on_invalidate(dbapi_conn, connection_record, exception): + if exception: + logger.warning(f"Connection invalidated due to: {exception}") + else: + logger.debug("Connection invalidated (soft)") + + @event.listens_for(engine, "connect") + def on_connect(dbapi_conn, connection_record): + logger.info("New database connection established") + + +# ============================================================ +# Request-scoped Connection +# ============================================================ + + +def get_db(): + """Get request-scoped database connection via Flask g.""" + if "db" not in g: + g.db = get_engine().connect() + return g.db + + +def close_db(_exc: Optional[BaseException] = None) -> None: + """Close request-scoped connection.""" + db = g.pop("db", None) + if db is not None: + db.close() + + +def init_db(app) -> None: + """Register database teardown handlers on the Flask app.""" + app.teardown_appcontext(close_db) + + +# ============================================================ +# Keep-Alive for Connection Pool +# ============================================================ +# Periodic keep-alive prevents idle connections from being dropped +# by firewalls/NAT. Runs every 5 minutes in a background thread. + +_KEEPALIVE_THREAD = None +_KEEPALIVE_STOP = threading.Event() +KEEPALIVE_INTERVAL = 300 # 5 minutes + + +def _keepalive_worker(): + """Background worker that pings the database periodically.""" + while not _KEEPALIVE_STOP.wait(KEEPALIVE_INTERVAL): + try: + engine = get_engine() + with engine.connect() as conn: + conn.execute(text("SELECT 1 FROM DUAL")) + logger.debug("Keep-alive ping successful") + except Exception as exc: + logger.warning(f"Keep-alive ping failed: {exc}") + + +def start_keepalive(): + """Start background keep-alive thread for connection pool.""" + global _KEEPALIVE_THREAD + if _KEEPALIVE_THREAD is None or not _KEEPALIVE_THREAD.is_alive(): + _KEEPALIVE_STOP.clear() + _KEEPALIVE_THREAD = threading.Thread( + target=_keepalive_worker, + daemon=True, + name="db-keepalive" + ) + _KEEPALIVE_THREAD.start() + logger.info(f"Keep-alive thread started (interval: {KEEPALIVE_INTERVAL}s)") + + +def stop_keepalive(): + """Stop the keep-alive background thread.""" + global _KEEPALIVE_THREAD + if _KEEPALIVE_THREAD and _KEEPALIVE_THREAD.is_alive(): + _KEEPALIVE_STOP.set() + _KEEPALIVE_THREAD.join(timeout=5) + logger.info("Keep-alive thread stopped") + + +def dispose_engine(): + """Dispose the database engine and all pooled connections. + + Call this during application shutdown to cleanly release resources. + """ + global _ENGINE, _DB_RUNTIME_CONFIG + stop_keepalive() + if _ENGINE is not None: + _ENGINE.dispose() + logger.info("Database engine disposed, all connections closed") + _ENGINE = None + _DB_RUNTIME_CONFIG = None + + +# ============================================================ +# Direct Connection Helpers +# ============================================================ + + +def get_db_connection(): + """Create a direct oracledb connection. + + Used for operations that need direct cursor access. + Includes call_timeout to prevent long-running queries from blocking workers. + """ + runtime = get_db_runtime_config() + try: + conn = oracledb.connect( + **DB_CONFIG, + tcp_connect_timeout=runtime["tcp_connect_timeout"], + retry_count=runtime["retry_count"], + retry_delay=runtime["retry_delay"], + ) + conn.call_timeout = runtime["call_timeout_ms"] + logger.debug( + "Direct oracledb connection established (call_timeout_ms=%s)", + runtime["call_timeout_ms"], + ) + return conn + except Exception as exc: + ora_code = _extract_ora_code(exc) + logger.error(f"Database connection failed - ORA-{ora_code}: {exc}") + return None + + +def _extract_ora_code(exc: Exception) -> str: + """Extract ORA error code from exception message.""" + match = re.search(r'ORA-(\d+)', str(exc)) + return match.group(1) if match else 'UNKNOWN' + + +def read_sql_df(sql: str, params: Optional[Dict[str, Any]] = None) -> pd.DataFrame: + """Execute SQL query and return results as a DataFrame. + + Args: + sql: SQL query string. Can include Oracle bind variables (:param_name) + for parameterized queries. Use SQLLoader to load SQL from files. + params: Optional dict of parameter values to bind to the query. + Use QueryBuilder to construct safe parameterized conditions. + + Returns: + DataFrame with query results. Column names are uppercased. + + Raises: + Exception: If query execution fails. ORA code is logged. + RuntimeError: If circuit breaker is open (service degraded). + + Example: + >>> sql = "SELECT * FROM users WHERE status = :status" + >>> df = read_sql_df(sql, {"status": "active"}) + + Note: + - Slow queries (>1s) are logged as warnings + - All queries use connection pooling via SQLAlchemy + - Call timeout is set to 55s to prevent worker blocking + - Circuit breaker protects against cascading failures + - Query latency is recorded for metrics + """ + from mes_dashboard.core.circuit_breaker import ( + get_database_circuit_breaker, + CIRCUIT_BREAKER_ENABLED + ) + from mes_dashboard.core.metrics import record_query_latency + + # Check circuit breaker before executing + circuit_breaker = get_database_circuit_breaker() + if not circuit_breaker.allow_request(): + logger.warning("Circuit breaker OPEN - rejecting database query") + retry_after = max(int(getattr(circuit_breaker, "recovery_timeout", 30)), 1) + raise DatabaseCircuitOpenError( + "Database service is temporarily unavailable (circuit breaker open)", + retry_after_seconds=retry_after, + ) + + start_time = time.time() + engine = get_engine() + + try: + with engine.connect() as conn: + df = pd.read_sql(text(sql), conn, params=params) + df.columns = [str(c).upper() for c in df.columns] + + elapsed = time.time() - start_time + + # Record metrics + record_query_latency(elapsed) + + # Record success to circuit breaker + if CIRCUIT_BREAKER_ENABLED: + circuit_breaker.record_success() + + # Log slow queries (>1 second) as warnings + if elapsed > 1.0: + # Truncate SQL for logging (first 100 chars) + sql_preview = sql.strip().replace('\n', ' ')[:100] + logger.warning(f"Slow query ({elapsed:.2f}s): {sql_preview}...") + else: + logger.debug(f"Query completed in {elapsed:.3f}s, rows={len(df)}") + + return df + + except SQLAlchemyTimeoutError as exc: + elapsed = time.time() - start_time + + # Record metrics even for failed queries + record_query_latency(elapsed) + + if CIRCUIT_BREAKER_ENABLED: + circuit_breaker.record_failure() + + logger.error( + "Connection pool exhausted after %.2fs - %s", + elapsed, + exc, + ) + raise DatabasePoolExhaustedError( + "Database connection pool exhausted", + retry_after_seconds=5, + ) from exc + except Exception as exc: + elapsed = time.time() - start_time + + # Record metrics even for failed queries + record_query_latency(elapsed) + + # Record failure to circuit breaker + if CIRCUIT_BREAKER_ENABLED: + circuit_breaker.record_failure() + + ora_code = _extract_ora_code(exc) + sql_preview = sql.strip().replace('\n', ' ')[:100] + logger.error( + f"Query failed after {elapsed:.2f}s - ORA-{ora_code}: {exc} | SQL: {sql_preview}..." + ) + raise + + +# ============================================================ +# Table Utilities +# ============================================================ + + +def get_table_columns(table_name: str) -> list: + """Get column names for a table.""" + connection = get_db_connection() + if not connection: + return [] + + try: + cursor = connection.cursor() + cursor.execute(f"SELECT * FROM {table_name} WHERE ROWNUM <= 1") + columns = [desc[0] for desc in cursor.description] + cursor.close() + connection.close() + return columns + except Exception: + if connection: + connection.close() + return [] + + +def get_table_data( + table_name: str, + limit: int = 1000, + time_field: Optional[str] = None, + filters: Optional[Dict[str, str]] = None, +) -> Dict[str, Any]: + """Fetch rows from a table with optional filtering and sorting.""" + from datetime import datetime + + connection = get_db_connection() + if not connection: + return {'error': 'Database connection failed'} + + try: + cursor = connection.cursor() + + where_conditions = [] + bind_params = {} + + if filters: + for col, val in filters.items(): + if val and val.strip(): + safe_col = ''.join(c for c in col if c.isalnum() or c == '_') + param_name = f"p_{safe_col}" + where_conditions.append( + f"UPPER(TO_CHAR({safe_col})) LIKE UPPER(:{param_name})" + ) + bind_params[param_name] = f"%{val.strip()}%" + + if time_field: + time_condition = f"{time_field} IS NOT NULL" + if where_conditions: + all_conditions = " AND ".join([time_condition] + where_conditions) + else: + all_conditions = time_condition + + sql = f""" + SELECT * FROM ( + SELECT * FROM {table_name} + WHERE {all_conditions} + ORDER BY {time_field} DESC + ) WHERE ROWNUM <= :row_limit + """ + else: + if where_conditions: + all_conditions = " AND ".join(where_conditions) + sql = f""" + SELECT * FROM ( + SELECT * FROM {table_name} + WHERE {all_conditions} + ) WHERE ROWNUM <= :row_limit + """ + else: + sql = f""" + SELECT * FROM {table_name} + WHERE ROWNUM <= :row_limit + """ + + bind_params['row_limit'] = limit + cursor.execute(sql, bind_params) + columns = [desc[0] for desc in cursor.description] + rows = cursor.fetchall() + + data = [] + for row in rows: + row_dict = {} + for i, col in enumerate(columns): + value = row[i] + if isinstance(value, datetime): + row_dict[col] = value.strftime('%Y-%m-%d %H:%M:%S') + else: + row_dict[col] = value + data.append(row_dict) + + cursor.close() + connection.close() + + return { + 'columns': columns, + 'data': data, + 'row_count': len(data) + } + except Exception as exc: + ora_code = _extract_ora_code(exc) + logger.error(f"get_table_data failed - ORA-{ora_code}: {exc}") + if connection: + connection.close() + return {'error': f'查詢失敗: {str(exc)}'} + + +def get_table_column_metadata(table_name: str) -> Dict[str, Any]: + """Get column metadata from Oracle ALL_TAB_COLUMNS. + + Args: + table_name: Table name in format 'SCHEMA.TABLE' or 'TABLE' + + Returns: + Dict with 'columns' list containing column info: + - name: Column name + - data_type: Oracle data type (VARCHAR2, NUMBER, DATE, etc.) + - data_length: Max length for character types + - data_precision: Precision for numeric types + - data_scale: Scale for numeric types + - is_date: True if column is DATE or TIMESTAMP type + - is_number: True if column is NUMBER type + """ + connection = get_db_connection() + if not connection: + return {'error': 'Database connection failed', 'columns': []} + + try: + cursor = connection.cursor() + + # Parse schema and table name + parts = table_name.split('.') + if len(parts) == 2: + owner, tbl_name = parts[0].upper(), parts[1].upper() + else: + owner = None + tbl_name = parts[0].upper() + + # Query ALL_TAB_COLUMNS for metadata + if owner: + sql = """ + SELECT COLUMN_NAME, DATA_TYPE, DATA_LENGTH, + DATA_PRECISION, DATA_SCALE, COLUMN_ID + FROM ALL_TAB_COLUMNS + WHERE OWNER = :owner AND TABLE_NAME = :table_name + ORDER BY COLUMN_ID + """ + cursor.execute(sql, {'owner': owner, 'table_name': tbl_name}) + else: + sql = """ + SELECT COLUMN_NAME, DATA_TYPE, DATA_LENGTH, + DATA_PRECISION, DATA_SCALE, COLUMN_ID + FROM ALL_TAB_COLUMNS + WHERE TABLE_NAME = :table_name + ORDER BY COLUMN_ID + """ + cursor.execute(sql, {'table_name': tbl_name}) + + rows = cursor.fetchall() + cursor.close() + connection.close() + + if not rows: + # Fallback to basic column detection if no metadata found + logger.warning( + f"No metadata found for {table_name}, falling back to basic detection" + ) + basic_columns = get_table_columns(table_name) + return { + 'columns': [ + { + 'name': col, + 'data_type': 'UNKNOWN', + 'data_length': None, + 'data_precision': None, + 'data_scale': None, + 'is_date': False, + 'is_number': False + } + for col in basic_columns + ] + } + + # Process results + columns = [] + date_types = {'DATE', 'TIMESTAMP', 'TIMESTAMP WITH TIME ZONE', + 'TIMESTAMP WITH LOCAL TIME ZONE'} + number_types = {'NUMBER', 'FLOAT', 'BINARY_FLOAT', 'BINARY_DOUBLE', + 'INTEGER', 'SMALLINT'} + + for row in rows: + col_name, data_type, data_length, data_precision, data_scale, _ = row + columns.append({ + 'name': col_name, + 'data_type': data_type, + 'data_length': data_length, + 'data_precision': data_precision, + 'data_scale': data_scale, + 'is_date': data_type in date_types, + 'is_number': data_type in number_types + }) + + logger.debug(f"Retrieved metadata for {table_name}: {len(columns)} columns") + return {'columns': columns} + + except Exception as exc: + ora_code = _extract_ora_code(exc) + logger.warning( + f"get_table_column_metadata failed - ORA-{ora_code}: {exc}, " + f"falling back to basic detection" + ) + if connection: + connection.close() + + # Fallback to basic column detection + basic_columns = get_table_columns(table_name) + return { + 'columns': [ + { + 'name': col, + 'data_type': 'UNKNOWN', + 'data_length': None, + 'data_precision': None, + 'data_scale': None, + 'is_date': False, + 'is_number': False + } + for col in basic_columns + ] + } diff --git a/src/mes_dashboard/core/log_store.py b/src/mes_dashboard/core/log_store.py new file mode 100644 index 0000000..78ddae8 --- /dev/null +++ b/src/mes_dashboard/core/log_store.py @@ -0,0 +1,529 @@ +# -*- coding: utf-8 -*- +"""SQLite-based log store for admin dashboard. + +Stores structured logs in a local SQLite database for admin querying. +Maintains existing file/STDERR logs for operations. +""" + +from __future__ import annotations + +import logging +import os +import sqlite3 +import threading +import time +from contextlib import contextmanager +from datetime import datetime, timedelta +from pathlib import Path +from typing import Any, Dict, Generator, List, Optional + +logger = logging.getLogger('mes_dashboard.log_store') + +# ============================================================ +# Configuration +# ============================================================ + +# SQLite database path +LOG_SQLITE_PATH = os.getenv( + 'LOG_SQLITE_PATH', + 'logs/admin_logs.sqlite' +) + +# Retention policy +LOG_SQLITE_RETENTION_DAYS = int(os.getenv('LOG_SQLITE_RETENTION_DAYS', '7')) +LOG_SQLITE_MAX_ROWS = int(os.getenv('LOG_SQLITE_MAX_ROWS', '100000')) + +# Enable/disable log store +LOG_STORE_ENABLED = os.getenv('LOG_STORE_ENABLED', 'true').lower() == 'true' + + +# ============================================================ +# Database Schema +# ============================================================ + +CREATE_TABLE_SQL = """ +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + timestamp TEXT NOT NULL, + level TEXT NOT NULL, + logger_name TEXT NOT NULL, + message TEXT NOT NULL, + request_id TEXT, + user TEXT, + ip TEXT, + extra TEXT +); +""" + +CREATE_INDEXES_SQL = [ + "CREATE INDEX IF NOT EXISTS idx_logs_timestamp ON logs(timestamp);", + "CREATE INDEX IF NOT EXISTS idx_logs_level ON logs(level);", + "CREATE INDEX IF NOT EXISTS idx_logs_logger ON logs(logger_name);", +] + + +# ============================================================ +# Log Store Implementation +# ============================================================ + +class LogStore: + """SQLite-based log storage for admin dashboard queries. + + Thread-safe implementation with connection pooling per thread. + Supports retention policy to prevent unbounded growth. + + Usage: + store = LogStore() + store.initialize() + + # Write logs + store.write_log( + level="ERROR", + logger_name="mes_dashboard.api", + message="Database connection failed", + user="admin@example.com" + ) + + # Query logs + logs = store.query_logs(level="ERROR", limit=100) + """ + + def __init__(self, db_path: str = LOG_SQLITE_PATH): + """Initialize log store. + + Args: + db_path: Path to SQLite database file. + """ + self.db_path = db_path + self._local = threading.local() + self._write_lock = threading.Lock() + self._initialized = False + + def initialize(self) -> None: + """Initialize the database schema. + + Creates tables and indexes if they don't exist. + """ + if self._initialized: + return + + # Ensure directory exists + db_dir = Path(self.db_path).parent + db_dir.mkdir(parents=True, exist_ok=True) + + with self._get_connection() as conn: + cursor = conn.cursor() + cursor.execute(CREATE_TABLE_SQL) + for index_sql in CREATE_INDEXES_SQL: + cursor.execute(index_sql) + conn.commit() + + self._initialized = True + logger.info(f"Log store initialized at {self.db_path}") + + @contextmanager + def _get_connection(self) -> Generator[sqlite3.Connection, None, None]: + """Get a thread-local database connection. + + Yields: + SQLite connection for the current thread. + """ + if not hasattr(self._local, 'connection') or self._local.connection is None: + self._local.connection = sqlite3.connect( + self.db_path, + timeout=10.0, + check_same_thread=False + ) + self._local.connection.row_factory = sqlite3.Row + + try: + yield self._local.connection + except sqlite3.Error as e: + logger.error(f"Database error: {e}") + # Reset connection on error + try: + self._local.connection.close() + except Exception: + pass + self._local.connection = None + raise + + def write_log( + self, + level: str, + logger_name: str, + message: str, + request_id: Optional[str] = None, + user: Optional[str] = None, + ip: Optional[str] = None, + extra: Optional[Dict[str, Any]] = None + ) -> bool: + """Write a log entry to the database. + + Args: + level: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL). + logger_name: Name of the logger. + message: Log message. + request_id: Optional request identifier. + user: Optional user identifier. + ip: Optional client IP address. + extra: Optional extra data as JSON-serializable dict. + + Returns: + True if log was written successfully. + """ + if not LOG_STORE_ENABLED: + return False + + if not self._initialized: + self.initialize() + + timestamp = datetime.now().isoformat() + extra_str = None + if extra: + import json + try: + extra_str = json.dumps(extra, ensure_ascii=False) + except (TypeError, ValueError): + extra_str = str(extra) + + try: + with self._write_lock: + with self._get_connection() as conn: + cursor = conn.cursor() + cursor.execute( + """ + INSERT INTO logs (timestamp, level, logger_name, message, request_id, user, ip, extra) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + """, + (timestamp, level, logger_name, message, request_id, user, ip, extra_str) + ) + conn.commit() + return True + except Exception as e: + # Don't let log store errors propagate + logger.debug(f"Failed to write log to SQLite: {e}") + return False + + def query_logs( + self, + level: Optional[str] = None, + q: Optional[str] = None, + limit: int = 200, + offset: int = 0, + since: Optional[str] = None, + logger_name: Optional[str] = None + ) -> List[Dict[str, Any]]: + """Query logs from the database. + + Args: + level: Filter by log level (e.g., "ERROR", "WARNING"). + q: Search query for message content (case-insensitive). + limit: Maximum number of logs to return (default: 200). + offset: Number of logs to skip (for pagination). + since: ISO timestamp to filter logs after this time. + logger_name: Filter by logger name prefix. + + Returns: + List of log entries as dictionaries. + """ + if not LOG_STORE_ENABLED: + return [] + + if not self._initialized: + self.initialize() + + query = "SELECT * FROM logs WHERE 1=1" + params: List[Any] = [] + + if level: + query += " AND level = ?" + params.append(level.upper()) + + if q: + query += " AND message LIKE ?" + params.append(f"%{q}%") + + if since: + query += " AND timestamp >= ?" + params.append(since) + + if logger_name: + query += " AND logger_name LIKE ?" + params.append(f"{logger_name}%") + + query += " ORDER BY timestamp DESC LIMIT ? OFFSET ?" + params.append(limit) + params.append(offset) + + try: + with self._get_connection() as conn: + cursor = conn.cursor() + cursor.execute(query, params) + rows = cursor.fetchall() + + return [dict(row) for row in rows] + except Exception as e: + logger.error(f"Failed to query logs: {e}") + return [] + + def count_logs( + self, + level: Optional[str] = None, + q: Optional[str] = None, + since: Optional[str] = None, + logger_name: Optional[str] = None + ) -> int: + """Count logs matching the given filters. + + Args: + level: Filter by log level (e.g., "ERROR", "WARNING"). + q: Search query for message content (case-insensitive). + since: ISO timestamp to filter logs after this time. + logger_name: Filter by logger name prefix. + + Returns: + Number of matching logs. + """ + if not LOG_STORE_ENABLED: + return 0 + + if not self._initialized: + self.initialize() + + query = "SELECT COUNT(*) FROM logs WHERE 1=1" + params: List[Any] = [] + + if level: + query += " AND level = ?" + params.append(level.upper()) + + if q: + query += " AND message LIKE ?" + params.append(f"%{q}%") + + if since: + query += " AND timestamp >= ?" + params.append(since) + + if logger_name: + query += " AND logger_name LIKE ?" + params.append(f"{logger_name}%") + + try: + with self._get_connection() as conn: + cursor = conn.cursor() + cursor.execute(query, params) + row = cursor.fetchone() + return row[0] if row else 0 + except Exception as e: + logger.error(f"Failed to count logs: {e}") + return 0 + + def cleanup_old_logs(self) -> int: + """Remove logs older than retention period or exceeding max rows. + + Returns: + Number of logs deleted. + """ + if not LOG_STORE_ENABLED or not self._initialized: + return 0 + + deleted = 0 + + try: + with self._write_lock: + with self._get_connection() as conn: + cursor = conn.cursor() + + # Delete logs older than retention days + cutoff_date = ( + datetime.now() - timedelta(days=LOG_SQLITE_RETENTION_DAYS) + ).isoformat() + + cursor.execute( + "DELETE FROM logs WHERE timestamp < ?", + (cutoff_date,) + ) + deleted += cursor.rowcount + + # Delete excess logs if over max rows + cursor.execute("SELECT COUNT(*) FROM logs") + count = cursor.fetchone()[0] + + if count > LOG_SQLITE_MAX_ROWS: + excess = count - LOG_SQLITE_MAX_ROWS + cursor.execute( + """ + DELETE FROM logs WHERE id IN ( + SELECT id FROM logs ORDER BY timestamp ASC LIMIT ? + ) + """, + (excess,) + ) + deleted += cursor.rowcount + + conn.commit() + + if deleted > 0: + logger.info(f"Cleaned up {deleted} old log entries") + + except Exception as e: + logger.error(f"Failed to cleanup logs: {e}") + + return deleted + + def get_stats(self) -> Dict[str, Any]: + """Get log store statistics. + + Returns: + Dictionary with stats (count, oldest, newest, size_bytes). + """ + if not LOG_STORE_ENABLED or not self._initialized: + return { + "enabled": LOG_STORE_ENABLED, + "count": 0, + "oldest": None, + "newest": None, + "size_bytes": 0 + } + + try: + with self._get_connection() as conn: + cursor = conn.cursor() + + cursor.execute("SELECT COUNT(*) FROM logs") + count = cursor.fetchone()[0] + + cursor.execute("SELECT MIN(timestamp), MAX(timestamp) FROM logs") + row = cursor.fetchone() + oldest = row[0] + newest = row[1] + + # Get file size + size_bytes = 0 + if Path(self.db_path).exists(): + size_bytes = Path(self.db_path).stat().st_size + + return { + "enabled": True, + "count": count, + "oldest": oldest, + "newest": newest, + "size_bytes": size_bytes, + "retention_days": LOG_SQLITE_RETENTION_DAYS, + "max_rows": LOG_SQLITE_MAX_ROWS + } + + except Exception as e: + logger.error(f"Failed to get log stats: {e}") + return { + "enabled": True, + "count": 0, + "oldest": None, + "newest": None, + "size_bytes": 0, + "error": str(e) + } + + def close(self) -> None: + """Close database connections.""" + if hasattr(self._local, 'connection') and self._local.connection: + try: + self._local.connection.close() + except Exception: + pass + self._local.connection = None + + +# ============================================================ +# SQLite Log Handler +# ============================================================ + +class SQLiteLogHandler(logging.Handler): + """Logging handler that writes to SQLite log store. + + Integrates with Python's logging framework to automatically + capture logs for admin dashboard. + + Usage: + handler = SQLiteLogHandler(log_store) + handler.setLevel(logging.INFO) + logging.getLogger().addHandler(handler) + """ + + def __init__(self, log_store: LogStore): + """Initialize the handler. + + Args: + log_store: LogStore instance to write to. + """ + super().__init__() + self.log_store = log_store + + def emit(self, record: logging.LogRecord) -> None: + """Write a log record to the store. + + Args: + record: Log record to write. + """ + try: + # Get extra context from request if available + request_id = getattr(record, 'request_id', None) + user = getattr(record, 'user', None) + ip = getattr(record, 'ip', None) + + # Try to get from Flask's g object if not in record + try: + from flask import g, has_request_context + if has_request_context(): + if not request_id: + request_id = getattr(g, 'request_id', None) + if not user: + user = getattr(g, 'user_email', None) + if not ip: + from flask import request + ip = request.remote_addr + except ImportError: + pass + + self.log_store.write_log( + level=record.levelname, + logger_name=record.name, + message=self.format(record), + request_id=request_id, + user=user, + ip=ip + ) + except Exception: + # Never let handler errors propagate + self.handleError(record) + + +# ============================================================ +# Global Log Store Instance +# ============================================================ + +_LOG_STORE: Optional[LogStore] = None + + +def get_log_store() -> LogStore: + """Get or create the global log store instance.""" + global _LOG_STORE + if _LOG_STORE is None: + _LOG_STORE = LogStore() + if LOG_STORE_ENABLED: + _LOG_STORE.initialize() + return _LOG_STORE + + +def get_sqlite_log_handler() -> SQLiteLogHandler: + """Get a configured SQLite log handler. + + Returns: + Configured SQLiteLogHandler instance. + """ + handler = SQLiteLogHandler(get_log_store()) + handler.setLevel(logging.INFO) + handler.setFormatter(logging.Formatter('%(message)s')) + return handler diff --git a/src/mes_dashboard/core/metrics.py b/src/mes_dashboard/core/metrics.py new file mode 100644 index 0000000..cd01da7 --- /dev/null +++ b/src/mes_dashboard/core/metrics.py @@ -0,0 +1,232 @@ +# -*- coding: utf-8 -*- +"""Performance metrics collection for MES Dashboard. + +Collects query latency metrics using an in-memory sliding window. +Each worker maintains independent statistics. +""" + +from __future__ import annotations + +import logging +import os +import threading +import time +from collections import deque +from dataclasses import dataclass +from datetime import datetime +from typing import Deque, List, Optional + +logger = logging.getLogger('mes_dashboard.metrics') + +# ============================================================ +# Configuration +# ============================================================ + +# Maximum number of latency samples to keep +METRICS_WINDOW_SIZE = int(os.getenv('METRICS_WINDOW_SIZE', '1000')) + +# Threshold for "slow" queries (seconds) +SLOW_QUERY_THRESHOLD = float(os.getenv('SLOW_QUERY_THRESHOLD', '1.0')) + + +# ============================================================ +# Types +# ============================================================ + +@dataclass +class MetricsSummary: + """Summary of collected metrics.""" + p50_ms: float + p95_ms: float + p99_ms: float + count: int + slow_count: int + slow_rate: float + worker_pid: int + collected_at: str + + +# ============================================================ +# Query Metrics Implementation +# ============================================================ + +class QueryMetrics: + """Collects and summarizes query latency metrics. + + Uses a thread-safe sliding window to track the most recent + query latencies. Provides percentile calculations for + monitoring and alerting. + + Usage: + metrics = QueryMetrics() + + # Record a query + start = time.time() + execute_query() + metrics.record_latency(time.time() - start) + + # Get summary + summary = metrics.get_summary() + """ + + def __init__(self, window_size: int = METRICS_WINDOW_SIZE): + """Initialize query metrics collector. + + Args: + window_size: Maximum number of samples to keep. + """ + self.window_size = window_size + self._latencies: Deque[float] = deque(maxlen=window_size) + self._lock = threading.Lock() + self._worker_pid = os.getpid() + + def record_latency(self, latency_seconds: float) -> None: + """Record a query latency. + + Args: + latency_seconds: Query execution time in seconds. + """ + with self._lock: + self._latencies.append(latency_seconds) + + # Log slow queries + if latency_seconds > SLOW_QUERY_THRESHOLD: + logger.warning( + f"Slow query detected: {latency_seconds:.2f}s " + f"(threshold: {SLOW_QUERY_THRESHOLD}s)" + ) + + def get_percentile(self, percentile: float) -> float: + """Calculate a specific percentile from the latency data. + + Args: + percentile: Percentile to calculate (0-100). + + Returns: + Latency value at the given percentile in seconds. + """ + with self._lock: + if not self._latencies: + return 0.0 + + sorted_latencies = sorted(self._latencies) + index = int((percentile / 100.0) * len(sorted_latencies)) + # Clamp index to valid range + index = min(index, len(sorted_latencies) - 1) + return sorted_latencies[index] + + def get_percentiles(self) -> dict: + """Calculate P50, P95, and P99 percentiles. + + Returns: + Dictionary with percentile values in milliseconds. + """ + with self._lock: + if not self._latencies: + return { + "p50": 0.0, + "p95": 0.0, + "p99": 0.0, + "count": 0, + "slow_count": 0 + } + + sorted_latencies = sorted(self._latencies) + count = len(sorted_latencies) + + def get_percentile_value(p: float) -> float: + index = int((p / 100.0) * count) + index = min(index, count - 1) + return sorted_latencies[index] + + slow_count = sum(1 for l in sorted_latencies if l > SLOW_QUERY_THRESHOLD) + + return { + "p50": get_percentile_value(50), + "p95": get_percentile_value(95), + "p99": get_percentile_value(99), + "count": count, + "slow_count": slow_count + } + + def get_summary(self) -> MetricsSummary: + """Get a complete metrics summary. + + Returns: + MetricsSummary with all collected metrics. + """ + percentiles = self.get_percentiles() + + slow_rate = 0.0 + if percentiles["count"] > 0: + slow_rate = percentiles["slow_count"] / percentiles["count"] + + return MetricsSummary( + p50_ms=round(percentiles["p50"] * 1000, 2), + p95_ms=round(percentiles["p95"] * 1000, 2), + p99_ms=round(percentiles["p99"] * 1000, 2), + count=percentiles["count"], + slow_count=percentiles["slow_count"], + slow_rate=round(slow_rate, 4), + worker_pid=self._worker_pid, + collected_at=datetime.now().isoformat() + ) + + def get_latencies(self) -> List[float]: + """Get a copy of all recorded latencies. + + Returns: + List of latencies in seconds. + """ + with self._lock: + return list(self._latencies) + + def clear(self) -> None: + """Clear all recorded metrics.""" + with self._lock: + self._latencies.clear() + logger.info(f"Metrics cleared for worker {self._worker_pid}") + + +# ============================================================ +# Global Query Metrics Instance +# ============================================================ + +_QUERY_METRICS: Optional[QueryMetrics] = None + + +def get_query_metrics() -> QueryMetrics: + """Get or create the global query metrics instance.""" + global _QUERY_METRICS + if _QUERY_METRICS is None: + _QUERY_METRICS = QueryMetrics() + return _QUERY_METRICS + + +def get_metrics_summary() -> dict: + """Get current metrics summary as a dictionary. + + Returns: + Dictionary with metrics summary information. + """ + metrics = get_query_metrics() + summary = metrics.get_summary() + return { + "p50_ms": summary.p50_ms, + "p95_ms": summary.p95_ms, + "p99_ms": summary.p99_ms, + "count": summary.count, + "slow_count": summary.slow_count, + "slow_rate": summary.slow_rate, + "worker_pid": summary.worker_pid, + "collected_at": summary.collected_at + } + + +def record_query_latency(latency_seconds: float) -> None: + """Record a query latency to the global metrics. + + Args: + latency_seconds: Query execution time in seconds. + """ + get_query_metrics().record_latency(latency_seconds) diff --git a/src/mes_dashboard/core/permissions.py b/src/mes_dashboard/core/permissions.py new file mode 100644 index 0000000..b1d92fc --- /dev/null +++ b/src/mes_dashboard/core/permissions.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +"""Permission checking utilities.""" + +from __future__ import annotations + +from functools import wraps +from typing import TYPE_CHECKING, Callable + +from flask import jsonify, redirect, request, session, url_for + +if TYPE_CHECKING: + from typing import Any + + +def is_admin_logged_in() -> bool: + """Check if an admin is currently logged in. + + Returns: + True if 'admin' key exists in session + """ + return "admin" in session + + +def get_current_admin() -> dict | None: + """Get current logged-in admin info. + + Returns: + Admin info dict or None if not logged in + """ + return session.get("admin") + + +def _is_ajax_request() -> bool: + """Check if the current request is an AJAX request. + + Returns: + True if request appears to be AJAX (fetch/XHR) + """ + # Check X-Requested-With header (jQuery style) + if request.headers.get("X-Requested-With") == "XMLHttpRequest": + return True + # Check Accept header for JSON + accept = request.headers.get("Accept", "") + if "application/json" in accept: + return True + # Check Content-Type for JSON POST requests + content_type = request.headers.get("Content-Type", "") + if "application/json" in content_type: + return True + return False + + +def admin_required(f: Callable) -> Callable: + """Decorator to require admin login for a route. + + For regular requests: Redirects to login page if not logged in. + For AJAX requests: Returns JSON error with 401 status. + """ + @wraps(f) + def decorated(*args: Any, **kwargs: Any) -> Any: + if not is_admin_logged_in(): + if _is_ajax_request(): + return jsonify({"error": "請先登入管理員帳號", "login_required": True}), 401 + return redirect(url_for("auth.login", next=request.url)) + return f(*args, **kwargs) + return decorated diff --git a/src/mes_dashboard/core/redis_client.py b/src/mes_dashboard/core/redis_client.py new file mode 100644 index 0000000..a0ad179 --- /dev/null +++ b/src/mes_dashboard/core/redis_client.py @@ -0,0 +1,170 @@ +# -*- coding: utf-8 -*- +"""Redis client management for MES Dashboard WIP cache.""" + +from __future__ import annotations + +import logging +import os +from typing import Optional + +import redis + +logger = logging.getLogger('mes_dashboard.redis') + +# ============================================================ +# Configuration from environment variables +# ============================================================ + +REDIS_URL = os.getenv('REDIS_URL', 'redis://localhost:6379/0') +REDIS_ENABLED = os.getenv('REDIS_ENABLED', 'true').lower() == 'true' +REDIS_KEY_PREFIX = os.getenv('REDIS_KEY_PREFIX', 'mes_wip') + +# ============================================================ +# Redis Client Singleton +# ============================================================ + +_REDIS_CLIENT: Optional[redis.Redis] = None + + +def get_redis_client() -> Optional[redis.Redis]: + """Get Redis client with connection pooling and health check. + + Returns: + Redis client instance, or None if Redis is disabled or unavailable. + """ + global _REDIS_CLIENT + + if not REDIS_ENABLED: + logger.debug("Redis is disabled via REDIS_ENABLED=false") + return None + + if _REDIS_CLIENT is None: + try: + _REDIS_CLIENT = redis.Redis.from_url( + REDIS_URL, + decode_responses=True, + socket_timeout=5, + socket_connect_timeout=5, + retry_on_timeout=True, + health_check_interval=30 + ) + # Test connection + _REDIS_CLIENT.ping() + logger.info(f"Redis client connected to {REDIS_URL}") + except redis.RedisError as e: + logger.warning(f"Failed to connect to Redis: {e}") + _REDIS_CLIENT = None + return None + + return _REDIS_CLIENT + + +def redis_available() -> bool: + """Check if Redis connection is available. + + Returns: + True if Redis is enabled and responding to PING. + """ + if not REDIS_ENABLED: + return False + + client = get_redis_client() + if client is None: + return False + + try: + client.ping() + return True + except redis.RedisError as e: + logger.warning(f"Redis health check failed: {e}") + return False + + +def get_key(key: str) -> str: + """Get full Redis key with prefix. + + Args: + key: Key name without prefix (e.g., "meta:sys_date") + + Returns: + Full key with prefix (e.g., "mes_wip:meta:sys_date") + """ + return f"{REDIS_KEY_PREFIX}:{key}" + + +def get_key_prefix() -> str: + """Get the Redis key prefix. + + Returns: + The configured key prefix (e.g., "mes_wip") + """ + return REDIS_KEY_PREFIX + + +def close_redis() -> None: + """Close Redis connection. + + Call this during application shutdown. + """ + global _REDIS_CLIENT + + if _REDIS_CLIENT is not None: + try: + _REDIS_CLIENT.close() + logger.info("Redis connection closed") + except Exception as e: + logger.warning(f"Error closing Redis connection: {e}") + finally: + _REDIS_CLIENT = None + + +def try_acquire_lock(lock_name: str, ttl_seconds: int = 60) -> bool: + """Try to acquire a distributed lock using Redis SET NX. + + This is a non-blocking lock acquisition. If the lock is already held, + returns False immediately without waiting. + + Args: + lock_name: Name of the lock (will be prefixed with key prefix). + ttl_seconds: Lock expiration time in seconds (prevents deadlocks). + + Returns: + True if lock was acquired, False if already held by another process. + """ + client = get_redis_client() + if client is None: + # Redis unavailable - allow operation to proceed (fail-open) + logger.warning(f"Redis unavailable, skipping lock for {lock_name}") + return True + + try: + lock_key = f"{REDIS_KEY_PREFIX}:lock:{lock_name}" + # SET NX EX is atomic: only sets if key doesn't exist + acquired = client.set(lock_key, str(os.getpid()), nx=True, ex=ttl_seconds) + if acquired: + logger.debug(f"Acquired lock: {lock_name}") + else: + logger.debug(f"Lock already held: {lock_name}") + return bool(acquired) + except Exception as e: + logger.warning(f"Failed to acquire lock {lock_name}: {e}") + # Fail-open: allow operation if Redis has issues + return True + + +def release_lock(lock_name: str) -> None: + """Release a distributed lock. + + Args: + lock_name: Name of the lock to release. + """ + client = get_redis_client() + if client is None: + return + + try: + lock_key = f"{REDIS_KEY_PREFIX}:lock:{lock_name}" + client.delete(lock_key) + logger.debug(f"Released lock: {lock_name}") + except Exception as e: + logger.warning(f"Failed to release lock {lock_name}: {e}") diff --git a/src/mes_dashboard/core/resilience.py b/src/mes_dashboard/core/resilience.py new file mode 100644 index 0000000..df753ac --- /dev/null +++ b/src/mes_dashboard/core/resilience.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 -*- +"""Runtime resilience thresholds and operator recommendation helpers.""" + +from __future__ import annotations + +import os +from datetime import datetime, timezone +from typing import Any + + +def _env_int(name: str, default: int) -> int: + try: + return int(os.getenv(name, str(default))) + except (TypeError, ValueError): + return default + + +def _env_float(name: str, default: float) -> float: + try: + return float(os.getenv(name, str(default))) + except (TypeError, ValueError): + return default + + +def _parse_iso(ts: str | None) -> datetime | None: + if not ts: + return None + try: + value = datetime.fromisoformat(ts) + except (TypeError, ValueError): + return None + + if value.tzinfo is None: + value = value.replace(tzinfo=timezone.utc) + return value + + +def _utc_now() -> datetime: + return datetime.now(timezone.utc) + + +def get_resilience_thresholds() -> dict[str, Any]: + """Return effective resilience thresholds from environment config.""" + return { + "degraded_alert_seconds": _env_int("RESILIENCE_DEGRADED_ALERT_SECONDS", 300), + "pool_saturation_warning": _env_float("RESILIENCE_POOL_SATURATION_WARNING", 0.90), + "pool_saturation_critical": _env_float("RESILIENCE_POOL_SATURATION_CRITICAL", 1.0), + "restart_churn_window_seconds": _env_int("RESILIENCE_RESTART_CHURN_WINDOW_SECONDS", 600), + "restart_churn_threshold": _env_int("RESILIENCE_RESTART_CHURN_THRESHOLD", 3), + } + + +def summarize_restart_history( + history: list[dict[str, Any]] | None, + *, + now: datetime | None = None, + window_seconds: int | None = None, + threshold: int | None = None, +) -> dict[str, Any]: + """Summarize restart churn for recent watchdog-triggered restarts.""" + values = history or [] + thresholds = get_resilience_thresholds() + active_window = int( + window_seconds + if window_seconds is not None + else thresholds["restart_churn_window_seconds"] + ) + active_threshold = int( + threshold + if threshold is not None + else thresholds["restart_churn_threshold"] + ) + now_dt = now or _utc_now() + + in_window_count = 0 + last_completed = None + for item in values: + completed_at = _parse_iso(item.get("completed_at")) + if completed_at is None: + continue + last_completed = max(last_completed, completed_at) if last_completed else completed_at + age = (now_dt - completed_at).total_seconds() + if age <= active_window: + in_window_count += 1 + + return { + "window_seconds": active_window, + "threshold": active_threshold, + "count": in_window_count, + "exceeded": in_window_count >= active_threshold, + "last_completed_at": last_completed.isoformat() if last_completed else None, + } + + +def build_recovery_recommendation( + *, + degraded_reason: str | None, + pool_saturation: float | None, + circuit_state: str | None, + restart_churn_exceeded: bool, + cooldown_active: bool = False, +) -> dict[str, Any]: + """Build machine-readable operator recommendation for degraded conditions.""" + if degraded_reason is None: + return { + "action": "none", + "reason": "healthy", + } + + if degraded_reason == "database_unreachable": + return { + "action": "check_database_connectivity", + "reason": "database_unreachable", + } + + if degraded_reason == "redis_unavailable": + return { + "action": "continue_degraded_mode", + "reason": "redis_unavailable", + } + + if circuit_state == "OPEN": + return { + "action": "wait_for_circuit_half_open", + "reason": "circuit_breaker_open", + } + + if degraded_reason == "db_pool_saturated": + if restart_churn_exceeded: + return { + "action": "throttle_and_investigate_queries", + "reason": "restart_churn_exceeded", + } + if cooldown_active: + return { + "action": "wait_for_restart_cooldown", + "reason": "restart_cooldown_active", + } + if pool_saturation is not None and pool_saturation >= 1.0: + return { + "action": "consider_controlled_worker_restart", + "reason": "db_pool_saturated", + } + + return { + "action": "observe_and_retry", + "reason": degraded_reason, + } diff --git a/src/mes_dashboard/core/response.py b/src/mes_dashboard/core/response.py new file mode 100644 index 0000000..3a070e2 --- /dev/null +++ b/src/mes_dashboard/core/response.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +"""Standard API response format utilities for MES Dashboard. + +Provides consistent response envelope for all API endpoints. +""" + +from __future__ import annotations + +import os +from datetime import datetime +from typing import Any, Dict, Optional + +from flask import jsonify, request + +# ============================================================ +# Standard Error Codes +# ============================================================ + +# Database errors +DB_CONNECTION_FAILED = "DB_CONNECTION_FAILED" +DB_QUERY_TIMEOUT = "DB_QUERY_TIMEOUT" +DB_QUERY_ERROR = "DB_QUERY_ERROR" +DB_POOL_EXHAUSTED = "DB_POOL_EXHAUSTED" + +# Service errors +SERVICE_UNAVAILABLE = "SERVICE_UNAVAILABLE" +CIRCUIT_BREAKER_OPEN = "CIRCUIT_BREAKER_OPEN" + +# Client errors +VALIDATION_ERROR = "VALIDATION_ERROR" +UNAUTHORIZED = "UNAUTHORIZED" +FORBIDDEN = "FORBIDDEN" +NOT_FOUND = "NOT_FOUND" +TOO_MANY_REQUESTS = "TOO_MANY_REQUESTS" + +# Server errors +INTERNAL_ERROR = "INTERNAL_ERROR" + + +# ============================================================ +# Response Functions +# ============================================================ + +def success_response( + data: Any, + meta: Optional[Dict[str, Any]] = None, + status_code: int = 200 +): + """Create a standardized success response. + + Args: + data: The response data payload. + meta: Optional metadata (timestamp, request_id, etc.). + status_code: HTTP status code (default: 200). + + Returns: + Flask response tuple (response, status_code). + + Example: + >>> return success_response({"users": [...]}) + >>> return success_response({"id": 1}, meta={"cached": True}) + """ + response = { + "success": True, + "data": data, + } + + # Add metadata if provided + if meta is not None: + response["meta"] = meta + else: + # Add default metadata + response["meta"] = { + "timestamp": datetime.now().isoformat(), + } + + return jsonify(response), status_code + + +def error_response( + code: str, + message: str, + details: Optional[str] = None, + status_code: int = 500, + meta: Optional[Dict[str, Any]] = None, + headers: Optional[Dict[str, str]] = None, +): + """Create a standardized error response. + + Args: + code: Machine-readable error code (e.g., DB_CONNECTION_FAILED). + message: User-friendly error message. + details: Technical details (only shown in development mode). + status_code: HTTP status code (default: 500). + + Returns: + Flask response tuple (response, status_code). + + Example: + >>> return error_response( + ... DB_CONNECTION_FAILED, + ... "資料庫連線失敗,請稍後再試", + ... "ORA-12541: TNS:no listener", + ... status_code=503 + ... ) + """ + error_obj = { + "code": code, + "message": message, + } + + # Only include details in development mode + if details and _is_development_mode(): + error_obj["details"] = details + + response_meta: Dict[str, Any] = { + "timestamp": datetime.now().isoformat(), + } + if meta: + response_meta.update(meta) + + response = { + "success": False, + "error": error_obj, + "meta": response_meta + } + + resp = jsonify(response) + if headers: + for key, value in headers.items(): + resp.headers[key] = value + return resp, status_code + + +def _is_development_mode() -> bool: + """Check if the application is running in development mode.""" + flask_env = os.getenv("FLASK_ENV", "production") + flask_debug = os.getenv("FLASK_DEBUG", "0") + return flask_env == "development" or flask_debug == "1" + + +# ============================================================ +# Convenience Functions for Common Errors +# ============================================================ + +def db_connection_error(details: Optional[str] = None): + """Return a database connection error response.""" + return error_response( + DB_CONNECTION_FAILED, + "資料庫連線失敗,請稍後再試", + details, + status_code=503 + ) + + +def db_query_timeout_error(details: Optional[str] = None): + """Return a database query timeout error response.""" + return error_response( + DB_QUERY_TIMEOUT, + "資料庫查詢逾時,請稍後再試", + details, + status_code=504 + ) + + +def service_unavailable_error(details: Optional[str] = None): + """Return a service unavailable error response.""" + return error_response( + SERVICE_UNAVAILABLE, + "服務暫時無法使用,請稍後再試", + details, + status_code=503 + ) + + +def circuit_breaker_error( + details: Optional[str] = None, + retry_after_seconds: int = 30, +): + """Return a circuit breaker open error response.""" + retry_after_seconds = max(int(retry_after_seconds), 1) + return error_response( + CIRCUIT_BREAKER_OPEN, + "服務暫時降級中,請稍後再試", + details, + status_code=503, + meta={"retry_after_seconds": retry_after_seconds}, + headers={"Retry-After": str(retry_after_seconds)}, + ) + + +def pool_exhausted_error( + details: Optional[str] = None, + retry_after_seconds: int = 5, +): + """Return a pool exhausted error response.""" + retry_after_seconds = max(int(retry_after_seconds), 1) + return error_response( + DB_POOL_EXHAUSTED, + "目前查詢流量較高,請稍後再試", + details, + status_code=503, + meta={"retry_after_seconds": retry_after_seconds}, + headers={"Retry-After": str(retry_after_seconds)}, + ) + + +def validation_error(message: str, details: Optional[str] = None): + """Return a validation error response.""" + return error_response( + VALIDATION_ERROR, + message, + details, + status_code=400 + ) + + +def unauthorized_error(message: str = "請先登入"): + """Return an unauthorized error response.""" + return error_response( + UNAUTHORIZED, + message, + status_code=401 + ) + + +def forbidden_error(message: str = "權限不足"): + """Return a forbidden error response.""" + return error_response( + FORBIDDEN, + message, + status_code=403 + ) + + +def not_found_error(message: str = "找不到請求的資源"): + """Return a not found error response.""" + return error_response( + NOT_FOUND, + message, + status_code=404 + ) + + +def too_many_requests_error(message: str = "請求過於頻繁,請稍後再試"): + """Return a too many requests error response.""" + return error_response( + TOO_MANY_REQUESTS, + message, + status_code=429 + ) + + +def internal_error(details: Optional[str] = None): + """Return an internal server error response.""" + return error_response( + INTERNAL_ERROR, + "伺服器內部錯誤", + details, + status_code=500 + ) diff --git a/src/mes_dashboard/core/utils.py b/src/mes_dashboard/core/utils.py new file mode 100644 index 0000000..f744208 --- /dev/null +++ b/src/mes_dashboard/core/utils.py @@ -0,0 +1,258 @@ +# -*- coding: utf-8 -*- +"""Utility functions for MES Dashboard. + +Common helper functions used across services. + +Note: SQL filter building functions in this module are DEPRECATED. +Use mes_dashboard.sql.CommonFilters with QueryBuilder instead. +""" + +import warnings +from datetime import datetime +from typing import Any, Dict, List, Optional + +import pandas as pd + +from mes_dashboard.config.constants import ( + DEFAULT_DAYS_BACK, + EQUIPMENT_FLAG_FILTERS, + EXCLUDED_LOCATIONS, + EXCLUDED_ASSET_STATUSES, +) + + +# ============================================================ +# Parameter Extraction +# ============================================================ + + +def get_days_back(filters: Optional[Dict] = None, default: int = DEFAULT_DAYS_BACK) -> int: + """Extract days_back parameter from filters dict.""" + if filters: + return int(filters.get('days_back', default)) + return default + + +# ============================================================ +# SQL Filter Building (DEPRECATED) +# Use mes_dashboard.sql.CommonFilters with QueryBuilder instead. +# ============================================================ + + +def build_filter_conditions( + filters: Optional[Dict], + field_mapping: Optional[Dict[str, str]] = None, +) -> List[str]: + """Build SQL WHERE conditions from filters dict. + + .. deprecated:: + Use QueryBuilder with add_in_condition() or add_param_condition() instead. + This function uses string formatting which may be vulnerable to SQL injection. + """ + warnings.warn( + "build_filter_conditions is deprecated. Use QueryBuilder with add_in_condition() instead.", + DeprecationWarning, + stacklevel=2 + ) + if not filters: + return [] + + conditions = [] + + if field_mapping: + for filter_key, column_name in field_mapping.items(): + values = filters.get(filter_key) + if values and len(values) > 0: + if isinstance(values, list): + value_list = "', '".join(str(v) for v in values) + conditions.append(f"{column_name} IN ('{value_list}')") + else: + conditions.append(f"{column_name} = '{values}'") + + return conditions + + +def build_equipment_filter_sql(filters: Optional[Dict]) -> List[str]: + """Build SQL conditions for equipment flag filters. + + Note: This function is safe as it uses static conditions from config, + but consider migrating to CommonFilters.add_equipment_flag_filters() + for consistency with the new architecture. + """ + if not filters: + return [] + + conditions = [] + + for flag_key, sql_condition in EQUIPMENT_FLAG_FILTERS.items(): + if filters.get(flag_key): + conditions.append(sql_condition) + + return conditions + + +def build_location_filter_sql( + filters: Optional[Dict], + column_name: str = 'LOCATIONNAME', +) -> Optional[str]: + """Build SQL condition for location filtering. + + .. deprecated:: + Use QueryBuilder.add_in_condition() instead. + This function uses string formatting which may be vulnerable to SQL injection. + """ + warnings.warn( + "build_location_filter_sql is deprecated. Use QueryBuilder.add_in_condition() instead.", + DeprecationWarning, + stacklevel=2 + ) + if not filters: + return None + + locations = filters.get('locations') + if locations and len(locations) > 0: + loc_list = "', '".join(locations) + return f"{column_name} IN ('{loc_list}')" + + return None + + +def build_asset_status_filter_sql( + filters: Optional[Dict], + column_name: str = 'PJ_ASSETSSTATUS', +) -> Optional[str]: + """Build SQL condition for asset status filtering. + + .. deprecated:: + Use QueryBuilder.add_in_condition() instead. + This function uses string formatting which may be vulnerable to SQL injection. + """ + warnings.warn( + "build_asset_status_filter_sql is deprecated. Use QueryBuilder.add_in_condition() instead.", + DeprecationWarning, + stacklevel=2 + ) + if not filters: + return None + + statuses = filters.get('assetsStatuses') + if statuses and len(statuses) > 0: + status_list = "', '".join(statuses) + return f"{column_name} IN ('{status_list}')" + + return None + + +def build_exclusion_sql( + locations: List[str] = None, + asset_statuses: List[str] = None, + location_column: str = 'LOCATIONNAME', + status_column: str = 'PJ_ASSETSSTATUS', +) -> List[str]: + """Build SQL conditions for excluding specific locations and statuses. + + .. deprecated:: + Use CommonFilters.add_location_exclusion() and + CommonFilters.add_asset_status_exclusion() instead. + """ + warnings.warn( + "build_exclusion_sql is deprecated. Use CommonFilters with QueryBuilder instead.", + DeprecationWarning, + stacklevel=2 + ) + conditions = [] + + loc_list = locations if locations is not None else EXCLUDED_LOCATIONS + if loc_list: + locs = "', '".join(loc_list) + conditions.append(f"{location_column} NOT IN ('{locs}')") + + status_list = asset_statuses if asset_statuses is not None else EXCLUDED_ASSET_STATUSES + if status_list: + stats = "', '".join(status_list) + conditions.append(f"{status_column} NOT IN ('{stats}')") + + return conditions + + +# ============================================================ +# Data Transformation +# ============================================================ + + +def convert_datetime_fields( + df: pd.DataFrame, + columns: Optional[List[str]] = None, + format_str: str = '%Y-%m-%d %H:%M:%S', +) -> pd.DataFrame: + """Convert datetime columns in DataFrame to formatted strings.""" + if df.empty: + return df + + if columns is None: + columns = df.select_dtypes(include=['datetime64']).columns.tolist() + + for col in columns: + if col in df.columns: + df[col] = df[col].apply( + lambda x: x.strftime(format_str) if pd.notna(x) and hasattr(x, 'strftime') else None + ) + + return df + + +def row_to_dict(row: Any, columns: List[str]) -> Dict[str, Any]: + """Convert a database row to dictionary with datetime handling.""" + row_dict = {} + for i, col in enumerate(columns): + value = row[i] + if isinstance(value, datetime): + row_dict[col] = value.strftime('%Y-%m-%d %H:%M:%S') + else: + row_dict[col] = value + return row_dict + + +# ============================================================ +# API Response Formatting +# ============================================================ + + +def format_api_response( + success: bool, + data: Any = None, + error: Optional[str] = None, + count: Optional[int] = None, + **extra, +) -> Dict[str, Any]: + """Create standardized API response dict.""" + response = {'success': success} + + if data is not None: + response['data'] = data + + if error is not None: + response['error'] = error + + if count is not None: + response['count'] = count + + response.update(extra) + + return response + + +def safe_int(value: Any, default: int = 0) -> int: + """Safely convert value to int.""" + try: + return int(value) if value is not None else default + except (ValueError, TypeError): + return default + + +def safe_float(value: Any, default: float = 0.0) -> float: + """Safely convert value to float.""" + try: + return float(value) if value is not None else default + except (ValueError, TypeError): + return default diff --git a/src/mes_dashboard/routes/__init__.py b/src/mes_dashboard/routes/__init__.py new file mode 100644 index 0000000..0d16e5e --- /dev/null +++ b/src/mes_dashboard/routes/__init__.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +"""API routes module for MES Dashboard. + +Contains Flask Blueprints for different API endpoints. +""" + +from .wip_routes import wip_bp +from .resource_routes import resource_bp +from .dashboard_routes import dashboard_bp +from .excel_query_routes import excel_query_bp +from .hold_routes import hold_bp +from .auth_routes import auth_bp +from .admin_routes import admin_bp +from .resource_history_routes import resource_history_bp +from .job_query_routes import job_query_bp + + +def register_routes(app) -> None: + """Register all API blueprints on the Flask app.""" + app.register_blueprint(wip_bp) + app.register_blueprint(resource_bp) + app.register_blueprint(dashboard_bp) + app.register_blueprint(excel_query_bp) + app.register_blueprint(hold_bp) + app.register_blueprint(resource_history_bp) + app.register_blueprint(job_query_bp) + +__all__ = [ + 'wip_bp', + 'resource_bp', + 'dashboard_bp', + 'excel_query_bp', + 'hold_bp', + 'auth_bp', + 'admin_bp', + 'resource_history_bp', + 'job_query_bp', + 'register_routes', +] diff --git a/src/mes_dashboard/routes/admin_routes.py b/src/mes_dashboard/routes/admin_routes.py new file mode 100644 index 0000000..e50cf85 --- /dev/null +++ b/src/mes_dashboard/routes/admin_routes.py @@ -0,0 +1,538 @@ +# -*- coding: utf-8 -*- +"""Admin routes for page management and performance monitoring.""" + +from __future__ import annotations + +import json +import logging +import os +import time +from datetime import datetime +from pathlib import Path + +from flask import Blueprint, g, jsonify, render_template, request + +from mes_dashboard.core.permissions import admin_required +from mes_dashboard.core.response import error_response, TOO_MANY_REQUESTS +from mes_dashboard.core.resilience import ( + build_recovery_recommendation, + get_resilience_thresholds, + summarize_restart_history, +) +from mes_dashboard.services.page_registry import get_all_pages, set_page_status + +admin_bp = Blueprint("admin", __name__, url_prefix="/admin") +logger = logging.getLogger("mes_dashboard.admin") + +# ============================================================ +# Worker Restart Configuration +# ============================================================ + +WATCHDOG_RUNTIME_DIR = os.getenv("WATCHDOG_RUNTIME_DIR", "/tmp") +RESTART_FLAG_PATH = os.getenv( + "WATCHDOG_RESTART_FLAG", + f"{WATCHDOG_RUNTIME_DIR}/mes_dashboard_restart.flag" +) +RESTART_STATE_PATH = os.getenv( + "WATCHDOG_STATE_FILE", + f"{WATCHDOG_RUNTIME_DIR}/mes_dashboard_restart_state.json" +) +WATCHDOG_PID_PATH = os.getenv( + "WATCHDOG_PID_FILE", + f"{WATCHDOG_RUNTIME_DIR}/gunicorn.pid" +) +GUNICORN_BIND = os.getenv("GUNICORN_BIND", "0.0.0.0:8080") +RESTART_COOLDOWN_SECONDS = int(os.getenv("WORKER_RESTART_COOLDOWN", "60")) + +# Track last restart request time (in-memory for this worker) +_last_restart_request: float = 0.0 + + +# ============================================================ +# Performance Monitoring Routes +# ============================================================ + +@admin_bp.route("/performance") +@admin_required +def performance(): + """Performance monitoring dashboard.""" + return render_template("admin/performance.html") + + +@admin_bp.route("/api/system-status", methods=["GET"]) +@admin_required +def api_system_status(): + """API: Get system status for performance dashboard.""" + from mes_dashboard.core.database import get_pool_runtime_config, get_pool_status + from mes_dashboard.core.redis_client import REDIS_ENABLED + from mes_dashboard.core.circuit_breaker import get_circuit_breaker_status + from mes_dashboard.routes.health_routes import ( + check_database, + check_redis, + get_route_cache_status, + ) + + # Database status + db_status, db_error = check_database() + + # Redis status + redis_status = 'disabled' + if REDIS_ENABLED: + redis_status, _ = check_redis() + + # Circuit breaker status + circuit_breaker = get_circuit_breaker_status() + route_cache = get_route_cache_status() + pool_runtime = get_pool_runtime_config() + try: + pool_state = get_pool_status() + except Exception: + pool_state = None + thresholds = get_resilience_thresholds() + restart_state = _get_restart_state() + restart_churn = _get_restart_churn_summary(restart_state) + in_cooldown, remaining = _check_restart_cooldown() + + degraded_reason = None + if db_status == "error": + degraded_reason = "database_unreachable" + elif circuit_breaker.get("state") == "OPEN": + degraded_reason = "circuit_breaker_open" + elif (pool_state or {}).get("saturation", 0.0) >= 1.0: + degraded_reason = "db_pool_saturated" + elif redis_status == "error": + degraded_reason = "redis_unavailable" + elif route_cache.get("degraded"): + degraded_reason = "route_cache_degraded" + recommendation = build_recovery_recommendation( + degraded_reason=degraded_reason, + pool_saturation=(pool_state or {}).get("saturation"), + circuit_state=circuit_breaker.get("state"), + restart_churn_exceeded=bool(restart_churn.get("exceeded")), + cooldown_active=in_cooldown, + ) + + # Cache status + from mes_dashboard.routes.health_routes import ( + get_cache_status, + get_resource_cache_status, + get_equipment_status_cache_status + ) + + return jsonify({ + "success": True, + "data": { + "database": { + "status": db_status, + "error": db_error + }, + "redis": { + "status": redis_status, + "enabled": REDIS_ENABLED + }, + "circuit_breaker": circuit_breaker, + "cache": { + "wip": get_cache_status(), + "resource": get_resource_cache_status(), + "equipment": get_equipment_status_cache_status() + }, + "runtime_resilience": { + "degraded_reason": degraded_reason, + "pool_runtime": pool_runtime, + "pool_state": pool_state, + "route_cache": route_cache, + "thresholds": thresholds, + "restart_churn": restart_churn, + "recovery_recommendation": recommendation, + "restart_cooldown": { + "active": in_cooldown, + "remaining_seconds": int(remaining) if in_cooldown else 0, + }, + }, + "single_port_bind": GUNICORN_BIND, + "worker_pid": os.getpid() + } + }) + + +@admin_bp.route("/api/metrics", methods=["GET"]) +@admin_required +def api_metrics(): + """API: Get performance metrics for dashboard.""" + from mes_dashboard.core.metrics import get_metrics_summary, get_query_metrics + + summary = get_metrics_summary() + metrics = get_query_metrics() + + return jsonify({ + "success": True, + "data": { + "p50_ms": summary.get("p50_ms"), + "p95_ms": summary.get("p95_ms"), + "p99_ms": summary.get("p99_ms"), + "count": summary.get("count"), + "slow_count": summary.get("slow_count"), + "slow_rate": summary.get("slow_rate"), + "worker_pid": summary.get("worker_pid"), + "collected_at": summary.get("collected_at"), + # Include latency distribution for charts + "latencies": metrics.get_latencies()[-100:] # Last 100 for chart + } + }) + + +@admin_bp.route("/api/logs", methods=["GET"]) +@admin_required +def api_logs(): + """API: Get recent logs from SQLite log store.""" + from mes_dashboard.core.log_store import get_log_store, LOG_STORE_ENABLED + + if not LOG_STORE_ENABLED: + return jsonify({ + "success": True, + "data": { + "logs": [], + "enabled": False, + "total": 0 + } + }) + + # Query parameters + level = request.args.get("level") + q = request.args.get("q") + limit = request.args.get("limit", 50, type=int) + offset = request.args.get("offset", 0, type=int) + since = request.args.get("since") + + log_store = get_log_store() + + # Get total count for pagination + total = log_store.count_logs(level=level, q=q, since=since) + + # Get paginated logs + logs = log_store.query_logs( + level=level, + q=q, + limit=min(limit, 100), # Cap at 100 per page + offset=offset, + since=since + ) + + return jsonify({ + "success": True, + "data": { + "logs": logs, + "count": len(logs), + "total": total, + "enabled": True, + "stats": log_store.get_stats() + } + }) + + +@admin_bp.route("/api/logs/cleanup", methods=["POST"]) +@admin_required +def api_logs_cleanup(): + """API: Manually trigger log cleanup. + + Supports optional parameters: + - older_than_days: Delete logs older than N days (default: use configured retention) + - keep_count: Keep only the most recent N logs (optional) + """ + from mes_dashboard.core.log_store import get_log_store, LOG_STORE_ENABLED + + if not LOG_STORE_ENABLED: + return jsonify({ + "success": False, + "error": "Log store is disabled" + }), 400 + + log_store = get_log_store() + + # Get current stats before cleanup + stats_before = log_store.get_stats() + + # Perform cleanup + deleted = log_store.cleanup_old_logs() + + # Get stats after cleanup + stats_after = log_store.get_stats() + + user = getattr(g, "username", "unknown") + logger.info(f"Log cleanup triggered by {user}: deleted {deleted} entries") + + return jsonify({ + "success": True, + "data": { + "deleted": deleted, + "before": { + "count": stats_before.get("count", 0), + "size_bytes": stats_before.get("size_bytes", 0) + }, + "after": { + "count": stats_after.get("count", 0), + "size_bytes": stats_after.get("size_bytes", 0) + } + } + }) + + +# ============================================================ +# Worker Restart Control Routes +# ============================================================ + +def _get_restart_state() -> dict: + """Read worker restart state from file.""" + state_path = Path(RESTART_STATE_PATH) + if not state_path.exists(): + return {} + try: + return json.loads(state_path.read_text()) + except (json.JSONDecodeError, IOError): + return {} + + +def _check_restart_cooldown() -> tuple[bool, float]: + """Check if restart is in cooldown. + + Returns: + Tuple of (is_in_cooldown, remaining_seconds). + """ + global _last_restart_request + + # Check in-memory cooldown first + now = time.time() + elapsed = now - _last_restart_request + if elapsed < RESTART_COOLDOWN_SECONDS: + return True, RESTART_COOLDOWN_SECONDS - elapsed + + # Check file-based state (for cross-worker coordination) + state = _get_restart_state() + last_restart = state.get("last_restart", {}) + requested_at = last_restart.get("requested_at") + + if requested_at: + try: + request_time = datetime.fromisoformat(requested_at).timestamp() + elapsed = now - request_time + if elapsed < RESTART_COOLDOWN_SECONDS: + return True, RESTART_COOLDOWN_SECONDS - elapsed + except (ValueError, TypeError): + pass + + return False, 0.0 + + +def _get_restart_history(state: dict | None = None) -> list[dict]: + """Return bounded restart history for admin telemetry.""" + payload = state if state is not None else _get_restart_state() + raw_history = payload.get("history") or [] + if not isinstance(raw_history, list): + return [] + return raw_history[-20:] + + +def _get_restart_churn_summary(state: dict | None = None) -> dict: + """Summarize restart churn within active resilience window.""" + history = _get_restart_history(state) + return summarize_restart_history(history) + + +def _worker_recovery_hint(churn: dict, cooldown_active: bool) -> dict: + """Build worker control recommendation from churn/cooldown state.""" + if churn.get("exceeded"): + return { + "action": "throttle_and_investigate_queries", + "reason": "restart_churn_exceeded", + } + if cooldown_active: + return { + "action": "wait_for_restart_cooldown", + "reason": "restart_cooldown_active", + } + return { + "action": "restart_available", + "reason": "no_churn_or_cooldown", + } + + +@admin_bp.route("/api/worker/restart", methods=["POST"]) +@admin_required +def api_worker_restart(): + """API: Request worker restart. + + Writes a restart flag file that the watchdog process monitors. + Enforces a 60-second cooldown between restart requests. + """ + global _last_restart_request + + # Check cooldown + in_cooldown, remaining = _check_restart_cooldown() + if in_cooldown: + return error_response( + TOO_MANY_REQUESTS, + f"Restart in cooldown. Please wait {int(remaining)} seconds.", + status_code=429 + ) + + # Get request metadata + user = getattr(g, "username", "unknown") + ip = request.remote_addr or "unknown" + timestamp = datetime.now().isoformat() + + # Write restart flag file + flag_path = Path(RESTART_FLAG_PATH) + flag_data = { + "user": user, + "ip": ip, + "timestamp": timestamp, + "worker_pid": os.getpid() + } + + try: + flag_path.write_text(json.dumps(flag_data)) + except IOError as e: + logger.error(f"Failed to write restart flag: {e}") + return error_response( + "RESTART_FAILED", + f"Failed to request restart: {e}", + status_code=500 + ) + + # Update in-memory cooldown + _last_restart_request = time.time() + + logger.info( + f"Worker restart requested by {user} from {ip}" + ) + + return jsonify({ + "success": True, + "data": { + "message": "Restart requested. Workers will reload shortly.", + "requested_by": user, + "requested_at": timestamp, + "single_port_bind": GUNICORN_BIND, + "watchdog": { + "runtime_dir": WATCHDOG_RUNTIME_DIR, + "flag_path": RESTART_FLAG_PATH, + "pid_path": WATCHDOG_PID_PATH, + "state_path": RESTART_STATE_PATH, + }, + } + }) + + +@admin_bp.route("/api/worker/status", methods=["GET"]) +@admin_required +def api_worker_status(): + """API: Get worker status and restart information.""" + # Check cooldown + in_cooldown, remaining = _check_restart_cooldown() + + # Get last restart info + state = _get_restart_state() + last_restart = state.get("last_restart", {}) + history = _get_restart_history(state) + churn = _get_restart_churn_summary(state) + thresholds = get_resilience_thresholds() + recommendation = _worker_recovery_hint(churn, in_cooldown) + + # Get worker start time (psutil is optional) + worker_start_time = None + try: + import psutil + process = psutil.Process(os.getpid()) + worker_start_time = datetime.fromtimestamp( + process.create_time() + ).isoformat() + except ImportError: + # psutil not installed, try /proc on Linux + try: + stat_path = f"/proc/{os.getpid()}/stat" + with open(stat_path) as f: + stat = f.read().split() + # Field 22 is starttime in clock ticks since boot + # This is a simplified fallback + pass + except Exception: + pass + except Exception: + pass + + return jsonify({ + "success": True, + "data": { + "worker_pid": os.getpid(), + "worker_start_time": worker_start_time, + "runtime_contract": { + "single_port_bind": GUNICORN_BIND, + "watchdog": { + "runtime_dir": WATCHDOG_RUNTIME_DIR, + "flag_path": RESTART_FLAG_PATH, + "flag_exists": Path(RESTART_FLAG_PATH).exists(), + "pid_path": WATCHDOG_PID_PATH, + "pid_exists": Path(WATCHDOG_PID_PATH).exists(), + "state_path": RESTART_STATE_PATH, + "state_exists": Path(RESTART_STATE_PATH).exists(), + }, + }, + "cooldown": { + "active": in_cooldown, + "remaining_seconds": int(remaining) if in_cooldown else 0 + }, + "resilience": { + "thresholds": thresholds, + "restart_churn": churn, + "recovery_recommendation": recommendation, + }, + "restart_history": history, + "last_restart": { + "requested_by": last_restart.get("requested_by"), + "requested_at": last_restart.get("requested_at"), + "requested_ip": last_restart.get("requested_ip"), + "completed_at": last_restart.get("completed_at"), + "success": last_restart.get("success") + } + } + }) + + +# ============================================================ +# Page Management Routes +# ============================================================ + +@admin_bp.route("/pages") +@admin_required +def pages(): + """Page management interface.""" + return render_template("admin/pages.html") + + +@admin_bp.route("/api/pages", methods=["GET"]) +@admin_required +def api_get_pages(): + """API: Get all page configurations.""" + return jsonify({"success": True, "pages": get_all_pages()}) + + +@admin_bp.route("/api/pages/", methods=["PUT"]) +@admin_required +def api_update_page(route: str): + """API: Update page status.""" + data = request.get_json() + status = data.get("status") + name = data.get("name") + + if status not in ("released", "dev"): + return jsonify({"success": False, "error": "Invalid status"}), 400 + + # Ensure route starts with / + if not route.startswith("/"): + route = "/" + route + + try: + set_page_status(route, status, name) + return jsonify({"success": True}) + except Exception as e: + return jsonify({"success": False, "error": str(e)}), 500 diff --git a/src/mes_dashboard/routes/auth_routes.py b/src/mes_dashboard/routes/auth_routes.py new file mode 100644 index 0000000..a320701 --- /dev/null +++ b/src/mes_dashboard/routes/auth_routes.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +"""Authentication routes for admin login/logout.""" + +from __future__ import annotations + +import logging +import time +from collections import defaultdict +from datetime import datetime +from threading import Lock + +from flask import Blueprint, flash, redirect, render_template, request, session, url_for + +from mes_dashboard.services.auth_service import authenticate, is_admin + +logger = logging.getLogger('mes_dashboard.auth_routes') +auth_bp = Blueprint("auth", __name__, url_prefix="/admin") + + +# ============================================================ +# Rate Limiting for Login Endpoint +# ============================================================ +# Simple in-memory rate limiter to prevent brute force attacks +# Configuration: max 5 attempts per IP per 5 minutes + +_rate_limit_lock = Lock() +_login_attempts: dict = defaultdict(list) # IP -> list of timestamps +RATE_LIMIT_MAX_ATTEMPTS = 5 +RATE_LIMIT_WINDOW_SECONDS = 300 # 5 minutes + + +def _is_rate_limited(ip: str) -> bool: + """Check if an IP address is rate limited. + + Args: + ip: Client IP address. + + Returns: + True if rate limited, False otherwise. + """ + current_time = time.time() + window_start = current_time - RATE_LIMIT_WINDOW_SECONDS + + with _rate_limit_lock: + # Clean up old attempts + _login_attempts[ip] = [ + ts for ts in _login_attempts[ip] if ts > window_start + ] + + # Check if limit exceeded + if len(_login_attempts[ip]) >= RATE_LIMIT_MAX_ATTEMPTS: + return True + + return False + + +def _record_login_attempt(ip: str) -> None: + """Record a login attempt for rate limiting. + + Args: + ip: Client IP address. + """ + with _rate_limit_lock: + _login_attempts[ip].append(time.time()) + + +@auth_bp.route("/login", methods=["GET", "POST"]) +def login(): + """Admin login page.""" + error = None + + if request.method == "POST": + # Rate limiting check + client_ip = request.remote_addr or "unknown" + if _is_rate_limited(client_ip): + logger.warning(f"Rate limit exceeded for IP: {client_ip}") + error = "登入嘗試過於頻繁,請稍後再試" + return render_template("login.html", error=error) + + # Record this attempt + _record_login_attempt(client_ip) + + username = request.form.get("username", "").strip() + password = request.form.get("password", "") + + if not username or not password: + error = "請輸入帳號和密碼" + else: + user = authenticate(username, password) + if user is None: + error = "帳號或密碼錯誤" + elif not is_admin(user): + error = "您不是管理員,無法登入後台" + else: + # Login successful + session["admin"] = { + "username": user.get("username"), + "displayName": user.get("displayName"), + "mail": user.get("mail"), + "department": user.get("department"), + "login_time": datetime.now().isoformat(), + } + next_url = request.args.get("next", url_for("portal_index")) + return redirect(next_url) + + return render_template("login.html", error=error) + + +@auth_bp.route("/logout") +def logout(): + """Admin logout.""" + session.pop("admin", None) + return redirect(url_for("portal_index")) diff --git a/src/mes_dashboard/routes/dashboard_routes.py b/src/mes_dashboard/routes/dashboard_routes.py new file mode 100644 index 0000000..e56d1bd --- /dev/null +++ b/src/mes_dashboard/routes/dashboard_routes.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +"""Dashboard API routes for MES Dashboard. + +Contains Flask Blueprint for dashboard/KPI-related API endpoints. +""" + +from flask import Blueprint, jsonify, request + +from mes_dashboard.core.cache import cache_get, cache_set, make_cache_key +from mes_dashboard.core.utils import get_days_back +from mes_dashboard.services.dashboard_service import ( + query_dashboard_kpi, + query_workcenter_cards, + query_resource_detail_with_job, + query_ou_trend, + query_utilization_heatmap, +) + +# Create Blueprint +dashboard_bp = Blueprint('dashboard', __name__, url_prefix='/api/dashboard') + + +@dashboard_bp.route('/kpi', methods=['POST']) +def api_dashboard_kpi(): + """API: Dashboard KPI data.""" + data = request.get_json() or {} + filters = data.get('filters') + + days_back = get_days_back(filters) + cache_key = make_cache_key("dashboard_kpi", days_back, filters) + kpi = cache_get(cache_key) + if kpi is None: + kpi = query_dashboard_kpi(filters) + if kpi: + cache_set(cache_key, kpi) + if kpi: + return jsonify({'success': True, 'data': kpi}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@dashboard_bp.route('/workcenter_cards', methods=['POST']) +def api_dashboard_workcenter_cards(): + """API: Workcenter cards data.""" + data = request.get_json() or {} + filters = data.get('filters') + + days_back = get_days_back(filters) + cache_key = make_cache_key("dashboard_workcenter_cards", days_back, filters) + cards = cache_get(cache_key) + if cards is None: + cards = query_workcenter_cards(filters) + if cards is not None: + cache_set(cache_key, cards) + if cards is not None: + return jsonify({'success': True, 'data': cards}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@dashboard_bp.route('/detail', methods=['POST']) +def api_dashboard_detail(): + """API: Resource detail with JOB info.""" + data = request.get_json() or {} + filters = data.get('filters') + limit = data.get('limit', 200) + offset = data.get('offset', 0) + + df, max_status_time = query_resource_detail_with_job(filters, limit, offset) + if df is not None: + records = df.to_dict(orient='records') + return jsonify({ + 'success': True, + 'data': records, + 'count': len(records), + 'offset': offset, + 'max_status_time': max_status_time + }) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@dashboard_bp.route('/ou_trend', methods=['POST']) +def api_dashboard_ou_trend(): + """API: OU% trend data for line chart.""" + data = request.get_json() or {} + filters = data.get('filters') + days = data.get('days', 7) + + cache_key = make_cache_key("dashboard_ou_trend", days, filters) + trend = cache_get(cache_key) + if trend is None: + trend = query_ou_trend(days, filters) + if trend is not None: + cache_set(cache_key, trend, ttl=300) # 5 min cache + if trend is not None: + return jsonify({'success': True, 'data': trend}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@dashboard_bp.route('/utilization_heatmap', methods=['POST']) +def api_dashboard_utilization_heatmap(): + """API: Utilization heatmap data.""" + data = request.get_json() or {} + filters = data.get('filters') + days = data.get('days', 7) + + cache_key = make_cache_key("dashboard_heatmap", days, filters) + heatmap = cache_get(cache_key) + if heatmap is None: + heatmap = query_utilization_heatmap(days, filters) + if heatmap is not None: + cache_set(cache_key, heatmap, ttl=300) # 5 min cache + if heatmap is not None: + return jsonify({'success': True, 'data': heatmap}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 diff --git a/src/mes_dashboard/routes/excel_query_routes.py b/src/mes_dashboard/routes/excel_query_routes.py new file mode 100644 index 0000000..ca10761 --- /dev/null +++ b/src/mes_dashboard/routes/excel_query_routes.py @@ -0,0 +1,355 @@ +# -*- coding: utf-8 -*- +"""API routes for Excel batch query functionality. + +Provides endpoints for: +- Excel file upload and parsing +- Column value extraction +- Batch query execution +- CSV export +""" + +from flask import Blueprint, jsonify, request, Response + +from mes_dashboard.config.tables import TABLES_CONFIG +from mes_dashboard.core.database import get_table_columns, get_table_column_metadata +from mes_dashboard.services.excel_query_service import ( + parse_excel, + get_column_unique_values, + execute_batch_query, + execute_advanced_batch_query, + generate_csv_content, + detect_excel_column_type, + LARGE_TABLE_THRESHOLD, +) + + +excel_query_bp = Blueprint('excel_query', __name__, url_prefix='/api/excel-query') + +# Store uploaded Excel data in memory (session-based in production) +_uploaded_excel_cache = {} + + +@excel_query_bp.route('/upload', methods=['POST']) +def upload_excel(): + """Upload and parse Excel file. + + Returns column list and preview data. + """ + if 'file' not in request.files: + return jsonify({'error': '未選擇檔案'}), 400 + + file = request.files['file'] + if file.filename == '': + return jsonify({'error': '未選擇檔案'}), 400 + + # Check file extension + allowed_extensions = {'.xlsx', '.xls'} + import os + ext = os.path.splitext(file.filename)[1].lower() + if ext not in allowed_extensions: + return jsonify({'error': '只支援 .xlsx 或 .xls 檔案'}), 400 + + # Parse Excel + result = parse_excel(file) + if 'error' in result: + return jsonify(result), 400 + + # Cache the file content for later use + file.seek(0) + _uploaded_excel_cache['current'] = file.read() + _uploaded_excel_cache['filename'] = file.filename + + return jsonify(result) + + +@excel_query_bp.route('/column-values', methods=['POST']) +def get_column_values(): + """Get unique values from selected Excel column.""" + data = request.get_json() + column_name = data.get('column_name') + + if not column_name: + return jsonify({'error': '請指定欄位名稱'}), 400 + + if 'current' not in _uploaded_excel_cache: + return jsonify({'error': '請先上傳 Excel 檔案'}), 400 + + # Create file-like object from cached content + import io + file_like = io.BytesIO(_uploaded_excel_cache['current']) + + result = get_column_unique_values(file_like, column_name) + if 'error' in result: + return jsonify(result), 400 + + return jsonify(result) + + +@excel_query_bp.route('/tables', methods=['GET']) +def get_tables(): + """Get available tables for querying.""" + tables = [] + for category, table_list in TABLES_CONFIG.items(): + for table in table_list: + tables.append({ + 'name': table['name'], + 'display_name': table['display_name'], + 'category': category + }) + return jsonify({'tables': tables}) + + +@excel_query_bp.route('/table-columns', methods=['POST']) +def get_table_cols(): + """Get columns for a specific table.""" + data = request.get_json() + table_name = data.get('table_name') + + if not table_name: + return jsonify({'error': '請指定資料表名稱'}), 400 + + columns = get_table_columns(table_name) + if not columns: + return jsonify({'error': f'無法取得資料表 {table_name} 的欄位'}), 400 + + return jsonify({'columns': columns}) + + +@excel_query_bp.route('/table-metadata', methods=['POST']) +def get_table_metadata(): + """Get enriched table metadata including column types. + + Returns: + - columns: List of column info with data types + - time_field: Configured time field from TABLES_CONFIG (or null) + - description: Table description from TABLES_CONFIG + - row_count: Approximate row count from TABLES_CONFIG + - performance_warning: Warning message if table is large + """ + data = request.get_json() + table_name = data.get('table_name') + + if not table_name: + return jsonify({'error': '請指定資料表名稱'}), 400 + + # Get column metadata from Oracle + metadata = get_table_column_metadata(table_name) + if 'error' in metadata and not metadata.get('columns'): + return jsonify({'error': f'無法取得資料表 {table_name} 的欄位資訊'}), 400 + + # Find table config for additional info + table_config = None + for category, table_list in TABLES_CONFIG.items(): + for table in table_list: + if table['name'] == table_name: + table_config = table + break + if table_config: + break + + # Build response + result = { + 'columns': metadata.get('columns', []), + 'time_field': table_config.get('time_field') if table_config else None, + 'description': table_config.get('description', '') if table_config else '', + 'row_count': table_config.get('row_count', 0) if table_config else 0, + 'performance_warning': None + } + + # Add performance warning for large tables + if result['row_count'] and result['row_count'] > LARGE_TABLE_THRESHOLD: + result['performance_warning'] = ( + f'此資料表超過 {LARGE_TABLE_THRESHOLD // 1_000_000} 千萬筆,' + '包含查詢可能較慢,建議配合日期範圍縮小查詢範圍' + ) + + return jsonify(result) + + +@excel_query_bp.route('/column-type', methods=['POST']) +def get_excel_column_type(): + """Detect Excel column data type from cached file. + + Expects JSON body: + {"column_name": "LOT_ID"} + + Returns column type info. + """ + data = request.get_json() + column_name = data.get('column_name') + + if not column_name: + return jsonify({'error': '請指定欄位名稱'}), 400 + + if 'current' not in _uploaded_excel_cache: + return jsonify({'error': '請先上傳 Excel 檔案'}), 400 + + import io + file_like = io.BytesIO(_uploaded_excel_cache['current']) + + # Get unique values first + from mes_dashboard.services.excel_query_service import get_column_unique_values + values_result = get_column_unique_values(file_like, column_name) + if 'error' in values_result: + return jsonify(values_result), 400 + + # Detect type from values + type_info = detect_excel_column_type(values_result['values']) + + return jsonify({ + 'column_name': column_name, + **type_info + }) + + +@excel_query_bp.route('/execute-advanced', methods=['POST']) +def execute_advanced_query(): + """Execute advanced batch query with multiple condition types. + + Expects JSON body: + { + "table_name": "DWH.DW_MES_WIP", + "search_column": "LOT_ID", + "return_columns": ["LOT_ID", "SPEC", "QTY"], + "search_values": ["val1", "val2", ...], + "query_type": "in" | "like_contains" | "like_prefix" | "like_suffix", + "date_column": "TXNDATE", // optional + "date_from": "2024-01-01", // optional (YYYY-MM-DD) + "date_to": "2024-12-31" // optional (YYYY-MM-DD) + } + """ + data = request.get_json() + + table_name = data.get('table_name') + search_column = data.get('search_column') + return_columns = data.get('return_columns') + search_values = data.get('search_values') + query_type = data.get('query_type', 'in') + date_column = data.get('date_column') + date_from = data.get('date_from') + date_to = data.get('date_to') + + # Validation + if not table_name: + return jsonify({'error': '請指定資料表'}), 400 + if not search_column: + return jsonify({'error': '請指定查詢欄位'}), 400 + if not return_columns or not isinstance(return_columns, list): + return jsonify({'error': '請指定回傳欄位'}), 400 + if not search_values or not isinstance(search_values, list): + return jsonify({'error': '無查詢值'}), 400 + + # Validate query_type + valid_types = {'in', 'like_contains', 'like_prefix', 'like_suffix'} + if query_type not in valid_types: + return jsonify({'error': f'無效的查詢類型: {query_type}'}), 400 + + # Validate date range if provided + if date_from and date_to: + try: + from datetime import datetime + d_from = datetime.strptime(date_from, '%Y-%m-%d') + d_to = datetime.strptime(date_to, '%Y-%m-%d') + if d_from > d_to: + return jsonify({'error': '起始日期不可晚於結束日期'}), 400 + if (d_to - d_from).days > 365: + return jsonify({'error': '日期範圍不可超過 365 天'}), 400 + except ValueError: + return jsonify({'error': '日期格式錯誤,請使用 YYYY-MM-DD'}), 400 + + result = execute_advanced_batch_query( + table_name=table_name, + search_column=search_column, + return_columns=return_columns, + search_values=search_values, + query_type=query_type, + date_column=date_column, + date_from=date_from, + date_to=date_to + ) + + if 'error' in result: + return jsonify(result), 400 + + return jsonify(result) + + +@excel_query_bp.route('/execute', methods=['POST']) +def execute_query(): + """Execute batch query with Excel values. + + Expects JSON body: + { + "table_name": "DWH.DW_MES_WIP", + "search_column": "LOT_ID", + "return_columns": ["LOT_ID", "SPEC", "QTY"], + "search_values": ["val1", "val2", ...] + } + """ + data = request.get_json() + + table_name = data.get('table_name') + search_column = data.get('search_column') + return_columns = data.get('return_columns') + search_values = data.get('search_values') + + # Validation + if not table_name: + return jsonify({'error': '請指定資料表'}), 400 + if not search_column: + return jsonify({'error': '請指定查詢欄位'}), 400 + if not return_columns or not isinstance(return_columns, list): + return jsonify({'error': '請指定回傳欄位'}), 400 + if not search_values or not isinstance(search_values, list): + return jsonify({'error': '無查詢值'}), 400 + + result = execute_batch_query( + table_name=table_name, + search_column=search_column, + return_columns=return_columns, + search_values=search_values + ) + + if 'error' in result: + return jsonify(result), 400 + + return jsonify(result) + + +@excel_query_bp.route('/export-csv', methods=['POST']) +def export_csv(): + """Export query results as CSV file. + + Same parameters as /execute endpoint. + """ + data = request.get_json() + + table_name = data.get('table_name') + search_column = data.get('search_column') + return_columns = data.get('return_columns') + search_values = data.get('search_values') + + # Validation + if not all([table_name, search_column, return_columns, search_values]): + return jsonify({'error': '缺少必要參數'}), 400 + + result = execute_batch_query( + table_name=table_name, + search_column=search_column, + return_columns=return_columns, + search_values=search_values + ) + + if 'error' in result: + return jsonify(result), 400 + + # Generate CSV + csv_content = generate_csv_content(result['data'], result['columns']) + + return Response( + csv_content, + mimetype='text/csv; charset=utf-8', + headers={ + 'Content-Disposition': 'attachment; filename=query_result.csv' + } + ) diff --git a/src/mes_dashboard/routes/health_routes.py b/src/mes_dashboard/routes/health_routes.py new file mode 100644 index 0000000..2f34db3 --- /dev/null +++ b/src/mes_dashboard/routes/health_routes.py @@ -0,0 +1,472 @@ +# -*- coding: utf-8 -*- +"""Health check endpoints for MES Dashboard. + +Provides /health and /health/deep endpoints for monitoring service status. +""" + +from __future__ import annotations + +import logging +import time +from datetime import datetime, timedelta +from flask import Blueprint, jsonify, make_response + +from mes_dashboard.core.database import ( + get_engine, + get_pool_runtime_config, + get_pool_status, +) +from mes_dashboard.core.redis_client import ( + get_redis_client, + redis_available, + REDIS_ENABLED +) +from mes_dashboard.core.cache import ( + get_cached_sys_date, + get_cache_updated_at +) +from mes_dashboard.core.resilience import ( + build_recovery_recommendation, + get_resilience_thresholds, +) +from sqlalchemy import text + +logger = logging.getLogger('mes_dashboard.health') + +health_bp = Blueprint('health', __name__) + +# ============================================================ +# Warning Thresholds +# ============================================================ + +DB_LATENCY_WARNING_MS = 100 # Database latency > 100ms is slow +CACHE_STALE_MINUTES = 2 # Cache update > 2 minutes is stale + + +def _classify_degraded_reason( + db_status: str, + redis_status: str, + route_cache_degraded: bool, + circuit_breaker_state: str | None = None, + pool_saturation: float | None = None, +) -> str | None: + if db_status == 'error': + return 'database_unreachable' + if circuit_breaker_state == 'OPEN': + return 'circuit_breaker_open' + if pool_saturation is not None and pool_saturation >= 1.0: + return 'db_pool_saturated' + if redis_status == 'error': + return 'redis_unavailable' + if route_cache_degraded: + return 'route_cache_degraded' + return None + + +def check_database() -> tuple[str, str | None]: + """Check database connectivity. + + Returns: + Tuple of (status, error_message). + status is 'ok' or 'error'. + """ + try: + engine = get_engine() + with engine.connect() as conn: + conn.execute(text("SELECT 1 FROM DUAL")) + return 'ok', None + except Exception as e: + logger.error(f"Database health check failed: {e}") + return 'error', str(e) + + +def check_redis() -> tuple[str, str | None]: + """Check Redis connectivity. + + Returns: + Tuple of (status, error_message). + status is 'ok', 'error', or 'disabled'. + """ + if not REDIS_ENABLED: + return 'disabled', None + + try: + client = get_redis_client() + if client is None: + return 'error', 'Failed to get Redis client' + + client.ping() + return 'ok', None + except Exception as e: + logger.warning(f"Redis health check failed: {e}") + return 'error', str(e) + + +def get_cache_status() -> dict: + """Get current WIP cache status. + + Returns: + Dict with WIP cache status information. + """ + status = { + 'enabled': REDIS_ENABLED, + 'sys_date': get_cached_sys_date(), + 'updated_at': get_cache_updated_at() + } + try: + from mes_dashboard.services.wip_service import get_wip_search_index_status + status['derived_search_index'] = get_wip_search_index_status() + except Exception: + status['derived_search_index'] = {} + return status + + +def get_route_cache_status() -> dict: + """Get route-cache telemetry for operational diagnostics.""" + from flask import current_app + + cache_backend = current_app.extensions.get("cache") + if cache_backend is None: + return { + 'mode': 'none', + 'degraded': False, + 'available': False, + } + + telemetry_fn = getattr(cache_backend, "telemetry", None) + if callable(telemetry_fn): + telemetry = telemetry_fn() + telemetry['available'] = True + return telemetry + + return { + 'mode': cache_backend.__class__.__name__, + 'degraded': False, + 'available': True, + } + + +def get_resource_cache_status() -> dict: + """Get current resource cache status. + + Returns: + Dict with resource cache status information. + """ + from mes_dashboard.services.resource_cache import ( + get_cache_status as get_res_cache_status, + RESOURCE_CACHE_ENABLED, + ) + + if not RESOURCE_CACHE_ENABLED: + return {'enabled': False} + + return get_res_cache_status() + + +def get_equipment_status_cache_status() -> dict: + """Get current realtime equipment status cache status. + + Returns: + Dict with equipment status cache information. + """ + from flask import current_app + from mes_dashboard.services.realtime_equipment_cache import ( + get_equipment_status_cache_status as get_eq_cache_status, + ) + + enabled = current_app.config.get('REALTIME_EQUIPMENT_CACHE_ENABLED', True) + if not enabled: + return {'enabled': False} + + return get_eq_cache_status() + + +def get_workcenter_mapping_status() -> dict: + """Get current workcenter mapping cache status. + + Returns: + Dict with workcenter mapping cache information. + """ + from mes_dashboard.services.filter_cache import get_cache_status + + status = get_cache_status() + return { + 'loaded': status.get('loaded', False), + 'workcenter_count': status.get('workcenter_mapping_count', 0), + 'group_count': status.get('workcenter_groups_count', 0), + } + + +@health_bp.route('/health', methods=['GET']) +def health_check(): + """Health check endpoint. + + Returns: + - 200 OK: All services healthy or degraded (Redis down but DB ok) + - 503 Service Unavailable: Database unhealthy + """ + from mes_dashboard.core.circuit_breaker import get_circuit_breaker_status + + db_status, db_error = check_database() + redis_status, redis_error = check_redis() + circuit_breaker = get_circuit_breaker_status() + + services = { + 'database': db_status, + 'redis': redis_status + } + route_cache = get_route_cache_status() + pool_runtime = get_pool_runtime_config() + try: + pool_status = get_pool_status() + except Exception: + pool_status = None + + errors = [] + warnings = [] + pool_saturation = (pool_status or {}).get('saturation') + degraded_reason = _classify_degraded_reason( + db_status=db_status, + redis_status=redis_status, + route_cache_degraded=bool(route_cache.get('degraded')), + circuit_breaker_state=circuit_breaker.get('state'), + pool_saturation=pool_saturation, + ) + + # Determine overall status + if db_status == 'error': + status = 'unhealthy' + http_code = 503 + if db_error: + errors.append(f"Database connection failed: {db_error}") + elif redis_status == 'error': + # Redis down is degraded, not unhealthy (fallback available) + status = 'degraded' + http_code = 200 + warnings.append("Redis unavailable, running in fallback mode") + elif circuit_breaker.get('state') == 'OPEN': + status = 'degraded' + http_code = 200 + warnings.append("Circuit breaker is OPEN") + else: + status = 'healthy' + http_code = 200 + + # Check resource cache status + resource_cache = get_resource_cache_status() + if resource_cache.get('enabled') and not resource_cache.get('loaded'): + warnings.append("Resource cache not loaded") + + if route_cache.get('degraded'): + warnings.append("Route cache is running in degraded L1-only mode") + + if pool_status is not None: + saturation = pool_saturation if pool_saturation is not None else 0.0 + if saturation >= 0.9: + warnings.append(f"Database pool saturation is high ({saturation:.0%})") + + thresholds = get_resilience_thresholds() + recommendation = build_recovery_recommendation( + degraded_reason=degraded_reason, + pool_saturation=pool_saturation, + circuit_state=circuit_breaker.get('state'), + restart_churn_exceeded=False, + cooldown_active=False, + ) + + # Check equipment status cache + equipment_status_cache = get_equipment_status_cache_status() + if equipment_status_cache.get('enabled') and not equipment_status_cache.get('loaded'): + warnings.append("Equipment status cache not loaded") + + # Check workcenter mapping + workcenter_mapping = get_workcenter_mapping_status() + + response = { + 'status': status, + 'services': services, + 'degraded_reason': degraded_reason, + 'circuit_breaker': circuit_breaker, + 'database_pool': { + 'runtime': pool_runtime, + 'state': pool_status, + }, + 'resilience': { + 'thresholds': thresholds, + 'recovery_recommendation': recommendation, + }, + 'cache': get_cache_status(), + 'route_cache': route_cache, + 'resource_cache': resource_cache, + 'equipment_status_cache': equipment_status_cache, + 'workcenter_mapping': workcenter_mapping, + } + + if errors: + response['errors'] = errors + if warnings: + response['warnings'] = warnings + + # Add no-cache headers to prevent browser caching + resp = make_response(jsonify(response), http_code) + resp.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate' + resp.headers['Pragma'] = 'no-cache' + resp.headers['Expires'] = '0' + return resp + + +@health_bp.route('/health/deep', methods=['GET']) +def deep_health_check(): + """Deep health check endpoint with detailed metrics. + + Requires admin authentication. + + Returns: + - 200 OK with detailed health information + - 503 if database is unhealthy + """ + from mes_dashboard.core.permissions import is_admin_logged_in + from mes_dashboard.core.circuit_breaker import get_circuit_breaker_status + from mes_dashboard.core.metrics import get_metrics_summary + from flask import redirect, url_for, request + + # Require admin authentication - redirect to login for consistency + if not is_admin_logged_in(): + return redirect(url_for("auth.login", next=request.url)) + + # Check database with latency measurement + db_start = time.time() + db_status, db_error = check_database() + db_latency_ms = round((time.time() - db_start) * 1000, 2) + + # Check Redis with latency measurement + redis_latency_ms = None + if REDIS_ENABLED: + redis_start = time.time() + redis_status, redis_error = check_redis() + redis_latency_ms = round((time.time() - redis_start) * 1000, 2) + else: + redis_status = 'disabled' + + # Get circuit breaker status + circuit_breaker = get_circuit_breaker_status() + pool_runtime = get_pool_runtime_config() + + # Get performance metrics + metrics = get_metrics_summary() + + # Get cache freshness + cache_status = get_cache_status() + route_cache = get_route_cache_status() + cache_updated_at = cache_status.get('updated_at') + cache_is_stale = False + if cache_updated_at: + try: + updated_time = datetime.fromisoformat(cache_updated_at) + cache_is_stale = datetime.now() - updated_time > timedelta(minutes=CACHE_STALE_MINUTES) + except (ValueError, TypeError): + pass + + # Get connection pool status + try: + pool_status = get_pool_status() + except Exception: + pool_status = None + + # Determine overall status with thresholds + warnings = [] + status = 'healthy' + http_code = 200 + + if db_status == 'error': + status = 'unhealthy' + http_code = 503 + elif circuit_breaker.get('state') == 'OPEN': + status = 'degraded' + warnings.append("Circuit breaker is OPEN") + elif redis_status == 'error': + status = 'degraded' + warnings.append("Redis unavailable") + + if route_cache.get('degraded'): + status = 'degraded' + warnings.append("Route cache degraded (L1-only fallback)") + + pool_saturation = (pool_status or {}).get('saturation') + if pool_saturation is not None and pool_saturation >= 0.9: + warnings.append(f"Database pool saturation is high ({pool_saturation:.0%})") + + thresholds = get_resilience_thresholds() + degraded_reason = _classify_degraded_reason( + db_status=db_status, + redis_status=redis_status, + route_cache_degraded=bool(route_cache.get('degraded')), + circuit_breaker_state=circuit_breaker.get('state'), + pool_saturation=pool_saturation, + ) + recommendation = build_recovery_recommendation( + degraded_reason=degraded_reason, + pool_saturation=pool_saturation, + circuit_state=circuit_breaker.get('state'), + restart_churn_exceeded=False, + cooldown_active=False, + ) + + # Check latency thresholds + db_latency_status = 'healthy' + if db_latency_ms > DB_LATENCY_WARNING_MS: + db_latency_status = 'slow' + warnings.append(f"Database latency is slow ({db_latency_ms}ms)") + + # Check cache staleness + cache_freshness = 'fresh' + if cache_is_stale: + cache_freshness = 'stale' + warnings.append("Cache data may be stale") + + response = { + 'status': status, + 'degraded_reason': degraded_reason, + 'resilience': { + 'thresholds': thresholds, + 'recovery_recommendation': recommendation, + }, + 'checks': { + 'database': { + 'status': db_latency_status if db_status == 'ok' else 'error', + 'latency_ms': db_latency_ms, + 'pool': pool_status, + 'pool_runtime': pool_runtime, + }, + 'redis': { + 'status': 'healthy' if redis_status == 'ok' else redis_status, + 'latency_ms': redis_latency_ms + }, + 'circuit_breaker': circuit_breaker, + 'cache': { + 'freshness': cache_freshness, + 'updated_at': cache_updated_at, + 'sys_date': cache_status.get('sys_date') + }, + 'route_cache': route_cache + }, + 'metrics': { + 'query_p50_ms': metrics.get('p50_ms'), + 'query_p95_ms': metrics.get('p95_ms'), + 'query_p99_ms': metrics.get('p99_ms'), + 'query_count': metrics.get('count'), + 'slow_query_count': metrics.get('slow_count'), + 'slow_query_rate': metrics.get('slow_rate'), + 'worker_pid': metrics.get('worker_pid') + } + } + + if warnings: + response['warnings'] = warnings + + # Add no-cache headers + resp = make_response(jsonify(response), http_code) + resp.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate' + resp.headers['Pragma'] = 'no-cache' + resp.headers['Expires'] = '0' + return resp diff --git a/src/mes_dashboard/routes/hold_routes.py b/src/mes_dashboard/routes/hold_routes.py new file mode 100644 index 0000000..dc0f9bc --- /dev/null +++ b/src/mes_dashboard/routes/hold_routes.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- +"""Hold Detail API routes for MES Dashboard. + +Contains Flask Blueprint for Hold Detail page and API endpoints. +""" + +from flask import Blueprint, jsonify, request, render_template, redirect, url_for + +from mes_dashboard.services.wip_service import ( + get_hold_detail_summary, + get_hold_detail_distribution, + get_hold_detail_lots, + is_quality_hold, +) + +# Create Blueprint +hold_bp = Blueprint('hold', __name__) + + +def _parse_bool(value: str) -> bool: + """Parse boolean from query string.""" + return value.lower() in ('true', '1', 'yes') if value else False + + +# ============================================================ +# Page Route +# ============================================================ + +@hold_bp.route('/hold-detail') +def hold_detail_page(): + """Render the Hold Detail page. + + Query Parameters: + reason: Hold reason name (required) + + Returns: + Rendered HTML template + """ + reason = request.args.get('reason', '').strip() + if not reason: + # Redirect to WIP Overview when reason is missing + return redirect('/wip-overview') + + hold_type = 'quality' if is_quality_hold(reason) else 'non-quality' + return render_template('hold_detail.html', reason=reason, hold_type=hold_type) + + +# ============================================================ +# Hold Detail APIs +# ============================================================ + +@hold_bp.route('/api/wip/hold-detail/summary') +def api_hold_detail_summary(): + """API: Get summary statistics for a specific hold reason. + + Query Parameters: + reason: Hold reason name (required) + include_dummy: Include DUMMY lots (default: false) + + Returns: + JSON with totalLots, totalQty, avgAge, maxAge, workcenterCount + """ + reason = request.args.get('reason', '').strip() + if not reason: + return jsonify({'success': False, 'error': '缺少必要參數: reason'}), 400 + + include_dummy = _parse_bool(request.args.get('include_dummy', '')) + + result = get_hold_detail_summary( + reason=reason, + include_dummy=include_dummy + ) + if result is not None: + return jsonify({'success': True, 'data': result}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@hold_bp.route('/api/wip/hold-detail/distribution') +def api_hold_detail_distribution(): + """API: Get distribution statistics for a specific hold reason. + + Query Parameters: + reason: Hold reason name (required) + include_dummy: Include DUMMY lots (default: false) + + Returns: + JSON with byWorkcenter, byPackage, byAge distributions + """ + reason = request.args.get('reason', '').strip() + if not reason: + return jsonify({'success': False, 'error': '缺少必要參數: reason'}), 400 + + include_dummy = _parse_bool(request.args.get('include_dummy', '')) + + result = get_hold_detail_distribution( + reason=reason, + include_dummy=include_dummy + ) + if result is not None: + return jsonify({'success': True, 'data': result}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@hold_bp.route('/api/wip/hold-detail/lots') +def api_hold_detail_lots(): + """API: Get paginated lot details for a specific hold reason. + + Query Parameters: + reason: Hold reason name (required) + workcenter: Optional WORKCENTER_GROUP filter + package: Optional PRODUCTLINENAME filter + age_range: Optional age range filter ('0-1', '1-3', '3-7', '7+') + include_dummy: Include DUMMY lots (default: false) + page: Page number (default 1) + per_page: Records per page (default 50, max 200) + + Returns: + JSON with lots list, pagination info, and active filters + """ + reason = request.args.get('reason', '').strip() + if not reason: + return jsonify({'success': False, 'error': '缺少必要參數: reason'}), 400 + + workcenter = request.args.get('workcenter', '').strip() or None + package = request.args.get('package', '').strip() or None + age_range = request.args.get('age_range', '').strip() or None + include_dummy = _parse_bool(request.args.get('include_dummy', '')) + page = request.args.get('page', 1, type=int) + per_page = min(request.args.get('per_page', 50, type=int), 200) + + if page < 1: + page = 1 + + # Validate age_range parameter + if age_range and age_range not in ('0-1', '1-3', '3-7', '7+'): + return jsonify({ + 'success': False, + 'error': 'Invalid age_range. Use 0-1, 1-3, 3-7, or 7+' + }), 400 + + result = get_hold_detail_lots( + reason=reason, + workcenter=workcenter, + package=package, + age_range=age_range, + include_dummy=include_dummy, + page=page, + page_size=per_page + ) + if result is not None: + return jsonify({'success': True, 'data': result}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 diff --git a/src/mes_dashboard/routes/job_query_routes.py b/src/mes_dashboard/routes/job_query_routes.py new file mode 100644 index 0000000..981cdb9 --- /dev/null +++ b/src/mes_dashboard/routes/job_query_routes.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +"""Job Query API routes. + +Contains Flask Blueprint for maintenance job query endpoints: +- Job list query by resources +- Job transaction history detail +- CSV export with full history +""" + +from flask import Blueprint, jsonify, request, Response, render_template + +from mes_dashboard.services.job_query_service import ( + get_jobs_by_resources, + get_job_txn_history, + export_jobs_with_history, + validate_date_range, +) + +# Create Blueprint +job_query_bp = Blueprint('job_query', __name__) + + +# ============================================================ +# Page Route +# ============================================================ + +@job_query_bp.route('/job-query') +def job_query_page(): + """Render the job query page.""" + return render_template('job_query.html') + + +# ============================================================ +# API Routes +# ============================================================ + +@job_query_bp.route('/api/job-query/resources', methods=['GET']) +def get_resources(): + """Get available resources for selection. + + Returns resources from cache for equipment selection. + """ + from mes_dashboard.services.resource_cache import get_all_resources + + try: + resources = get_all_resources() + if not resources: + return jsonify({'error': '無法載入設備資料'}), 500 + + # Return minimal data for selection UI + data = [] + for r in resources: + data.append({ + 'RESOURCEID': r.get('RESOURCEID'), + 'RESOURCENAME': r.get('RESOURCENAME'), + 'WORKCENTERNAME': r.get('WORKCENTERNAME'), + 'RESOURCEFAMILYNAME': r.get('RESOURCEFAMILYNAME'), + }) + + # Sort by WORKCENTERNAME, then RESOURCENAME + data.sort(key=lambda x: (x.get('WORKCENTERNAME', ''), x.get('RESOURCENAME', ''))) + + return jsonify({ + 'data': data, + 'total': len(data) + }) + + except Exception as exc: + return jsonify({'error': f'載入設備資料失敗: {str(exc)}'}), 500 + + +@job_query_bp.route('/api/job-query/jobs', methods=['POST']) +def query_jobs(): + """Query jobs for selected resources. + + Expects JSON body: + { + "resource_ids": ["id1", "id2", ...], + "start_date": "2024-01-01", + "end_date": "2024-12-31" + } + + Returns job list. + """ + data = request.get_json() + + resource_ids = data.get('resource_ids', []) + start_date = data.get('start_date') + end_date = data.get('end_date') + + # Validation + if not resource_ids: + return jsonify({'error': '請選擇至少一台設備'}), 400 + if not start_date or not end_date: + return jsonify({'error': '請指定日期範圍'}), 400 + + validation_error = validate_date_range(start_date, end_date) + if validation_error: + return jsonify({'error': validation_error}), 400 + + result = get_jobs_by_resources(resource_ids, start_date, end_date) + + if 'error' in result: + return jsonify(result), 400 + + return jsonify(result) + + +@job_query_bp.route('/api/job-query/txn/', methods=['GET']) +def query_job_txn_history(job_id: str): + """Query transaction history for a single job. + + Args: + job_id: The JOBID to query + + Returns transaction history list. + """ + if not job_id: + return jsonify({'error': '請指定工單 ID'}), 400 + + result = get_job_txn_history(job_id) + + if 'error' in result: + return jsonify(result), 400 + + return jsonify(result) + + +@job_query_bp.route('/api/job-query/export', methods=['POST']) +def export_jobs(): + """Export jobs with full transaction history as CSV. + + Expects JSON body: + { + "resource_ids": ["id1", "id2", ...], + "start_date": "2024-01-01", + "end_date": "2024-12-31" + } + + Returns streaming CSV response. + """ + data = request.get_json() + + resource_ids = data.get('resource_ids', []) + start_date = data.get('start_date') + end_date = data.get('end_date') + + # Validation + if not resource_ids: + return jsonify({'error': '請選擇至少一台設備'}), 400 + if not start_date or not end_date: + return jsonify({'error': '請指定日期範圍'}), 400 + + validation_error = validate_date_range(start_date, end_date) + if validation_error: + return jsonify({'error': validation_error}), 400 + + # Stream CSV response + return Response( + export_jobs_with_history(resource_ids, start_date, end_date), + mimetype='text/csv; charset=utf-8', + headers={ + 'Content-Disposition': 'attachment; filename=job_history_export.csv' + } + ) diff --git a/src/mes_dashboard/routes/resource_history_routes.py b/src/mes_dashboard/routes/resource_history_routes.py new file mode 100644 index 0000000..54056d7 --- /dev/null +++ b/src/mes_dashboard/routes/resource_history_routes.py @@ -0,0 +1,239 @@ +# -*- coding: utf-8 -*- +"""Resource History Analysis API routes. + +Contains Flask Blueprint for historical equipment performance analysis endpoints. +""" + +from flask import Blueprint, jsonify, request, render_template, Response + +from mes_dashboard.core.cache import cache_get, cache_set, make_cache_key +from mes_dashboard.config.constants import CACHE_TTL_FILTER_OPTIONS, CACHE_TTL_TREND +from mes_dashboard.services.resource_history_service import ( + get_filter_options, + query_summary, + query_detail, + export_csv, +) + +# Create Blueprint +resource_history_bp = Blueprint( + 'resource_history', + __name__, + url_prefix='/api/resource/history' +) + + +# ============================================================ +# Page Route (for template rendering) +# ============================================================ + +@resource_history_bp.route('/page', methods=['GET'], endpoint='page_alias') +def api_resource_history_page(): + """Render the resource history analysis page. + + Note: The actual page route /resource-history is registered separately + in the main app initialization. + """ + return render_template('resource_history.html') + + +# ============================================================ +# API Endpoints +# ============================================================ + +@resource_history_bp.route('/options', methods=['GET']) +def api_resource_history_options(): + """API: Get filter options (workcenters and families). + + Returns: + JSON with workcenters and families lists. + """ + cache_key = make_cache_key("resource_history_options") + options = cache_get(cache_key) + + if options is None: + options = get_filter_options() + if options is not None: + cache_set(cache_key, options, ttl=CACHE_TTL_FILTER_OPTIONS) + + if options is not None: + return jsonify({'success': True, 'data': options}) + return jsonify({'success': False, 'error': '查詢篩選選項失敗'}), 500 + + +@resource_history_bp.route('/summary', methods=['GET']) +def api_resource_history_summary(): + """API: Get summary data (KPI, trend, heatmap, workcenter comparison). + + Query Parameters: + start_date: Start date (YYYY-MM-DD) + end_date: End date (YYYY-MM-DD) + granularity: day|week|month|year (default: day) + workcenter_groups: Optional workcenter group filter (multi-select) + families: Optional resource family filter (multi-select) + is_production: 1 to filter production equipment + is_key: 1 to filter key equipment + is_monitor: 1 to filter monitored equipment + + Returns: + JSON with kpi, trend, heatmap, workcenter_comparison sections. + """ + # Parse query parameters + start_date = request.args.get('start_date') + end_date = request.args.get('end_date') + granularity = request.args.get('granularity', 'day') + workcenter_groups = request.args.getlist('workcenter_groups') or None + families = request.args.getlist('families') or None + is_production = request.args.get('is_production') == '1' + is_key = request.args.get('is_key') == '1' + is_monitor = request.args.get('is_monitor') == '1' + + # Validate required parameters + if not start_date or not end_date: + return jsonify({ + 'success': False, + 'error': '必須提供 start_date 和 end_date 參數' + }), 400 + + # Build cache key with filters dict + cache_filters = { + 'start_date': start_date, + 'end_date': end_date, + 'granularity': granularity, + 'workcenter_groups': sorted(workcenter_groups) if workcenter_groups else None, + 'families': sorted(families) if families else None, + 'is_production': is_production, + 'is_key': is_key, + 'is_monitor': is_monitor, + } + cache_key = make_cache_key("resource_history_summary", filters=cache_filters) + result = cache_get(cache_key) + + if result is None: + result = query_summary( + start_date=start_date, + end_date=end_date, + granularity=granularity, + workcenter_groups=workcenter_groups, + families=families, + is_production=is_production, + is_key=is_key, + is_monitor=is_monitor, + ) + if result is not None and 'error' not in result: + cache_set(cache_key, result, ttl=CACHE_TTL_TREND) + + if result is not None: + if 'error' in result: + return jsonify({'success': False, 'error': result['error']}), 400 + return jsonify({'success': True, 'data': result}) + return jsonify({'success': False, 'error': '查詢摘要資料失敗'}), 500 + + +@resource_history_bp.route('/detail', methods=['GET']) +def api_resource_history_detail(): + """API: Get hierarchical detail data. + + Query Parameters: + start_date: Start date (YYYY-MM-DD) + end_date: End date (YYYY-MM-DD) + granularity: day|week|month|year (default: day) + workcenter_groups: Optional workcenter group filter (multi-select) + families: Optional resource family filter (multi-select) + is_production: 1 to filter production equipment + is_key: 1 to filter key equipment + is_monitor: 1 to filter monitored equipment + + Returns: + JSON with data array, total count, truncated flag. + """ + # Parse query parameters + start_date = request.args.get('start_date') + end_date = request.args.get('end_date') + granularity = request.args.get('granularity', 'day') + workcenter_groups = request.args.getlist('workcenter_groups') or None + families = request.args.getlist('families') or None + is_production = request.args.get('is_production') == '1' + is_key = request.args.get('is_key') == '1' + is_monitor = request.args.get('is_monitor') == '1' + + # Validate required parameters + if not start_date or not end_date: + return jsonify({ + 'success': False, + 'error': '必須提供 start_date 和 end_date 參數' + }), 400 + + result = query_detail( + start_date=start_date, + end_date=end_date, + granularity=granularity, + workcenter_groups=workcenter_groups, + families=families, + is_production=is_production, + is_key=is_key, + is_monitor=is_monitor, + ) + + if result is not None: + if 'error' in result: + return jsonify({'success': False, 'error': result['error']}), 400 + return jsonify({'success': True, **result}) + return jsonify({'success': False, 'error': '查詢明細資料失敗'}), 500 + + +@resource_history_bp.route('/export', methods=['GET']) +def api_resource_history_export(): + """API: Export detail data as CSV. + + Query Parameters: + start_date: Start date (YYYY-MM-DD) + end_date: End date (YYYY-MM-DD) + granularity: day|week|month|year (default: day) + workcenter_groups: Optional workcenter group filter (multi-select) + families: Optional resource family filter (multi-select) + is_production: 1 to filter production equipment + is_key: 1 to filter key equipment + is_monitor: 1 to filter monitored equipment + + Returns: + CSV file download. + """ + # Parse query parameters + start_date = request.args.get('start_date') + end_date = request.args.get('end_date') + granularity = request.args.get('granularity', 'day') + workcenter_groups = request.args.getlist('workcenter_groups') or None + families = request.args.getlist('families') or None + is_production = request.args.get('is_production') == '1' + is_key = request.args.get('is_key') == '1' + is_monitor = request.args.get('is_monitor') == '1' + + # Validate required parameters + if not start_date or not end_date: + return jsonify({ + 'success': False, + 'error': '必須提供 start_date 和 end_date 參數' + }), 400 + + # Generate filename + filename = f"resource_history_{start_date}_to_{end_date}.csv" + + # Stream CSV response + return Response( + export_csv( + start_date=start_date, + end_date=end_date, + granularity=granularity, + workcenter_groups=workcenter_groups, + families=families, + is_production=is_production, + is_key=is_key, + is_monitor=is_monitor, + ), + mimetype='text/csv', + headers={ + 'Content-Disposition': f'attachment; filename={filename}', + 'Content-Type': 'text/csv; charset=utf-8-sig' + } + ) diff --git a/src/mes_dashboard/routes/resource_routes.py b/src/mes_dashboard/routes/resource_routes.py new file mode 100644 index 0000000..84610aa --- /dev/null +++ b/src/mes_dashboard/routes/resource_routes.py @@ -0,0 +1,339 @@ +# -*- coding: utf-8 -*- +"""Resource (Equipment) API routes for MES Dashboard. + +Contains Flask Blueprint for resource/equipment-related API endpoints. +""" + +import math +from flask import Blueprint, jsonify, request + +from mes_dashboard.core.database import ( + get_db_connection, + DatabasePoolExhaustedError, + DatabaseCircuitOpenError, +) +from mes_dashboard.core.cache import cache_get, cache_set, make_cache_key + + +def _clean_nan_values(data): + """Convert NaN and NaT values to None for JSON serialization. + + Args: + data: List of dicts or single dict. + + Returns: + Cleaned data with NaN/NaT replaced by None. + """ + if isinstance(data, list): + return [_clean_nan_values(item) for item in data] + elif isinstance(data, dict): + cleaned = {} + for key, value in data.items(): + if isinstance(value, float) and math.isnan(value): + cleaned[key] = None + elif isinstance(value, str) and value == 'NaT': + cleaned[key] = None + elif value != value: # NaN check (NaN != NaN) + cleaned[key] = None + elif isinstance(value, list): + # Recursively clean nested lists (e.g., LOT_DETAILS) + cleaned[key] = _clean_nan_values(value) + elif isinstance(value, dict): + # Recursively clean nested dicts + cleaned[key] = _clean_nan_values(value) + else: + cleaned[key] = value + return cleaned + return data +from mes_dashboard.core.utils import get_days_back +from mes_dashboard.services.resource_service import ( + query_resource_by_status, + query_resource_by_workcenter, + query_resource_detail, + query_resource_workcenter_status_matrix, + query_resource_filter_options, + get_merged_resource_status, + get_resource_status_summary, + get_workcenter_status_matrix, +) +from mes_dashboard.services.filter_cache import get_workcenter_groups +from mes_dashboard.config.constants import STATUS_CATEGORIES + +# Create Blueprint +resource_bp = Blueprint('resource', __name__, url_prefix='/api/resource') + + +@resource_bp.route('/by_status') +def api_resource_by_status(): + """API: Resource count by status.""" + days_back = request.args.get('days_back', 30, type=int) + cache_key = make_cache_key("resource_by_status", days_back) + data = cache_get(cache_key) + if data is None: + df = query_resource_by_status(days_back) + if df is not None: + data = df.to_dict(orient='records') + cache_set(cache_key, data) + else: + data = None + if data is not None: + return jsonify({'success': True, 'data': data}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@resource_bp.route('/by_workcenter') +def api_resource_by_workcenter(): + """API: Resource count by workcenter.""" + days_back = request.args.get('days_back', 30, type=int) + cache_key = make_cache_key("resource_by_workcenter", days_back) + data = cache_get(cache_key) + if data is None: + df = query_resource_by_workcenter(days_back) + if df is not None: + data = df.to_dict(orient='records') + cache_set(cache_key, data) + else: + data = None + if data is not None: + return jsonify({'success': True, 'data': data}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@resource_bp.route('/workcenter_status_matrix') +def api_resource_workcenter_status_matrix(): + """API: Resource count matrix by workcenter and status category.""" + days_back = request.args.get('days_back', 30, type=int) + cache_key = make_cache_key("resource_workcenter_matrix", days_back) + data = cache_get(cache_key) + if data is None: + df = query_resource_workcenter_status_matrix(days_back) + if df is not None: + data = df.to_dict(orient='records') + cache_set(cache_key, data) + else: + data = None + if data is not None: + return jsonify({'success': True, 'data': data}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@resource_bp.route('/detail', methods=['POST']) +def api_resource_detail(): + """API: Resource detail with filters.""" + data = request.get_json() or {} + filters = data.get('filters') + limit = data.get('limit', 500) + offset = data.get('offset', 0) + days_back = get_days_back(filters) + + df = query_resource_detail(filters, limit, offset, days_back) + if df is not None: + records = df.to_dict(orient='records') + return jsonify({'success': True, 'data': records, 'count': len(records), 'offset': offset}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@resource_bp.route('/filter_options') +def api_resource_filter_options(): + """API: Get filter options.""" + days_back = request.args.get('days_back', 30, type=int) + cache_key = make_cache_key("resource_filter_options", days_back) + options = cache_get(cache_key) + if options is None: + options = query_resource_filter_options(days_back) + if options: + cache_set(cache_key, options) + if options: + return jsonify({'success': True, 'data': options}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@resource_bp.route('/status_values') +def api_resource_status_values(): + """API: Get all distinct status values with counts (for verification).""" + connection = get_db_connection() + if not connection: + return jsonify({'success': False, 'error': '數據庫連接失敗'}), 500 + + try: + sql = """ + SELECT DISTINCT NEWSTATUSNAME, COUNT(*) as CNT + FROM DWH.DW_MES_RESOURCESTATUS + WHERE NEWSTATUSNAME IS NOT NULL + AND LASTSTATUSCHANGEDATE >= SYSDATE - 30 + GROUP BY NEWSTATUSNAME + ORDER BY CNT DESC + """ + cursor = connection.cursor() + cursor.execute(sql) + rows = cursor.fetchall() + cursor.close() + connection.close() + + data = [{'status': row[0], 'count': row[1]} for row in rows] + return jsonify({'success': True, 'data': data}) + except Exception as exc: + if connection: + connection.close() + return jsonify({'success': False, 'error': str(exc)}), 500 + + +# ============================================================ +# Realtime Equipment Status APIs (New) +# ============================================================ + +@resource_bp.route('/status') +def api_resource_status(): + """API: Get merged resource status from realtime cache. + + Query params: + workcenter_groups: Comma-separated group names (e.g., '焊接,成型') + is_production: '1' or 'true' to filter production equipment + is_key: '1' or 'true' to filter key equipment + is_monitor: '1' or 'true' to filter monitor equipment + status_categories: Comma-separated categories (e.g., 'PRODUCTIVE,DOWN') + """ + # Parse filters + wc_groups_param = request.args.get('workcenter_groups') + workcenter_groups = wc_groups_param.split(',') if wc_groups_param else None + + is_production = None + is_prod_param = request.args.get('is_production') + if is_prod_param: + is_production = is_prod_param.lower() in ('1', 'true', 'yes') + + is_key = None + is_key_param = request.args.get('is_key') + if is_key_param: + is_key = is_key_param.lower() in ('1', 'true', 'yes') + + is_monitor = None + is_monitor_param = request.args.get('is_monitor') + if is_monitor_param: + is_monitor = is_monitor_param.lower() in ('1', 'true', 'yes') + + status_cats_param = request.args.get('status_categories') + status_categories = status_cats_param.split(',') if status_cats_param else None + + try: + data = get_merged_resource_status( + workcenter_groups=workcenter_groups, + is_production=is_production, + is_key=is_key, + is_monitor=is_monitor, + status_categories=status_categories, + ) + # Clean NaN/NaT values for valid JSON + cleaned_data = _clean_nan_values(data) + return jsonify({ + 'success': True, + 'data': cleaned_data, + 'count': len(cleaned_data), + }) + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + return jsonify({'success': False, 'error': str(exc)}), 500 + + +@resource_bp.route('/status/options') +def api_resource_status_options(): + """API: Get filter options for realtime status queries. + + Returns workcenter_groups, status_categories, and other filter options. + """ + try: + # Get workcenter groups from cache + wc_groups = get_workcenter_groups() or [] + + return jsonify({ + 'success': True, + 'data': { + 'workcenter_groups': [g['name'] for g in wc_groups], + 'status_categories': STATUS_CATEGORIES, + } + }) + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + return jsonify({'success': False, 'error': str(exc)}), 500 + + +@resource_bp.route('/status/summary') +def api_resource_status_summary(): + """API: Get resource status summary statistics. + + Query params: same as /status + """ + # Parse filters (same as /status) + wc_groups_param = request.args.get('workcenter_groups') + workcenter_groups = wc_groups_param.split(',') if wc_groups_param else None + + is_production = None + is_prod_param = request.args.get('is_production') + if is_prod_param: + is_production = is_prod_param.lower() in ('1', 'true', 'yes') + + is_key = None + is_key_param = request.args.get('is_key') + if is_key_param: + is_key = is_key_param.lower() in ('1', 'true', 'yes') + + is_monitor = None + is_monitor_param = request.args.get('is_monitor') + if is_monitor_param: + is_monitor = is_monitor_param.lower() in ('1', 'true', 'yes') + + try: + data = get_resource_status_summary( + workcenter_groups=workcenter_groups, + is_production=is_production, + is_key=is_key, + is_monitor=is_monitor, + ) + # Clean NaN/NaT values for valid JSON + cleaned_data = _clean_nan_values(data) + return jsonify({'success': True, 'data': cleaned_data}) + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + return jsonify({'success': False, 'error': str(exc)}), 500 + + +@resource_bp.route('/status/matrix') +def api_resource_status_matrix(): + """API: Get workcenter × status matrix. + + Query params: + is_production: Filter by production equipment + is_key: Filter by key equipment + is_monitor: Filter by monitor equipment + """ + is_production = None + is_prod_param = request.args.get('is_production') + if is_prod_param: + is_production = is_prod_param.lower() in ('1', 'true', 'yes') + + is_key = None + is_key_param = request.args.get('is_key') + if is_key_param: + is_key = is_key_param.lower() in ('1', 'true', 'yes') + + is_monitor = None + is_monitor_param = request.args.get('is_monitor') + if is_monitor_param: + is_monitor = is_monitor_param.lower() in ('1', 'true', 'yes') + + try: + data = get_workcenter_status_matrix( + is_production=is_production, + is_key=is_key, + is_monitor=is_monitor, + ) + # Clean NaN/NaT values for valid JSON + cleaned_data = _clean_nan_values(data) + return jsonify({'success': True, 'data': cleaned_data}) + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + return jsonify({'success': False, 'error': str(exc)}), 500 diff --git a/src/mes_dashboard/routes/wip_routes.py b/src/mes_dashboard/routes/wip_routes.py new file mode 100644 index 0000000..4b1a2a9 --- /dev/null +++ b/src/mes_dashboard/routes/wip_routes.py @@ -0,0 +1,339 @@ +# -*- coding: utf-8 -*- +"""WIP API routes for MES Dashboard. + +Contains Flask Blueprint for WIP-related API endpoints. +Uses DWH.DW_MES_LOT_V view for real-time WIP data. +""" + +from flask import Blueprint, jsonify, request + +from mes_dashboard.services.wip_service import ( + get_wip_summary, + get_wip_matrix, + get_wip_hold_summary, + get_wip_detail, + get_workcenters, + get_packages, + search_workorders, + search_lot_ids, + search_packages, + search_types, + get_lot_detail, +) + +# Create Blueprint +wip_bp = Blueprint('wip', __name__, url_prefix='/api/wip') + + +def _parse_bool(value: str) -> bool: + """Parse boolean from query string.""" + return value.lower() in ('true', '1', 'yes') if value else False + + +# ============================================================ +# Overview APIs +# ============================================================ + +@wip_bp.route('/overview/summary') +def api_overview_summary(): + """API: Get WIP KPI summary for overview dashboard. + + Query Parameters: + workorder: Optional WORKORDER filter (fuzzy match) + lotid: Optional LOTID filter (fuzzy match) + package: Optional PACKAGE_LEF filter (exact match) + pj_type: Optional PJ_TYPE filter (exact match) + include_dummy: Include DUMMY lots (default: false) + + Returns: + JSON with totalLots, totalQtyPcs, byWipStatus, dataUpdateDate + """ + workorder = request.args.get('workorder', '').strip() or None + lotid = request.args.get('lotid', '').strip() or None + package = request.args.get('package', '').strip() or None + pj_type = request.args.get('type', '').strip() or None + include_dummy = _parse_bool(request.args.get('include_dummy', '')) + + result = get_wip_summary( + include_dummy=include_dummy, + workorder=workorder, + lotid=lotid, + package=package, + pj_type=pj_type + ) + if result is not None: + return jsonify({'success': True, 'data': result}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@wip_bp.route('/overview/matrix') +def api_overview_matrix(): + """API: Get workcenter x product line matrix for overview dashboard. + + Query Parameters: + workorder: Optional WORKORDER filter (fuzzy match) + lotid: Optional LOTID filter (fuzzy match) + package: Optional PACKAGE_LEF filter (exact match) + pj_type: Optional PJ_TYPE filter (exact match) + include_dummy: Include DUMMY lots (default: false) + status: Optional WIP status filter ('RUN', 'QUEUE', 'HOLD') + hold_type: Optional hold type filter ('quality', 'non-quality') + Only effective when status='HOLD' + + Returns: + JSON with workcenters, packages, matrix, workcenter_totals, + package_totals, grand_total + """ + workorder = request.args.get('workorder', '').strip() or None + lotid = request.args.get('lotid', '').strip() or None + package = request.args.get('package', '').strip() or None + pj_type = request.args.get('type', '').strip() or None + include_dummy = _parse_bool(request.args.get('include_dummy', '')) + status = request.args.get('status', '').strip().upper() or None + hold_type = request.args.get('hold_type', '').strip().lower() or None + + # Validate status parameter + if status and status not in ('RUN', 'QUEUE', 'HOLD'): + return jsonify({ + 'success': False, + 'error': 'Invalid status. Use RUN, QUEUE, or HOLD' + }), 400 + + # Validate hold_type parameter + if hold_type and hold_type not in ('quality', 'non-quality'): + return jsonify({ + 'success': False, + 'error': 'Invalid hold_type. Use quality or non-quality' + }), 400 + + result = get_wip_matrix( + include_dummy=include_dummy, + workorder=workorder, + lotid=lotid, + status=status, + hold_type=hold_type, + package=package, + pj_type=pj_type + ) + if result is not None: + return jsonify({'success': True, 'data': result}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@wip_bp.route('/overview/hold') +def api_overview_hold(): + """API: Get hold summary grouped by hold reason. + + Query Parameters: + workorder: Optional WORKORDER filter (fuzzy match) + lotid: Optional LOTID filter (fuzzy match) + include_dummy: Include DUMMY lots (default: false) + + Returns: + JSON with items list containing reason, lots, qty + """ + workorder = request.args.get('workorder', '').strip() or None + lotid = request.args.get('lotid', '').strip() or None + include_dummy = _parse_bool(request.args.get('include_dummy', '')) + + result = get_wip_hold_summary( + include_dummy=include_dummy, + workorder=workorder, + lotid=lotid + ) + if result is not None: + return jsonify({'success': True, 'data': result}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +# ============================================================ +# Detail APIs +# ============================================================ + +@wip_bp.route('/detail/') +def api_detail(workcenter: str): + """API: Get WIP detail for a specific workcenter group. + + Args: + workcenter: WORKCENTER_GROUP name (URL path parameter) + + Query Parameters: + package: Optional PRODUCTLINENAME filter + status: Optional WIP status filter ('RUN', 'QUEUE', 'HOLD') + hold_type: Optional hold type filter ('quality', 'non-quality') + Only effective when status='HOLD' + workorder: Optional WORKORDER filter (fuzzy match) + lotid: Optional LOTID filter (fuzzy match) + include_dummy: Include DUMMY lots (default: false) + page: Page number (default 1) + page_size: Records per page (default 100, max 500) + + Returns: + JSON with workcenter, summary, specs, lots, pagination, sys_date + """ + package = request.args.get('package', '').strip() or None + status = request.args.get('status', '').strip().upper() or None + hold_type = request.args.get('hold_type', '').strip().lower() or None + workorder = request.args.get('workorder', '').strip() or None + lotid = request.args.get('lotid', '').strip() or None + include_dummy = _parse_bool(request.args.get('include_dummy', '')) + page = request.args.get('page', 1, type=int) + page_size = min(request.args.get('page_size', 100, type=int), 500) + + if page < 1: + page = 1 + + # Validate status parameter + if status and status not in ('RUN', 'QUEUE', 'HOLD'): + return jsonify({ + 'success': False, + 'error': 'Invalid status. Use RUN, QUEUE, or HOLD' + }), 400 + + # Validate hold_type parameter + if hold_type and hold_type not in ('quality', 'non-quality'): + return jsonify({ + 'success': False, + 'error': 'Invalid hold_type. Use quality or non-quality' + }), 400 + + result = get_wip_detail( + workcenter=workcenter, + package=package, + status=status, + hold_type=hold_type, + workorder=workorder, + lotid=lotid, + include_dummy=include_dummy, + page=page, + page_size=page_size + ) + + if result is not None: + return jsonify({'success': True, 'data': result}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@wip_bp.route('/lot/') +def api_lot_detail(lotid: str): + """API: Get detailed information for a specific lot. + + Args: + lotid: LOTID (URL path parameter) + + Returns: + JSON with lot details including all fields from DW_MES_LOT_V + """ + result = get_lot_detail(lotid) + + if result is not None: + return jsonify({'success': True, 'data': result}) + return jsonify({'success': False, 'error': '找不到此批號'}), 404 + + +# ============================================================ +# Meta APIs +# ============================================================ + +@wip_bp.route('/meta/workcenters') +def api_meta_workcenters(): + """API: Get list of workcenter groups with lot counts. + + Query Parameters: + include_dummy: Include DUMMY lots (default: false) + + Returns: + JSON with list of {name, lot_count} sorted by sequence + """ + include_dummy = _parse_bool(request.args.get('include_dummy', '')) + + result = get_workcenters(include_dummy=include_dummy) + if result is not None: + return jsonify({'success': True, 'data': result}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@wip_bp.route('/meta/packages') +def api_meta_packages(): + """API: Get list of packages (product lines) with lot counts. + + Query Parameters: + include_dummy: Include DUMMY lots (default: false) + + Returns: + JSON with list of {name, lot_count} sorted by count desc + """ + include_dummy = _parse_bool(request.args.get('include_dummy', '')) + + result = get_packages(include_dummy=include_dummy) + if result is not None: + return jsonify({'success': True, 'data': result}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 + + +@wip_bp.route('/meta/search') +def api_meta_search(): + """API: Search for WORKORDER, LOTID, PACKAGE, or PJ_TYPE values. + + Query Parameters: + field: Field to search ('workorder', 'lotid', 'package', or 'pj_type') + q: Search query (minimum 2 characters) + limit: Maximum results (default: 20, max: 50) + include_dummy: Include DUMMY lots (default: false) + + Cross-filter parameters (for interdependent filter suggestions): + workorder: Optional WORKORDER cross-filter (fuzzy match) + lotid: Optional LOTID cross-filter (fuzzy match) + package: Optional PACKAGE_LEF cross-filter (exact match) + type: Optional PJ_TYPE cross-filter (exact match) + + Returns: + JSON with items list containing matching values + """ + search_field = request.args.get('field', '').strip().lower() + q = request.args.get('q', '').strip() + limit = min(request.args.get('limit', 20, type=int), 50) + include_dummy = _parse_bool(request.args.get('include_dummy', '')) + + # Cross-filter parameters + workorder = request.args.get('workorder', '').strip() or None + lotid = request.args.get('lotid', '').strip() or None + package = request.args.get('package', '').strip() or None + pj_type = request.args.get('type', '').strip() or None + + # Validate search field + if search_field not in ('workorder', 'lotid', 'package', 'pj_type'): + return jsonify({ + 'success': False, + 'error': 'Invalid field. Use "workorder", "lotid", "package", or "pj_type"' + }), 400 + + # Validate query length + if len(q) < 2: + return jsonify({'success': True, 'data': {'items': []}}) + + # Perform search with cross-filters (exclude the field being searched) + if search_field == 'workorder': + result = search_workorders( + q=q, limit=limit, include_dummy=include_dummy, + lotid=lotid, package=package, pj_type=pj_type + ) + elif search_field == 'lotid': + result = search_lot_ids( + q=q, limit=limit, include_dummy=include_dummy, + workorder=workorder, package=package, pj_type=pj_type + ) + elif search_field == 'package': + result = search_packages( + q=q, limit=limit, include_dummy=include_dummy, + workorder=workorder, lotid=lotid, pj_type=pj_type + ) + else: # pj_type + result = search_types( + q=q, limit=limit, include_dummy=include_dummy, + workorder=workorder, lotid=lotid, package=package + ) + + if result is not None: + return jsonify({'success': True, 'data': {'items': result}}) + return jsonify({'success': False, 'error': '查詢失敗'}), 500 diff --git a/src/mes_dashboard/services/__init__.py b/src/mes_dashboard/services/__init__.py new file mode 100644 index 0000000..2415213 --- /dev/null +++ b/src/mes_dashboard/services/__init__.py @@ -0,0 +1 @@ +"""Service modules for MES Dashboard.""" diff --git a/src/mes_dashboard/services/auth_service.py b/src/mes_dashboard/services/auth_service.py new file mode 100644 index 0000000..a4ee1a8 --- /dev/null +++ b/src/mes_dashboard/services/auth_service.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- +"""Authentication service using LDAP API or local credentials.""" + +from __future__ import annotations + +import logging +import os + +import requests + +logger = logging.getLogger(__name__) + +# Configuration - MUST be set in .env file +LDAP_API_BASE = os.environ.get("LDAP_API_URL", "") +ADMIN_EMAILS = os.environ.get("ADMIN_EMAILS", "").lower().split(",") + +# Timeout for LDAP API requests +LDAP_TIMEOUT = 10 + +# Local authentication configuration (for development/testing) +LOCAL_AUTH_ENABLED = os.environ.get("LOCAL_AUTH_ENABLED", "false").lower() in ("true", "1", "yes") +LOCAL_AUTH_USERNAME = os.environ.get("LOCAL_AUTH_USERNAME", "") +LOCAL_AUTH_PASSWORD = os.environ.get("LOCAL_AUTH_PASSWORD", "") + + +def _authenticate_local(username: str, password: str) -> dict | None: + """Authenticate using local environment credentials. + + Args: + username: User provided username + password: User provided password + + Returns: + User info dict on success, None on failure + """ + if not LOCAL_AUTH_ENABLED: + return None + + if not LOCAL_AUTH_USERNAME or not LOCAL_AUTH_PASSWORD: + logger.warning("Local auth enabled but credentials not configured") + return None + + if username == LOCAL_AUTH_USERNAME and password == LOCAL_AUTH_PASSWORD: + logger.info("Local auth success for user: %s", username) + return { + "username": username, + "displayName": f"Local User ({username})", + "mail": f"{username}@local.dev", + "department": "Development", + } + + logger.warning("Local auth failed for user: %s", username) + return None + + +def authenticate(username: str, password: str, domain: str = "PANJIT") -> dict | None: + """Authenticate user via local credentials or LDAP API. + + If LOCAL_AUTH_ENABLED is set, tries local authentication first. + Falls back to LDAP API if local auth is disabled or fails. + + Args: + username: Employee ID or email + password: User password + domain: Domain name (default: PANJIT) + + Returns: + User info dict on success: {username, displayName, mail, department} + None on failure + """ + # Try local authentication first if enabled + if LOCAL_AUTH_ENABLED: + local_result = _authenticate_local(username, password) + if local_result: + return local_result + # If local auth is enabled but failed, don't fall back to LDAP + # This ensures local-only mode when LOCAL_AUTH_ENABLED is true + return None + + # LDAP authentication + try: + response = requests.post( + f"{LDAP_API_BASE}/api/v1/ldap/auth", + json={"username": username, "password": password, "domain": domain}, + timeout=LDAP_TIMEOUT, + ) + data = response.json() + + if data.get("success"): + user = data.get("user", {}) + logger.info("LDAP auth success for user: %s", user.get("username")) + return user + + logger.warning("LDAP auth failed for user: %s", username) + return None + + except requests.Timeout: + logger.error("LDAP API timeout for user: %s", username) + return None + except requests.RequestException as e: + logger.error("LDAP API error for user %s: %s", username, e) + return None + except (ValueError, KeyError) as e: + logger.error("LDAP API response parse error: %s", e) + return None + + +def is_admin(user: dict) -> bool: + """Check if user is an admin. + + Args: + user: User info dict with 'mail' field + + Returns: + True if user email is in ADMIN_EMAILS list, or if local auth is enabled + """ + # Local auth users are automatically admins (for development/testing) + if LOCAL_AUTH_ENABLED: + user_mail = user.get("mail", "") + if user_mail.endswith("@local.dev"): + return True + + user_mail = user.get("mail", "").lower().strip() + return user_mail in [e.strip() for e in ADMIN_EMAILS] diff --git a/src/mes_dashboard/services/dashboard_service.py b/src/mes_dashboard/services/dashboard_service.py new file mode 100644 index 0000000..1fd9712 --- /dev/null +++ b/src/mes_dashboard/services/dashboard_service.py @@ -0,0 +1,507 @@ +# -*- coding: utf-8 -*- +"""Dashboard and KPI query services for MES Dashboard. + +Provides functions to query dashboard KPIs, workcenter cards, +resource details with job info, OU trends, and utilization heatmap. +""" + +import logging +import pandas as pd +from typing import Optional, Dict, List, Any, Tuple + +logger = logging.getLogger('mes_dashboard.dashboard_service') + +from mes_dashboard.core.database import ( + get_db_connection, + read_sql_df, + DatabasePoolExhaustedError, + DatabaseCircuitOpenError, +) +from mes_dashboard.core.utils import get_days_back, build_equipment_filter_sql +from mes_dashboard.config.constants import ( + EXCLUDED_LOCATIONS, + EXCLUDED_ASSET_STATUSES, + DEFAULT_DAYS_BACK, +) +from mes_dashboard.config.workcenter_groups import WORKCENTER_GROUPS, get_workcenter_group +from mes_dashboard.services.resource_service import ( + get_resource_latest_status_subquery, + get_resource_status_summary, + get_workcenter_status_matrix, +) +from mes_dashboard.sql import SQLLoader, QueryBuilder +from mes_dashboard.sql.filters import CommonFilters + + +# ============================================================ +# Dashboard KPI Queries +# ============================================================ + +def query_dashboard_kpi(filters: Optional[Dict] = None) -> Optional[Dict]: + """Query overall KPI for dashboard header using cached resource data. + + Status categories: + - RUN: PRD (Production) + - DOWN: UDT + SDT (Down Time) + - IDLE: SBY + NST (Idle) + - ENG: EGT (Engineering Time) + + OU% = PRD / (PRD + SBY + EGT + SDT + UDT) * 100 + + Uses get_resource_status_summary() for fast, cached data from Redis. + + Args: + filters: Optional filter values (is_production, is_key, is_monitor) + + Returns: + Dict with KPI data or None if query fails. + """ + try: + # Extract flag filters for cached query + is_production = None + is_key = None + is_monitor = None + if filters: + if filters.get('isProduction'): + is_production = True + if filters.get('isKey'): + is_key = True + if filters.get('isMonitor'): + is_monitor = True + + # Use cached resource status summary for fast response + summary = get_resource_status_summary( + is_production=is_production, + is_key=is_key, + is_monitor=is_monitor, + ) + + if not summary or summary.get('total_count', 0) == 0: + return None + + # Extract counts from summary + by_status = summary.get('by_status', {}) + total = summary.get('total_count', 0) + prd = by_status.get('PRD', 0) + sby = by_status.get('SBY', 0) + udt = by_status.get('UDT', 0) + sdt = by_status.get('SDT', 0) + egt = by_status.get('EGT', 0) + nst = by_status.get('NST', 0) + other = by_status.get('OTHER', 0) + + # Status categories + run_count = prd # RUN = PRD + down_count = udt + sdt # DOWN = UDT + SDT + idle_count = sby + nst # IDLE = SBY + NST + eng_count = egt # ENG = EGT + + # OU% from cached summary (already calculated) + ou_pct = summary.get('ou_pct', 0) + + # Run% = PRD / Total * 100 + run_pct = round(prd / total * 100, 1) if total > 0 else 0 + + return { + 'total': total, + 'prd': prd, + 'sby': sby, + 'udt': udt, + 'sdt': sdt, + 'egt': egt, + 'nst': nst, + 'other': other, + # Four main indicators + 'run': run_count, + 'down': down_count, + 'idle': idle_count, + 'eng': eng_count, + # Percentages + 'ou_pct': ou_pct, + 'run_pct': run_pct + } + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"KPI query failed: {exc}") + return None + + +# ============================================================ +# Workcenter Cards +# ============================================================ + +def query_workcenter_cards(filters: Optional[Dict] = None) -> Optional[List[Dict]]: + """Query workcenter status cards for dashboard with grouping. + + Uses cached resource data from Redis for fast response times. + Data is pre-grouped by workcenter group in the cache. + + Args: + filters: Optional filter values (isProduction, isKey, isMonitor) + + Returns: + List of workcenter card data or None if query fails. + """ + try: + # Extract flag filters for cached query + is_production = None + is_key = None + is_monitor = None + if filters: + if filters.get('isProduction'): + is_production = True + if filters.get('isKey'): + is_key = True + if filters.get('isMonitor'): + is_monitor = True + + # Use cached workcenter matrix for fast response + matrix = get_workcenter_status_matrix( + is_production=is_production, + is_key=is_key, + is_monitor=is_monitor, + ) + + if not matrix: + return None + + # Transform matrix data to expected card format + result = [] + for row in matrix: + group_name = row['workcenter_group'] + order = row['workcenter_sequence'] + total = row['total'] + prd = row['PRD'] + sby = row['SBY'] + udt = row['UDT'] + sdt = row['SDT'] + egt = row['EGT'] + nst = row['NST'] + + # OU% = PRD / (PRD + SBY + EGT + SDT + UDT) * 100 + operational = prd + sby + egt + sdt + udt + ou_pct = round(prd / operational * 100, 1) if operational > 0 else 0 + run_pct = round(prd / total * 100, 1) if total > 0 else 0 + + result.append({ + 'workcenter': group_name, + 'original_wcs': [], # Not available from cache (aggregated by group) + 'order': order, + 'total': total, + 'prd': prd, + 'sby': sby, + 'udt': udt, + 'sdt': sdt, + 'egt': egt, + 'nst': nst, + 'ou_pct': ou_pct, + 'run_pct': run_pct, + 'down': udt + sdt, + 'idle': sby + nst, + 'eng': egt + }) + + # Sort by order (already sorted by sequence, but ensure consistency) + result.sort(key=lambda x: (x['order'], -x['total'])) + + return result + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Workcenter cards query failed: {exc}") + return None + + +# ============================================================ +# Resource Detail with Job Info +# ============================================================ + +def query_resource_detail_with_job( + filters: Optional[Dict] = None, + limit: int = 200, + offset: int = 0 +) -> Tuple[Optional[pd.DataFrame], Optional[str]]: + """Query resource detail with JOB info for SDT/UDT drill-down. + + Field sources: + - PJ_LOTID: From DWH.DW_MES_RESOURCE.PJ_LOTID + - SYMPTOMCODENAME: From DWH.DW_MES_JOB via JOBID + - CAUSECODENAME: From DWH.DW_MES_JOB via JOBID + - DOWN_MINUTES: Calculated from MAX(LASTSTATUSCHANGEDATE) - resource's LASTSTATUSCHANGEDATE + + Args: + filters: Optional filter values + limit: Maximum rows to return + offset: Offset for pagination + + Returns: + Tuple of (DataFrame with detail records, max_status_time string) or (None, None) if query fails. + """ + try: + days_back = get_days_back(filters) + + # Build exclusion filters using CommonFilters (legacy format for SQL placeholders) + location_filter = CommonFilters.build_location_filter_legacy( + excluded_locations=list(EXCLUDED_LOCATIONS) if EXCLUDED_LOCATIONS else None + ) + if location_filter: + location_filter = f"AND {location_filter.replace('LOCATIONNAME', 'r.LOCATIONNAME')}" + + asset_status_filter = CommonFilters.build_asset_status_filter_legacy( + excluded_statuses=list(EXCLUDED_ASSET_STATUSES) if EXCLUDED_ASSET_STATUSES else None + ) + if asset_status_filter: + asset_status_filter = f"AND {asset_status_filter.replace('PJ_ASSETSSTATUS', 'r.PJ_ASSETSSTATUS')}" + + # Build filter conditions using QueryBuilder for safety + builder = QueryBuilder() + if filters: + # Support workcenter group filter + if filters.get('workcenter'): + wc_filter = filters['workcenter'] + # Check if it's a merged group + if wc_filter in WORKCENTER_GROUPS: + patterns = WORKCENTER_GROUPS[wc_filter]['patterns'] + # Use parameterized OR LIKE conditions (safe escaping) + builder.add_or_like_conditions( + 'rs.WORKCENTERNAME', + patterns, + case_insensitive=True, + ) + else: + builder.add_param_condition('rs.WORKCENTERNAME', wc_filter) + + if filters.get('original_wcs'): + # If original workcenter list provided, use IN query + builder.add_in_condition('rs.WORKCENTERNAME', list(filters['original_wcs'])) + + if filters.get('status'): + builder.add_param_condition('rs.NEWSTATUSNAME', filters['status']) + + # Equipment flag filters (safe - boolean values) + if filters.get('isProduction'): + builder.add_condition("NVL(rs.PJ_ISPRODUCTION, 0) = 1") + if filters.get('isKey'): + builder.add_condition("NVL(rs.PJ_ISKEY, 0) = 1") + if filters.get('isMonitor'): + builder.add_condition("NVL(rs.PJ_ISMONITOR, 0) = 1") + + # Multi-select location filter (parameterized) + if filters.get('locations') and len(filters['locations']) > 0: + builder.add_in_condition('rs.LOCATIONNAME', list(filters['locations'])) + + # Multi-select asset status filter (parameterized) + if filters.get('assetsStatuses') and len(filters['assetsStatuses']) > 0: + builder.add_in_condition('rs.PJ_ASSETSSTATUS', list(filters['assetsStatuses'])) + + # Default to showing only DOWN status (UDT, SDT) + builder.add_in_condition('rs.NEWSTATUSNAME', ['UDT', 'SDT']) + + conditions_sql = builder.get_conditions_sql() + params = builder.params.copy() + where_clause = conditions_sql if conditions_sql else "1=1" + + # Add pagination parameters + start_row = offset + 1 + end_row = offset + limit + params['start_row'] = start_row + params['end_row'] = end_row + + # Load SQL from file and replace placeholders + sql = SQLLoader.load("dashboard/resource_detail_with_job") + sql = sql.replace("{{ DAYS_BACK }}", str(days_back)) + sql = sql.replace("{{ LOCATION_FILTER }}", location_filter if location_filter else "") + sql = sql.replace("{{ ASSET_STATUS_FILTER }}", asset_status_filter if asset_status_filter else "") + sql = sql.replace("{{ WHERE_CLAUSE }}", where_clause) + df = read_sql_df(sql, params) + + # Get max_status_time for Last Update display + max_status_time = None + if 'MAX_STATUS_TIME' in df.columns and len(df) > 0: + max_status_time = df['MAX_STATUS_TIME'].iloc[0] + if pd.notna(max_status_time): + max_status_time = max_status_time.strftime('%Y-%m-%d %H:%M:%S') + + # Convert datetime columns + datetime_cols = ['LASTSTATUSCHANGEDATE', 'JOB_CREATEDATE', 'FIRSTCLOCKONDATE', 'MAX_STATUS_TIME'] + for col in datetime_cols: + if col in df.columns: + df[col] = df[col].apply( + lambda x: x.strftime('%Y-%m-%d %H:%M:%S') if pd.notna(x) else None + ) + + return df, max_status_time + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Detail query failed: {exc}") + return None, None + + +# ============================================================ +# OU Trend +# ============================================================ + +def query_ou_trend(days: int = 7, filters: Optional[Dict] = None) -> Optional[List[Dict]]: + """Query OU% trend by date using RESOURCESTATUS_SHIFT table. + + Uses HOURS field to calculate actual time-based OU%. + OU% = PRD_HOURS / (PRD + SBY + EGT + SDT + UDT) * 100 + + Args: + days: Number of days to query (default 7) + filters: Optional filters (isProduction, isKey, isMonitor) + + Returns: + List of {date, ou_pct, prd_hours, total_hours} records or None if query fails. + """ + try: + # Build exclusion filters using CommonFilters (legacy format for SQL placeholders) + location_filter = CommonFilters.build_location_filter_legacy( + excluded_locations=list(EXCLUDED_LOCATIONS) if EXCLUDED_LOCATIONS else None + ) + if location_filter: + location_filter = f"AND {location_filter.replace('LOCATIONNAME', 'ss.LOCATIONNAME')}" + + asset_status_filter = CommonFilters.build_asset_status_filter_legacy( + excluded_statuses=list(EXCLUDED_ASSET_STATUSES) if EXCLUDED_ASSET_STATUSES else None + ) + if asset_status_filter: + asset_status_filter = f"AND {asset_status_filter.replace('PJ_ASSETSSTATUS', 'ss.PJ_ASSETSSTATUS')}" + + # Build filter conditions for equipment flags (safe - boolean values) + flag_conditions = [] + if filters: + if filters.get('isProduction'): + flag_conditions.append("r.PJ_ISPRODUCTION = 1") + if filters.get('isKey'): + flag_conditions.append("r.PJ_ISKEY = 1") + if filters.get('isMonitor'): + flag_conditions.append("r.PJ_ISMONITOR = 1") + + flag_filter = "" + if flag_conditions: + flag_filter = "AND " + " AND ".join(flag_conditions) + + # Load SQL from file and replace placeholders + sql = SQLLoader.load("dashboard/ou_trend") + sql = sql.replace("{{ LOCATION_FILTER }}", location_filter if location_filter else "") + sql = sql.replace("{{ ASSET_STATUS_FILTER }}", asset_status_filter if asset_status_filter else "") + sql = sql.replace("{{ FLAG_FILTER }}", flag_filter) + + df = read_sql_df(sql, {'days': days}) + + result = [] + for _, row in df.iterrows(): + prd = float(row['PRD_HOURS'] or 0) + sby = float(row['SBY_HOURS'] or 0) + udt = float(row['UDT_HOURS'] or 0) + sdt = float(row['SDT_HOURS'] or 0) + egt = float(row['EGT_HOURS'] or 0) + + # OU% denominator: PRD + SBY + EGT + SDT + UDT (excludes NST) + denominator = prd + sby + egt + sdt + udt + ou_pct = round((prd / denominator * 100), 2) if denominator > 0 else 0 + + result.append({ + 'date': row['DATA_DATE'].strftime('%Y-%m-%d') if pd.notna(row['DATA_DATE']) else None, + 'ou_pct': ou_pct, + 'prd_hours': round(prd, 1), + 'sby_hours': round(sby, 1), + 'udt_hours': round(udt, 1), + 'sdt_hours': round(sdt, 1), + 'egt_hours': round(egt, 1), + 'total_hours': round(float(row['TOTAL_HOURS'] or 0), 1) + }) + + return result + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"OU trend query failed: {exc}", exc_info=True) + return None + + +# ============================================================ +# Utilization Heatmap +# ============================================================ + +def query_utilization_heatmap(days: int = 7, filters: Optional[Dict] = None) -> Optional[List[Dict]]: + """Query equipment utilization heatmap data by workcenter and date. + + Uses HOURS field to calculate PRD% per workcenter per day. + + Args: + days: Number of days to query (default 7) + filters: Optional filters (isProduction, isKey, isMonitor) + + Returns: + List of {workcenter, date, prd_pct, prd_hours, avail_hours} records or None if query fails. + """ + try: + # Build exclusion filters using CommonFilters (legacy format for SQL placeholders) + location_filter = CommonFilters.build_location_filter_legacy( + excluded_locations=list(EXCLUDED_LOCATIONS) if EXCLUDED_LOCATIONS else None + ) + if location_filter: + location_filter = f"AND {location_filter.replace('LOCATIONNAME', 'r.LOCATIONNAME')}" + else: + location_filter = "" + + asset_status_filter = CommonFilters.build_asset_status_filter_legacy( + excluded_statuses=list(EXCLUDED_ASSET_STATUSES) if EXCLUDED_ASSET_STATUSES else None + ) + if asset_status_filter: + asset_status_filter = f"AND {asset_status_filter.replace('PJ_ASSETSSTATUS', 'r.PJ_ASSETSSTATUS')}" + else: + asset_status_filter = "" + + # Build filter conditions for equipment flags (safe - boolean values) + flag_conditions = [] + if filters: + if filters.get('isProduction'): + flag_conditions.append("r.PJ_ISPRODUCTION = 1") + if filters.get('isKey'): + flag_conditions.append("r.PJ_ISKEY = 1") + if filters.get('isMonitor'): + flag_conditions.append("r.PJ_ISMONITOR = 1") + + flag_filter = "" + if flag_conditions: + flag_filter = "AND " + " AND ".join(flag_conditions) + + # Load SQL from file and replace placeholders + sql = SQLLoader.load("dashboard/heatmap") + sql = sql.replace("{{ LOCATION_FILTER }}", location_filter) + sql = sql.replace("{{ ASSET_STATUS_FILTER }}", asset_status_filter) + sql = sql.replace("{{ FLAG_FILTER }}", flag_filter) + + df = read_sql_df(sql, {'days': days}) + + # Group by workcenter for heatmap format + result = [] + for _, row in df.iterrows(): + prd = float(row['PRD_HOURS'] or 0) + avail = float(row['AVAIL_HOURS'] or 0) + prd_pct = round((prd / avail * 100), 2) if avail > 0 else 0 + + wc_name = row['WORKCENTERNAME'] + # Apply workcenter grouping + group_name, _ = get_workcenter_group(wc_name) + + result.append({ + 'workcenter': wc_name, + 'group': group_name, + 'date': row['DATA_DATE'].strftime('%Y-%m-%d') if pd.notna(row['DATA_DATE']) else None, + 'prd_pct': prd_pct, + 'prd_hours': round(prd, 1), + 'avail_hours': round(avail, 1) + }) + + return result + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Utilization heatmap query failed: {exc}", exc_info=True) + return None diff --git a/src/mes_dashboard/services/excel_query_service.py b/src/mes_dashboard/services/excel_query_service.py new file mode 100644 index 0000000..24fcd22 --- /dev/null +++ b/src/mes_dashboard/services/excel_query_service.py @@ -0,0 +1,557 @@ +# -*- coding: utf-8 -*- +"""Excel batch query service for MES Dashboard. + +Provides Excel parsing, batch query execution, and CSV export functions. +Supports large datasets (7000+ rows) by splitting queries into batches. +""" + +import re +from datetime import datetime +from typing import Any, Dict, List, Tuple + +import pandas as pd + +from mes_dashboard.core.database import get_db_connection + + +# Oracle IN clause limit +BATCH_SIZE = 1000 + +# LIKE query keyword limit +LIKE_KEYWORD_LIMIT = 100 + +# Large table threshold for performance warning (10 million rows) +LARGE_TABLE_THRESHOLD = 10_000_000 + + +def parse_excel(file_storage) -> Dict[str, Any]: + """Parse uploaded Excel file and return column info. + + Args: + file_storage: Flask FileStorage object + + Returns: + Dict with 'columns' list and 'preview' data, or 'error' if failed. + """ + try: + df = pd.read_excel(file_storage) + columns = [str(col) for col in df.columns.tolist()] + preview_df = df.head(5).copy() + preview_df.columns = columns + preview = preview_df.to_dict('records') + + return { + 'columns': columns, + 'preview': preview, + 'total_rows': len(df) + } + except Exception as exc: + return {'error': f'Excel 解析失敗: {str(exc)}'} + + +def get_column_unique_values(file_storage, column_name: str) -> Dict[str, Any]: + """Get unique values from a specific Excel column. + + Args: + file_storage: Flask FileStorage object + column_name: Name of the column to extract + + Returns: + Dict with 'values' list and 'count', or 'error' if failed. + """ + try: + df = pd.read_excel(file_storage) + df.columns = [str(col) for col in df.columns] + + if column_name not in df.columns: + return {'error': f'欄位 {column_name} 不存在'} + + values = df[column_name].dropna().drop_duplicates() + values_list = [str(v).strip() for v in values.tolist() if str(v).strip()] + + return { + 'values': values_list, + 'count': len(values_list) + } + except Exception as exc: + return {'error': f'讀取欄位失敗: {str(exc)}'} + + +def detect_excel_column_type(values: List[str]) -> Dict[str, Any]: + """Detect the data type of Excel column values. + + Args: + values: List of string values from Excel column + + Returns: + Dict with: + - detected_type: 'text', 'number', 'date', 'datetime', or 'id' + - type_label: Display label in Chinese + - sample_values: First 5 sample values + """ + if not values: + return { + 'detected_type': 'text', + 'type_label': '文字', + 'sample_values': [] + } + + # Sample first 100 non-empty values for analysis + sample = [str(v).strip() for v in values[:100] if str(v).strip()] + if not sample: + return { + 'detected_type': 'text', + 'type_label': '文字', + 'sample_values': [] + } + + # Regex patterns for type detection + date_pattern = re.compile(r'^\d{4}[-/]\d{1,2}[-/]\d{1,2}$') + datetime_pattern = re.compile(r'^\d{4}[-/]\d{1,2}[-/]\d{1,2}[T ]\d{1,2}:\d{2}') + number_pattern = re.compile(r'^-?\d+\.?\d*$') + id_pattern = re.compile(r'^[A-Z0-9_-]+$', re.IGNORECASE) + + # Count matches for each type + type_counts = { + 'datetime': 0, + 'date': 0, + 'number': 0, + 'id': 0, + 'text': 0 + } + + for val in sample: + if datetime_pattern.match(val): + type_counts['datetime'] += 1 + elif date_pattern.match(val): + type_counts['date'] += 1 + elif number_pattern.match(val): + type_counts['number'] += 1 + elif id_pattern.match(val) and len(val) >= 3: + # ID pattern: uppercase alphanumeric, at least 3 chars + type_counts['id'] += 1 + else: + type_counts['text'] += 1 + + # Determine type based on majority (>80%) + threshold = len(sample) * 0.8 + detected_type = 'text' + type_label = '文字' + + if type_counts['datetime'] >= threshold: + detected_type = 'datetime' + type_label = '日期時間' + elif type_counts['date'] >= threshold: + detected_type = 'date' + type_label = '日期' + elif type_counts['number'] >= threshold: + detected_type = 'number' + type_label = '數值' + elif type_counts['id'] >= threshold: + detected_type = 'id' + type_label = '識別碼' + + return { + 'detected_type': detected_type, + 'type_label': type_label, + 'sample_values': sample[:5] + } + + +def sanitize_column_name(name: str) -> str: + """Sanitize column name to prevent SQL injection.""" + return re.sub(r'[^a-zA-Z0-9_]', '', name) + + +def validate_table_name(table_name: str) -> bool: + """Validate table name format (supports schema.table format).""" + return bool(re.match(r'^[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)?$', table_name)) + + +def escape_like_pattern(value: str) -> str: + """Escape special characters in LIKE pattern. + + Oracle LIKE special characters: % (any chars), _ (single char) + These need to be escaped with backslash for literal matching. + + Args: + value: Raw search value + + Returns: + Escaped value safe for LIKE pattern + """ + # Escape backslash first, then special chars + value = value.replace('\\', '\\\\') + value = value.replace('%', '\\%') + value = value.replace('_', '\\_') + return value + + +def build_like_condition( + column: str, + values: List[str], + mode: str = 'contains' +) -> Tuple[str, Dict[str, str]]: + """Build LIKE query condition with multiple OR clauses. + + Args: + column: Column name to search (must be sanitized) + values: List of search keywords + mode: 'contains' (%val%), 'prefix' (val%), or 'suffix' (%val) + + Returns: + Tuple of (WHERE clause string, params dict) + """ + if not values: + return '', {} + + conditions = [] + params = {} + + for i, val in enumerate(values): + param_name = f'like_{i}' + escaped_val = escape_like_pattern(val) + + if mode == 'contains': + params[param_name] = f'%{escaped_val}%' + elif mode == 'prefix': + params[param_name] = f'{escaped_val}%' + elif mode == 'suffix': + params[param_name] = f'%{escaped_val}' + else: + params[param_name] = f'%{escaped_val}%' + + conditions.append(f"{column} LIKE :{param_name} ESCAPE '\\'") + + where_clause = ' OR '.join(conditions) + return f'({where_clause})', params + + +def build_date_range_condition( + column: str, + date_from: str = None, + date_to: str = None +) -> Tuple[str, Dict[str, str]]: + """Build date range condition for Oracle. + + Args: + column: Date column name (must be sanitized) + date_from: Start date in YYYY-MM-DD format + date_to: End date in YYYY-MM-DD format + + Returns: + Tuple of (WHERE clause string, params dict) + """ + conditions = [] + params = {} + + if date_from: + conditions.append( + f"{column} >= TO_DATE(:date_from, 'YYYY-MM-DD')" + ) + params['date_from'] = date_from + + if date_to: + # Add 1 day to include the entire end date + conditions.append( + f"{column} < TO_DATE(:date_to, 'YYYY-MM-DD') + 1" + ) + params['date_to'] = date_to + + if not conditions: + return '', {} + + return ' AND '.join(conditions), params + + +def validate_like_keywords(values: List[str]) -> Dict[str, Any]: + """Validate LIKE query keyword count. + + Args: + values: List of search keywords + + Returns: + Dict with 'valid' boolean and optional 'error' message + """ + if len(values) > LIKE_KEYWORD_LIMIT: + return { + 'valid': False, + 'error': f'LIKE 查詢最多支援 {LIKE_KEYWORD_LIMIT} 個關鍵字,目前有 {len(values)} 個' + } + return {'valid': True} + + +def execute_batch_query( + table_name: str, + search_column: str, + return_columns: List[str], + search_values: List[str] +) -> Dict[str, Any]: + """Execute batch query with IN clause, splitting into batches for large datasets. + + Handles Oracle's 1000-value limit by executing multiple queries and merging results. + + Args: + table_name: Target table name + search_column: Column to search (WHERE ... IN) + return_columns: Columns to return in SELECT + search_values: Values to search for (can be 7000+) + + Returns: + Dict with 'columns', 'data', 'row_count', or 'error' if failed. + """ + # Validate inputs + if not validate_table_name(table_name): + return {'error': f'無效的資料表名稱: {table_name}'} + + safe_search_col = sanitize_column_name(search_column) + safe_return_cols = [sanitize_column_name(col) for col in return_columns] + + if not safe_search_col: + return {'error': '查詢欄位名稱無效'} + if not safe_return_cols: + return {'error': '回傳欄位名稱無效'} + + connection = get_db_connection() + if not connection: + return {'error': '資料庫連接失敗'} + + try: + cursor = connection.cursor() + all_data = [] + columns = None + columns_str = ', '.join(safe_return_cols) + + # Calculate batch count for progress info + total_batches = (len(search_values) + BATCH_SIZE - 1) // BATCH_SIZE + + # Process in batches + for batch_idx in range(0, len(search_values), BATCH_SIZE): + batch_values = search_values[batch_idx:batch_idx + BATCH_SIZE] + + # Build placeholders and params for this batch + placeholders = ', '.join([f':v{j}' for j in range(len(batch_values))]) + params = {f'v{j}': str(v) for j, v in enumerate(batch_values)} + + sql = f""" + SELECT {columns_str} + FROM {table_name} + WHERE {safe_search_col} IN ({placeholders}) + """ + + cursor.execute(sql, params) + + # Get column names from first batch + if columns is None: + columns = [desc[0] for desc in cursor.description] + + rows = cursor.fetchall() + + # Convert rows to dicts + for row in rows: + row_dict = {} + for i, col in enumerate(columns): + value = row[i] + if isinstance(value, datetime): + row_dict[col] = value.strftime('%Y-%m-%d %H:%M:%S') + else: + row_dict[col] = value + all_data.append(row_dict) + + cursor.close() + connection.close() + + return { + 'columns': columns or safe_return_cols, + 'data': all_data, + 'row_count': len(all_data), + 'search_count': len(search_values), + 'batch_count': total_batches + } + + except Exception as exc: + if connection: + connection.close() + return {'error': f'查詢失敗: {str(exc)}'} + + +def execute_advanced_batch_query( + table_name: str, + search_column: str, + return_columns: List[str], + search_values: List[str], + query_type: str = 'in', + date_column: str = None, + date_from: str = None, + date_to: str = None +) -> Dict[str, Any]: + """Execute advanced batch query with multiple condition types. + + Args: + table_name: Target table name + search_column: Column to search + return_columns: Columns to return in SELECT + search_values: Values to search for + query_type: 'in', 'like_contains', 'like_prefix', or 'like_suffix' + date_column: Optional date column for range filter + date_from: Optional start date (YYYY-MM-DD) + date_to: Optional end date (YYYY-MM-DD) + + Returns: + Dict with 'columns', 'data', 'row_count', or 'error' if failed. + """ + # Validate inputs + if not validate_table_name(table_name): + return {'error': f'無效的資料表名稱: {table_name}'} + + safe_search_col = sanitize_column_name(search_column) + safe_return_cols = [sanitize_column_name(col) for col in return_columns] + + if not safe_search_col: + return {'error': '查詢欄位名稱無效'} + if not safe_return_cols: + return {'error': '回傳欄位名稱無效'} + + # Validate LIKE keyword count + if query_type.startswith('like_'): + validation = validate_like_keywords(search_values) + if not validation['valid']: + return {'error': validation['error']} + + connection = get_db_connection() + if not connection: + return {'error': '資料庫連接失敗'} + + try: + cursor = connection.cursor() + all_data = [] + columns = None + columns_str = ', '.join(safe_return_cols) + + # Build date range condition + date_condition = '' + date_params = {} + if date_column: + safe_date_col = sanitize_column_name(date_column) + if safe_date_col: + date_condition, date_params = build_date_range_condition( + safe_date_col, date_from, date_to + ) + + # Handle different query types + if query_type == 'in': + # Original IN clause logic with batching + total_batches = (len(search_values) + BATCH_SIZE - 1) // BATCH_SIZE + + for batch_idx in range(0, len(search_values), BATCH_SIZE): + batch_values = search_values[batch_idx:batch_idx + BATCH_SIZE] + placeholders = ', '.join([f':v{j}' for j in range(len(batch_values))]) + params = {f'v{j}': str(v) for j, v in enumerate(batch_values)} + params.update(date_params) + + where_parts = [f'{safe_search_col} IN ({placeholders})'] + if date_condition: + where_parts.append(date_condition) + + sql = f""" + SELECT {columns_str} + FROM {table_name} + WHERE {' AND '.join(where_parts)} + """ + + cursor.execute(sql, params) + + if columns is None: + columns = [desc[0] for desc in cursor.description] + + rows = cursor.fetchall() + for row in rows: + row_dict = {} + for i, col in enumerate(columns): + value = row[i] + if isinstance(value, datetime): + row_dict[col] = value.strftime('%Y-%m-%d %H:%M:%S') + else: + row_dict[col] = value + all_data.append(row_dict) + + else: + # LIKE query - process all at once (already limited to 100 keywords) + mode_map = { + 'like_contains': 'contains', + 'like_prefix': 'prefix', + 'like_suffix': 'suffix' + } + mode = mode_map.get(query_type, 'contains') + like_condition, like_params = build_like_condition( + safe_search_col, search_values, mode + ) + + params = {**like_params, **date_params} + + where_parts = [like_condition] + if date_condition: + where_parts.append(date_condition) + + sql = f""" + SELECT {columns_str} + FROM {table_name} + WHERE {' AND '.join(where_parts)} + """ + + cursor.execute(sql, params) + columns = [desc[0] for desc in cursor.description] + + rows = cursor.fetchall() + for row in rows: + row_dict = {} + for i, col in enumerate(columns): + value = row[i] + if isinstance(value, datetime): + row_dict[col] = value.strftime('%Y-%m-%d %H:%M:%S') + else: + row_dict[col] = value + all_data.append(row_dict) + + total_batches = 1 + + cursor.close() + connection.close() + + return { + 'columns': columns or safe_return_cols, + 'data': all_data, + 'row_count': len(all_data), + 'search_count': len(search_values), + 'batch_count': total_batches, + 'query_type': query_type + } + + except Exception as exc: + if connection: + connection.close() + return {'error': f'查詢失敗: {str(exc)}'} + + +def generate_csv_content(data: List[Dict], columns: List[str]) -> str: + """Generate CSV content from query results. + + Args: + data: List of row dictionaries + columns: Column names for header + + Returns: + CSV content as string (UTF-8 with BOM for Excel compatibility) + """ + import csv + import io + + output = io.StringIO() + # Add BOM for Excel UTF-8 compatibility + output.write('\ufeff') + + writer = csv.DictWriter(output, fieldnames=columns, extrasaction='ignore') + writer.writeheader() + writer.writerows(data) + + return output.getvalue() diff --git a/src/mes_dashboard/services/filter_cache.py b/src/mes_dashboard/services/filter_cache.py new file mode 100644 index 0000000..d5dbba2 --- /dev/null +++ b/src/mes_dashboard/services/filter_cache.py @@ -0,0 +1,397 @@ +# -*- coding: utf-8 -*- +"""Cached filter options for MES Dashboard. + +Provides cached workcenter groups and resource families for filter dropdowns. +Data is loaded from database and cached in memory with periodic refresh. +""" + +import logging +import threading +from datetime import datetime, timedelta +from typing import Optional, Dict, List, Any + +from mes_dashboard.core.database import read_sql_df + +logger = logging.getLogger('mes_dashboard.filter_cache') + +# ============================================================ +# Cache Configuration +# ============================================================ + +CACHE_TTL_SECONDS = 3600 # 1 hour cache TTL +WIP_VIEW = "DWH.DW_MES_LOT_V" +SPEC_WORKCENTER_VIEW = "DWH.DW_MES_SPEC_WORKCENTER_V" + +# ============================================================ +# Cache Storage +# ============================================================ + +_CACHE = { + 'workcenter_groups': None, # List of {name, sequence} + 'workcenter_mapping': None, # Dict {workcentername: {group, sequence}} + 'workcenter_to_short': None, # Dict {workcentername: short_name} + 'last_refresh': None, + 'is_loading': False, +} + +_CACHE_LOCK = threading.Lock() + + +# ============================================================ +# Workcenter Group Functions +# ============================================================ + +def get_workcenter_groups(force_refresh: bool = False) -> Optional[List[Dict[str, Any]]]: + """Get list of workcenter groups with sequence order. + + Returns: + List of {name, sequence} sorted by sequence, or None if loading fails. + """ + _ensure_cache_loaded(force_refresh) + return _CACHE.get('workcenter_groups') + + +def get_workcenter_mapping(force_refresh: bool = False) -> Optional[Dict[str, Dict[str, Any]]]: + """Get workcenter name to group mapping. + + Returns: + Dict mapping workcentername to {group, sequence}, or None if loading fails. + """ + _ensure_cache_loaded(force_refresh) + return _CACHE.get('workcenter_mapping') + + +def get_workcenters_for_groups(groups: List[str]) -> List[str]: + """Get list of workcenter names that belong to specified groups. + + Args: + groups: List of WORKCENTER_GROUP names + + Returns: + List of WORKCENTERNAME values belonging to those groups + """ + mapping = get_workcenter_mapping() + if not mapping: + return [] + + result = [] + for wc_name, info in mapping.items(): + if info.get('group') in groups: + result.append(wc_name) + return result + + +def get_workcenter_group(workcenter_name: str) -> Optional[str]: + """Get workcenter group for a workcenter name. + + Args: + workcenter_name: The workcenter name to look up. + + Returns: + The WORK_CENTER_GROUP, or None if not found. + """ + mapping = get_workcenter_mapping() + if not mapping or workcenter_name not in mapping: + return None + return mapping[workcenter_name].get('group') + + +def get_workcenter_group_sequence(workcenter_name: str) -> Optional[int]: + """Get workcenter group sequence for a workcenter name. + + Args: + workcenter_name: The workcenter name to look up. + + Returns: + The WORKCENTERSEQUENCE_GROUP, or None if not found. + """ + mapping = get_workcenter_mapping() + if not mapping or workcenter_name not in mapping: + return None + return mapping[workcenter_name].get('sequence') + + +def get_workcenter_short(workcenter_name: str) -> Optional[str]: + """Get workcenter short name for a workcenter name. + + Args: + workcenter_name: The workcenter name to look up. + + Returns: + The WORK_CENTER_SHORT (e.g., DB, WB, Mold), or None if not found. + """ + _ensure_cache_loaded() + short_mapping = _CACHE.get('workcenter_to_short') + if not short_mapping or workcenter_name not in short_mapping: + return None + return short_mapping.get(workcenter_name) + + +def get_workcenters_by_group(group_name: str) -> List[str]: + """Get all workcenter names that belong to a specific group. + + Args: + group_name: The WORKCENTER_GROUP name. + + Returns: + List of workcenter names in that group. + """ + mapping = get_workcenter_mapping() + if not mapping: + return [] + + return [ + wc_name + for wc_name, info in mapping.items() + if info.get('group') == group_name + ] + + +# ============================================================ +# Cache Management +# ============================================================ + +def get_cache_status() -> Dict[str, Any]: + """Get current cache status. + + Returns: + Dict with cache status information + """ + with _CACHE_LOCK: + last_refresh = _CACHE.get('last_refresh') + return { + 'loaded': last_refresh is not None, + 'last_refresh': last_refresh.isoformat() if last_refresh else None, + 'is_loading': _CACHE.get('is_loading', False), + 'workcenter_groups_count': len(_CACHE.get('workcenter_groups') or []), + 'workcenter_mapping_count': len(_CACHE.get('workcenter_mapping') or {}), + } + + +def refresh_cache() -> bool: + """Force refresh the cache. + + Returns: + True if refresh succeeded, False otherwise + """ + return _load_cache() + + +def _ensure_cache_loaded(force_refresh: bool = False): + """Ensure cache is loaded and not stale.""" + with _CACHE_LOCK: + now = datetime.now() + last_refresh = _CACHE.get('last_refresh') + is_loading = _CACHE.get('is_loading', False) + + # Check if cache is valid + cache_valid = ( + last_refresh is not None and + (now - last_refresh).total_seconds() < CACHE_TTL_SECONDS + ) + + if cache_valid and not force_refresh: + return + + if is_loading: + return # Another thread is loading + + # Load cache (outside lock to avoid blocking) + _load_cache() + + +def _load_cache() -> bool: + """Load all cache data from database. + + Returns: + True if loading succeeded, False otherwise + """ + with _CACHE_LOCK: + if _CACHE.get('is_loading'): + return False + _CACHE['is_loading'] = True + + try: + # Load workcenter groups - prioritize SPEC_WORKCENTER_V + wc_groups, wc_mapping, wc_short = _load_workcenter_data() + + with _CACHE_LOCK: + _CACHE['workcenter_groups'] = wc_groups + _CACHE['workcenter_mapping'] = wc_mapping + _CACHE['workcenter_to_short'] = wc_short + _CACHE['last_refresh'] = datetime.now() + _CACHE['is_loading'] = False + + logger.info( + f"Filter cache refreshed: {len(wc_groups or [])} groups, " + f"{len(wc_mapping or {})} workcenters" + ) + return True + + except Exception as exc: + logger.error(f"Failed to load filter cache: {exc}") + with _CACHE_LOCK: + _CACHE['is_loading'] = False + return False + + +def _load_workcenter_data(): + """Load workcenter group data from SPEC_WORKCENTER_V (preferred) or fallback to WIP. + + Returns: + Tuple of (groups_list, mapping_dict, short_mapping_dict) + """ + # Try to load from SPEC_WORKCENTER_V first (authoritative source) + result = _load_workcenter_mapping_from_spec() + if result[0]: # If groups are loaded + logger.debug("Loaded workcenter groups from SPEC_WORKCENTER_V") + return result + + # Fallback to WIP cache + logger.warning("Falling back to WIP source for workcenter groups") + try: + from mes_dashboard.core.cache import get_cached_wip_data + + df = get_cached_wip_data() + if df is not None and not df.empty: + logger.debug("Loading workcenter groups from WIP cache") + groups, mapping = _extract_workcenter_data_from_df(df) + return groups, mapping, {} + except Exception as exc: + logger.warning(f"Failed to load from WIP cache: {exc}") + + # Fallback to Oracle WIP view direct query + logger.debug("Falling back to Oracle WIP view for workcenter groups") + try: + sql = f""" + SELECT DISTINCT + WORKCENTERNAME, + WORKCENTERID, + WORKCENTER_GROUP, + WORKCENTERSEQUENCE_GROUP + FROM {WIP_VIEW} + WHERE WORKCENTER_GROUP IS NOT NULL + AND WORKCENTERNAME IS NOT NULL + """ + df = read_sql_df(sql) + + if df is None or df.empty: + logger.warning("No workcenter data found in DWH.DW_MES_LOT_V") + return [], {}, {} + + groups, mapping = _extract_workcenter_data_from_df(df) + return groups, mapping, {} + + except Exception as exc: + logger.error(f"Failed to load workcenter data: {exc}") + return [], {}, {} + + +def _load_workcenter_mapping_from_spec(): + """Load workcenter mapping from DW_MES_SPEC_WORKCENTER_V. + + This is the authoritative source for workcenter -> group mapping. + + Returns: + Tuple of (groups_list, mapping_dict, short_mapping_dict) + """ + try: + sql = f""" + SELECT DISTINCT + WORK_CENTER, + WORK_CENTER_GROUP, + WORKCENTERSEQUENCE_GROUP, + WORK_CENTER_SHORT + FROM {SPEC_WORKCENTER_VIEW} + WHERE WORK_CENTER IS NOT NULL + """ + df = read_sql_df(sql) + + if df is None or df.empty: + logger.warning("No data found in SPEC_WORKCENTER_V") + return [], {}, {} + + # Build groups list (unique groups, take minimum sequence for each group) + groups_df = df.groupby('WORK_CENTER_GROUP')['WORKCENTERSEQUENCE_GROUP'].min().reset_index() + groups_df = groups_df.sort_values('WORKCENTERSEQUENCE_GROUP') + + groups = [] + for _, row in groups_df.iterrows(): + group_name = row['WORK_CENTER_GROUP'] + if group_name: + groups.append({ + 'name': group_name, + 'sequence': int(row['WORKCENTERSEQUENCE_GROUP'] or 999) + }) + + # Build mapping dict (WORK_CENTER -> group info) + mapping = {} + short_mapping = {} + for _, row in df.iterrows(): + wc_name = row['WORK_CENTER'] + if wc_name: + mapping[wc_name] = { + 'group': row['WORK_CENTER_GROUP'], + 'sequence': int(row['WORKCENTERSEQUENCE_GROUP'] or 999) + } + if row.get('WORK_CENTER_SHORT'): + short_mapping[wc_name] = row['WORK_CENTER_SHORT'] + + logger.info(f"Loaded {len(mapping)} workcenters from SPEC_WORKCENTER_V") + return groups, mapping, short_mapping + + except Exception as exc: + logger.error(f"Failed to load from SPEC_WORKCENTER_V: {exc}") + return [], {}, {} + + +def _extract_workcenter_data_from_df(df): + """Extract workcenter groups and mapping from DataFrame. + + Args: + df: DataFrame with WORKCENTERNAME, WORKCENTER_GROUP, WORKCENTERSEQUENCE_GROUP columns + + Returns: + Tuple of (groups_list, mapping_dict) + """ + # Filter to rows with valid workcenter group + df = df[df['WORKCENTER_GROUP'].notna() & df['WORKCENTERNAME'].notna()] + + if df.empty: + return [], {} + + # Build groups list (unique groups, take minimum sequence for each group) + groups_df = df.groupby('WORKCENTER_GROUP')['WORKCENTERSEQUENCE_GROUP'].min().reset_index() + groups_df = groups_df.sort_values('WORKCENTERSEQUENCE_GROUP') + + groups = [] + for _, row in groups_df.iterrows(): + groups.append({ + 'name': row['WORKCENTER_GROUP'], + 'sequence': int(row['WORKCENTERSEQUENCE_GROUP'] or 999) + }) + + # Build mapping dict + mapping = {} + for _, row in df.iterrows(): + wc_name = row['WORKCENTERNAME'] + mapping[wc_name] = { + 'id': row.get('WORKCENTERID'), + 'group': row['WORKCENTER_GROUP'], + 'sequence': int(row['WORKCENTERSEQUENCE_GROUP'] or 999) + } + + return groups, mapping + + +# ============================================================ +# Initialization +# ============================================================ + +def init_cache(): + """Initialize the cache on application startup. + + Should be called during app initialization. + """ + logger.info("Initializing filter cache...") + _load_cache() diff --git a/src/mes_dashboard/services/job_query_service.py b/src/mes_dashboard/services/job_query_service.py new file mode 100644 index 0000000..3b7a340 --- /dev/null +++ b/src/mes_dashboard/services/job_query_service.py @@ -0,0 +1,386 @@ +# -*- coding: utf-8 -*- +"""Job Query Service. + +Provides functions for querying maintenance job data: +- Job list by resource IDs +- Job transaction history detail +- CSV export with full history + +Architecture: +- Uses resource_cache as the source for equipment master data +- Queries DW_MES_JOB for job current status +- Queries DW_MES_JOBTXNHISTORY for transaction history +- Supports batching for large resource lists (Oracle IN clause limit) +""" + +import csv +import io +import logging +from datetime import datetime +from typing import Dict, List, Any, Optional, Generator + +import pandas as pd + +from mes_dashboard.core.database import read_sql_df, get_db_connection +from mes_dashboard.sql import SQLLoader +from mes_dashboard.config.field_contracts import get_export_headers, get_export_api_keys + +logger = logging.getLogger('mes_dashboard.job_query') + +# Constants +BATCH_SIZE = 1000 # Oracle IN clause limit +MAX_DATE_RANGE_DAYS = 365 + + +# ============================================================ +# Validation Functions +# ============================================================ + +def validate_date_range(start_date: str, end_date: str) -> Optional[str]: + """Validate date range. + + Args: + start_date: Start date in YYYY-MM-DD format + end_date: End date in YYYY-MM-DD format + + Returns: + Error message if validation fails, None if valid. + """ + try: + start = datetime.strptime(start_date, '%Y-%m-%d') + end = datetime.strptime(end_date, '%Y-%m-%d') + + if end < start: + return '結束日期不可早於起始日期' + + diff = (end - start).days + if diff > MAX_DATE_RANGE_DAYS: + return f'日期範圍不可超過 {MAX_DATE_RANGE_DAYS} 天' + + return None + except ValueError as e: + return f'日期格式錯誤: {e}' + + +# ============================================================ +# Resource Filter Helpers +# ============================================================ + +def _build_resource_filter(resource_ids: List[str], max_chunk_size: int = BATCH_SIZE) -> List[str]: + """Build SQL IN clause lists for resource IDs. + + Oracle has a limit of ~1000 items per IN clause, so we chunk if needed. + + Args: + resource_ids: List of resource IDs. + max_chunk_size: Maximum items per IN clause. + + Returns: + List of SQL IN clause strings (e.g., "'ID1', 'ID2', 'ID3'"). + """ + if not resource_ids: + return [] + + # Escape single quotes + escaped_ids = [rid.replace("'", "''") for rid in resource_ids] + + # Chunk into groups + chunks = [] + for i in range(0, len(escaped_ids), max_chunk_size): + chunk = escaped_ids[i:i + max_chunk_size] + chunks.append("'" + "', '".join(chunk) + "'") + + return chunks + + +def _build_resource_filter_sql(resource_ids: List[str], column: str = 'j.RESOURCEID') -> str: + """Build SQL WHERE clause for resource ID filtering. + + Handles chunking for large resource lists. + + Args: + resource_ids: List of resource IDs. + column: Column name to filter on. + + Returns: + SQL condition string (e.g., "j.RESOURCEID IN ('ID1', 'ID2')"). + """ + chunks = _build_resource_filter(resource_ids) + if not chunks: + return "1=0" # No resources = no results + + if len(chunks) == 1: + return f"{column} IN ({chunks[0]})" + + # Multiple chunks need OR + conditions = [f"{column} IN ({chunk})" for chunk in chunks] + return "(" + " OR ".join(conditions) + ")" + + +# ============================================================ +# Query Functions +# ============================================================ + +def get_jobs_by_resources( + resource_ids: List[str], + start_date: str, + end_date: str +) -> Dict[str, Any]: + """Query jobs for selected resources within date range. + + Args: + resource_ids: List of RESOURCEID values to query + start_date: Start date in YYYY-MM-DD format + end_date: End date in YYYY-MM-DD format + + Returns: + Dict with 'data' (list of job records) and 'total' (count), + or 'error' if query fails. + """ + # Validate inputs + if not resource_ids: + return {'error': '請選擇至少一台設備'} + + validation_error = validate_date_range(start_date, end_date) + if validation_error: + return {'error': validation_error} + + try: + # Build resource filter + resource_filter = _build_resource_filter_sql(resource_ids) + + # Load SQL template + sql = SQLLoader.load("job_query/job_list") + sql = sql.replace("{{ RESOURCE_FILTER }}", resource_filter) + + # Execute query + params = {'start_date': start_date, 'end_date': end_date} + df = read_sql_df(sql, params) + + # Convert to records + data = [] + for _, row in df.iterrows(): + record = {} + for col in df.columns: + value = row[col] + if pd.isna(value): + record[col] = None + elif isinstance(value, datetime): + record[col] = value.strftime('%Y-%m-%d %H:%M:%S') + else: + record[col] = value + data.append(record) + + logger.info(f"Job query returned {len(data)} records for {len(resource_ids)} resources") + + return { + 'data': data, + 'total': len(data), + 'resource_count': len(resource_ids) + } + + except Exception as exc: + logger.error(f"Job query failed: {exc}") + return {'error': f'查詢失敗: {str(exc)}'} + + +def get_job_txn_history(job_id: str) -> Dict[str, Any]: + """Query transaction history for a single job. + + Args: + job_id: The JOBID to query + + Returns: + Dict with 'data' (list of transaction records) and 'total' (count), + or 'error' if query fails. + """ + if not job_id: + return {'error': '請指定工單 ID'} + + try: + # Load SQL template + sql = SQLLoader.load("job_query/job_txn_detail") + + # Execute query + params = {'job_id': job_id} + df = read_sql_df(sql, params) + + # Convert to records + data = [] + for _, row in df.iterrows(): + record = {} + for col in df.columns: + value = row[col] + if pd.isna(value): + record[col] = None + elif isinstance(value, datetime): + record[col] = value.strftime('%Y-%m-%d %H:%M:%S') + else: + record[col] = value + data.append(record) + + logger.debug(f"Transaction history query returned {len(data)} records for job {job_id}") + + return { + 'data': data, + 'total': len(data), + 'job_id': job_id + } + + except Exception as exc: + logger.error(f"Transaction history query failed for job {job_id}: {exc}") + return {'error': f'查詢失敗: {str(exc)}'} + + +# ============================================================ +# Export Functions +# ============================================================ + +def export_jobs_with_history( + resource_ids: List[str], + start_date: str, + end_date: str +) -> Generator[str, None, None]: + """Generate CSV content for jobs with full transaction history. + + Uses streaming to handle large datasets without memory issues. + + Args: + resource_ids: List of RESOURCEID values to export + start_date: Start date in YYYY-MM-DD format + end_date: End date in YYYY-MM-DD format + + Yields: + CSV rows as strings (including header row first) + """ + # Validate inputs + if not resource_ids: + yield "Error: 請選擇至少一台設備\n" + return + + validation_error = validate_date_range(start_date, end_date) + if validation_error: + yield f"Error: {validation_error}\n" + return + + try: + # Build resource filter + resource_filter = _build_resource_filter_sql(resource_ids) + + # Load SQL template + sql = SQLLoader.load("job_query/job_txn_export") + sql = sql.replace("{{ RESOURCE_FILTER }}", resource_filter) + + # Execute query + params = {'start_date': start_date, 'end_date': end_date} + df = read_sql_df(sql, params) + + if df is None or len(df) == 0: + yield "Error: 無符合條件的資料\n" + return + + # Write CSV header with BOM for Excel UTF-8 compatibility + output = io.StringIO() + output.write('\ufeff') # UTF-8 BOM + + export_keys = get_export_api_keys('job_query') + headers = get_export_headers('job_query') + + if not export_keys or not headers or len(export_keys) != len(headers): + export_keys = [ + 'RESOURCENAME', 'JOBID', 'JOB_FINAL_STATUS', 'JOBMODELNAME', 'JOBORDERNAME', + 'JOB_CREATEDATE', 'JOB_COMPLETEDATE', 'JOB_CAUSECODENAME', 'JOB_REPAIRCODENAME', 'JOB_SYMPTOMCODENAME', + 'TXNDATE', 'FROMJOBSTATUS', 'TXN_JOBSTATUS', 'STAGENAME', + 'TXN_CAUSECODENAME', 'TXN_REPAIRCODENAME', 'TXN_SYMPTOMCODENAME', + 'USER_NAME', 'EMP_NAME', 'COMMENTS' + ] + headers = export_keys + + writer = csv.writer(output) + writer.writerow(headers) + yield output.getvalue() + output.truncate(0) + output.seek(0) + + # Write data rows + for _, row in df.iterrows(): + csv_row = [] + for col in export_keys: + value = row.get(col) + if pd.isna(value): + csv_row.append('') + elif isinstance(value, datetime): + csv_row.append(value.strftime('%Y-%m-%d %H:%M:%S')) + else: + csv_row.append(str(value)) + + writer.writerow(csv_row) + yield output.getvalue() + output.truncate(0) + output.seek(0) + + logger.info(f"CSV export completed: {len(df)} records") + + except Exception as exc: + logger.error(f"CSV export failed: {exc}") + yield f"Error: 匯出失敗 - {str(exc)}\n" + + +def get_export_data( + resource_ids: List[str], + start_date: str, + end_date: str +) -> Dict[str, Any]: + """Get export data as a dict (for non-streaming use cases). + + Args: + resource_ids: List of RESOURCEID values to export + start_date: Start date in YYYY-MM-DD format + end_date: End date in YYYY-MM-DD format + + Returns: + Dict with 'data', 'columns', 'total', or 'error' if query fails. + """ + # Validate inputs + if not resource_ids: + return {'error': '請選擇至少一台設備'} + + validation_error = validate_date_range(start_date, end_date) + if validation_error: + return {'error': validation_error} + + try: + # Build resource filter + resource_filter = _build_resource_filter_sql(resource_ids) + + # Load SQL template + sql = SQLLoader.load("job_query/job_txn_export") + sql = sql.replace("{{ RESOURCE_FILTER }}", resource_filter) + + # Execute query + params = {'start_date': start_date, 'end_date': end_date} + df = read_sql_df(sql, params) + + # Convert to records + data = [] + for _, row in df.iterrows(): + record = {} + for col in df.columns: + value = row[col] + if pd.isna(value): + record[col] = None + elif isinstance(value, datetime): + record[col] = value.strftime('%Y-%m-%d %H:%M:%S') + else: + record[col] = value + data.append(record) + + return { + 'data': data, + 'columns': list(df.columns), + 'total': len(data) + } + + except Exception as exc: + logger.error(f"Export data query failed: {exc}") + return {'error': f'查詢失敗: {str(exc)}'} diff --git a/src/mes_dashboard/services/page_registry.py b/src/mes_dashboard/services/page_registry.py new file mode 100644 index 0000000..91e7448 --- /dev/null +++ b/src/mes_dashboard/services/page_registry.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- +"""Page registry service for managing page access status.""" + +from __future__ import annotations + +import json +import logging +from pathlib import Path +from threading import Lock + +logger = logging.getLogger(__name__) + +# Data file path (relative to project root) +# Path: src/mes_dashboard/services/page_registry.py -> project root/data/ +DATA_FILE = Path(__file__).parent.parent.parent.parent / "data" / "page_status.json" +_lock = Lock() +_cache: dict | None = None + + +def _load() -> dict: + """Load page status configuration.""" + global _cache + if _cache is None: + if DATA_FILE.exists(): + try: + _cache = json.loads(DATA_FILE.read_text(encoding="utf-8")) + logger.debug("Loaded page status from %s", DATA_FILE) + except (json.JSONDecodeError, OSError) as e: + logger.warning("Failed to load page status: %s", e) + _cache = {"pages": [], "api_public": True} + else: + logger.info("Page status file not found, using defaults") + _cache = {"pages": [], "api_public": True} + return _cache + + +def _save(data: dict) -> None: + """Save page status configuration.""" + global _cache + try: + DATA_FILE.parent.mkdir(parents=True, exist_ok=True) + DATA_FILE.write_text( + json.dumps(data, ensure_ascii=False, indent=2), + encoding="utf-8" + ) + _cache = data + logger.debug("Saved page status to %s", DATA_FILE) + except OSError as e: + logger.error("Failed to save page status: %s", e) + raise + + +def get_page_status(route: str) -> str | None: + """Get page status ('released' or 'dev'). + + Args: + route: Page route path (e.g., '/wip-overview') + + Returns: + 'released', 'dev', or None if page is not registered. + """ + with _lock: + data = _load() + for page in data.get("pages", []): + if page["route"] == route: + return page.get("status", "dev") + return None # Not registered - let Flask handle it + + +def is_page_registered(route: str) -> bool: + """Check if a page is registered in the page registry. + + Args: + route: Page route path (e.g., '/wip-overview') + + Returns: + True if page is registered, False otherwise. + """ + return get_page_status(route) is not None + + +def set_page_status(route: str, status: str, name: str | None = None) -> None: + """Set page status. + + Args: + route: Page route path + status: 'released' or 'dev' + name: Optional page display name + """ + if status not in ("released", "dev"): + raise ValueError(f"Invalid status: {status}") + + with _lock: + data = _load() + pages = data.setdefault("pages", []) + + # Update existing page + for page in pages: + if page["route"] == route: + page["status"] = status + if name: + page["name"] = name + _save(data) + logger.info("Updated page status: %s -> %s", route, status) + return + + # Add new page + pages.append({ + "route": route, + "name": name or route, + "status": status + }) + _save(data) + logger.info("Added new page: %s (%s)", route, status) + + +def get_all_pages() -> list[dict]: + """Get all page configurations. + + Returns: + List of page dicts: [{route, name, status}, ...] + """ + with _lock: + return _load().get("pages", []) + + +def is_api_public() -> bool: + """Check if API endpoints are publicly accessible. + + Returns: + True if API endpoints bypass permission checks + """ + with _lock: + return _load().get("api_public", True) + + +def reload_cache() -> None: + """Force reload of page status from disk.""" + global _cache + with _lock: + _cache = None + _load() + logger.info("Reloaded page status cache") diff --git a/src/mes_dashboard/services/realtime_equipment_cache.py b/src/mes_dashboard/services/realtime_equipment_cache.py new file mode 100644 index 0000000..40980e9 --- /dev/null +++ b/src/mes_dashboard/services/realtime_equipment_cache.py @@ -0,0 +1,753 @@ +# -*- coding: utf-8 -*- +"""Realtime Equipment Status Cache for MES Dashboard. + +Provides cached equipment status from DW_MES_EQUIPMENTSTATUS_WIP_V. +Data is synced periodically (default 5 minutes) and stored in Redis. +""" + +import json +import logging +import threading +import time +from datetime import datetime +from typing import Any, Dict, List, Optional, Tuple + +from mes_dashboard.core.database import read_sql_df +from mes_dashboard.core.redis_client import ( + get_redis_client, + get_key_prefix, + try_acquire_lock, + release_lock, +) +from mes_dashboard.config.constants import ( + EQUIPMENT_STATUS_DATA_KEY, + EQUIPMENT_STATUS_INDEX_KEY, + EQUIPMENT_STATUS_META_UPDATED_KEY, + EQUIPMENT_STATUS_META_COUNT_KEY, + STATUS_CATEGORY_MAP, +) + +logger = logging.getLogger('mes_dashboard.realtime_equipment_cache') + +# ============================================================ +# Process-Level Cache (Prevents redundant JSON parsing) +# ============================================================ + +class _ProcessLevelCache: + """Thread-safe process-level cache for parsed equipment status data.""" + + def __init__(self, ttl_seconds: int = 30): + self._cache: Dict[str, Tuple[List[Dict[str, Any]], float]] = {} + self._lock = threading.Lock() + self._ttl = ttl_seconds + + def get(self, key: str) -> Optional[List[Dict[str, Any]]]: + """Get cached data if not expired.""" + with self._lock: + if key not in self._cache: + return None + data, timestamp = self._cache[key] + if time.time() - timestamp > self._ttl: + del self._cache[key] + return None + return data + + def set(self, key: str, data: List[Dict[str, Any]]) -> None: + """Cache data with current timestamp.""" + with self._lock: + self._cache[key] = (data, time.time()) + + def invalidate(self, key: str) -> None: + """Remove a key from cache.""" + with self._lock: + self._cache.pop(key, None) + + +# Global process-level cache for equipment status (30s TTL) +_equipment_status_cache = _ProcessLevelCache(ttl_seconds=30) +_equipment_status_parse_lock = threading.Lock() +_equipment_lookup_lock = threading.Lock() +_equipment_status_lookup: Dict[str, Dict[str, Any]] = {} +_equipment_status_lookup_built_at: Optional[str] = None +_equipment_status_lookup_ts: float = 0.0 +LOOKUP_TTL_SECONDS = 30 + +# ============================================================ +# Module State +# ============================================================ + +_SYNC_THREAD: Optional[threading.Thread] = None +_STOP_EVENT = threading.Event() +_SYNC_LOCK = threading.Lock() + + +# ============================================================ +# Oracle Query +# ============================================================ + +def _load_equipment_status_from_oracle() -> Optional[List[Dict[str, Any]]]: + """Query DW_MES_EQUIPMENTSTATUS_WIP_V from Oracle. + + Returns: + List of equipment status records, or None if query fails. + """ + sql = """ + SELECT + RESOURCEID, + EQUIPMENTID, + OBJECTCATEGORY, + EQUIPMENTASSETSSTATUS, + EQUIPMENTASSETSSTATUSREASON, + JOBORDER, + JOBMODEL, + JOBSTAGE, + JOBID, + JOBSTATUS, + CREATEDATE, + CREATEUSERNAME, + CREATEUSER, + TECHNICIANUSERNAME, + TECHNICIANUSER, + SYMPTOMCODE, + CAUSECODE, + REPAIRCODE, + RUNCARDLOTID, + LOTTRACKINQTY_PCS, + LOTTRACKINTIME, + LOTTRACKINEMPLOYEE + FROM DWH.DW_MES_EQUIPMENTSTATUS_WIP_V + """ + try: + df = read_sql_df(sql) + if df is None or df.empty: + logger.warning("No data returned from DW_MES_EQUIPMENTSTATUS_WIP_V") + return [] + + # Convert DataFrame to list of dicts + records = df.to_dict('records') + + # Convert datetime columns to ISO format strings + for record in records: + for key in ['CREATEDATE', 'LOTTRACKINTIME']: + if record.get(key) is not None: + try: + record[key] = record[key].isoformat() + except (AttributeError, TypeError): + pass + + logger.info(f"Loaded {len(records)} records from DW_MES_EQUIPMENTSTATUS_WIP_V") + return records + + except Exception as exc: + logger.error(f"Failed to load equipment status from Oracle: {exc}") + return None + + +# ============================================================ +# Data Aggregation +# ============================================================ + +def _classify_status(status: Optional[str]) -> str: + """Classify equipment status into category. + + Args: + status: Equipment status code (e.g., 'PRD', 'SBY') + + Returns: + Status category string. + """ + if not status: + return 'OTHER' + return STATUS_CATEGORY_MAP.get(status, 'OTHER') + + +def _is_valid_value(value) -> bool: + """Check if a value is valid (not None, not NaN, not empty string). + + Args: + value: The value to check. + + Returns: + True if valid, False otherwise. + """ + if value is None: + return False + if isinstance(value, str) and (not value.strip() or value == 'NaT'): + return False + # Check for NaN (pandas NaN or float NaN) + try: + if value != value: # NaN != NaN is True + return False + except (TypeError, ValueError): + pass + return True + + +def _aggregate_by_resourceid(records: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """Aggregate equipment status records by RESOURCEID. + + For each RESOURCEID: + - Status fields: take first (should be same for all records) + - LOT_COUNT: count of distinct RUNCARDLOTID values + - LOT_DETAILS: list of LOT information for tooltip display + - TOTAL_TRACKIN_QTY: sum of LOTTRACKINQTY_PCS + - LATEST_TRACKIN_TIME: max of LOTTRACKINTIME + + Args: + records: Raw records from Oracle query. + + Returns: + Aggregated records, one per RESOURCEID. + """ + if not records: + return [] + + # Group by RESOURCEID + grouped: Dict[str, List[Dict[str, Any]]] = {} + for record in records: + resource_id = record.get('RESOURCEID') + if resource_id: + if resource_id not in grouped: + grouped[resource_id] = [] + grouped[resource_id].append(record) + + # Aggregate each group + aggregated = [] + for resource_id, group in grouped.items(): + first = group[0] + + # Collect unique LOTs by RUNCARDLOTID + seen_lots = set() + lot_details = [] + total_qty = 0 + + for r in group: + lot_id = r.get('RUNCARDLOTID') + qty = r.get('LOTTRACKINQTY_PCS') + # Sum only valid quantities + if _is_valid_value(qty): + total_qty += qty + + # Only add unique LOTs with valid RUNCARDLOTID + if _is_valid_value(lot_id) and lot_id not in seen_lots: + seen_lots.add(lot_id) + trackin_time = r.get('LOTTRACKINTIME') + trackin_employee = r.get('LOTTRACKINEMPLOYEE') + lot_details.append({ + 'RUNCARDLOTID': lot_id, + 'LOTTRACKINQTY_PCS': qty if _is_valid_value(qty) else None, + 'LOTTRACKINTIME': trackin_time if _is_valid_value(trackin_time) else None, + 'LOTTRACKINEMPLOYEE': trackin_employee if _is_valid_value(trackin_employee) else None, + }) + + # Find latest trackin time + trackin_times = [ + r.get('LOTTRACKINTIME') + for r in group + if r.get('LOTTRACKINTIME') + ] + latest_trackin = max(trackin_times) if trackin_times else None + + # Build aggregated record + status = first.get('EQUIPMENTASSETSSTATUS') + aggregated.append({ + 'RESOURCEID': resource_id, + 'EQUIPMENTID': first.get('EQUIPMENTID'), + 'OBJECTCATEGORY': first.get('OBJECTCATEGORY'), + 'EQUIPMENTASSETSSTATUS': status, + 'EQUIPMENTASSETSSTATUSREASON': first.get('EQUIPMENTASSETSSTATUSREASON'), + 'STATUS_CATEGORY': _classify_status(status), + # JOB related fields + 'JOBORDER': first.get('JOBORDER'), + 'JOBMODEL': first.get('JOBMODEL'), + 'JOBSTAGE': first.get('JOBSTAGE'), + 'JOBID': first.get('JOBID'), + 'JOBSTATUS': first.get('JOBSTATUS'), + 'CREATEDATE': first.get('CREATEDATE'), + 'CREATEUSERNAME': first.get('CREATEUSERNAME'), + 'CREATEUSER': first.get('CREATEUSER'), + 'TECHNICIANUSERNAME': first.get('TECHNICIANUSERNAME'), + 'TECHNICIANUSER': first.get('TECHNICIANUSER'), + 'SYMPTOMCODE': first.get('SYMPTOMCODE'), + 'CAUSECODE': first.get('CAUSECODE'), + 'REPAIRCODE': first.get('REPAIRCODE'), + # LOT related fields + 'LOT_COUNT': len(seen_lots), # Count distinct RUNCARDLOTID + 'LOT_DETAILS': lot_details, # LOT details for tooltip + 'TOTAL_TRACKIN_QTY': total_qty, + 'LATEST_TRACKIN_TIME': latest_trackin, + }) + + logger.debug(f"Aggregated {len(records)} records into {len(aggregated)} unique resources") + return aggregated + + +# ============================================================ +# Redis Storage +# ============================================================ + +def _save_to_redis(aggregated: List[Dict[str, Any]]) -> bool: + """Save aggregated equipment status to Redis. + + Uses pipeline for atomic update of all keys. + + Args: + aggregated: Aggregated equipment status records. + + Returns: + True if save succeeded, False otherwise. + """ + redis_client = get_redis_client() + if not redis_client: + logger.error("Redis client not available") + return False + + try: + prefix = get_key_prefix() + data_key = f"{prefix}:{EQUIPMENT_STATUS_DATA_KEY}" + index_key = f"{prefix}:{EQUIPMENT_STATUS_INDEX_KEY}" + updated_key = f"{prefix}:{EQUIPMENT_STATUS_META_UPDATED_KEY}" + count_key = f"{prefix}:{EQUIPMENT_STATUS_META_COUNT_KEY}" + + # Build index mapping: RESOURCEID -> array index + index_mapping = { + record['RESOURCEID']: str(idx) + for idx, record in enumerate(aggregated) + } + + # Serialize data + data_json = json.dumps(aggregated, ensure_ascii=False, default=str) + updated_at = datetime.now().isoformat() + count = len(aggregated) + + # Atomic update using pipeline + pipe = redis_client.pipeline() + pipe.set(data_key, data_json) + pipe.delete(index_key) + if index_mapping: + pipe.hset(index_key, mapping=index_mapping) + pipe.set(updated_key, updated_at) + pipe.set(count_key, str(count)) + pipe.execute() + + # Invalidate process-level cache so next request picks up new data + _equipment_status_cache.invalidate("equipment_status_all") + _invalidate_equipment_status_lookup() + + logger.info(f"Saved {count} equipment status records to Redis") + return True + + except Exception as exc: + logger.error(f"Failed to save equipment status to Redis: {exc}") + return False + + +# ============================================================ +# Query API +# ============================================================ + +def _invalidate_equipment_status_lookup() -> None: + global _equipment_status_lookup, _equipment_status_lookup_built_at, _equipment_status_lookup_ts + with _equipment_lookup_lock: + _equipment_status_lookup = {} + _equipment_status_lookup_built_at = None + _equipment_status_lookup_ts = 0.0 + + +def get_equipment_status_lookup() -> Dict[str, Dict[str, Any]]: + """Get RESOURCEID -> status record lookup with process-level caching.""" + global _equipment_status_lookup, _equipment_status_lookup_built_at, _equipment_status_lookup_ts + + with _equipment_lookup_lock: + if _equipment_status_lookup and (time.time() - _equipment_status_lookup_ts) <= LOOKUP_TTL_SECONDS: + return _equipment_status_lookup + + records = get_all_equipment_status() + lookup = { + str(record.get("RESOURCEID")): record + for record in records + if record.get("RESOURCEID") is not None + } + + with _equipment_lookup_lock: + _equipment_status_lookup = lookup + _equipment_status_lookup_built_at = datetime.now().isoformat() + _equipment_status_lookup_ts = time.time() + return _equipment_status_lookup + +def get_all_equipment_status() -> List[Dict[str, Any]]: + """Get all equipment status from cache with process-level caching. + + Uses a two-tier cache strategy: + 1. Process-level cache: Parsed data (30s TTL) - fast, no parsing + 2. Redis cache: Raw JSON data - shared across workers + + This prevents redundant JSON parsing across concurrent requests. + + Returns: + List of equipment status records, or empty list if unavailable. + """ + cache_key = "equipment_status_all" + + # Tier 1: Check process-level cache first (fast path) + cached_data = _equipment_status_cache.get(cache_key) + if cached_data is not None: + logger.debug(f"Process cache hit: {len(cached_data)} records") + return cached_data + + # Tier 2: Parse from Redis (slow path - needs lock) + redis_client = get_redis_client() + if not redis_client: + logger.warning("Redis client not available for equipment status query") + return [] + + # Use lock to prevent multiple threads from parsing simultaneously + with _equipment_status_parse_lock: + # Double-check after acquiring lock + cached_data = _equipment_status_cache.get(cache_key) + if cached_data is not None: + logger.debug(f"Process cache hit (after lock): {len(cached_data)} records") + return cached_data + + try: + start_time = time.time() + prefix = get_key_prefix() + data_key = f"{prefix}:{EQUIPMENT_STATUS_DATA_KEY}" + + data_json = redis_client.get(data_key) + if not data_json: + logger.debug("No equipment status data in cache") + return [] + + data = json.loads(data_json) + parse_time = time.time() - start_time + + # Store in process-level cache + _equipment_status_cache.set(cache_key, data) + + logger.debug(f"Equipment status cache hit: {len(data)} records (parsed in {parse_time:.2f}s)") + return data + + except Exception as exc: + logger.error(f"Failed to get equipment status from cache: {exc}") + return [] + + +def get_equipment_status_by_id(resource_id: str) -> Optional[Dict[str, Any]]: + """Get equipment status by RESOURCEID. + + Uses index hash for O(1) lookup. + + Args: + resource_id: The RESOURCEID to look up. + + Returns: + Equipment status record, or None if not found. + """ + if not resource_id: + return None + + lookup = get_equipment_status_lookup() + if lookup: + cached = lookup.get(str(resource_id)) + if cached is not None: + return cached + + redis_client = get_redis_client() + if not redis_client: + return None + + try: + prefix = get_key_prefix() + index_key = f"{prefix}:{EQUIPMENT_STATUS_INDEX_KEY}" + data_key = f"{prefix}:{EQUIPMENT_STATUS_DATA_KEY}" + + # Get index from hash + idx_str = redis_client.hget(index_key, resource_id) + if idx_str is None: + return None + + idx = int(idx_str) + + # Get data array + data_json = redis_client.get(data_key) + if not data_json: + return None + + data = json.loads(data_json) + if 0 <= idx < len(data): + return data[idx] + + return None + + except Exception as exc: + logger.error(f"Failed to get equipment status by ID: {exc}") + return None + + +def get_equipment_status_by_ids(resource_ids: List[str]) -> List[Dict[str, Any]]: + """Get equipment status for multiple RESOURCEIDs. + + Args: + resource_ids: List of RESOURCEIDs to look up. + + Returns: + List of equipment status records (only existing ones). + """ + if not resource_ids: + return [] + + lookup = get_equipment_status_lookup() + if lookup: + result = [] + for resource_id in resource_ids: + row = lookup.get(str(resource_id)) + if row is not None: + result.append(row) + return result + + redis_client = get_redis_client() + if not redis_client: + return [] + + try: + prefix = get_key_prefix() + index_key = f"{prefix}:{EQUIPMENT_STATUS_INDEX_KEY}" + data_key = f"{prefix}:{EQUIPMENT_STATUS_DATA_KEY}" + + # Get all indices at once + indices = redis_client.hmget(index_key, resource_ids) + + # Get data array + data_json = redis_client.get(data_key) + if not data_json: + return [] + + data = json.loads(data_json) + + # Collect matching records + results = [] + for idx_str in indices: + if idx_str is not None: + idx = int(idx_str) + if 0 <= idx < len(data): + results.append(data[idx]) + + return results + + except Exception as exc: + logger.error(f"Failed to get equipment status by IDs: {exc}") + return [] + + +def get_equipment_status_cache_status() -> Dict[str, Any]: + """Get equipment status cache status. + + Returns: + Dict with cache status information. + """ + from flask import current_app + + enabled = current_app.config.get('REALTIME_EQUIPMENT_CACHE_ENABLED', True) + + with _equipment_lookup_lock: + lookup_ready = bool(_equipment_status_lookup) + lookup_count = len(_equipment_status_lookup) + lookup_built_at = _equipment_status_lookup_built_at + + lookup_age_seconds = None + if lookup_built_at: + try: + lookup_age_seconds = max( + (datetime.now() - datetime.fromisoformat(lookup_built_at)).total_seconds(), + 0.0, + ) + except Exception: + lookup_age_seconds = None + + lookup_meta = { + 'ready': lookup_ready, + 'count': lookup_count, + 'built_at': lookup_built_at, + 'age_seconds': round(lookup_age_seconds, 3) if lookup_age_seconds is not None else None, + } + + if not enabled: + return { + 'enabled': False, + 'loaded': False, + 'count': 0, + 'updated_at': None, + 'lookup': lookup_meta, + } + + redis_client = get_redis_client() + if not redis_client: + return { + 'enabled': True, + 'loaded': False, + 'count': 0, + 'updated_at': None, + 'lookup': lookup_meta, + } + + try: + prefix = get_key_prefix() + updated_key = f"{prefix}:{EQUIPMENT_STATUS_META_UPDATED_KEY}" + count_key = f"{prefix}:{EQUIPMENT_STATUS_META_COUNT_KEY}" + + updated_at = redis_client.get(updated_key) + count_str = redis_client.get(count_key) + + return { + 'enabled': True, + 'loaded': updated_at is not None, + 'count': int(count_str) if count_str else 0, + 'updated_at': updated_at, + 'lookup': lookup_meta, + } + + except Exception as exc: + logger.error(f"Failed to get equipment status cache status: {exc}") + return { + 'enabled': True, + 'loaded': False, + 'count': 0, + 'updated_at': None, + 'lookup': lookup_meta, + } + + +# ============================================================ +# Background Sync +# ============================================================ + +def refresh_equipment_status_cache(force: bool = False) -> bool: + """Refresh equipment status cache. + + Uses distributed lock to prevent multiple workers from refreshing simultaneously. + + Args: + force: If True, refresh immediately regardless of state. + + Returns: + True if refresh succeeded, False otherwise. + """ + # Try to acquire distributed lock (non-blocking) + if not try_acquire_lock("equipment_status_cache_update", ttl_seconds=120): + logger.debug("Another worker is refreshing equipment status cache, skipping") + return False + + try: + with _SYNC_LOCK: + logger.info("Refreshing equipment status cache...") + start_time = time.time() + + # Load from Oracle + records = _load_equipment_status_from_oracle() + if records is None: + logger.error("Failed to load equipment status from Oracle") + return False + + # Aggregate + aggregated = _aggregate_by_resourceid(records) + + # Save to Redis + success = _save_to_redis(aggregated) + + elapsed = time.time() - start_time + if success: + logger.info(f"Equipment status cache refreshed in {elapsed:.2f}s") + else: + logger.error(f"Equipment status cache refresh failed after {elapsed:.2f}s") + + return success + finally: + release_lock("equipment_status_cache_update") + + +def _sync_worker(interval: int): + """Background worker that periodically syncs equipment status. + + Args: + interval: Sync interval in seconds. + """ + logger.info(f"Equipment status sync worker started (interval: {interval}s)") + + while not _STOP_EVENT.is_set(): + try: + refresh_equipment_status_cache() + except Exception as exc: + logger.error(f"Equipment status sync error: {exc}") + + # Wait for next sync or stop signal + _STOP_EVENT.wait(timeout=interval) + + logger.info("Equipment status sync worker stopped") + + +def _start_equipment_status_sync_worker(interval: int): + """Start the background sync worker thread. + + Args: + interval: Sync interval in seconds. + """ + global _SYNC_THREAD + + if _SYNC_THREAD is not None and _SYNC_THREAD.is_alive(): + logger.warning("Equipment status sync worker already running") + return + + _STOP_EVENT.clear() + _SYNC_THREAD = threading.Thread( + target=_sync_worker, + args=(interval,), + daemon=True, + name="equipment-status-sync" + ) + _SYNC_THREAD.start() + + +def stop_equipment_status_sync_worker(): + """Stop the background sync worker thread.""" + global _SYNC_THREAD + + if _SYNC_THREAD is None or not _SYNC_THREAD.is_alive(): + return + + logger.info("Stopping equipment status sync worker...") + _STOP_EVENT.set() + _SYNC_THREAD.join(timeout=5) + _SYNC_THREAD = None + + +# ============================================================ +# Initialization +# ============================================================ + +def init_realtime_equipment_cache(app=None): + """Initialize the realtime equipment status cache. + + Should be called during app initialization. + + Args: + app: Flask application instance (optional, uses current_app if None). + """ + from flask import current_app + + config = app.config if app else current_app.config + + enabled = config.get('REALTIME_EQUIPMENT_CACHE_ENABLED', True) + if not enabled: + logger.info("Realtime equipment cache is disabled") + return + + interval = config.get('EQUIPMENT_STATUS_SYNC_INTERVAL', 300) + + logger.info("Initializing realtime equipment cache...") + + # Initial sync + refresh_equipment_status_cache() + + # Start background worker + _start_equipment_status_sync_worker(interval) diff --git a/src/mes_dashboard/services/resource_cache.py b/src/mes_dashboard/services/resource_cache.py new file mode 100644 index 0000000..2ce26b9 --- /dev/null +++ b/src/mes_dashboard/services/resource_cache.py @@ -0,0 +1,858 @@ +# -*- coding: utf-8 -*- +"""Resource Cache - DWH.DW_MES_RESOURCE 全表快取模組. + +全表快取套用全域篩選後的設備主檔資料至 Redis。 +提供統一 API 供各模組取用設備資料和篩選器選項。 +""" + +from __future__ import annotations + +import io +import json +import logging +import os +import threading +import time +from datetime import datetime +from typing import Any, Dict, List, Optional, Tuple + +import pandas as pd + +from mes_dashboard.core.redis_client import ( + get_redis_client, + redis_available, + REDIS_ENABLED, + REDIS_KEY_PREFIX, +) +from mes_dashboard.core.database import read_sql_df +from mes_dashboard.config.constants import ( + EXCLUDED_LOCATIONS, + EXCLUDED_ASSET_STATUSES, + EQUIPMENT_TYPE_FILTER, +) +from mes_dashboard.sql import QueryBuilder + +logger = logging.getLogger('mes_dashboard.resource_cache') + +# ============================================================ +# Process-Level Cache (Prevents redundant JSON parsing) +# ============================================================ + +class _ProcessLevelCache: + """Thread-safe process-level cache for parsed DataFrames.""" + + def __init__(self, ttl_seconds: int = 30): + self._cache: Dict[str, Tuple[pd.DataFrame, float]] = {} + self._lock = threading.Lock() + self._ttl = ttl_seconds + + def get(self, key: str) -> Optional[pd.DataFrame]: + """Get cached DataFrame if not expired.""" + with self._lock: + if key not in self._cache: + return None + df, timestamp = self._cache[key] + if time.time() - timestamp > self._ttl: + del self._cache[key] + return None + return df + + def set(self, key: str, df: pd.DataFrame) -> None: + """Cache a DataFrame with current timestamp.""" + with self._lock: + self._cache[key] = (df, time.time()) + + def invalidate(self, key: str) -> None: + """Remove a key from cache.""" + with self._lock: + self._cache.pop(key, None) + + +# Global process-level cache for resource data (30s TTL) +_resource_df_cache = _ProcessLevelCache(ttl_seconds=30) +_resource_parse_lock = threading.Lock() +_resource_index_lock = threading.Lock() +_resource_index: Dict[str, Any] = { + "ready": False, + "source": None, + "version": None, + "updated_at": None, + "built_at": None, + "version_checked_at": 0.0, + "count": 0, + "records": [], + "by_resource_id": {}, + "by_workcenter": {}, + "by_family": {}, + "by_department": {}, + "by_location": {}, + "by_is_production": {"1": [], "0": []}, + "by_is_key": {"1": [], "0": []}, + "by_is_monitor": {"1": [], "0": []}, +} + + +def _new_empty_index() -> Dict[str, Any]: + return { + "ready": False, + "source": None, + "version": None, + "updated_at": None, + "built_at": None, + "version_checked_at": 0.0, + "count": 0, + "records": [], + "by_resource_id": {}, + "by_workcenter": {}, + "by_family": {}, + "by_department": {}, + "by_location": {}, + "by_is_production": {"1": [], "0": []}, + "by_is_key": {"1": [], "0": []}, + "by_is_monitor": {"1": [], "0": []}, + } + + +def _invalidate_resource_index() -> None: + with _resource_index_lock: + global _resource_index + _resource_index = _new_empty_index() + + +def _is_truthy_flag(value: Any) -> bool: + if value is True: + return True + if value in (1, "1"): + return True + if isinstance(value, str): + return value.strip().lower() in {"true", "yes", "y"} + return False + + +def _bucket_append(bucket: Dict[str, List[Dict[str, Any]]], key: Any, record: Dict[str, Any]) -> None: + if key is None: + return + if isinstance(key, float) and pd.isna(key): + return + key_str = str(key) + bucket.setdefault(key_str, []).append(record) + + +def _build_resource_index( + df: pd.DataFrame, + *, + source: str, + version: Optional[str], + updated_at: Optional[str], +) -> Dict[str, Any]: + records = df.to_dict(orient='records') + index = _new_empty_index() + index["ready"] = True + index["source"] = source + index["version"] = version + index["updated_at"] = updated_at + index["built_at"] = datetime.now().isoformat() + index["version_checked_at"] = time.time() + index["count"] = len(records) + index["records"] = records + + for record in records: + resource_id = record.get("RESOURCEID") + if resource_id is not None and not (isinstance(resource_id, float) and pd.isna(resource_id)): + index["by_resource_id"][str(resource_id)] = record + + _bucket_append(index["by_workcenter"], record.get("WORKCENTERNAME"), record) + _bucket_append(index["by_family"], record.get("RESOURCEFAMILYNAME"), record) + _bucket_append(index["by_department"], record.get("PJ_DEPARTMENT"), record) + _bucket_append(index["by_location"], record.get("LOCATIONNAME"), record) + + index["by_is_production"]["1" if _is_truthy_flag(record.get("PJ_ISPRODUCTION")) else "0"].append(record) + index["by_is_key"]["1" if _is_truthy_flag(record.get("PJ_ISKEY")) else "0"].append(record) + index["by_is_monitor"]["1" if _is_truthy_flag(record.get("PJ_ISMONITOR")) else "0"].append(record) + + return index + + +def _index_matches( + current: Dict[str, Any], + *, + source: str, + version: Optional[str], + row_count: int, +) -> bool: + if not current.get("ready"): + return False + if current.get("source") != source: + return False + if version and current.get("version") != version: + return False + return int(current.get("count", 0)) == int(row_count) + + +def _ensure_resource_index( + df: pd.DataFrame, + *, + source: str, + version: Optional[str] = None, + updated_at: Optional[str] = None, +) -> None: + global _resource_index + with _resource_index_lock: + current = _resource_index + if _index_matches(current, source=source, version=version, row_count=len(df)): + return + + new_index = _build_resource_index( + df, + source=source, + version=version, + updated_at=updated_at, + ) + with _resource_index_lock: + _resource_index = new_index + + +def _get_resource_index() -> Dict[str, Any]: + with _resource_index_lock: + return _resource_index + + +def _get_cache_meta(client=None) -> Tuple[Optional[str], Optional[str]]: + redis_client = client or get_redis_client() + if redis_client is None: + return None, None + + try: + version, updated_at = redis_client.mget([ + _get_key("meta:version"), + _get_key("meta:updated"), + ]) + return version, updated_at + except Exception: + return None, None + + +def _redis_data_available(client=None) -> bool: + """Check whether Redis currently has resource payload.""" + redis_client = client or get_redis_client() + if redis_client is None: + return False + + try: + return redis_client.get(_get_key("data")) is not None + except Exception: + return False + + +def _pick_bucket_records( + bucket: Dict[str, List[Dict[str, Any]]], + keys: List[Any], +) -> List[Dict[str, Any]]: + seen: set[str] = set() + result: List[Dict[str, Any]] = [] + for key in keys: + for record in bucket.get(str(key), []): + rid = record.get("RESOURCEID") + rid_key = str(rid) if rid is not None else str(id(record)) + if rid_key in seen: + continue + seen.add(rid_key) + result.append(record) + return result + +# ============================================================ +# Configuration +# ============================================================ + +RESOURCE_CACHE_ENABLED = os.getenv('RESOURCE_CACHE_ENABLED', 'true').lower() == 'true' +RESOURCE_SYNC_INTERVAL = int(os.getenv('RESOURCE_SYNC_INTERVAL', '14400')) # 4 hours +RESOURCE_INDEX_VERSION_CHECK_INTERVAL = int( + os.getenv('RESOURCE_INDEX_VERSION_CHECK_INTERVAL', '5') +) # seconds + +# Redis key helpers +def _get_key(key: str) -> str: + """Get full Redis key with resource prefix.""" + return f"{REDIS_KEY_PREFIX}:resource:{key}" + + +# ============================================================ +# Internal: Oracle Load Functions +# ============================================================ + +def _build_filter_builder() -> QueryBuilder: + """Build QueryBuilder with global filter conditions. + + Returns: + QueryBuilder instance with filter conditions applied. + """ + builder = QueryBuilder() + + # Equipment type filter - raw SQL condition from config + builder.add_condition(EQUIPMENT_TYPE_FILTER.strip()) + + # Workcenter filter - exclude resources without WORKCENTERNAME + builder.add_is_not_null("WORKCENTERNAME") + + # Location filter - exclude locations, allow NULL + if EXCLUDED_LOCATIONS: + builder.add_not_in_condition( + "LOCATIONNAME", + list(EXCLUDED_LOCATIONS), + allow_null=True + ) + + # Asset status filter - exclude statuses, allow NULL + if EXCLUDED_ASSET_STATUSES: + builder.add_not_in_condition( + "PJ_ASSETSSTATUS", + list(EXCLUDED_ASSET_STATUSES), + allow_null=True + ) + + return builder + + +def _load_from_oracle() -> Optional[pd.DataFrame]: + """從 Oracle 載入全表資料(套用全域篩選). + + Returns: + DataFrame with all columns, or None if query failed. + """ + builder = _build_filter_builder() + builder.base_sql = "SELECT * FROM DWH.DW_MES_RESOURCE {{ WHERE_CLAUSE }}" + sql, params = builder.build() + + try: + df = read_sql_df(sql, params) + if df is not None: + logger.info(f"Loaded {len(df)} resources from Oracle") + return df + except Exception as e: + logger.error(f"Failed to load resources from Oracle: {e}") + return None + + +def _get_version_from_oracle() -> Optional[str]: + """取得 Oracle 資料版本(MAX(LASTCHANGEDATE)). + + Returns: + Version string (ISO format), or None if query failed. + """ + builder = _build_filter_builder() + builder.base_sql = "SELECT MAX(LASTCHANGEDATE) as VERSION FROM DWH.DW_MES_RESOURCE {{ WHERE_CLAUSE }}" + sql, params = builder.build() + + try: + df = read_sql_df(sql, params) + if df is not None and not df.empty: + version = df.iloc[0]['VERSION'] + if version is not None: + if hasattr(version, 'isoformat'): + return version.isoformat() + return str(version) + return None + except Exception as e: + logger.error(f"Failed to get version from Oracle: {e}") + return None + + +# ============================================================ +# Internal: Redis Functions +# ============================================================ + +def _get_version_from_redis() -> Optional[str]: + """取得 Redis 快取版本. + + Returns: + Cached version string, or None. + """ + client = get_redis_client() + if client is None: + return None + + try: + return client.get(_get_key("meta:version")) + except Exception as e: + logger.warning(f"Failed to get version from Redis: {e}") + return None + + +def _sync_to_redis(df: pd.DataFrame, version: str) -> bool: + """同步至 Redis(使用 pipeline 確保原子性). + + Args: + df: DataFrame with resource data. + version: Version string (MAX(LASTCHANGEDATE)). + + Returns: + True if sync was successful. + """ + client = get_redis_client() + if client is None: + return False + + try: + # Convert DataFrame to JSON + # Handle datetime columns + df_copy = df.copy() + for col in df_copy.select_dtypes(include=['datetime64']).columns: + df_copy[col] = df_copy[col].astype(str) + + data_json = df_copy.to_json(orient='records', force_ascii=False) + + # Atomic update using pipeline + now = datetime.now().isoformat() + pipe = client.pipeline() + pipe.set(_get_key("data"), data_json) + pipe.set(_get_key("meta:version"), version) + pipe.set(_get_key("meta:updated"), now) + pipe.set(_get_key("meta:count"), str(len(df))) + pipe.execute() + + # Invalidate process-level cache so next request picks up new data + _resource_df_cache.invalidate("resource_data") + _invalidate_resource_index() + + logger.info(f"Resource cache synced: {len(df)} rows, version={version}") + return True + except Exception as e: + logger.error(f"Failed to sync to Redis: {e}") + return False + + +def _get_cached_data() -> Optional[pd.DataFrame]: + """Get cached resource data from Redis with process-level caching. + + Uses a two-tier cache strategy: + 1. Process-level cache: Parsed DataFrame (30s TTL) - fast, no parsing + 2. Redis cache: Raw JSON data - shared across workers + + This prevents redundant JSON parsing across concurrent requests. + + Returns: + DataFrame with resource data, or None if cache miss. + """ + cache_key = "resource_data" + + # Tier 1: Check process-level cache first (fast path) + cached_df = _resource_df_cache.get(cache_key) + if cached_df is not None: + if not _get_resource_index().get("ready"): + version, updated_at = _get_cache_meta() + _ensure_resource_index( + cached_df, + source="redis", + version=version, + updated_at=updated_at, + ) + logger.debug(f"Process cache hit: {len(cached_df)} rows") + return cached_df + + # Tier 2: Parse from Redis (slow path - needs lock) + if not REDIS_ENABLED or not RESOURCE_CACHE_ENABLED: + return None + + client = get_redis_client() + if client is None: + return None + + # Use lock to prevent multiple threads from parsing simultaneously + with _resource_parse_lock: + # Double-check after acquiring lock + cached_df = _resource_df_cache.get(cache_key) + if cached_df is not None: + logger.debug(f"Process cache hit (after lock): {len(cached_df)} rows") + return cached_df + + try: + start_time = time.time() + data_json = client.get(_get_key("data")) + if data_json is None: + logger.debug("Resource cache miss: no data in Redis") + return None + + df = pd.read_json(io.StringIO(data_json), orient='records') + parse_time = time.time() - start_time + version, updated_at = _get_cache_meta(client) + + # Store in process-level cache + _resource_df_cache.set(cache_key, df) + _ensure_resource_index( + df, + source="redis", + version=version, + updated_at=updated_at, + ) + + logger.debug(f"Resource cache hit: loaded {len(df)} rows from Redis (parsed in {parse_time:.2f}s)") + return df + except Exception as e: + logger.warning(f"Failed to read resource cache: {e}") + return None + + +# ============================================================ +# Cache Management API +# ============================================================ + +def refresh_cache(force: bool = False) -> bool: + """手動刷新快取. + + Args: + force: 強制刷新,忽略版本檢查. + + Returns: + True if cache was refreshed. + """ + if not REDIS_ENABLED or not RESOURCE_CACHE_ENABLED: + logger.info("Resource cache is disabled") + return False + + if not redis_available(): + logger.warning("Redis not available, cannot refresh resource cache") + return False + + try: + # Get versions + oracle_version = _get_version_from_oracle() + if oracle_version is None: + logger.error("Failed to get version from Oracle") + return False + + redis_version = _get_version_from_redis() + + # Check if update needed + if not force and redis_version == oracle_version: + logger.debug(f"Resource cache version unchanged ({oracle_version}), skipping") + return False + + logger.info(f"Resource cache version changed: {redis_version} -> {oracle_version}") + + # Load and sync + df = _load_from_oracle() + if df is None or df.empty: + logger.error("Failed to load resources from Oracle") + return False + + return _sync_to_redis(df, oracle_version) + + except Exception as e: + logger.error(f"Failed to refresh resource cache: {e}", exc_info=True) + return False + + +def init_cache() -> None: + """初始化快取(應用啟動時呼叫).""" + if not REDIS_ENABLED or not RESOURCE_CACHE_ENABLED: + logger.info("Resource cache is disabled, skipping init") + return + + if not redis_available(): + logger.warning("Redis not available during resource cache init") + return + + # Check if cache exists + client = get_redis_client() + if client is None: + return + + try: + exists = client.exists(_get_key("data")) + if not exists: + logger.info("Resource cache empty, performing initial load...") + refresh_cache(force=True) + else: + logger.info("Resource cache already populated") + except Exception as e: + logger.error(f"Failed to init resource cache: {e}") + + +def get_cache_status() -> Dict[str, Any]: + """取得快取狀態資訊. + + Returns: + Dict with cache status. + """ + status = { + 'enabled': REDIS_ENABLED and RESOURCE_CACHE_ENABLED, + 'loaded': False, + 'count': 0, + 'version': None, + 'updated_at': None, + } + + if not status['enabled']: + return status + + client = get_redis_client() + if client is None: + return status + + try: + status['loaded'] = client.exists(_get_key("data")) > 0 + if status['loaded']: + count_str = client.get(_get_key("meta:count")) + status['count'] = int(count_str) if count_str else 0 + status['version'] = client.get(_get_key("meta:version")) + status['updated_at'] = client.get(_get_key("meta:updated")) + except Exception as e: + logger.warning(f"Failed to get resource cache status: {e}") + + derived = get_resource_index_status() + derived_version = derived.get("version") + derived["is_fresh"] = bool(status.get("version")) and derived_version == status.get("version") + status["derived_index"] = derived + + return status + + +# ============================================================ +# Query API +# ============================================================ + +def get_resource_index_status() -> Dict[str, Any]: + """Get process-level derived index telemetry.""" + index = _get_resource_index() + built_at = index.get("built_at") + age_seconds = None + if built_at: + try: + age_seconds = max((datetime.now() - datetime.fromisoformat(built_at)).total_seconds(), 0.0) + except Exception: + age_seconds = None + + return { + "ready": bool(index.get("ready")), + "source": index.get("source"), + "version": index.get("version"), + "updated_at": index.get("updated_at"), + "built_at": built_at, + "count": int(index.get("count", 0)), + "age_seconds": round(age_seconds, 3) if age_seconds is not None else None, + } + + +def get_resource_index_snapshot() -> Dict[str, Any]: + """Get derived resource index snapshot, rebuilding if needed.""" + index = _get_resource_index() + if index.get("ready"): + if index.get("source") == "redis": + # If Redis metadata version is missing, verify payload existence on every call. + # This avoids serving stale in-process index when Redis payload is evicted. + if not index.get("version"): + if not _redis_data_available(): + _resource_df_cache.invalidate("resource_data") + _invalidate_resource_index() + index = _get_resource_index() + else: + with _resource_index_lock: + _resource_index["version_checked_at"] = time.time() + return _get_resource_index() + + if index.get("ready"): + checked_at = float(index.get("version_checked_at") or 0.0) + if time.time() - checked_at >= RESOURCE_INDEX_VERSION_CHECK_INTERVAL: + latest_version = _get_version_from_redis() + current_version = index.get("version") + if latest_version and current_version and latest_version != current_version: + logger.info( + "Resource cache version changed (%s -> %s), rebuilding derived index", + current_version, + latest_version, + ) + _resource_df_cache.invalidate("resource_data") + _invalidate_resource_index() + index = _get_resource_index() + else: + with _resource_index_lock: + _resource_index["version_checked_at"] = time.time() + return _get_resource_index() + else: + return index + else: + # Oracle fallback snapshot should be treated as ephemeral to avoid serving + # stale process data indefinitely if subsequent fallback query fails. + _invalidate_resource_index() + index = _get_resource_index() + + df = _get_cached_data() + if df is not None: + version, updated_at = _get_cache_meta() + _ensure_resource_index( + df, + source="redis", + version=version, + updated_at=updated_at, + ) + return _get_resource_index() + + logger.info("Resource cache miss while building index, falling back to Oracle") + oracle_df = _load_from_oracle() + if oracle_df is None: + return _new_empty_index() + + _ensure_resource_index( + oracle_df, + source="oracle", + version=None, + updated_at=datetime.now().isoformat(), + ) + return _get_resource_index() + +def get_all_resources() -> List[Dict]: + """取得所有快取中的設備資料(全欄位). + + Falls back to Oracle if cache unavailable. + + Returns: + List of resource dicts. + """ + index = get_resource_index_snapshot() + records = index.get("records", []) + return list(records) + + +def get_resource_by_id(resource_id: str) -> Optional[Dict]: + """依 RESOURCEID 取得單筆設備資料. + + Args: + resource_id: The RESOURCEID to look up. + + Returns: + Resource dict, or None if not found. + """ + if not resource_id: + return None + index = get_resource_index_snapshot() + by_id = index.get("by_resource_id", {}) + row = by_id.get(str(resource_id)) + if row is not None: + return row + + # Backward-compatible fallback for call sites/tests that patch get_all_resources. + target = str(resource_id) + for resource in get_all_resources(): + if str(resource.get("RESOURCEID")) == target: + return resource + return None + + +def get_resources_by_ids(resource_ids: List[str]) -> List[Dict]: + """依 RESOURCEID 清單批次取得設備資料. + + Args: + resource_ids: List of RESOURCEIDs to look up. + + Returns: + List of matching resource dicts. + """ + id_set = set(resource_ids) + resources = get_all_resources() + return [r for r in resources if r.get('RESOURCEID') in id_set] + + +def get_resources_by_filter( + workcenters: Optional[List[str]] = None, + families: Optional[List[str]] = None, + departments: Optional[List[str]] = None, + locations: Optional[List[str]] = None, + is_production: Optional[bool] = None, + is_key: Optional[bool] = None, + is_monitor: Optional[bool] = None, +) -> List[Dict]: + """依條件篩選設備資料(在 Python 端篩選). + + Args: + workcenters: Filter by WORKCENTERNAME values. + families: Filter by RESOURCEFAMILYNAME values. + departments: Filter by PJ_DEPARTMENT values. + locations: Filter by LOCATIONNAME values. + is_production: Filter by PJ_ISPRODUCTION flag. + is_key: Filter by PJ_ISKEY flag. + is_monitor: Filter by PJ_ISMONITOR flag. + + Returns: + List of matching resource dicts. + """ + resources = get_all_resources() + + result = [] + for r in resources: + # Apply filters + if workcenters and r.get('WORKCENTERNAME') not in workcenters: + continue + if families and r.get('RESOURCEFAMILYNAME') not in families: + continue + if departments and r.get('PJ_DEPARTMENT') not in departments: + continue + if locations and r.get('LOCATIONNAME') not in locations: + continue + if is_production is not None: + val = r.get('PJ_ISPRODUCTION') + if (val == 1) != is_production: + continue + if is_key is not None: + val = r.get('PJ_ISKEY') + if (val == 1) != is_key: + continue + if is_monitor is not None: + val = r.get('PJ_ISMONITOR') + if (val == 1) != is_monitor: + continue + + result.append(r) + + return result + + +# ============================================================ +# Distinct Values API (for filters) +# ============================================================ + +def get_distinct_values(column: str) -> List[str]: + """取得指定欄位的唯一值清單(排序後). + + Args: + column: Column name (e.g., 'RESOURCEFAMILYNAME'). + + Returns: + Sorted list of unique values (excluding None, NaN, and empty strings). + """ + resources = get_all_resources() + values = set() + for r in resources: + val = r.get(column) + # Skip None, empty strings, and NaN (pandas converts NaN to float) + if val is None or val == '': + continue + # Check for NaN (float type and is NaN) + if isinstance(val, float) and pd.isna(val): + continue + values.add(str(val) if not isinstance(val, str) else val) + return sorted(values) + + +def get_resource_families() -> List[str]: + """取得型號清單(便捷方法).""" + return get_distinct_values('RESOURCEFAMILYNAME') + + +def get_workcenters() -> List[str]: + """取得站點清單(便捷方法).""" + return get_distinct_values('WORKCENTERNAME') + + +def get_departments() -> List[str]: + """取得部門清單(便捷方法).""" + return get_distinct_values('PJ_DEPARTMENT') + + +def get_locations() -> List[str]: + """取得區域清單(便捷方法).""" + return get_distinct_values('LOCATIONNAME') + + +def get_vendors() -> List[str]: + """取得供應商清單(便捷方法).""" + return get_distinct_values('VENDORNAME') diff --git a/src/mes_dashboard/services/resource_history_service.py b/src/mes_dashboard/services/resource_history_service.py new file mode 100644 index 0000000..c0dd3c0 --- /dev/null +++ b/src/mes_dashboard/services/resource_history_service.py @@ -0,0 +1,953 @@ +# -*- coding: utf-8 -*- +"""Resource History Analysis Service. + +Provides functions for querying historical equipment performance data including: +- Filter options (workcenters, families) +- Summary data (KPI, trend, heatmap, workcenter comparison) +- Hierarchical detail data (workcenter → family → resource) +- CSV export with streaming + +Architecture: +- Uses resource_cache as the single source of truth for equipment master data +- Queries DW_MES_RESOURCESTATUS_SHIFT only for valid cached resource IDs +- Merges dimension data (WORKCENTERNAME, RESOURCEFAMILYNAME, etc.) from cache +""" + +import io +import csv +import logging +from concurrent.futures import ThreadPoolExecutor, as_completed +from datetime import datetime +from typing import Optional, Dict, List, Any, Generator + +import pandas as pd + +from mes_dashboard.core.database import read_sql_df +from mes_dashboard.sql import QueryBuilder, SQLLoader +from mes_dashboard.config.field_contracts import get_export_headers, get_export_api_keys + +logger = logging.getLogger('mes_dashboard.resource_history') + +# Maximum allowed query range in days +MAX_QUERY_DAYS = 730 + +# E10 Status definitions +E10_STATUSES = ['PRD', 'SBY', 'UDT', 'SDT', 'EGT', 'NST'] + + +# ============================================================ +# Resource Cache Integration +# ============================================================ + +def _get_filtered_resources( + workcenter_groups: Optional[List[str]] = None, + families: Optional[List[str]] = None, + is_production: bool = False, + is_key: bool = False, + is_monitor: bool = False, +) -> List[Dict[str, Any]]: + """Get filtered resources from resource_cache. + + Applies additional filters on top of the cache's pre-applied global filters. + + Args: + workcenter_groups: Optional list of WORKCENTER_GROUP names + families: Optional list of RESOURCEFAMILYNAME values + is_production: Filter by production flag + is_key: Filter by key equipment flag + is_monitor: Filter by monitor flag + + Returns: + List of resource dicts matching the filters. + """ + from mes_dashboard.services.resource_cache import get_all_resources + from mes_dashboard.services.filter_cache import get_workcenter_mapping + + resources = get_all_resources() + if not resources: + logger.warning("No resources available from cache") + return [] + + # Get workcenter mapping for group filtering + wc_mapping = get_workcenter_mapping() or {} + + # Build set of workcenters if filtering by groups + allowed_workcenters = None + if workcenter_groups: + allowed_workcenters = set() + for wc_name, info in wc_mapping.items(): + if info.get('group') in workcenter_groups: + allowed_workcenters.add(wc_name) + + # Apply filters + filtered = [] + for r in resources: + # Workcenter group filter + if allowed_workcenters is not None: + if r.get('WORKCENTERNAME') not in allowed_workcenters: + continue + + # Family filter + if families and r.get('RESOURCEFAMILYNAME') not in families: + continue + + # Equipment flags filter + if is_production and r.get('PJ_ISPRODUCTION') != 1: + continue + if is_key and r.get('PJ_ISKEY') != 1: + continue + if is_monitor and r.get('PJ_ISMONITOR') != 1: + continue + + filtered.append(r) + + logger.debug(f"Filtered {len(resources)} resources to {len(filtered)}") + return filtered + + +def _build_resource_lookup(resources: List[Dict[str, Any]]) -> Dict[str, Dict[str, Any]]: + """Build a lookup dict from RESOURCEID to resource info. + + Args: + resources: List of resource dicts from cache. + + Returns: + Dict mapping RESOURCEID to resource dict. + """ + return {r['RESOURCEID']: r for r in resources if r.get('RESOURCEID')} + + +def _get_resource_ids_sql_list(resources: List[Dict[str, Any]], max_chunk_size: int = 1000) -> List[str]: + """Build SQL IN clause lists for resource IDs. + + Oracle has a limit of ~1000 items per IN clause, so we chunk if needed. + + Args: + resources: List of resource dicts. + max_chunk_size: Maximum items per IN clause. + + Returns: + List of SQL IN clause strings (e.g., "'ID1', 'ID2', 'ID3'"). + """ + resource_ids = [r['RESOURCEID'] for r in resources if r.get('RESOURCEID')] + if not resource_ids: + return [] + + # Escape single quotes + escaped_ids = [rid.replace("'", "''") for rid in resource_ids] + + # Chunk into groups + chunks = [] + for i in range(0, len(escaped_ids), max_chunk_size): + chunk = escaped_ids[i:i + max_chunk_size] + chunks.append("'" + "', '".join(chunk) + "'") + + return chunks + + +def _build_historyid_filter(resources: List[Dict[str, Any]]) -> str: + """Build SQL WHERE clause for HISTORYID filtering. + + Handles chunking for large resource lists. + + Args: + resources: List of resource dicts. + + Returns: + SQL condition string (e.g., "HISTORYID IN ('ID1', 'ID2') OR HISTORYID IN ('ID3', 'ID4')"). + """ + chunks = _get_resource_ids_sql_list(resources) + if not chunks: + return "1=0" # No resources = no results + + if len(chunks) == 1: + return f"HISTORYID IN ({chunks[0]})" + + # Multiple chunks need OR + conditions = [f"HISTORYID IN ({chunk})" for chunk in chunks] + return "(" + " OR ".join(conditions) + ")" + + +# ============================================================ +# Filter Options +# ============================================================ + +def get_filter_options() -> Optional[Dict[str, Any]]: + """Get filter options from cache. + + Uses cached workcenter groups from DWH.DW_MES_LOT_V and resource families from resource_cache. + + Returns: + Dict with: + - 'workcenter_groups': List of {name, sequence} sorted by sequence + - 'families': List of family names sorted alphabetically + Or None if cache loading fails. + """ + from mes_dashboard.services.filter_cache import get_workcenter_groups + from mes_dashboard.services.resource_cache import get_resource_families + + try: + groups = get_workcenter_groups() + families = get_resource_families() + + if groups is None or families is None: + logger.error("Filter cache not available") + return None + + return { + 'workcenter_groups': groups, + 'families': families + } + except Exception as exc: + logger.error(f"Filter options query failed: {exc}") + return None + + +# ============================================================ +# Summary Query +# ============================================================ + +def query_summary( + start_date: str, + end_date: str, + granularity: str = 'day', + workcenter_groups: Optional[List[str]] = None, + families: Optional[List[str]] = None, + is_production: bool = False, + is_key: bool = False, + is_monitor: bool = False, +) -> Optional[Dict[str, Any]]: + """Query summary data including KPI, trend, heatmap, and workcenter comparison. + + Uses resource_cache as the source for equipment master data. + Queries only DW_MES_RESOURCESTATUS_SHIFT for SHIFT data. + + Args: + start_date: Start date in YYYY-MM-DD format + end_date: End date in YYYY-MM-DD format + granularity: Time granularity ('day', 'week', 'month', 'year') + workcenter_groups: Optional list of WORKCENTER_GROUP names to filter + families: Optional list of RESOURCEFAMILYNAME values to filter + is_production: Filter by production flag + is_key: Filter by key equipment flag + is_monitor: Filter by monitor flag + + Returns: + Dict with 'kpi', 'trend', 'heatmap', 'workcenter_comparison' sections, + or None if query fails. + """ + # Validate date range + validation = _validate_date_range(start_date, end_date) + if validation: + return {'error': validation} + + try: + # Get filtered resources from cache + resources = _get_filtered_resources( + workcenter_groups=workcenter_groups, + families=families, + is_production=is_production, + is_key=is_key, + is_monitor=is_monitor, + ) + + if not resources: + logger.warning("No resources match the filter criteria") + return { + 'kpi': _build_kpi_from_df(pd.DataFrame()), + 'trend': [], + 'heatmap': [], + 'workcenter_comparison': [] + } + + # Build resource lookup for dimension merging + resource_lookup = _build_resource_lookup(resources) + historyid_filter = _build_historyid_filter(resources) + + # Build SQL components + date_trunc = _get_date_trunc(granularity) + + # Common parameters for all queries (dates are parameterized for safety) + params = {'start_date': start_date, 'end_date': end_date} + + # Load SQL templates and replace placeholders + kpi_sql = SQLLoader.load("resource_history/kpi") + kpi_sql = kpi_sql.replace("{{ HISTORYID_FILTER }}", historyid_filter) + + trend_sql = SQLLoader.load("resource_history/trend") + trend_sql = trend_sql.replace("{{ HISTORYID_FILTER }}", historyid_filter) + trend_sql = trend_sql.replace("{{ DATE_TRUNC }}", date_trunc) + + heatmap_raw_sql = SQLLoader.load("resource_history/heatmap") + heatmap_raw_sql = heatmap_raw_sql.replace("{{ HISTORYID_FILTER }}", historyid_filter) + heatmap_raw_sql = heatmap_raw_sql.replace("{{ DATE_TRUNC }}", date_trunc) + + # Execute queries in parallel with params + results = {} + with ThreadPoolExecutor(max_workers=3) as executor: + futures = { + executor.submit(read_sql_df, kpi_sql, params): 'kpi', + executor.submit(read_sql_df, trend_sql, params): 'trend', + executor.submit(read_sql_df, heatmap_raw_sql, params): 'heatmap_raw', + } + for future in as_completed(futures): + query_name = futures[future] + try: + results[query_name] = future.result() + except Exception as exc: + logger.error(f"{query_name} query failed: {exc}") + results[query_name] = pd.DataFrame() + + # Build response from results + kpi = _build_kpi_from_df(results.get('kpi', pd.DataFrame())) + trend = _build_trend_from_df(results.get('trend', pd.DataFrame()), granularity) + + # Build heatmap and comparison from raw data with dimension merge + heatmap_raw_df = results.get('heatmap_raw', pd.DataFrame()) + heatmap = _build_heatmap_from_raw_df(heatmap_raw_df, resource_lookup, granularity) + workcenter_comparison = _build_comparison_from_raw_df(heatmap_raw_df, resource_lookup) + + return { + 'kpi': kpi, + 'trend': trend, + 'heatmap': heatmap, + 'workcenter_comparison': workcenter_comparison + } + except Exception as exc: + logger.error(f"Summary query failed: {exc}") + import traceback + traceback.print_exc() + return None + + +# ============================================================ +# Detail Query +# ============================================================ + +def query_detail( + start_date: str, + end_date: str, + granularity: str = 'day', + workcenter_groups: Optional[List[str]] = None, + families: Optional[List[str]] = None, + is_production: bool = False, + is_key: bool = False, + is_monitor: bool = False, +) -> Optional[Dict[str, Any]]: + """Query hierarchical detail data. + + Uses resource_cache as the source for equipment master data. + Returns flat data with workcenter, family, resource dimensions. + Frontend handles hierarchy assembly. + + Args: + start_date: Start date in YYYY-MM-DD format + end_date: End date in YYYY-MM-DD format + granularity: Time granularity ('day', 'week', 'month', 'year') + workcenter_groups: Optional list of WORKCENTER_GROUP names to filter + families: Optional list of RESOURCEFAMILYNAME values to filter + is_production: Filter by production flag + is_key: Filter by key equipment flag + is_monitor: Filter by monitor flag + + Returns: + Dict with 'data', 'total', 'truncated' fields, + or None if query fails. + """ + # Validate date range + validation = _validate_date_range(start_date, end_date) + if validation: + return {'error': validation} + + try: + # Get filtered resources from cache + resources = _get_filtered_resources( + workcenter_groups=workcenter_groups, + families=families, + is_production=is_production, + is_key=is_key, + is_monitor=is_monitor, + ) + + if not resources: + logger.warning("No resources match the filter criteria") + return { + 'data': [], + 'total': 0, + 'truncated': False, + 'max_records': None + } + + # Build resource lookup for dimension merging + resource_lookup = _build_resource_lookup(resources) + historyid_filter = _build_historyid_filter(resources) + + # Query SHIFT data grouped by HISTORYID (dates parameterized for safety) + params = {'start_date': start_date, 'end_date': end_date} + + # Load SQL template and replace placeholder + detail_sql = SQLLoader.load("resource_history/detail") + detail_sql = detail_sql.replace("{{ HISTORYID_FILTER }}", historyid_filter) + + detail_df = read_sql_df(detail_sql, params) + + # Build detail data with dimension merge from cache + data = _build_detail_from_raw_df(detail_df, resource_lookup) + total = len(data) + + return { + 'data': data, + 'total': total, + 'truncated': False, + 'max_records': None + } + except Exception as exc: + logger.error(f"Detail query failed: {exc}") + import traceback + traceback.print_exc() + return None + + +# ============================================================ +# CSV Export +# ============================================================ + +def export_csv( + start_date: str, + end_date: str, + granularity: str = 'day', + workcenter_groups: Optional[List[str]] = None, + families: Optional[List[str]] = None, + is_production: bool = False, + is_key: bool = False, + is_monitor: bool = False, +) -> Generator[str, None, None]: + """Generate CSV data as a stream for export. + + Uses resource_cache as the source for equipment master data. + Yields CSV rows one at a time to avoid memory issues with large datasets. + + Args: + start_date: Start date in YYYY-MM-DD format + end_date: End date in YYYY-MM-DD format + granularity: Time granularity + workcenter_groups: Optional list of WORKCENTER_GROUP names to filter + families: Optional list of RESOURCEFAMILYNAME values to filter + is_production: Filter by production flag + is_key: Filter by key equipment flag + is_monitor: Filter by monitor flag + + Yields: + CSV rows as strings + """ + # Validate date range + validation = _validate_date_range(start_date, end_date) + if validation: + yield f"Error: {validation}\n" + return + + try: + # Get filtered resources from cache + resources = _get_filtered_resources( + workcenter_groups=workcenter_groups, + families=families, + is_production=is_production, + is_key=is_key, + is_monitor=is_monitor, + ) + + if not resources: + yield "Error: No resources match the filter criteria\n" + return + + # Build resource lookup for dimension merging + resource_lookup = _build_resource_lookup(resources) + historyid_filter = _build_historyid_filter(resources) + + # Get workcenter mapping for WORKCENTER_GROUP + from mes_dashboard.services.filter_cache import get_workcenter_mapping + wc_mapping = get_workcenter_mapping() or {} + + # Query SHIFT data grouped by HISTORYID (dates parameterized for safety) + params = {'start_date': start_date, 'end_date': end_date} + + # Load SQL template and replace placeholder (reuse detail.sql) + sql = SQLLoader.load("resource_history/detail") + sql = sql.replace("{{ HISTORYID_FILTER }}", historyid_filter) + + df = read_sql_df(sql, params) + + export_keys = get_export_api_keys('resource_history') + headers = get_export_headers('resource_history') + if not export_keys or not headers or len(export_keys) != len(headers): + export_keys = [ + 'workcenter', + 'family', + 'resource', + 'ou_pct', + 'availability_pct', + 'prd_hours', + 'prd_pct', + 'sby_hours', + 'sby_pct', + 'udt_hours', + 'udt_pct', + 'sdt_hours', + 'sdt_pct', + 'egt_hours', + 'egt_pct', + 'nst_hours', + 'nst_pct', + ] + headers = [ + '站點', '型號', '機台', 'OU%', 'Availability%', + 'PRD(h)', 'PRD(%)', 'SBY(h)', 'SBY(%)', + 'UDT(h)', 'UDT(%)', 'SDT(h)', 'SDT(%)', + 'EGT(h)', 'EGT(%)', 'NST(h)', 'NST(%)' + ] + + # Write CSV header + output = io.StringIO() + writer = csv.writer(output) + writer.writerow(headers) + yield output.getvalue() + output.truncate(0) + output.seek(0) + + # Write data rows + if df is not None: + for _, row in df.iterrows(): + historyid = row['HISTORYID'] + resource_info = resource_lookup.get(historyid, {}) + + # Skip if no resource info found + if not resource_info: + continue + + prd = float(row['PRD_HOURS'] or 0) + sby = float(row['SBY_HOURS'] or 0) + udt = float(row['UDT_HOURS'] or 0) + sdt = float(row['SDT_HOURS'] or 0) + egt = float(row['EGT_HOURS'] or 0) + nst = float(row['NST_HOURS'] or 0) + total = float(row['TOTAL_HOURS'] or 0) + + # Get dimension data from cache + wc_name = resource_info.get('WORKCENTERNAME', '') + wc_info = wc_mapping.get(wc_name, {}) + wc_group = wc_info.get('group', wc_name) + family = resource_info.get('RESOURCEFAMILYNAME', '') + resource_name = resource_info.get('RESOURCENAME', '') + + # Calculate percentages + ou_pct = _calc_ou_pct(prd, sby, udt, sdt, egt) + availability_pct = _calc_availability_pct(prd, sby, udt, sdt, egt, nst) + prd_pct = round(prd / total * 100, 1) if total > 0 else 0 + sby_pct = round(sby / total * 100, 1) if total > 0 else 0 + udt_pct = round(udt / total * 100, 1) if total > 0 else 0 + sdt_pct = round(sdt / total * 100, 1) if total > 0 else 0 + egt_pct = round(egt / total * 100, 1) if total > 0 else 0 + nst_pct = round(nst / total * 100, 1) if total > 0 else 0 + + value_map = { + 'workcenter': wc_group, + 'family': family, + 'resource': resource_name, + 'ou_pct': f"{ou_pct}%", + 'availability_pct': f"{availability_pct}%", + 'prd_hours': round(prd, 1), + 'prd_pct': f"{prd_pct}%", + 'sby_hours': round(sby, 1), + 'sby_pct': f"{sby_pct}%", + 'udt_hours': round(udt, 1), + 'udt_pct': f"{udt_pct}%", + 'sdt_hours': round(sdt, 1), + 'sdt_pct': f"{sdt_pct}%", + 'egt_hours': round(egt, 1), + 'egt_pct': f"{egt_pct}%", + 'nst_hours': round(nst, 1), + 'nst_pct': f"{nst_pct}%", + } + csv_row = [value_map.get(key, '') for key in export_keys] + writer.writerow(csv_row) + yield output.getvalue() + output.truncate(0) + output.seek(0) + + except Exception as exc: + logger.error(f"CSV export failed: {exc}") + yield f"Error: {exc}\n" + + +# ============================================================ +# Helper Functions +# ============================================================ + +def _validate_date_range(start_date: str, end_date: str) -> Optional[str]: + """Validate date range doesn't exceed MAX_QUERY_DAYS.""" + try: + start = datetime.strptime(start_date, '%Y-%m-%d') + end = datetime.strptime(end_date, '%Y-%m-%d') + diff = (end - start).days + + if diff > MAX_QUERY_DAYS: + return f'查詢範圍不可超過 {MAX_QUERY_DAYS} 天(兩年)' + if diff < 0: + return '結束日期必須大於起始日期' + return None + except ValueError as e: + return f'日期格式錯誤: {e}' + + +def _get_date_trunc(granularity: str) -> str: + """Get Oracle TRUNC expression for date granularity. + + Note: Uses 'ss' as alias for shift_data CTE. + """ + trunc_map = { + 'day': "TRUNC(TXNDATE)", + 'week': "TRUNC(TXNDATE, 'IW')", + 'month': "TRUNC(TXNDATE, 'MM')", + 'year': "TRUNC(TXNDATE, 'YYYY')" + } + return trunc_map.get(granularity, "TRUNC(TXNDATE)") + + +def _safe_float(value, default=0.0) -> float: + """Safely convert value to float, handling NaN and None.""" + if value is None or pd.isna(value): + return default + return float(value) + + +def _calc_ou_pct(prd: float, sby: float, udt: float, sdt: float, egt: float) -> float: + """Calculate OU% = PRD / (PRD + SBY + UDT + SDT + EGT) * 100.""" + denominator = prd + sby + udt + sdt + egt + return round(prd / denominator * 100, 1) if denominator > 0 else 0 + + +def _calc_availability_pct(prd: float, sby: float, udt: float, sdt: float, egt: float, nst: float) -> float: + """Calculate Availability% = (PRD + SBY + EGT) / (PRD + SBY + EGT + SDT + UDT + NST) * 100.""" + numerator = prd + sby + egt + denominator = prd + sby + egt + sdt + udt + nst + return round(numerator / denominator * 100, 1) if denominator > 0 else 0 + + +def _calc_status_pct(value: float, total: float) -> float: + """Calculate status percentage = value / total * 100.""" + return round(value / total * 100, 1) if total > 0 else 0 + + +def _build_kpi_from_df(df: pd.DataFrame) -> Dict[str, Any]: + """Build KPI dict from query result DataFrame.""" + if df is None or len(df) == 0: + return { + 'ou_pct': 0, + 'availability_pct': 0, + 'prd_hours': 0, + 'prd_pct': 0, + 'sby_hours': 0, + 'sby_pct': 0, + 'udt_hours': 0, + 'udt_pct': 0, + 'sdt_hours': 0, + 'sdt_pct': 0, + 'egt_hours': 0, + 'egt_pct': 0, + 'nst_hours': 0, + 'nst_pct': 0, + 'machine_count': 0 + } + + row = df.iloc[0] + prd = _safe_float(row['PRD_HOURS']) + sby = _safe_float(row['SBY_HOURS']) + udt = _safe_float(row['UDT_HOURS']) + sdt = _safe_float(row['SDT_HOURS']) + egt = _safe_float(row['EGT_HOURS']) + nst = _safe_float(row['NST_HOURS']) + machine_count = int(_safe_float(row['MACHINE_COUNT'])) + + # Total hours for percentage calculation (includes NST) + total_hours = prd + sby + udt + sdt + egt + nst + + return { + 'ou_pct': _calc_ou_pct(prd, sby, udt, sdt, egt), + 'availability_pct': _calc_availability_pct(prd, sby, udt, sdt, egt, nst), + 'prd_hours': round(prd, 1), + 'prd_pct': _calc_status_pct(prd, total_hours), + 'sby_hours': round(sby, 1), + 'sby_pct': _calc_status_pct(sby, total_hours), + 'udt_hours': round(udt, 1), + 'udt_pct': _calc_status_pct(udt, total_hours), + 'sdt_hours': round(sdt, 1), + 'sdt_pct': _calc_status_pct(sdt, total_hours), + 'egt_hours': round(egt, 1), + 'egt_pct': _calc_status_pct(egt, total_hours), + 'nst_hours': round(nst, 1), + 'nst_pct': _calc_status_pct(nst, total_hours), + 'machine_count': machine_count + } + + +def _format_date(date_val, granularity: str) -> Optional[str]: + """Format date value based on granularity.""" + if pd.isna(date_val): + return None + + if granularity == 'year': + return date_val.strftime('%Y') + elif granularity == 'month': + return date_val.strftime('%Y-%m') + elif granularity == 'week': + return date_val.strftime('%Y-%m-%d') # Week start date + else: + return date_val.strftime('%Y-%m-%d') + + +def _build_trend_from_df(df: pd.DataFrame, granularity: str) -> List[Dict]: + """Build trend data from query result DataFrame.""" + if df is None or len(df) == 0: + return [] + + result = [] + for _, row in df.iterrows(): + prd = _safe_float(row['PRD_HOURS']) + sby = _safe_float(row['SBY_HOURS']) + udt = _safe_float(row['UDT_HOURS']) + sdt = _safe_float(row['SDT_HOURS']) + egt = _safe_float(row['EGT_HOURS']) + nst = _safe_float(row['NST_HOURS']) + + result.append({ + 'date': _format_date(row['DATA_DATE'], granularity), + 'ou_pct': _calc_ou_pct(prd, sby, udt, sdt, egt), + 'availability_pct': _calc_availability_pct(prd, sby, udt, sdt, egt, nst), + 'prd_hours': round(prd, 1), + 'sby_hours': round(sby, 1), + 'udt_hours': round(udt, 1), + 'sdt_hours': round(sdt, 1), + 'egt_hours': round(egt, 1), + 'nst_hours': round(nst, 1) + }) + + return result + + +def _build_heatmap_from_raw_df( + df: pd.DataFrame, + resource_lookup: Dict[str, Dict[str, Any]], + granularity: str +) -> List[Dict]: + """Build heatmap data from raw SHIFT query grouped by HISTORYID. + + Merges dimension data from resource_lookup. + + Args: + df: DataFrame with HISTORYID, DATA_DATE, and status hours. + resource_lookup: Dict mapping RESOURCEID to resource info. + granularity: Time granularity for date formatting. + + Returns: + List of heatmap data dicts. + """ + if df is None or len(df) == 0: + return [] + + # Get workcenter mapping to convert WORKCENTERNAME to WORKCENTER_GROUP + from mes_dashboard.services.filter_cache import get_workcenter_mapping + wc_mapping = get_workcenter_mapping() or {} + + # Aggregate data by WORKCENTER_GROUP and date + # Track sequence for each workcenter group + wc_seq_map = {} + aggregated = {} + for _, row in df.iterrows(): + historyid = row['HISTORYID'] + resource_info = resource_lookup.get(historyid, {}) + + # Skip if no resource info + if not resource_info: + continue + + wc_name = resource_info.get('WORKCENTERNAME', '') + if not wc_name: + continue + + wc_info = wc_mapping.get(wc_name, {}) + wc_group = wc_info.get('group', wc_name) + wc_seq = wc_info.get('sequence', 999) + wc_seq_map[wc_group] = wc_seq # Store sequence for this group + date_str = _format_date(row['DATA_DATE'], granularity) + key = (wc_group, date_str) + + if key not in aggregated: + aggregated[key] = {'prd': 0, 'sby': 0, 'udt': 0, 'sdt': 0, 'egt': 0} + + aggregated[key]['prd'] += _safe_float(row['PRD_HOURS']) + aggregated[key]['sby'] += _safe_float(row['SBY_HOURS']) + aggregated[key]['udt'] += _safe_float(row['UDT_HOURS']) + aggregated[key]['sdt'] += _safe_float(row['SDT_HOURS']) + aggregated[key]['egt'] += _safe_float(row['EGT_HOURS']) + + result = [] + for (wc_group, date_str), data in aggregated.items(): + result.append({ + 'workcenter': wc_group, + 'workcenter_seq': wc_seq_map.get(wc_group, 999), + 'date': date_str, + 'ou_pct': _calc_ou_pct(data['prd'], data['sby'], data['udt'], data['sdt'], data['egt']) + }) + + # Sort by workcenter sequence (ascending, smaller first) and date + result.sort(key=lambda x: (x['workcenter_seq'], x['date'] or '')) + return result + + +def _build_comparison_from_raw_df( + df: pd.DataFrame, + resource_lookup: Dict[str, Dict[str, Any]] +) -> List[Dict]: + """Build workcenter comparison data from raw SHIFT query grouped by HISTORYID. + + Merges dimension data from resource_lookup. + + Args: + df: DataFrame with HISTORYID and status hours (may have DATA_DATE if from heatmap query). + resource_lookup: Dict mapping RESOURCEID to resource info. + + Returns: + List of comparison data dicts. + """ + if df is None or len(df) == 0: + return [] + + # Get workcenter mapping to convert WORKCENTERNAME to WORKCENTER_GROUP + from mes_dashboard.services.filter_cache import get_workcenter_mapping + wc_mapping = get_workcenter_mapping() or {} + + # First aggregate by HISTORYID (in case df is by HISTORYID + date) + by_resource = {} + for _, row in df.iterrows(): + historyid = row['HISTORYID'] + if historyid not in by_resource: + by_resource[historyid] = {'prd': 0, 'sby': 0, 'udt': 0, 'sdt': 0, 'egt': 0} + + by_resource[historyid]['prd'] += _safe_float(row['PRD_HOURS']) + by_resource[historyid]['sby'] += _safe_float(row['SBY_HOURS']) + by_resource[historyid]['udt'] += _safe_float(row['UDT_HOURS']) + by_resource[historyid]['sdt'] += _safe_float(row['SDT_HOURS']) + by_resource[historyid]['egt'] += _safe_float(row['EGT_HOURS']) + + # Then aggregate by WORKCENTER_GROUP + aggregated = {} + for historyid, hours in by_resource.items(): + resource_info = resource_lookup.get(historyid, {}) + + # Skip if no resource info + if not resource_info: + continue + + wc_name = resource_info.get('WORKCENTERNAME', '') + if not wc_name: + continue + + wc_info = wc_mapping.get(wc_name, {}) + wc_group = wc_info.get('group', wc_name) + + if wc_group not in aggregated: + aggregated[wc_group] = {'prd': 0, 'sby': 0, 'udt': 0, 'sdt': 0, 'egt': 0, 'machine_count': 0} + + aggregated[wc_group]['prd'] += hours['prd'] + aggregated[wc_group]['sby'] += hours['sby'] + aggregated[wc_group]['udt'] += hours['udt'] + aggregated[wc_group]['sdt'] += hours['sdt'] + aggregated[wc_group]['egt'] += hours['egt'] + aggregated[wc_group]['machine_count'] += 1 + + result = [] + for wc_group, data in aggregated.items(): + result.append({ + 'workcenter': wc_group, + 'ou_pct': _calc_ou_pct(data['prd'], data['sby'], data['udt'], data['sdt'], data['egt']), + 'prd_hours': round(data['prd'], 1), + 'machine_count': data['machine_count'] + }) + + # Sort by OU% descending + result.sort(key=lambda x: x['ou_pct'], reverse=True) + return result + + +def _build_detail_from_raw_df( + df: pd.DataFrame, + resource_lookup: Dict[str, Dict[str, Any]] +) -> List[Dict]: + """Build detail data from raw SHIFT query grouped by HISTORYID. + + Merges dimension data from resource_lookup. + + Args: + df: DataFrame with HISTORYID and status hours. + resource_lookup: Dict mapping RESOURCEID to resource info. + + Returns: + List of detail data dicts. + """ + if df is None or len(df) == 0: + return [] + + # Get workcenter mapping to convert WORKCENTERNAME to WORKCENTER_GROUP + from mes_dashboard.services.filter_cache import get_workcenter_mapping + wc_mapping = get_workcenter_mapping() or {} + + result = [] + for _, row in df.iterrows(): + historyid = row['HISTORYID'] + resource_info = resource_lookup.get(historyid, {}) + + # Skip if no resource info + if not resource_info: + continue + + prd = _safe_float(row['PRD_HOURS']) + sby = _safe_float(row['SBY_HOURS']) + udt = _safe_float(row['UDT_HOURS']) + sdt = _safe_float(row['SDT_HOURS']) + egt = _safe_float(row['EGT_HOURS']) + nst = _safe_float(row['NST_HOURS']) + total = _safe_float(row['TOTAL_HOURS']) + + # Get dimension data from cache + wc_name = resource_info.get('WORKCENTERNAME', '') + wc_info = wc_mapping.get(wc_name, {}) + wc_group = wc_info.get('group', wc_name) # Fallback to workcentername if no mapping + wc_seq = wc_info.get('sequence', 999) # Get sequence for sorting + family = resource_info.get('RESOURCEFAMILYNAME', '') + resource_name = resource_info.get('RESOURCENAME', '') + + result.append({ + 'workcenter': wc_group, + 'workcenter_seq': wc_seq, + 'family': family or '', + 'resource': resource_name or '', + 'ou_pct': _calc_ou_pct(prd, sby, udt, sdt, egt), + 'availability_pct': _calc_availability_pct(prd, sby, udt, sdt, egt, nst), + 'prd_hours': round(prd, 1), + 'prd_pct': round(prd / total * 100, 1) if total > 0 else 0, + 'sby_hours': round(sby, 1), + 'sby_pct': round(sby / total * 100, 1) if total > 0 else 0, + 'udt_hours': round(udt, 1), + 'udt_pct': round(udt / total * 100, 1) if total > 0 else 0, + 'sdt_hours': round(sdt, 1), + 'sdt_pct': round(sdt / total * 100, 1) if total > 0 else 0, + 'egt_hours': round(egt, 1), + 'egt_pct': round(egt / total * 100, 1) if total > 0 else 0, + 'nst_hours': round(nst, 1), + 'nst_pct': round(nst / total * 100, 1) if total > 0 else 0, + 'machine_count': 1 + }) + + # Sort by workcenter sequence (ascending, smaller first), then family, resource + result.sort(key=lambda x: (x['workcenter_seq'], x['family'], x['resource'])) + return result diff --git a/src/mes_dashboard/services/resource_service.py b/src/mes_dashboard/services/resource_service.py new file mode 100644 index 0000000..51a88f6 --- /dev/null +++ b/src/mes_dashboard/services/resource_service.py @@ -0,0 +1,599 @@ +# -*- coding: utf-8 -*- +"""Resource (Equipment) query services for MES Dashboard. + +Provides functions to query equipment status from DWH.DW_MES_RESOURCE and DWH.DW_MES_RESOURCESTATUS tables. +""" + +import logging +import pandas as pd +from typing import Optional, Dict, List, Any + +logger = logging.getLogger('mes_dashboard.resource_service') + +from mes_dashboard.core.database import ( + get_db_connection, + read_sql_df, + DatabasePoolExhaustedError, + DatabaseCircuitOpenError, +) +from mes_dashboard.core.utils import get_days_back, build_equipment_filter_sql +from mes_dashboard.config.constants import ( + EXCLUDED_LOCATIONS, + EXCLUDED_ASSET_STATUSES, + DEFAULT_DAYS_BACK, + STATUS_CATEGORIES, +) +from mes_dashboard.sql import SQLLoader, QueryBuilder +from mes_dashboard.sql.filters import CommonFilters +from mes_dashboard.services.resource_cache import get_all_resources +from mes_dashboard.services.realtime_equipment_cache import ( + get_all_equipment_status, + get_equipment_status_lookup, +) +from mes_dashboard.services.filter_cache import ( + get_workcenter_group, + get_workcenter_group_sequence, + get_workcenter_short, + get_workcenter_groups, +) + + +# ============================================================ +# Helper Functions +# ============================================================ + + +def _is_valid_value(value) -> bool: + """Check if a value is valid (not None, not NaN, not empty string). + + Args: + value: The value to check. + + Returns: + True if valid, False otherwise. + """ + if value is None: + return False + if isinstance(value, str) and (not value.strip() or value == 'NaT'): + return False + # Check for NaN (pandas NaN or float NaN) + try: + if value != value: # NaN != NaN is True + return False + except (TypeError, ValueError): + pass + return True + + +# ============================================================ +# Resource Base Subquery +# ============================================================ + +def get_resource_latest_status_subquery(days_back: int = 30) -> str: + """Returns subquery to get latest status per resource. + + Filter conditions: + - (OBJECTCATEGORY = 'ASSEMBLY' AND OBJECTTYPE = 'ASSEMBLY') OR + (OBJECTCATEGORY = 'WAFERSORT' AND OBJECTTYPE = 'WAFERSORT') + - Excludes specified locations and asset statuses + + Uses ROW_NUMBER() for performance. + Only scans recent status changes (default 30 days). + Includes JOBID for SDT/UDT drill-down. + Includes PJ_LOTID from RESOURCE table. + + Args: + days_back: Number of days to look back + + Returns: + SQL subquery string for latest resource status. + """ + # Build exclusion filters using CommonFilters (legacy format for SQL placeholders) + location_filter = CommonFilters.build_location_filter_legacy( + excluded_locations=list(EXCLUDED_LOCATIONS) if EXCLUDED_LOCATIONS else None + ) + if location_filter: + location_filter = f"AND {location_filter.replace('LOCATIONNAME', 'r.LOCATIONNAME')}" + + asset_status_filter = CommonFilters.build_asset_status_filter_legacy( + excluded_statuses=list(EXCLUDED_ASSET_STATUSES) if EXCLUDED_ASSET_STATUSES else None + ) + if asset_status_filter: + asset_status_filter = f"AND {asset_status_filter.replace('PJ_ASSETSSTATUS', 'r.PJ_ASSETSSTATUS')}" + + return SQLLoader.load_with_params( + "resource/latest_status", + days_back=days_back, + LOCATION_FILTER=location_filter, + ASSET_STATUS_FILTER=asset_status_filter, + ) + + +# ============================================================ +# Resource Summary Queries +# ============================================================ + +def query_resource_by_status(days_back: int = 30) -> Optional[pd.DataFrame]: + """Query resource count grouped by status. + + Args: + days_back: Number of days to look back + + Returns: + DataFrame with status counts or None if query fails. + """ + try: + base_sql = get_resource_latest_status_subquery(days_back) + sql = SQLLoader.load("resource/by_status") + sql = sql.replace("{{ LATEST_STATUS_SUBQUERY }}", base_sql) + return read_sql_df(sql) + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Resource by status query failed: {exc}") + return None + + +def query_resource_by_workcenter(days_back: int = 30) -> Optional[pd.DataFrame]: + """Query resource count grouped by workcenter and status. + + Args: + days_back: Number of days to look back + + Returns: + DataFrame with workcenter/status counts or None if query fails. + """ + try: + base_sql = get_resource_latest_status_subquery(days_back) + sql = SQLLoader.load("resource/by_workcenter") + sql = sql.replace("{{ LATEST_STATUS_SUBQUERY }}", base_sql) + return read_sql_df(sql) + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Resource by workcenter query failed: {exc}") + return None + + +def query_resource_detail( + filters: Optional[Dict] = None, + limit: int = 500, + offset: int = 0, + days_back: int = 30 +) -> Optional[pd.DataFrame]: + """Query resource detail with optional filters. + + Args: + filters: Optional filter values + limit: Maximum rows to return + offset: Offset for pagination + days_back: Number of days to look back + + Returns: + DataFrame with resource details or None if query fails. + """ + try: + base_sql = get_resource_latest_status_subquery(days_back) + + # Use QueryBuilder for safe parameterized conditions + builder = QueryBuilder() + if filters: + if filters.get('workcenter'): + builder.add_param_condition('WORKCENTERNAME', filters['workcenter']) + if filters.get('status'): + builder.add_param_condition('NEWSTATUSNAME', filters['status']) + if filters.get('family'): + builder.add_param_condition('RESOURCEFAMILYNAME', filters['family']) + if filters.get('department'): + builder.add_param_condition('PJ_DEPARTMENT', filters['department']) + + # Equipment flag filters (boolean to 0/1) + if filters.get('isProduction') is not None: + builder.add_condition( + f"NVL(PJ_ISPRODUCTION, 0) = {1 if filters['isProduction'] else 0}" + ) + if filters.get('isKey') is not None: + builder.add_condition( + f"NVL(PJ_ISKEY, 0) = {1 if filters['isKey'] else 0}" + ) + if filters.get('isMonitor') is not None: + builder.add_condition( + f"NVL(PJ_ISMONITOR, 0) = {1 if filters['isMonitor'] else 0}" + ) + + # Build WHERE clause and get parameters + conditions_sql = builder.get_conditions_sql() + params = builder.params.copy() + + # Add pagination parameters + start_row = offset + 1 + end_row = offset + limit + params['start_row'] = start_row + params['end_row'] = end_row + + where_clause = f" AND {conditions_sql}" if conditions_sql else "" + + # Load SQL from file and replace placeholders + sql = SQLLoader.load("resource/detail") + sql = sql.replace("{{ LATEST_STATUS_SUBQUERY }}", base_sql) + sql = sql.replace("{{ WHERE_CLAUSE }}", where_clause) + + df = read_sql_df(sql, params) + + # Convert datetime to string + if 'LASTSTATUSCHANGEDATE' in df.columns: + df['LASTSTATUSCHANGEDATE'] = df['LASTSTATUSCHANGEDATE'].apply( + lambda x: x.strftime('%Y-%m-%d %H:%M:%S') if pd.notna(x) else None + ) + + return df + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Resource detail query failed: {exc}") + return None + + +def query_resource_workcenter_status_matrix(days_back: int = 30) -> Optional[pd.DataFrame]: + """Query resource count matrix by workcenter and status category. + + Status values in database: + - PRD: Productive + - SBY: Standby + - UDT: Unscheduled Down Time + - SDT: Scheduled Down Time + - EGT: Engineering Time + - NST: Not Scheduled Time + + Args: + days_back: Number of days to look back + + Returns: + DataFrame with workcenter/status matrix or None if query fails. + """ + try: + base_sql = get_resource_latest_status_subquery(days_back) + sql = SQLLoader.load("resource/workcenter_status_matrix") + sql = sql.replace("{{ LATEST_STATUS_SUBQUERY }}", base_sql) + return read_sql_df(sql) + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Resource status matrix query failed: {exc}") + return None + + +def query_resource_filter_options(days_back: int = 30) -> Optional[Dict]: + """Get available filter options for resource queries. + + Uses resource_cache for static resource data (workcenters, families, departments, locations). + Only queries Oracle for dynamic status data. + + Args: + days_back: Number of days to look back + + Returns: + Dict with filter options or None if query fails. + """ + from mes_dashboard.services.resource_cache import ( + get_workcenters, + get_resource_families, + get_departments, + get_locations, + get_distinct_values, + ) + + try: + # Get static filter options from resource cache + workcenters = get_workcenters() + families = get_resource_families() + departments = get_departments() + locations = get_locations() + assets_statuses = get_distinct_values('PJ_ASSETSSTATUS') + + # Query only dynamic status data from Oracle using SQLLoader + sql_statuses = SQLLoader.load("resource/distinct_statuses") + status_df = read_sql_df(sql_statuses, {'days_back': days_back}) + statuses = status_df['NEWSTATUSNAME'].tolist() if status_df is not None else [] + + return { + 'workcenters': workcenters, + 'statuses': statuses, + 'families': families, + 'departments': departments, + 'locations': locations, + 'assets_statuses': assets_statuses + } + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Resource filter options query failed: {exc}", exc_info=True) + return None + + +# ============================================================ +# Merged Resource Status Query (Three-Layer Cache) +# ============================================================ + +def get_merged_resource_status( + workcenter_groups: Optional[List[str]] = None, + is_production: Optional[bool] = None, + is_key: Optional[bool] = None, + is_monitor: Optional[bool] = None, + status_categories: Optional[List[str]] = None, +) -> List[Dict[str, Any]]: + """Get merged resource status from three cache layers. + + Combines: + - resource-cache: Equipment master data (RESOURCENAME, WORKCENTERNAME, etc.) + - realtime-equipment-cache: Real-time status (EQUIPMENTASSETSSTATUS, JOBORDER, etc.) + - workcenter-mapping: WORKCENTER_GROUP, WORKCENTER_SHORT + + Args: + workcenter_groups: Filter by WORKCENTER_GROUP (e.g., ['焊接', '成型']) + is_production: Filter by PJ_ISPRODUCTION flag + is_key: Filter by PJ_ISKEY flag + is_monitor: Filter by PJ_ISMONITOR flag + status_categories: Filter by STATUS_CATEGORY (e.g., ['PRODUCTIVE', 'DOWN']) + + Returns: + List of merged equipment status records. + """ + import logging + logger = logging.getLogger('mes_dashboard.resource_service') + + # Resource master data is served from full-table cache. + resources = get_all_resources() + if not resources: + logger.warning("No resources from resource-cache") + return [] + + # Get realtime status from cache + status_lookup = get_equipment_status_lookup() + if not status_lookup: + equipment_status = get_all_equipment_status() + status_lookup = { + str(row.get('RESOURCEID')): row + for row in equipment_status + if row.get('RESOURCEID') is not None + } + + # Precompute workcenter mapping once per unique workcenter to avoid repetitive lookups. + wc_names = { + row.get('WORKCENTERNAME') + for row in resources + if row.get('WORKCENTERNAME') + } + wc_group_map = {name: get_workcenter_group(name) for name in wc_names} + wc_group_seq_map = {name: get_workcenter_group_sequence(name) for name in wc_names} + wc_short_map = {name: get_workcenter_short(name) for name in wc_names} + + # Merge data + merged = [] + for resource in resources: + resource_id = resource.get('RESOURCEID') + workcenter_name = resource.get('WORKCENTERNAME') + resource_key = str(resource_id) if resource_id is not None else None + realtime = status_lookup.get(resource_key, {}) if resource_key else {} + + wc_group = wc_group_map.get(workcenter_name) if workcenter_name else None + wc_group_seq = wc_group_seq_map.get(workcenter_name) if workcenter_name else None + wc_short = wc_short_map.get(workcenter_name) if workcenter_name else None + + # Apply filters before creating merged payload. + if workcenter_groups and wc_group not in workcenter_groups: + continue + if is_production is not None and bool(resource.get('PJ_ISPRODUCTION')) != is_production: + continue + if is_key is not None and bool(resource.get('PJ_ISKEY')) != is_key: + continue + if is_monitor is not None and bool(resource.get('PJ_ISMONITOR')) != is_monitor: + continue + if status_categories and realtime.get('STATUS_CATEGORY') not in status_categories: + continue + + # Build merged record + record = { + # From resource-cache + 'RESOURCEID': resource_id, + 'RESOURCENAME': resource.get('RESOURCENAME'), + 'WORKCENTERNAME': workcenter_name, + 'RESOURCEFAMILYNAME': resource.get('RESOURCEFAMILYNAME'), + 'PJ_DEPARTMENT': resource.get('PJ_DEPARTMENT'), + 'PJ_ASSETSSTATUS': resource.get('PJ_ASSETSSTATUS'), + 'PJ_ISPRODUCTION': resource.get('PJ_ISPRODUCTION'), + 'PJ_ISKEY': resource.get('PJ_ISKEY'), + 'PJ_ISMONITOR': resource.get('PJ_ISMONITOR'), + 'VENDORNAME': resource.get('VENDORNAME'), + 'VENDORMODEL': resource.get('VENDORMODEL'), + 'LOCATIONNAME': resource.get('LOCATIONNAME'), + # From workcenter-mapping + 'WORKCENTER_GROUP': wc_group, + 'WORKCENTER_GROUP_SEQ': wc_group_seq, + 'WORKCENTER_SHORT': wc_short, + # From realtime-equipment-cache + 'EQUIPMENTASSETSSTATUS': realtime.get('EQUIPMENTASSETSSTATUS'), + 'EQUIPMENTASSETSSTATUSREASON': realtime.get('EQUIPMENTASSETSSTATUSREASON'), + 'STATUS_CATEGORY': realtime.get('STATUS_CATEGORY'), + # JOB related fields + 'JOBORDER': realtime.get('JOBORDER'), + 'JOBMODEL': realtime.get('JOBMODEL'), + 'JOBSTAGE': realtime.get('JOBSTAGE'), + 'JOBID': realtime.get('JOBID'), + 'JOBSTATUS': realtime.get('JOBSTATUS'), + 'CREATEDATE': realtime.get('CREATEDATE'), + 'CREATEUSERNAME': realtime.get('CREATEUSERNAME'), + 'CREATEUSER': realtime.get('CREATEUSER'), + 'TECHNICIANUSERNAME': realtime.get('TECHNICIANUSERNAME'), + 'TECHNICIANUSER': realtime.get('TECHNICIANUSER'), + 'SYMPTOMCODE': realtime.get('SYMPTOMCODE'), + 'CAUSECODE': realtime.get('CAUSECODE'), + 'REPAIRCODE': realtime.get('REPAIRCODE'), + # LOT related fields + 'LOT_COUNT': realtime.get('LOT_COUNT'), + 'LOT_DETAILS': realtime.get('LOT_DETAILS'), + 'TOTAL_TRACKIN_QTY': realtime.get('TOTAL_TRACKIN_QTY'), + 'LATEST_TRACKIN_TIME': realtime.get('LATEST_TRACKIN_TIME'), + } + + merged.append(record) + + logger.debug(f"Merged {len(merged)} resource status records") + return merged + + +def get_resource_status_summary( + workcenter_groups: Optional[List[str]] = None, + is_production: Optional[bool] = None, + is_key: Optional[bool] = None, + is_monitor: Optional[bool] = None, +) -> Dict[str, Any]: + """Get resource status summary statistics. + + Args: + workcenter_groups: Filter by WORKCENTER_GROUP + is_production: Filter by PJ_ISPRODUCTION flag + is_key: Filter by PJ_ISKEY flag + is_monitor: Filter by PJ_ISMONITOR flag + + Returns: + Dict with summary statistics including OU%, Availability%, and per-status counts. + """ + # Get merged data with filters (except status_categories) + data = get_merged_resource_status( + workcenter_groups=workcenter_groups, + is_production=is_production, + is_key=is_key, + is_monitor=is_monitor, + ) + + if not data: + return { + 'total_count': 0, + 'by_status_category': {}, + 'by_status': {}, + 'by_workcenter_group': {}, + 'with_active_job': 0, + 'with_wip': 0, + 'ou_pct': 0, + 'availability_pct': 0, + } + + # Count by status category (for backward compatibility) + by_status_category = {} + for record in data: + cat = record.get('STATUS_CATEGORY') or 'UNKNOWN' + by_status_category[cat] = by_status_category.get(cat, 0) + 1 + + # Count by individual E10 status (PRD, SBY, UDT, SDT, EGT, NST) + by_status = {'PRD': 0, 'SBY': 0, 'UDT': 0, 'SDT': 0, 'EGT': 0, 'NST': 0, 'OTHER': 0} + for record in data: + status = record.get('EQUIPMENTASSETSSTATUS') or 'UNKNOWN' + if status in by_status: + by_status[status] += 1 + else: + by_status['OTHER'] += 1 + + # Count by workcenter group + by_workcenter_group = {} + for record in data: + group = record.get('WORKCENTER_GROUP') or 'UNKNOWN' + by_workcenter_group[group] = by_workcenter_group.get(group, 0) + 1 + + # Count with active job (use _is_valid_value to exclude NaN/None/empty) + with_active_job = sum(1 for r in data if _is_valid_value(r.get('JOBORDER'))) + + # Count with WIP + with_wip = sum(1 for r in data if (r.get('LOT_COUNT') or 0) > 0) + + # Calculate OU% = PRD / (PRD + SBY + UDT + SDT + EGT) * 100 + prd = by_status['PRD'] + sby = by_status['SBY'] + udt = by_status['UDT'] + sdt = by_status['SDT'] + egt = by_status['EGT'] + nst = by_status['NST'] + + ou_denominator = prd + sby + udt + sdt + egt + ou_pct = round(prd / ou_denominator * 100, 1) if ou_denominator > 0 else 0 + + # Calculate Availability% = (PRD + SBY + EGT) / total * 100 + total_count = len(data) + availability_pct = round((prd + sby + egt) / total_count * 100, 1) if total_count > 0 else 0 + + return { + 'total_count': total_count, + 'by_status_category': by_status_category, + 'by_status': by_status, + 'by_workcenter_group': by_workcenter_group, + 'with_active_job': with_active_job, + 'with_wip': with_wip, + 'ou_pct': ou_pct, + 'availability_pct': availability_pct, + } + + +def get_workcenter_status_matrix( + is_production: Optional[bool] = None, + is_key: Optional[bool] = None, + is_monitor: Optional[bool] = None, +) -> List[Dict[str, Any]]: + """Get workcenter × status matrix. + + Returns count of equipment by workcenter group and status. + + Args: + is_production: Filter by PJ_ISPRODUCTION flag + is_key: Filter by PJ_ISKEY flag + is_monitor: Filter by PJ_ISMONITOR flag + + Returns: + List of dicts with workcenter_group and status counts. + """ + # Get merged data + data = get_merged_resource_status( + is_production=is_production, + is_key=is_key, + is_monitor=is_monitor, + ) + + if not data: + return [] + + # Get all workcenter groups with sequence + all_groups = get_workcenter_groups() or [] + group_sequence = {g['name']: g['sequence'] for g in all_groups} + + # Build matrix + matrix = {} + for record in data: + group = record.get('WORKCENTER_GROUP') or 'UNKNOWN' + status = record.get('EQUIPMENTASSETSSTATUS') or 'UNKNOWN' + + if group not in matrix: + matrix[group] = { + 'workcenter_group': group, + 'workcenter_sequence': group_sequence.get(group, 999), + 'total': 0, + 'PRD': 0, + 'SBY': 0, + 'UDT': 0, + 'SDT': 0, + 'EGT': 0, + 'NST': 0, + 'OTHER': 0, + } + + matrix[group]['total'] += 1 + + # Categorize status + if status in ('PRD', 'SBY', 'UDT', 'SDT', 'EGT', 'NST'): + matrix[group][status] += 1 + else: + matrix[group]['OTHER'] += 1 + + # Convert to list and sort by sequence + result = list(matrix.values()) + result.sort(key=lambda x: x['workcenter_sequence']) + + return result diff --git a/src/mes_dashboard/services/wip_service.py b/src/mes_dashboard/services/wip_service.py new file mode 100644 index 0000000..60548ed --- /dev/null +++ b/src/mes_dashboard/services/wip_service.py @@ -0,0 +1,2535 @@ +# -*- coding: utf-8 -*- +"""WIP (Work In Progress) query services for MES Dashboard. + +Provides functions to query WIP data from DWH.DW_MES_LOT_V view. +This view provides real-time WIP information updated every 5 minutes. + +Now uses Redis cache when available, with fallback to Oracle direct query. +""" + +import logging +import threading +from datetime import datetime +from typing import Optional, Dict, List, Any + +import numpy as np +import pandas as pd + +from mes_dashboard.core.database import ( + read_sql_df, + DatabasePoolExhaustedError, + DatabaseCircuitOpenError, +) +from mes_dashboard.core.cache import ( + get_cached_wip_data, + get_cached_sys_date, + get_cache_updated_at, +) +from mes_dashboard.sql import SQLLoader, QueryBuilder +from mes_dashboard.sql.filters import CommonFilters, NON_QUALITY_HOLD_REASONS + +logger = logging.getLogger('mes_dashboard.wip_service') + +_wip_search_index_lock = threading.Lock() +_wip_search_index_cache: Dict[str, Dict[str, Any]] = {} + + +def _safe_value(val): + """Convert pandas NaN/NaT to None and numpy types to native Python types for JSON serialization.""" + if pd.isna(val): + return None + # Convert numpy types to native Python types for JSON serialization + if hasattr(val, 'item'): # numpy scalar types have .item() method + return val.item() + return val + + +def _build_base_conditions_builder( + include_dummy: bool = False, + workorder: Optional[str] = None, + lotid: Optional[str] = None, + builder: Optional[QueryBuilder] = None +) -> QueryBuilder: + """Build base WHERE conditions for WIP queries using QueryBuilder. + + Args: + include_dummy: If False (default), exclude LOTID containing 'DUMMY' + workorder: Optional WORKORDER filter (fuzzy match) + lotid: Optional LOTID filter (fuzzy match) + builder: Optional existing QueryBuilder to add conditions to + + Returns: + QueryBuilder with base conditions and parameters + """ + if builder is None: + builder = QueryBuilder() + + # Exclude raw materials (NULL WORKORDER) + builder.add_is_not_null("WORKORDER") + + # DUMMY exclusion (default behavior) + if not include_dummy: + builder.add_condition("LOTID NOT LIKE '%DUMMY%'") + + # WORKORDER filter (fuzzy match) + if workorder: + builder.add_like_condition("WORKORDER", workorder, position="both") + + # LOTID filter (fuzzy match) + if lotid: + builder.add_like_condition("LOTID", lotid, position="both") + + return builder + + +# ============================================================ +# Hold Type Classification +# ============================================================ +# NON_QUALITY_HOLD_REASONS is imported from sql.filters + + +def is_quality_hold(reason: str) -> bool: + """Check if a hold reason is quality-related. + + Wrapper for CommonFilters.is_quality_hold for backwards compatibility. + """ + return CommonFilters.is_quality_hold(reason) + + +def _add_hold_type_conditions( + builder: QueryBuilder, + hold_type: Optional[str] = None +) -> QueryBuilder: + """Add hold type filter conditions to QueryBuilder. + + Args: + builder: QueryBuilder to add conditions to + hold_type: 'quality' for quality holds, 'non-quality' for non-quality holds + + Returns: + QueryBuilder with hold type conditions added + """ + if hold_type == 'quality': + # Quality hold: HOLDREASONNAME is NULL or NOT in non-quality list + builder.add_not_in_condition( + "HOLDREASONNAME", + list(NON_QUALITY_HOLD_REASONS), + allow_null=True + ) + elif hold_type == 'non-quality': + # Non-quality hold: HOLDREASONNAME is in non-quality list + builder.add_in_condition("HOLDREASONNAME", list(NON_QUALITY_HOLD_REASONS)) + return builder + + +# ============================================================ +# Data Source Configuration +# ============================================================ +# WIP view for real-time lot data +WIP_VIEW = "DWH.DW_MES_LOT_V" + + +# ============================================================ +# Cache Data Helper Functions +# ============================================================ + +def _get_wip_dataframe() -> Optional[pd.DataFrame]: + """Get WIP data from cache or return None if unavailable. + + Returns: + DataFrame with WIP data from Redis cache, or None if cache miss. + """ + df = get_cached_wip_data() + if df is not None and not df.empty: + logger.debug(f"Using cached WIP data ({len(df)} rows)") + return df + return None + + +def _get_wip_cache_version() -> str: + """Build a lightweight cache version marker for derived index refresh.""" + updated_at = get_cache_updated_at() or "" + sys_date = get_cached_sys_date() or "" + return f"{updated_at}|{sys_date}" + + +def _distinct_sorted_values(df: pd.DataFrame, column: str) -> List[str]: + if column not in df.columns: + return [] + series = df[column].dropna().astype(str) + if series.empty: + return [] + series = series[series.str.len() > 0] + if series.empty: + return [] + return series.drop_duplicates().sort_values().tolist() + + +def _build_wip_search_index(df: pd.DataFrame, include_dummy: bool) -> Dict[str, Any]: + filtered = _filter_base_conditions(df, include_dummy=include_dummy) + return { + "built_at": datetime.now().isoformat(), + "row_count": len(filtered), + "workorders": _distinct_sorted_values(filtered, "WORKORDER"), + "lotids": _distinct_sorted_values(filtered, "LOTID"), + "packages": _distinct_sorted_values(filtered, "PACKAGE_LEF"), + "types": _distinct_sorted_values(filtered, "PJ_TYPE"), + } + + +def _get_wip_search_index(include_dummy: bool) -> Optional[Dict[str, Any]]: + cache_key = "with_dummy" if include_dummy else "without_dummy" + version = _get_wip_cache_version() + + with _wip_search_index_lock: + cached = _wip_search_index_cache.get(cache_key) + if cached and cached.get("version") == version: + return cached + + df = _get_wip_dataframe() + if df is None: + return None + + index_payload = _build_wip_search_index(df, include_dummy=include_dummy) + index_payload["version"] = version + + with _wip_search_index_lock: + _wip_search_index_cache[cache_key] = index_payload + return index_payload + + +def _search_values_from_index(values: List[str], query: str, limit: int) -> List[str]: + query_lower = query.lower() + matched = [value for value in values if query_lower in value.lower()] + return matched[:limit] + + +def get_wip_search_index_status() -> Dict[str, Any]: + """Expose WIP derived search-index freshness for diagnostics.""" + with _wip_search_index_lock: + snapshot = {} + for key, payload in _wip_search_index_cache.items(): + snapshot[key] = { + "version": payload.get("version"), + "built_at": payload.get("built_at"), + "row_count": payload.get("row_count", 0), + "workorders": len(payload.get("workorders", [])), + "lotids": len(payload.get("lotids", [])), + "packages": len(payload.get("packages", [])), + "types": len(payload.get("types", [])), + } + return snapshot + + +def _add_wip_status_columns(df: pd.DataFrame) -> pd.DataFrame: + """Add computed WIP status columns to DataFrame. + + Adds columns: + - WIP_STATUS: 'RUN', 'HOLD', or 'QUEUE' + - IS_QUALITY_HOLD: True if quality hold + - IS_NON_QUALITY_HOLD: True if non-quality hold + + Args: + df: DataFrame with EQUIPMENTCOUNT, CURRENTHOLDCOUNT, HOLDREASONNAME columns + + Returns: + DataFrame with additional status columns + """ + df = df.copy() + + # Ensure numeric columns + df['EQUIPMENTCOUNT'] = pd.to_numeric(df['EQUIPMENTCOUNT'], errors='coerce').fillna(0) + df['CURRENTHOLDCOUNT'] = pd.to_numeric(df['CURRENTHOLDCOUNT'], errors='coerce').fillna(0) + df['QTY'] = pd.to_numeric(df['QTY'], errors='coerce').fillna(0) + + # Compute WIP status + df['WIP_STATUS'] = 'QUEUE' # Default + df.loc[df['EQUIPMENTCOUNT'] > 0, 'WIP_STATUS'] = 'RUN' + df.loc[(df['EQUIPMENTCOUNT'] == 0) & (df['CURRENTHOLDCOUNT'] > 0), 'WIP_STATUS'] = 'HOLD' + + # Compute hold type + df['IS_NON_QUALITY_HOLD'] = df['HOLDREASONNAME'].isin(NON_QUALITY_HOLD_REASONS) + df['IS_QUALITY_HOLD'] = (df['WIP_STATUS'] == 'HOLD') & ~df['IS_NON_QUALITY_HOLD'] + df['IS_NON_QUALITY_HOLD'] = (df['WIP_STATUS'] == 'HOLD') & df['IS_NON_QUALITY_HOLD'] + + return df + + +def _filter_base_conditions( + df: pd.DataFrame, + include_dummy: bool = False, + workorder: Optional[str] = None, + lotid: Optional[str] = None +) -> pd.DataFrame: + """Apply base filter conditions to DataFrame. + + Args: + df: DataFrame to filter + include_dummy: If False (default), exclude LOTID containing 'DUMMY' + workorder: Optional WORKORDER filter (fuzzy match) + lotid: Optional LOTID filter (fuzzy match) + + Returns: + Filtered DataFrame + """ + df = df.copy() + + # Exclude NULL WORKORDER (raw materials) + df = df[df['WORKORDER'].notna()] + + # DUMMY exclusion + if not include_dummy: + df = df[~df['LOTID'].str.contains('DUMMY', case=False, na=False)] + + # WORKORDER filter (fuzzy match) + if workorder: + df = df[df['WORKORDER'].str.contains(workorder, case=False, na=False)] + + # LOTID filter (fuzzy match) + if lotid: + df = df[df['LOTID'].str.contains(lotid, case=False, na=False)] + + return df + + +# ============================================================ +# Overview API Functions +# ============================================================ + +def get_wip_summary( + include_dummy: bool = False, + workorder: Optional[str] = None, + lotid: Optional[str] = None, + package: Optional[str] = None, + pj_type: Optional[str] = None +) -> Optional[Dict[str, Any]]: + """Get WIP KPI summary for overview dashboard. + + Uses Redis cache when available, falls back to Oracle direct query. + + Args: + include_dummy: If True, include DUMMY lots (default: False) + workorder: Optional WORKORDER filter (fuzzy match) + lotid: Optional LOTID filter (fuzzy match) + package: Optional PACKAGE_LEF filter (exact match) + pj_type: Optional PJ_TYPE filter (exact match) + + Returns: + Dict with summary stats (camelCase): + - totalLots: Total number of lots + - totalQtyPcs: Total quantity + - byWipStatus: Grouped counts for RUN/QUEUE/HOLD + - dataUpdateDate: Data timestamp + """ + # Try cache first + cached_df = _get_wip_dataframe() + if cached_df is not None: + try: + df = _filter_base_conditions(cached_df, include_dummy, workorder, lotid) + df = _add_wip_status_columns(df) + + # Apply package filter + if package and 'PACKAGE_LEF' in df.columns: + df = df[df['PACKAGE_LEF'] == package] + + # Apply pj_type filter + if pj_type and 'PJ_TYPE' in df.columns: + df = df[df['PJ_TYPE'] == pj_type] + + if df.empty: + return { + 'totalLots': 0, + 'totalQtyPcs': 0, + 'byWipStatus': { + 'run': {'lots': 0, 'qtyPcs': 0}, + 'queue': {'lots': 0, 'qtyPcs': 0}, + 'hold': {'lots': 0, 'qtyPcs': 0}, + 'qualityHold': {'lots': 0, 'qtyPcs': 0}, + 'nonQualityHold': {'lots': 0, 'qtyPcs': 0} + }, + 'dataUpdateDate': get_cached_sys_date() + } + + # Calculate summary from cached data + run_df = df[df['WIP_STATUS'] == 'RUN'] + queue_df = df[df['WIP_STATUS'] == 'QUEUE'] + hold_df = df[df['WIP_STATUS'] == 'HOLD'] + quality_hold_df = df[df['IS_QUALITY_HOLD']] + non_quality_hold_df = df[df['IS_NON_QUALITY_HOLD']] + + return { + 'totalLots': len(df), + 'totalQtyPcs': int(df['QTY'].sum()), + 'byWipStatus': { + 'run': { + 'lots': len(run_df), + 'qtyPcs': int(run_df['QTY'].sum()) + }, + 'queue': { + 'lots': len(queue_df), + 'qtyPcs': int(queue_df['QTY'].sum()) + }, + 'hold': { + 'lots': len(hold_df), + 'qtyPcs': int(hold_df['QTY'].sum()) + }, + 'qualityHold': { + 'lots': len(quality_hold_df), + 'qtyPcs': int(quality_hold_df['QTY'].sum()) + }, + 'nonQualityHold': { + 'lots': len(non_quality_hold_df), + 'qtyPcs': int(non_quality_hold_df['QTY'].sum()) + } + }, + 'dataUpdateDate': get_cached_sys_date() + } + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.warning(f"Cache-based summary calculation failed, falling back to Oracle: {exc}") + + # Fallback to Oracle direct query + return _get_wip_summary_from_oracle(include_dummy, workorder, lotid, package, pj_type) + + +def _get_wip_summary_from_oracle( + include_dummy: bool = False, + workorder: Optional[str] = None, + lotid: Optional[str] = None, + package: Optional[str] = None, + pj_type: Optional[str] = None +) -> Optional[Dict[str, Any]]: + """Get WIP summary directly from Oracle (fallback).""" + try: + # Build conditions using QueryBuilder + builder = _build_base_conditions_builder(include_dummy, workorder, lotid) + + if package: + builder.add_param_condition("PACKAGE_LEF", package) + if pj_type: + builder.add_param_condition("PJ_TYPE", pj_type) + + # Load SQL template and build query + base_sql = SQLLoader.load("wip/summary") + builder.base_sql = base_sql + + # Replace NON_QUALITY_REASONS placeholder (must be literal values for CASE expressions) + non_quality_list = CommonFilters.get_non_quality_reasons_sql() + builder.base_sql = builder.base_sql.replace("{{ NON_QUALITY_REASONS }}", non_quality_list) + + sql, params = builder.build() + df = read_sql_df(sql, params) + + if df is None or df.empty: + return None + + row = df.iloc[0] + return { + 'totalLots': int(row['TOTAL_LOTS'] or 0), + 'totalQtyPcs': int(row['TOTAL_QTY_PCS'] or 0), + 'byWipStatus': { + 'run': { + 'lots': int(row['RUN_LOTS'] or 0), + 'qtyPcs': int(row['RUN_QTY_PCS'] or 0) + }, + 'queue': { + 'lots': int(row['QUEUE_LOTS'] or 0), + 'qtyPcs': int(row['QUEUE_QTY_PCS'] or 0) + }, + 'hold': { + 'lots': int(row['HOLD_LOTS'] or 0), + 'qtyPcs': int(row['HOLD_QTY_PCS'] or 0) + }, + 'qualityHold': { + 'lots': int(row['QUALITY_HOLD_LOTS'] or 0), + 'qtyPcs': int(row['QUALITY_HOLD_QTY_PCS'] or 0) + }, + 'nonQualityHold': { + 'lots': int(row['NON_QUALITY_HOLD_LOTS'] or 0), + 'qtyPcs': int(row['NON_QUALITY_HOLD_QTY_PCS'] or 0) + } + }, + 'dataUpdateDate': str(row['DATA_UPDATE_DATE']) if row['DATA_UPDATE_DATE'] else None + } + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"WIP summary query failed: {exc}") + return None + + +def get_wip_matrix( + include_dummy: bool = False, + workorder: Optional[str] = None, + lotid: Optional[str] = None, + status: Optional[str] = None, + hold_type: Optional[str] = None, + package: Optional[str] = None, + pj_type: Optional[str] = None +) -> Optional[Dict[str, Any]]: + """Get workcenter x product line matrix for overview dashboard. + + Uses Redis cache when available, falls back to Oracle direct query. + + Args: + include_dummy: If True, include DUMMY lots (default: False) + workorder: Optional WORKORDER filter (fuzzy match) + lotid: Optional LOTID filter (fuzzy match) + status: Optional WIP status filter ('RUN', 'QUEUE', 'HOLD') + hold_type: Optional hold type filter ('quality', 'non-quality') + Only effective when status='HOLD' + package: Optional PACKAGE_LEF filter (exact match) + pj_type: Optional PJ_TYPE filter (exact match) + + Returns: + Dict with matrix data: + - workcenters: List of workcenter groups (sorted by WORKCENTERSEQUENCE_GROUP) + - packages: List of product lines (sorted by total QTY desc) + - matrix: Dict of {workcenter: {package: qty}} + - workcenter_totals: Dict of {workcenter: total_qty} + - package_totals: Dict of {package: total_qty} + - grand_total: Overall total + """ + # Try cache first + cached_df = _get_wip_dataframe() + if cached_df is not None: + try: + df = _filter_base_conditions(cached_df, include_dummy, workorder, lotid) + df = _add_wip_status_columns(df) + + # Filter by WORKCENTER_GROUP and PACKAGE_LEF + df = df[df['WORKCENTER_GROUP'].notna() & df['PACKAGE_LEF'].notna()] + + # Apply package filter + if package: + df = df[df['PACKAGE_LEF'] == package] + + # Apply pj_type filter + if pj_type and 'PJ_TYPE' in df.columns: + df = df[df['PJ_TYPE'] == pj_type] + + # WIP status filter + if status: + status_upper = status.upper() + df = df[df['WIP_STATUS'] == status_upper] + + # Hold type sub-filter + if status_upper == 'HOLD' and hold_type: + if hold_type == 'quality': + df = df[df['IS_QUALITY_HOLD']] + elif hold_type == 'non-quality': + df = df[df['IS_NON_QUALITY_HOLD']] + + if df.empty: + return { + 'workcenters': [], + 'packages': [], + 'matrix': {}, + 'workcenter_totals': {}, + 'package_totals': {}, + 'grand_total': 0 + } + + return _build_matrix_result(df) + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.warning(f"Cache-based matrix calculation failed, falling back to Oracle: {exc}") + + # Fallback to Oracle direct query + return _get_wip_matrix_from_oracle(include_dummy, workorder, lotid, status, hold_type, package, pj_type) + + +def _build_matrix_result(df: pd.DataFrame) -> Dict[str, Any]: + """Build matrix result from DataFrame.""" + # Group by workcenter and package + grouped = df.groupby(['WORKCENTER_GROUP', 'WORKCENTERSEQUENCE_GROUP', 'PACKAGE_LEF'])['QTY'].sum().reset_index() + + if grouped.empty: + return { + 'workcenters': [], + 'packages': [], + 'matrix': {}, + 'workcenter_totals': {}, + 'package_totals': {}, + 'grand_total': 0 + } + + # Build matrix + matrix = {} + workcenter_totals = {} + package_totals = {} + + # Get unique workcenters sorted by sequence + wc_order = grouped.drop_duplicates('WORKCENTER_GROUP')[['WORKCENTER_GROUP', 'WORKCENTERSEQUENCE_GROUP']] + wc_order = wc_order.sort_values('WORKCENTERSEQUENCE_GROUP') + workcenters = wc_order['WORKCENTER_GROUP'].tolist() + + # Build matrix and totals + for _, row in grouped.iterrows(): + wc = row['WORKCENTER_GROUP'] + pkg = row['PACKAGE_LEF'] + qty = int(row['QTY'] or 0) + + if wc not in matrix: + matrix[wc] = {} + matrix[wc][pkg] = qty + + workcenter_totals[wc] = workcenter_totals.get(wc, 0) + qty + package_totals[pkg] = package_totals.get(pkg, 0) + qty + + # Sort packages by total qty desc + packages = sorted(package_totals.keys(), key=lambda x: package_totals[x], reverse=True) + + grand_total = sum(workcenter_totals.values()) + + return { + 'workcenters': workcenters, + 'packages': packages, + 'matrix': matrix, + 'workcenter_totals': workcenter_totals, + 'package_totals': package_totals, + 'grand_total': grand_total + } + + +def _get_wip_matrix_from_oracle( + include_dummy: bool = False, + workorder: Optional[str] = None, + lotid: Optional[str] = None, + status: Optional[str] = None, + hold_type: Optional[str] = None, + package: Optional[str] = None, + pj_type: Optional[str] = None +) -> Optional[Dict[str, Any]]: + """Get WIP matrix directly from Oracle (fallback).""" + try: + # Build conditions using QueryBuilder + builder = _build_base_conditions_builder(include_dummy, workorder, lotid) + builder.add_is_not_null("WORKCENTER_GROUP") + builder.add_is_not_null("PACKAGE_LEF") + + if package: + builder.add_param_condition("PACKAGE_LEF", package) + if pj_type: + builder.add_param_condition("PJ_TYPE", pj_type) + + # WIP status filter + if status: + status_upper = status.upper() + if status_upper == 'RUN': + builder.add_condition("COALESCE(EQUIPMENTCOUNT, 0) > 0") + elif status_upper == 'HOLD': + builder.add_condition("COALESCE(EQUIPMENTCOUNT, 0) = 0 AND COALESCE(CURRENTHOLDCOUNT, 0) > 0") + # Hold type sub-filter + if hold_type: + _add_hold_type_conditions(builder, hold_type) + elif status_upper == 'QUEUE': + builder.add_condition("COALESCE(EQUIPMENTCOUNT, 0) = 0 AND COALESCE(CURRENTHOLDCOUNT, 0) = 0") + + # Load SQL template and build query + base_sql = SQLLoader.load("wip/matrix") + builder.base_sql = base_sql + sql, params = builder.build() + + df = read_sql_df(sql, params) + + if df is None or df.empty: + return { + 'workcenters': [], + 'packages': [], + 'matrix': {}, + 'workcenter_totals': {}, + 'package_totals': {}, + 'grand_total': 0 + } + + return _build_matrix_result(df) + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"WIP matrix query failed: {exc}") + import traceback + traceback.print_exc() + return None + + +def get_wip_hold_summary( + include_dummy: bool = False, + workorder: Optional[str] = None, + lotid: Optional[str] = None +) -> Optional[Dict[str, Any]]: + """Get hold summary grouped by hold reason. + + Uses Redis cache when available, falls back to Oracle direct query. + + Args: + include_dummy: If True, include DUMMY lots (default: False) + workorder: Optional WORKORDER filter (fuzzy match) + lotid: Optional LOTID filter (fuzzy match) + + Returns: + Dict with hold items sorted by lots desc: + - items: List of {reason, lots, qty} + """ + # Try cache first + cached_df = _get_wip_dataframe() + if cached_df is not None: + try: + df = _filter_base_conditions(cached_df, include_dummy, workorder, lotid) + df = _add_wip_status_columns(df) + + # Filter for HOLD status with reason + df = df[(df['WIP_STATUS'] == 'HOLD') & df['HOLDREASONNAME'].notna()] + + if df.empty: + return {'items': []} + + # Group by hold reason + grouped = df.groupby('HOLDREASONNAME').agg({ + 'LOTID': 'count', + 'QTY': 'sum' + }).reset_index() + grouped.columns = ['REASON', 'LOTS', 'QTY'] + grouped = grouped.sort_values('LOTS', ascending=False) + + items = [] + for _, row in grouped.iterrows(): + reason = row['REASON'] + items.append({ + 'reason': reason, + 'lots': int(row['LOTS'] or 0), + 'qty': int(row['QTY'] or 0), + 'holdType': 'quality' if is_quality_hold(reason) else 'non-quality' + }) + + return {'items': items} + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.warning(f"Cache-based hold summary calculation failed, falling back to Oracle: {exc}") + + # Fallback to Oracle direct query + return _get_wip_hold_summary_from_oracle(include_dummy, workorder, lotid) + + +def _get_wip_hold_summary_from_oracle( + include_dummy: bool = False, + workorder: Optional[str] = None, + lotid: Optional[str] = None +) -> Optional[Dict[str, Any]]: + """Get WIP hold summary directly from Oracle (fallback).""" + try: + # Build conditions using QueryBuilder + builder = _build_base_conditions_builder(include_dummy, workorder, lotid) + builder.add_param_condition("STATUS", "HOLD") + builder.add_is_not_null("HOLDREASONNAME") + + where_clause, params = builder.build_where_only() + + sql = f""" + SELECT + HOLDREASONNAME as REASON, + COUNT(*) as LOTS, + SUM(QTY) as QTY + FROM {WIP_VIEW} + {where_clause} + GROUP BY HOLDREASONNAME + ORDER BY COUNT(*) DESC + """ + df = read_sql_df(sql, params) + + if df is None or df.empty: + return {'items': []} + + items = [] + for _, row in df.iterrows(): + reason = row['REASON'] + items.append({ + 'reason': reason, + 'lots': int(row['LOTS'] or 0), + 'qty': int(row['QTY'] or 0), + 'holdType': 'quality' if is_quality_hold(reason) else 'non-quality' + }) + + return {'items': items} + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"WIP hold summary query failed: {exc}") + return None + + +# ============================================================ +# Detail API Functions +# ============================================================ + +def get_wip_detail( + workcenter: str, + package: Optional[str] = None, + status: Optional[str] = None, + hold_type: Optional[str] = None, + workorder: Optional[str] = None, + lotid: Optional[str] = None, + include_dummy: bool = False, + page: int = 1, + page_size: int = 100 +) -> Optional[Dict[str, Any]]: + """Get WIP detail for a specific workcenter group. + + Uses Redis cache when available, falls back to Oracle direct query. + + Args: + workcenter: WORKCENTER_GROUP name + package: Optional PACKAGE_LEF filter + status: Optional WIP status filter ('RUN', 'QUEUE', 'HOLD') + hold_type: Optional hold type filter ('quality', 'non-quality') + Only effective when status='HOLD' + workorder: Optional WORKORDER filter (fuzzy match) + lotid: Optional LOTID filter (fuzzy match) + include_dummy: If True, include DUMMY lots (default: False) + page: Page number (1-based) + page_size: Number of records per page + + Returns: + Dict with: + - workcenter: The workcenter group name + - summary: {totalLots, runLots, queueLots, holdLots, qualityHoldLots, nonQualityHoldLots} + - specs: List of spec names (sorted by SPECSEQUENCE) + - lots: List of lot details + - pagination: {page, page_size, total_count, total_pages} + - sys_date: Data timestamp + """ + # Try cache first + cached_df = _get_wip_dataframe() + if cached_df is not None: + try: + df = _filter_base_conditions(cached_df, include_dummy, workorder, lotid) + df = _add_wip_status_columns(df) + + # Filter by workcenter + df = df[df['WORKCENTER_GROUP'] == workcenter] + + if package: + df = df[df['PACKAGE_LEF'] == package] + + # Calculate summary before status filter + summary_df = df.copy() + run_lots = len(summary_df[summary_df['WIP_STATUS'] == 'RUN']) + queue_lots = len(summary_df[summary_df['WIP_STATUS'] == 'QUEUE']) + hold_lots = len(summary_df[summary_df['WIP_STATUS'] == 'HOLD']) + quality_hold_lots = len(summary_df[summary_df['IS_QUALITY_HOLD']]) + non_quality_hold_lots = len(summary_df[summary_df['IS_NON_QUALITY_HOLD']]) + total_lots = len(summary_df) + + summary = { + 'totalLots': total_lots, + 'runLots': run_lots, + 'queueLots': queue_lots, + 'holdLots': hold_lots, + 'qualityHoldLots': quality_hold_lots, + 'nonQualityHoldLots': non_quality_hold_lots + } + + # Apply status filter for lots list + if status: + status_upper = status.upper() + df = df[df['WIP_STATUS'] == status_upper] + + if status_upper == 'HOLD' and hold_type: + if hold_type == 'quality': + df = df[df['IS_QUALITY_HOLD']] + elif hold_type == 'non-quality': + df = df[df['IS_NON_QUALITY_HOLD']] + + # Get specs (sorted by SPECSEQUENCE if available) + specs_df = df[df['SPECNAME'].notna()][['SPECNAME', 'SPECSEQUENCE']].drop_duplicates() + if 'SPECSEQUENCE' in specs_df.columns: + specs_df = specs_df.sort_values('SPECSEQUENCE') + specs = specs_df['SPECNAME'].tolist() if not specs_df.empty else [] + + # Pagination + filtered_count = len(df) + total_pages = (filtered_count + page_size - 1) // page_size if filtered_count > 0 else 1 + offset = (page - 1) * page_size + + # Sort by LOTID and paginate + df = df.sort_values('LOTID') + page_df = df.iloc[offset:offset + page_size] + + lots = [] + for _, row in page_df.iterrows(): + lots.append({ + 'lotId': _safe_value(row.get('LOTID')), + 'equipment': _safe_value(row.get('EQUIPMENTS')), + 'wipStatus': _safe_value(row.get('WIP_STATUS')), + 'holdReason': _safe_value(row.get('HOLDREASONNAME')), + 'qty': int(row.get('QTY', 0) or 0), + 'package': _safe_value(row.get('PACKAGE_LEF')), + 'spec': _safe_value(row.get('SPECNAME')) + }) + + return { + 'workcenter': workcenter, + 'summary': summary, + 'specs': specs, + 'lots': lots, + 'pagination': { + 'page': page, + 'page_size': page_size, + 'total_count': filtered_count, + 'total_pages': total_pages + }, + 'sys_date': get_cached_sys_date() + } + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.warning(f"Cache-based detail calculation failed, falling back to Oracle: {exc}") + + # Fallback to Oracle direct query + return _get_wip_detail_from_oracle( + workcenter, package, status, hold_type, workorder, lotid, include_dummy, page, page_size + ) + + +def _get_wip_detail_from_oracle( + workcenter: str, + package: Optional[str] = None, + status: Optional[str] = None, + hold_type: Optional[str] = None, + workorder: Optional[str] = None, + lotid: Optional[str] = None, + include_dummy: bool = False, + page: int = 1, + page_size: int = 100 +) -> Optional[Dict[str, Any]]: + """Get WIP detail directly from Oracle (fallback).""" + try: + # Build WHERE conditions using QueryBuilder + builder = _build_base_conditions_builder(include_dummy, workorder, lotid) + builder.add_param_condition("WORKCENTER_GROUP", workcenter) + + if package: + builder.add_param_condition("PACKAGE_LEF", package) + + # WIP status filter (RUN/QUEUE/HOLD based on EQUIPMENTCOUNT and CURRENTHOLDCOUNT) + if status: + status_upper = status.upper() + if status_upper == 'RUN': + builder.add_condition("COALESCE(EQUIPMENTCOUNT, 0) > 0") + elif status_upper == 'HOLD': + builder.add_condition("COALESCE(EQUIPMENTCOUNT, 0) = 0 AND COALESCE(CURRENTHOLDCOUNT, 0) > 0") + # Hold type sub-filter + if hold_type: + _add_hold_type_conditions(builder, hold_type) + elif status_upper == 'QUEUE': + builder.add_condition("COALESCE(EQUIPMENTCOUNT, 0) = 0 AND COALESCE(CURRENTHOLDCOUNT, 0) = 0") + + where_clause, params = builder.build_where_only() + + # Build summary conditions (without status/hold_type filter for full breakdown) + summary_builder = _build_base_conditions_builder(include_dummy, workorder, lotid) + summary_builder.add_param_condition("WORKCENTER_GROUP", workcenter) + if package: + summary_builder.add_param_condition("PACKAGE_LEF", package) + + summary_where, summary_params = summary_builder.build_where_only() + non_quality_list = CommonFilters.get_non_quality_reasons_sql() + + summary_sql = f""" + SELECT + COUNT(*) as TOTAL_LOTS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) > 0 THEN 1 ELSE 0 END) as RUN_LOTS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) = 0 + AND COALESCE(CURRENTHOLDCOUNT, 0) = 0 THEN 1 ELSE 0 END) as QUEUE_LOTS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) = 0 + AND COALESCE(CURRENTHOLDCOUNT, 0) > 0 THEN 1 ELSE 0 END) as HOLD_LOTS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) = 0 + AND COALESCE(CURRENTHOLDCOUNT, 0) > 0 + AND (HOLDREASONNAME IS NULL OR HOLDREASONNAME NOT IN ({non_quality_list})) + THEN 1 ELSE 0 END) as QUALITY_HOLD_LOTS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) = 0 + AND COALESCE(CURRENTHOLDCOUNT, 0) > 0 + AND HOLDREASONNAME IN ({non_quality_list}) + THEN 1 ELSE 0 END) as NON_QUALITY_HOLD_LOTS, + MAX(SYS_DATE) as SYS_DATE + FROM {WIP_VIEW} + {summary_where} + """ + + summary_df = read_sql_df(summary_sql, summary_params) + + if summary_df is None or summary_df.empty: + return None + + summary_row = summary_df.iloc[0] + sys_date = str(summary_row['SYS_DATE']) if summary_row['SYS_DATE'] else None + + # Calculate counts from summary + total_lots = int(summary_row['TOTAL_LOTS'] or 0) + run_lots = int(summary_row['RUN_LOTS'] or 0) + queue_lots = int(summary_row['QUEUE_LOTS'] or 0) + hold_lots = int(summary_row['HOLD_LOTS'] or 0) + quality_hold_lots = int(summary_row['QUALITY_HOLD_LOTS'] or 0) + non_quality_hold_lots = int(summary_row['NON_QUALITY_HOLD_LOTS'] or 0) + + # Determine filtered count based on status filter + if status: + status_upper = status.upper() + if status_upper == 'RUN': + filtered_count = run_lots + elif status_upper == 'QUEUE': + filtered_count = queue_lots + elif status_upper == 'HOLD': + if hold_type == 'quality': + filtered_count = quality_hold_lots + elif hold_type == 'non-quality': + filtered_count = non_quality_hold_lots + else: + filtered_count = hold_lots + else: + filtered_count = total_lots + else: + filtered_count = total_lots + + summary = { + 'totalLots': total_lots, + 'runLots': run_lots, + 'queueLots': queue_lots, + 'holdLots': hold_lots, + 'qualityHoldLots': quality_hold_lots, + 'nonQualityHoldLots': non_quality_hold_lots + } + + # Get unique specs for this workcenter (sorted by SPECSEQUENCE) + specs_sql = f""" + SELECT DISTINCT SPECNAME, SPECSEQUENCE + FROM {WIP_VIEW} + {where_clause} + AND SPECNAME IS NOT NULL + ORDER BY SPECSEQUENCE + """ + + specs_df = read_sql_df(specs_sql, params) + specs = specs_df['SPECNAME'].tolist() if specs_df is not None and not specs_df.empty else [] + + # Get paginated lot details using SQL file with bind variables + offset = (page - 1) * page_size + base_detail_sql = SQLLoader.load("wip/detail") + detail_sql = base_detail_sql.replace("{{ WHERE_CLAUSE }}", where_clause) + + # Add pagination params to existing params + detail_params = params.copy() + detail_params['offset'] = offset + detail_params['limit'] = page_size + + lots_df = read_sql_df(detail_sql, detail_params) + + lots = [] + if lots_df is not None and not lots_df.empty: + for _, row in lots_df.iterrows(): + lots.append({ + 'lotId': _safe_value(row['LOTID']), + 'equipment': _safe_value(row['EQUIPMENTS']), + 'wipStatus': _safe_value(row['WIP_STATUS']), + 'holdReason': _safe_value(row['HOLDREASONNAME']), + 'qty': int(row['QTY'] or 0), + 'package': _safe_value(row['PACKAGE_LEF']), + 'spec': _safe_value(row['SPECNAME']) + }) + + total_pages = (filtered_count + page_size - 1) // page_size if filtered_count > 0 else 1 + + return { + 'workcenter': workcenter, + 'summary': summary, + 'specs': specs, + 'lots': lots, + 'pagination': { + 'page': page, + 'page_size': page_size, + 'total_count': filtered_count, + 'total_pages': total_pages + }, + 'sys_date': sys_date + } + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"WIP detail query failed: {exc}") + import traceback + traceback.print_exc() + return None + + +# ============================================================ +# Meta API Functions +# ============================================================ + +def get_workcenters(include_dummy: bool = False) -> Optional[List[Dict[str, Any]]]: + """Get list of workcenter groups with lot counts. + + Uses Redis cache when available, falls back to Oracle direct query. + + Args: + include_dummy: If True, include DUMMY lots (default: False) + + Returns: + List of {name, lot_count} sorted by WORKCENTERSEQUENCE_GROUP + """ + # Try cache first + cached_df = _get_wip_dataframe() + if cached_df is not None: + try: + df = _filter_base_conditions(cached_df, include_dummy) + df = df[df['WORKCENTER_GROUP'].notna()] + + if df.empty: + return [] + + # Group by workcenter with sequence + grouped = df.groupby(['WORKCENTER_GROUP', 'WORKCENTERSEQUENCE_GROUP']).size().reset_index(name='LOT_COUNT') + grouped = grouped.sort_values('WORKCENTERSEQUENCE_GROUP') + + result = [] + for _, row in grouped.iterrows(): + result.append({ + 'name': row['WORKCENTER_GROUP'], + 'lot_count': int(row['LOT_COUNT'] or 0) + }) + + return result + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.warning(f"Cache-based workcenters calculation failed, falling back to Oracle: {exc}") + + # Fallback to Oracle direct query + return _get_workcenters_from_oracle(include_dummy) + + +def _get_workcenters_from_oracle(include_dummy: bool = False) -> Optional[List[Dict[str, Any]]]: + """Get workcenters directly from Oracle (fallback).""" + try: + builder = _build_base_conditions_builder(include_dummy) + builder.add_is_not_null("WORKCENTER_GROUP") + where_clause, params = builder.build_where_only() + + sql = f""" + SELECT + WORKCENTER_GROUP, + WORKCENTERSEQUENCE_GROUP, + COUNT(*) as LOT_COUNT + FROM {WIP_VIEW} + {where_clause} + GROUP BY WORKCENTER_GROUP, WORKCENTERSEQUENCE_GROUP + ORDER BY WORKCENTERSEQUENCE_GROUP + """ + df = read_sql_df(sql, params) + + if df is None or df.empty: + return [] + + result = [] + for _, row in df.iterrows(): + result.append({ + 'name': row['WORKCENTER_GROUP'], + 'lot_count': int(row['LOT_COUNT'] or 0) + }) + + return result + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Workcenters query failed: {exc}") + return None + + +def get_packages(include_dummy: bool = False) -> Optional[List[Dict[str, Any]]]: + """Get list of packages (product lines) with lot counts. + + Uses Redis cache when available, falls back to Oracle direct query. + + Args: + include_dummy: If True, include DUMMY lots (default: False) + + Returns: + List of {name, lot_count} sorted by lot_count desc + """ + # Try cache first + cached_df = _get_wip_dataframe() + if cached_df is not None: + try: + df = _filter_base_conditions(cached_df, include_dummy) + df = df[df['PACKAGE_LEF'].notna()] + + if df.empty: + return [] + + # Group by package and count + grouped = df.groupby('PACKAGE_LEF').size().reset_index(name='LOT_COUNT') + grouped = grouped.sort_values('LOT_COUNT', ascending=False) + + result = [] + for _, row in grouped.iterrows(): + result.append({ + 'name': row['PACKAGE_LEF'], + 'lot_count': int(row['LOT_COUNT'] or 0) + }) + + return result + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.warning(f"Cache-based packages calculation failed, falling back to Oracle: {exc}") + + # Fallback to Oracle direct query + return _get_packages_from_oracle(include_dummy) + + +def _get_packages_from_oracle(include_dummy: bool = False) -> Optional[List[Dict[str, Any]]]: + """Get packages directly from Oracle (fallback).""" + try: + builder = _build_base_conditions_builder(include_dummy) + builder.add_is_not_null("PACKAGE_LEF") + where_clause, params = builder.build_where_only() + + sql = f""" + SELECT + PACKAGE_LEF, + COUNT(*) as LOT_COUNT + FROM {WIP_VIEW} + {where_clause} + GROUP BY PACKAGE_LEF + ORDER BY COUNT(*) DESC + """ + df = read_sql_df(sql, params) + + if df is None or df.empty: + return [] + + result = [] + for _, row in df.iterrows(): + result.append({ + 'name': row['PACKAGE_LEF'], + 'lot_count': int(row['LOT_COUNT'] or 0) + }) + + return result + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Packages query failed: {exc}") + return None + + +# ============================================================ +# Search API Functions +# ============================================================ + +def search_workorders( + q: str, + limit: int = 20, + include_dummy: bool = False, + lotid: Optional[str] = None, + package: Optional[str] = None, + pj_type: Optional[str] = None +) -> Optional[List[str]]: + """Search for WORKORDER values matching the query. + + Uses Redis cache when available, falls back to Oracle direct query. + + Args: + q: Search query (minimum 2 characters) + limit: Maximum number of results (default: 20, max: 50) + include_dummy: If True, include DUMMY lots (default: False) + lotid: Optional LOTID cross-filter (fuzzy match) + package: Optional PACKAGE_LEF cross-filter (exact match) + pj_type: Optional PJ_TYPE cross-filter (exact match) + + Returns: + List of matching WORKORDER values (distinct) + """ + # Validate input + if not q or len(q) < 2: + return [] + + limit = min(limit, 50) # Cap at 50 + + if not lotid and not package and not pj_type: + indexed = _get_wip_search_index(include_dummy=include_dummy) + if indexed is not None: + return _search_values_from_index(indexed.get("workorders", []), q, limit) + + # Try cache first + cached_df = _get_wip_dataframe() + if cached_df is not None: + try: + df = _filter_base_conditions(cached_df, include_dummy, lotid=lotid) + df = df[df['WORKORDER'].notna()] + + # Apply cross-filters + if package and 'PACKAGE_LEF' in df.columns: + df = df[df['PACKAGE_LEF'] == package] + if pj_type and 'PJ_TYPE' in df.columns: + df = df[df['PJ_TYPE'] == pj_type] + + # Filter by search query (case-insensitive) + df = df[df['WORKORDER'].str.contains(q, case=False, na=False)] + + if df.empty: + return [] + + # Get distinct, sorted, limited results + result = df['WORKORDER'].drop_duplicates().sort_values().head(limit).tolist() + return result + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.warning(f"Cache-based workorder search failed, falling back to Oracle: {exc}") + + # Fallback to Oracle direct query + return _search_workorders_from_oracle(q, limit, include_dummy, lotid, package, pj_type) + + +def _search_workorders_from_oracle( + q: str, + limit: int = 20, + include_dummy: bool = False, + lotid: Optional[str] = None, + package: Optional[str] = None, + pj_type: Optional[str] = None +) -> Optional[List[str]]: + """Search workorders directly from Oracle (fallback).""" + try: + builder = _build_base_conditions_builder(include_dummy, lotid=lotid) + builder.add_like_condition("WORKORDER", q, position="both") + builder.add_is_not_null("WORKORDER") + + # Apply cross-filters + if package: + builder.add_param_condition("PACKAGE_LEF", package) + if pj_type: + builder.add_param_condition("PJ_TYPE", pj_type) + + where_clause, params = builder.build_where_only() + params['row_limit'] = limit + + sql = f""" + SELECT DISTINCT WORKORDER + FROM {WIP_VIEW} + {where_clause} + ORDER BY WORKORDER + FETCH FIRST :row_limit ROWS ONLY + """ + df = read_sql_df(sql, params) + + if df is None or df.empty: + return [] + + return df['WORKORDER'].tolist() + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Search workorders failed: {exc}") + return None + + +def search_lot_ids( + q: str, + limit: int = 20, + include_dummy: bool = False, + workorder: Optional[str] = None, + package: Optional[str] = None, + pj_type: Optional[str] = None +) -> Optional[List[str]]: + """Search for LOTID values matching the query. + + Uses Redis cache when available, falls back to Oracle direct query. + + Args: + q: Search query (minimum 2 characters) + limit: Maximum number of results (default: 20, max: 50) + include_dummy: If True, include DUMMY lots (default: False) + workorder: Optional WORKORDER cross-filter (fuzzy match) + package: Optional PACKAGE_LEF cross-filter (exact match) + pj_type: Optional PJ_TYPE cross-filter (exact match) + + Returns: + List of matching LOTID values + """ + # Validate input + if not q or len(q) < 2: + return [] + + limit = min(limit, 50) # Cap at 50 + + if not workorder and not package and not pj_type: + indexed = _get_wip_search_index(include_dummy=include_dummy) + if indexed is not None: + return _search_values_from_index(indexed.get("lotids", []), q, limit) + + # Try cache first + cached_df = _get_wip_dataframe() + if cached_df is not None: + try: + df = _filter_base_conditions(cached_df, include_dummy, workorder=workorder) + + # Apply cross-filters + if package and 'PACKAGE_LEF' in df.columns: + df = df[df['PACKAGE_LEF'] == package] + if pj_type and 'PJ_TYPE' in df.columns: + df = df[df['PJ_TYPE'] == pj_type] + + # Filter by search query (case-insensitive) + df = df[df['LOTID'].str.contains(q, case=False, na=False)] + + if df.empty: + return [] + + # Get sorted, limited results + result = df['LOTID'].sort_values().head(limit).tolist() + return result + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.warning(f"Cache-based lot ID search failed, falling back to Oracle: {exc}") + + # Fallback to Oracle direct query + return _search_lot_ids_from_oracle(q, limit, include_dummy, workorder, package, pj_type) + + +def _search_lot_ids_from_oracle( + q: str, + limit: int = 20, + include_dummy: bool = False, + workorder: Optional[str] = None, + package: Optional[str] = None, + pj_type: Optional[str] = None +) -> Optional[List[str]]: + """Search lot IDs directly from Oracle (fallback).""" + try: + builder = _build_base_conditions_builder(include_dummy, workorder=workorder) + builder.add_like_condition("LOTID", q, position="both") + + # Apply cross-filters + if package: + builder.add_param_condition("PACKAGE_LEF", package) + if pj_type: + builder.add_param_condition("PJ_TYPE", pj_type) + + where_clause, params = builder.build_where_only() + params['row_limit'] = limit + + sql = f""" + SELECT LOTID + FROM {WIP_VIEW} + {where_clause} + ORDER BY LOTID + FETCH FIRST :row_limit ROWS ONLY + """ + df = read_sql_df(sql, params) + + if df is None or df.empty: + return [] + + return df['LOTID'].tolist() + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Search lot IDs failed: {exc}") + return None + + +def search_packages( + q: str, + limit: int = 20, + include_dummy: bool = False, + workorder: Optional[str] = None, + lotid: Optional[str] = None, + pj_type: Optional[str] = None +) -> Optional[List[str]]: + """Search for PACKAGE_LEF values matching the query. + + Uses Redis cache when available, falls back to Oracle direct query. + + Args: + q: Search query (minimum 2 characters) + limit: Maximum number of results (default: 20, max: 50) + include_dummy: If True, include DUMMY lots (default: False) + workorder: Optional WORKORDER cross-filter (fuzzy match) + lotid: Optional LOTID cross-filter (fuzzy match) + pj_type: Optional PJ_TYPE cross-filter (exact match) + + Returns: + List of matching PACKAGE_LEF values (distinct) + """ + # Validate input + if not q or len(q) < 2: + return [] + + limit = min(limit, 50) # Cap at 50 + + if not workorder and not lotid and not pj_type: + indexed = _get_wip_search_index(include_dummy=include_dummy) + if indexed is not None: + return _search_values_from_index(indexed.get("packages", []), q, limit) + + # Try cache first + cached_df = _get_wip_dataframe() + if cached_df is not None: + try: + df = _filter_base_conditions(cached_df, include_dummy, workorder=workorder, lotid=lotid) + + # Check if PACKAGE_LEF column exists + if 'PACKAGE_LEF' not in df.columns: + logger.warning("PACKAGE_LEF column not found in cache") + return _search_packages_from_oracle(q, limit, include_dummy, workorder, lotid, pj_type) + + df = df[df['PACKAGE_LEF'].notna()] + + # Apply cross-filter + if pj_type and 'PJ_TYPE' in df.columns: + df = df[df['PJ_TYPE'] == pj_type] + + # Filter by search query (case-insensitive) + df = df[df['PACKAGE_LEF'].str.contains(q, case=False, na=False)] + + if df.empty: + return [] + + # Get distinct values sorted + result = df['PACKAGE_LEF'].drop_duplicates().sort_values().head(limit).tolist() + return result + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.warning(f"Cache-based package search failed, falling back to Oracle: {exc}") + + # Fallback to Oracle direct query + return _search_packages_from_oracle(q, limit, include_dummy, workorder, lotid, pj_type) + + +def _search_packages_from_oracle( + q: str, + limit: int = 20, + include_dummy: bool = False, + workorder: Optional[str] = None, + lotid: Optional[str] = None, + pj_type: Optional[str] = None +) -> Optional[List[str]]: + """Search packages directly from Oracle (fallback).""" + try: + builder = _build_base_conditions_builder(include_dummy, workorder=workorder, lotid=lotid) + builder.add_like_condition("PACKAGE_LEF", q, position="both") + builder.add_is_not_null("PACKAGE_LEF") + + # Apply cross-filter + if pj_type: + builder.add_param_condition("PJ_TYPE", pj_type) + + where_clause, params = builder.build_where_only() + params['row_limit'] = limit + + sql = f""" + SELECT DISTINCT PACKAGE_LEF + FROM {WIP_VIEW} + {where_clause} + ORDER BY PACKAGE_LEF + FETCH FIRST :row_limit ROWS ONLY + """ + df = read_sql_df(sql, params) + + if df is None or df.empty: + return [] + + return df['PACKAGE_LEF'].tolist() + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Search packages failed: {exc}") + return None + + +def search_types( + q: str, + limit: int = 20, + include_dummy: bool = False, + workorder: Optional[str] = None, + lotid: Optional[str] = None, + package: Optional[str] = None +) -> Optional[List[str]]: + """Search for PJ_TYPE values matching the query. + + Uses Redis cache when available, falls back to Oracle direct query. + + Args: + q: Search query (minimum 2 characters) + limit: Maximum number of results (default: 20, max: 50) + include_dummy: If True, include DUMMY lots (default: False) + workorder: Optional WORKORDER cross-filter (fuzzy match) + lotid: Optional LOTID cross-filter (fuzzy match) + package: Optional PACKAGE_LEF cross-filter (exact match) + + Returns: + List of matching PJ_TYPE values (distinct) + """ + # Validate input + if not q or len(q) < 2: + return [] + + limit = min(limit, 50) # Cap at 50 + + if not workorder and not lotid and not package: + indexed = _get_wip_search_index(include_dummy=include_dummy) + if indexed is not None: + return _search_values_from_index(indexed.get("types", []), q, limit) + + # Try cache first + cached_df = _get_wip_dataframe() + if cached_df is not None: + try: + df = _filter_base_conditions(cached_df, include_dummy, workorder=workorder, lotid=lotid) + + # Check if PJ_TYPE column exists + if 'PJ_TYPE' not in df.columns: + logger.warning("PJ_TYPE column not found in cache") + return _search_types_from_oracle(q, limit, include_dummy, workorder, lotid, package) + + df = df[df['PJ_TYPE'].notna()] + + # Apply cross-filter + if package and 'PACKAGE_LEF' in df.columns: + df = df[df['PACKAGE_LEF'] == package] + + # Filter by search query (case-insensitive) + df = df[df['PJ_TYPE'].str.contains(q, case=False, na=False)] + + if df.empty: + return [] + + # Get distinct values sorted + result = df['PJ_TYPE'].drop_duplicates().sort_values().head(limit).tolist() + return result + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.warning(f"Cache-based type search failed, falling back to Oracle: {exc}") + + # Fallback to Oracle direct query + return _search_types_from_oracle(q, limit, include_dummy, workorder, lotid, package) + + +def _search_types_from_oracle( + q: str, + limit: int = 20, + include_dummy: bool = False, + workorder: Optional[str] = None, + lotid: Optional[str] = None, + package: Optional[str] = None +) -> Optional[List[str]]: + """Search types directly from Oracle (fallback).""" + try: + builder = _build_base_conditions_builder(include_dummy, workorder=workorder, lotid=lotid) + builder.add_like_condition("PJ_TYPE", q, position="both") + builder.add_is_not_null("PJ_TYPE") + + # Apply cross-filter + if package: + builder.add_param_condition("PACKAGE_LEF", package) + + where_clause, params = builder.build_where_only() + params['row_limit'] = limit + + sql = f""" + SELECT DISTINCT PJ_TYPE + FROM {WIP_VIEW} + {where_clause} + ORDER BY PJ_TYPE + FETCH FIRST :row_limit ROWS ONLY + """ + df = read_sql_df(sql, params) + + if df is None or df.empty: + return [] + + return df['PJ_TYPE'].tolist() + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Search types failed: {exc}") + return None + + +# ============================================================ +# Hold Detail API Functions +# ============================================================ + +def get_hold_detail_summary( + reason: str, + include_dummy: bool = False +) -> Optional[Dict[str, Any]]: + """Get summary statistics for a specific hold reason. + + Uses Redis cache when available, falls back to Oracle direct query. + + Args: + reason: The HOLDREASONNAME to filter by + include_dummy: If True, include DUMMY lots (default: False) + + Returns: + Dict with totalLots, totalQty, avgAge, maxAge, workcenterCount + """ + # Try cache first + cached_df = _get_wip_dataframe() + if cached_df is not None: + try: + df = _filter_base_conditions(cached_df, include_dummy) + df = _add_wip_status_columns(df) + + # Filter for HOLD status with matching reason + df = df[(df['WIP_STATUS'] == 'HOLD') & (df['HOLDREASONNAME'] == reason)] + + if df.empty: + return { + 'totalLots': 0, + 'totalQty': 0, + 'avgAge': 0, + 'maxAge': 0, + 'workcenterCount': 0 + } + + # Ensure AGEBYDAYS is numeric + df['AGEBYDAYS'] = pd.to_numeric(df['AGEBYDAYS'], errors='coerce').fillna(0) + + return { + 'totalLots': len(df), + 'totalQty': int(df['QTY'].sum()), + 'avgAge': round(float(df['AGEBYDAYS'].mean()), 1), + 'maxAge': float(df['AGEBYDAYS'].max()), + 'workcenterCount': df['WORKCENTER_GROUP'].nunique() + } + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.warning(f"Cache-based hold detail summary failed, falling back to Oracle: {exc}") + + # Fallback to Oracle direct query + return _get_hold_detail_summary_from_oracle(reason, include_dummy) + + +def _get_hold_detail_summary_from_oracle( + reason: str, + include_dummy: bool = False +) -> Optional[Dict[str, Any]]: + """Get hold detail summary directly from Oracle (fallback).""" + try: + builder = _build_base_conditions_builder(include_dummy) + builder.add_param_condition("STATUS", "HOLD") + builder.add_condition("CURRENTHOLDCOUNT > 0") + builder.add_param_condition("HOLDREASONNAME", reason) + where_clause, params = builder.build_where_only() + + sql = f""" + SELECT + COUNT(*) AS TOTAL_LOTS, + SUM(QTY) AS TOTAL_QTY, + ROUND(AVG(AGEBYDAYS), 1) AS AVG_AGE, + MAX(AGEBYDAYS) AS MAX_AGE, + COUNT(DISTINCT WORKCENTER_GROUP) AS WORKCENTER_COUNT + FROM {WIP_VIEW} + {where_clause} + """ + df = read_sql_df(sql, params) + + if df is None or df.empty: + return None + + row = df.iloc[0] + return { + 'totalLots': int(row['TOTAL_LOTS'] or 0), + 'totalQty': int(row['TOTAL_QTY'] or 0), + 'avgAge': float(row['AVG_AGE']) if row['AVG_AGE'] else 0, + 'maxAge': float(row['MAX_AGE']) if row['MAX_AGE'] else 0, + 'workcenterCount': int(row['WORKCENTER_COUNT'] or 0) + } + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Hold detail summary query failed: {exc}") + import traceback + traceback.print_exc() + return None + + +def get_hold_detail_distribution( + reason: str, + include_dummy: bool = False +) -> Optional[Dict[str, Any]]: + """Get distribution statistics for a specific hold reason. + + Uses Redis cache when available, falls back to Oracle direct query. + + Args: + reason: The HOLDREASONNAME to filter by + include_dummy: If True, include DUMMY lots (default: False) + + Returns: + Dict with byWorkcenter, byPackage, byAge distributions + """ + # Try cache first + cached_df = _get_wip_dataframe() + if cached_df is not None: + try: + df = _filter_base_conditions(cached_df, include_dummy) + df = _add_wip_status_columns(df) + + # Filter for HOLD status with matching reason + df = df[(df['WIP_STATUS'] == 'HOLD') & (df['HOLDREASONNAME'] == reason)] + + total_lots = len(df) + + if total_lots == 0: + return { + 'byWorkcenter': [], + 'byPackage': [], + 'byAge': [] + } + + # Ensure numeric columns + df['AGEBYDAYS'] = pd.to_numeric(df['AGEBYDAYS'], errors='coerce').fillna(0) + + # By Workcenter + wc_df = df[df['WORKCENTER_GROUP'].notna()].groupby('WORKCENTER_GROUP').agg({ + 'LOTID': 'count', + 'QTY': 'sum' + }).reset_index() + wc_df.columns = ['NAME', 'LOTS', 'QTY'] + wc_df = wc_df.sort_values('LOTS', ascending=False) + + by_workcenter = [] + for _, row in wc_df.iterrows(): + lots = int(row['LOTS'] or 0) + by_workcenter.append({ + 'name': row['NAME'], + 'lots': lots, + 'qty': int(row['QTY'] or 0), + 'percentage': round(lots / total_lots * 100, 1) if total_lots > 0 else 0 + }) + + # By Package + pkg_df = df[df['PACKAGE_LEF'].notna()].groupby('PACKAGE_LEF').agg({ + 'LOTID': 'count', + 'QTY': 'sum' + }).reset_index() + pkg_df.columns = ['NAME', 'LOTS', 'QTY'] + pkg_df = pkg_df.sort_values('LOTS', ascending=False) + + by_package = [] + for _, row in pkg_df.iterrows(): + lots = int(row['LOTS'] or 0) + by_package.append({ + 'name': row['NAME'], + 'lots': lots, + 'qty': int(row['QTY'] or 0), + 'percentage': round(lots / total_lots * 100, 1) if total_lots > 0 else 0 + }) + + # By Age - compute age range + def get_age_range(age): + if age < 1: + return '0-1' + elif age < 3: + return '1-3' + elif age < 7: + return '3-7' + else: + return '7+' + + df['AGE_RANGE'] = df['AGEBYDAYS'].apply(get_age_range) + + age_df = df.groupby('AGE_RANGE').agg({ + 'LOTID': 'count', + 'QTY': 'sum' + }).reset_index() + age_df.columns = ['AGE_RANGE', 'LOTS', 'QTY'] + + # Define age ranges in order + age_labels = { + '0-1': '0-1天', + '1-3': '1-3天', + '3-7': '3-7天', + '7+': '7+天' + } + age_order = ['0-1', '1-3', '3-7', '7+'] + + # Build age distribution with all ranges (even if 0) + age_data = {r: {'lots': 0, 'qty': 0} for r in age_order} + for _, row in age_df.iterrows(): + range_key = row['AGE_RANGE'] + if range_key in age_data: + age_data[range_key] = { + 'lots': int(row['LOTS'] or 0), + 'qty': int(row['QTY'] or 0) + } + + by_age = [] + for r in age_order: + lots = age_data[r]['lots'] + by_age.append({ + 'range': r, + 'label': age_labels[r], + 'lots': lots, + 'qty': age_data[r]['qty'], + 'percentage': round(lots / total_lots * 100, 1) if total_lots > 0 else 0 + }) + + return { + 'byWorkcenter': by_workcenter, + 'byPackage': by_package, + 'byAge': by_age + } + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.warning(f"Cache-based hold detail distribution failed, falling back to Oracle: {exc}") + + # Fallback to Oracle direct query + return _get_hold_detail_distribution_from_oracle(reason, include_dummy) + + +def _get_hold_detail_distribution_from_oracle( + reason: str, + include_dummy: bool = False +) -> Optional[Dict[str, Any]]: + """Get hold detail distribution directly from Oracle (fallback).""" + try: + builder = _build_base_conditions_builder(include_dummy) + builder.add_param_condition("STATUS", "HOLD") + builder.add_condition("CURRENTHOLDCOUNT > 0") + builder.add_param_condition("HOLDREASONNAME", reason) + where_clause, params = builder.build_where_only() + + # Get total for percentage calculation + total_sql = f""" + SELECT COUNT(*) AS TOTAL_LOTS, SUM(QTY) AS TOTAL_QTY + FROM {WIP_VIEW} + {where_clause} + """ + total_df = read_sql_df(total_sql, params) + total_lots = int(total_df.iloc[0]['TOTAL_LOTS'] or 0) if total_df is not None else 0 + + if total_lots == 0: + return { + 'byWorkcenter': [], + 'byPackage': [], + 'byAge': [] + } + + # By Workcenter + wc_sql = f""" + SELECT + WORKCENTER_GROUP AS NAME, + COUNT(*) AS LOTS, + SUM(QTY) AS QTY + FROM {WIP_VIEW} + {where_clause} + AND WORKCENTER_GROUP IS NOT NULL + GROUP BY WORKCENTER_GROUP + ORDER BY COUNT(*) DESC + """ + wc_df = read_sql_df(wc_sql, params) + by_workcenter = [] + if wc_df is not None and not wc_df.empty: + for _, row in wc_df.iterrows(): + lots = int(row['LOTS'] or 0) + by_workcenter.append({ + 'name': row['NAME'], + 'lots': lots, + 'qty': int(row['QTY'] or 0), + 'percentage': round(lots / total_lots * 100, 1) if total_lots > 0 else 0 + }) + + # By Package + pkg_sql = f""" + SELECT + PACKAGE_LEF AS NAME, + COUNT(*) AS LOTS, + SUM(QTY) AS QTY + FROM {WIP_VIEW} + {where_clause} + AND PACKAGE_LEF IS NOT NULL + GROUP BY PACKAGE_LEF + ORDER BY COUNT(*) DESC + """ + pkg_df = read_sql_df(pkg_sql, params) + by_package = [] + if pkg_df is not None and not pkg_df.empty: + for _, row in pkg_df.iterrows(): + lots = int(row['LOTS'] or 0) + by_package.append({ + 'name': row['NAME'], + 'lots': lots, + 'qty': int(row['QTY'] or 0), + 'percentage': round(lots / total_lots * 100, 1) if total_lots > 0 else 0 + }) + + # By Age (station dwell time) + age_sql = f""" + SELECT + CASE + WHEN AGEBYDAYS < 1 THEN '0-1' + WHEN AGEBYDAYS < 3 THEN '1-3' + WHEN AGEBYDAYS < 7 THEN '3-7' + ELSE '7+' + END AS AGE_RANGE, + COUNT(*) AS LOTS, + SUM(QTY) AS QTY + FROM {WIP_VIEW} + {where_clause} + GROUP BY CASE + WHEN AGEBYDAYS < 1 THEN '0-1' + WHEN AGEBYDAYS < 3 THEN '1-3' + WHEN AGEBYDAYS < 7 THEN '3-7' + ELSE '7+' + END + """ + age_df = read_sql_df(age_sql, params) + + # Define age ranges in order + age_labels = { + '0-1': '0-1天', + '1-3': '1-3天', + '3-7': '3-7天', + '7+': '7+天' + } + age_order = ['0-1', '1-3', '3-7', '7+'] + + # Build age distribution with all ranges (even if 0) + age_data = {r: {'lots': 0, 'qty': 0} for r in age_order} + if age_df is not None and not age_df.empty: + for _, row in age_df.iterrows(): + range_key = row['AGE_RANGE'] + if range_key in age_data: + age_data[range_key] = { + 'lots': int(row['LOTS'] or 0), + 'qty': int(row['QTY'] or 0) + } + + by_age = [] + for r in age_order: + lots = age_data[r]['lots'] + by_age.append({ + 'range': r, + 'label': age_labels[r], + 'lots': lots, + 'qty': age_data[r]['qty'], + 'percentage': round(lots / total_lots * 100, 1) if total_lots > 0 else 0 + }) + + return { + 'byWorkcenter': by_workcenter, + 'byPackage': by_package, + 'byAge': by_age + } + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Hold detail distribution query failed: {exc}") + import traceback + traceback.print_exc() + return None + + +def get_hold_detail_lots( + reason: str, + workcenter: Optional[str] = None, + package: Optional[str] = None, + age_range: Optional[str] = None, + include_dummy: bool = False, + page: int = 1, + page_size: int = 50 +) -> Optional[Dict[str, Any]]: + """Get paginated lot details for a specific hold reason. + + Uses Redis cache when available, falls back to Oracle direct query. + + Args: + reason: The HOLDREASONNAME to filter by + workcenter: Optional WORKCENTER_GROUP filter + package: Optional PACKAGE_LEF filter + age_range: Optional age range filter ('0-1', '1-3', '3-7', '7+') + include_dummy: If True, include DUMMY lots (default: False) + page: Page number (1-based) + page_size: Number of records per page + + Returns: + Dict with lots list, pagination info, and active filters + """ + # Try cache first + cached_df = _get_wip_dataframe() + if cached_df is not None: + try: + df = _filter_base_conditions(cached_df, include_dummy) + df = _add_wip_status_columns(df) + + # Filter for HOLD status with matching reason + df = df[(df['WIP_STATUS'] == 'HOLD') & (df['HOLDREASONNAME'] == reason)] + + # Ensure numeric columns + df['AGEBYDAYS'] = pd.to_numeric(df['AGEBYDAYS'], errors='coerce').fillna(0) + + # Optional filters + if workcenter: + df = df[df['WORKCENTER_GROUP'] == workcenter] + if package: + df = df[df['PACKAGE_LEF'] == package] + if age_range: + if age_range == '0-1': + df = df[(df['AGEBYDAYS'] >= 0) & (df['AGEBYDAYS'] < 1)] + elif age_range == '1-3': + df = df[(df['AGEBYDAYS'] >= 1) & (df['AGEBYDAYS'] < 3)] + elif age_range == '3-7': + df = df[(df['AGEBYDAYS'] >= 3) & (df['AGEBYDAYS'] < 7)] + elif age_range == '7+': + df = df[df['AGEBYDAYS'] >= 7] + + total = len(df) + + # Sort by age descending, then LOTID + df = df.sort_values(['AGEBYDAYS', 'LOTID'], ascending=[False, True]) + + # Pagination + offset = (page - 1) * page_size + page_df = df.iloc[offset:offset + page_size] + + lots = [] + for _, row in page_df.iterrows(): + lots.append({ + 'lotId': _safe_value(row.get('LOTID')), + 'workorder': _safe_value(row.get('WORKORDER')), + 'qty': int(row.get('QTY', 0) or 0), + 'package': _safe_value(row.get('PACKAGE_LEF')), + 'workcenter': _safe_value(row.get('WORKCENTER_GROUP')), + 'spec': _safe_value(row.get('SPECNAME')), + 'age': round(float(row.get('AGEBYDAYS', 0) or 0), 1), + 'holdBy': _safe_value(row.get('HOLDEMP')), + 'dept': _safe_value(row.get('DEPTNAME')), + 'holdComment': _safe_value(row.get('COMMENT_HOLD')) + }) + + total_pages = (total + page_size - 1) // page_size if total > 0 else 1 + + return { + 'lots': lots, + 'pagination': { + 'page': page, + 'perPage': page_size, + 'total': total, + 'totalPages': total_pages + }, + 'filters': { + 'workcenter': workcenter, + 'package': package, + 'ageRange': age_range + } + } + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.warning(f"Cache-based hold detail lots failed, falling back to Oracle: {exc}") + + # Fallback to Oracle direct query + return _get_hold_detail_lots_from_oracle( + reason, workcenter, package, age_range, include_dummy, page, page_size + ) + + +def _get_hold_detail_lots_from_oracle( + reason: str, + workcenter: Optional[str] = None, + package: Optional[str] = None, + age_range: Optional[str] = None, + include_dummy: bool = False, + page: int = 1, + page_size: int = 50 +) -> Optional[Dict[str, Any]]: + """Get hold detail lots directly from Oracle (fallback).""" + try: + builder = _build_base_conditions_builder(include_dummy) + builder.add_param_condition("STATUS", "HOLD") + builder.add_condition("CURRENTHOLDCOUNT > 0") + builder.add_param_condition("HOLDREASONNAME", reason) + + # Optional filters + if workcenter: + builder.add_param_condition("WORKCENTER_GROUP", workcenter) + if package: + builder.add_param_condition("PACKAGE_LEF", package) + if age_range: + if age_range == '0-1': + builder.add_condition("AGEBYDAYS >= 0 AND AGEBYDAYS < 1") + elif age_range == '1-3': + builder.add_condition("AGEBYDAYS >= 1 AND AGEBYDAYS < 3") + elif age_range == '3-7': + builder.add_condition("AGEBYDAYS >= 3 AND AGEBYDAYS < 7") + elif age_range == '7+': + builder.add_condition("AGEBYDAYS >= 7") + + where_clause, params = builder.build_where_only() + + # Get total count + count_sql = f""" + SELECT COUNT(*) AS TOTAL + FROM {WIP_VIEW} + {where_clause} + """ + count_df = read_sql_df(count_sql, params) + total = int(count_df.iloc[0]['TOTAL'] or 0) if count_df is not None else 0 + + # Get paginated lots with bind variables + offset = (page - 1) * page_size + lots_params = params.copy() + lots_params['offset'] = offset + lots_params['limit'] = page_size + + lots_sql = f""" + SELECT * FROM ( + SELECT + LOTID, + WORKORDER, + QTY, + PACKAGE_LEF AS PACKAGE, + WORKCENTER_GROUP AS WORKCENTER, + SPECNAME AS SPEC, + ROUND(AGEBYDAYS, 1) AS AGE, + HOLDEMP AS HOLD_BY, + DEPTNAME AS DEPT, + COMMENT_HOLD AS HOLD_COMMENT, + ROW_NUMBER() OVER (ORDER BY AGEBYDAYS DESC, LOTID) AS RN + FROM {WIP_VIEW} + {where_clause} + ) + WHERE RN > :offset AND RN <= :offset + :limit + ORDER BY RN + """ + lots_df = read_sql_df(lots_sql, lots_params) + + lots = [] + if lots_df is not None and not lots_df.empty: + for _, row in lots_df.iterrows(): + lots.append({ + 'lotId': _safe_value(row['LOTID']), + 'workorder': _safe_value(row['WORKORDER']), + 'qty': int(row['QTY'] or 0), + 'package': _safe_value(row['PACKAGE']), + 'workcenter': _safe_value(row['WORKCENTER']), + 'spec': _safe_value(row['SPEC']), + 'age': float(row['AGE']) if row['AGE'] else 0, + 'holdBy': _safe_value(row['HOLD_BY']), + 'dept': _safe_value(row['DEPT']), + 'holdComment': _safe_value(row['HOLD_COMMENT']) + }) + + total_pages = (total + page_size - 1) // page_size if total > 0 else 1 + + return { + 'lots': lots, + 'pagination': { + 'page': page, + 'perPage': page_size, + 'total': total, + 'totalPages': total_pages + }, + 'filters': { + 'workcenter': workcenter, + 'package': package, + 'ageRange': age_range + } + } + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Hold detail lots query failed: {exc}") + import traceback + traceback.print_exc() + return None + + +# ============================================================ +# Lot Detail API Functions +# ============================================================ + +# Field labels mapping for lot detail display (PowerBI naming convention) +LOT_DETAIL_FIELD_LABELS = { + 'lotId': 'Run Card Lot ID', + 'workorder': 'Work Order ID', + 'qty': 'Lot Qty(pcs)', + 'qty2': 'Lot Qty(Wafer pcs)', + 'status': 'Run Card Status', + 'holdReason': 'Hold Reason', + 'holdCount': 'Hold Count', + 'owner': 'Work Order Owner', + 'startDate': 'Run Card Start Date', + 'uts': 'UTS', + 'product': 'Product P/N', + 'productLine': 'Package', + 'packageLef': 'Package(LF)', + 'pjFunction': 'Product Function', + 'pjType': 'Product Type', + 'bop': 'BOP', + 'waferLotId': 'Wafer Lot ID', + 'waferPn': 'Wafer P/N', + 'waferLotPrefix': 'Wafer Lot ID(Prefix)', + 'spec': 'Spec', + 'specSequence': 'Spec Sequence', + 'workcenter': 'Work Center', + 'workcenterSequence': 'Work Center Sequence', + 'workcenterGroup': 'Work Center(Group)', + 'workcenterShort': 'Work Center(Short)', + 'ageByDays': 'Age By Days', + 'equipment': 'Equipment ID', + 'equipmentCount': 'Equipment Count', + 'workflow': 'Work Flow Name', + 'dateCode': 'Product Date Code', + 'leadframeName': 'LF Material Part', + 'leadframeOption': 'LF Option ID', + 'compoundName': 'Compound Material Part', + 'location': 'Run Card Location', + 'ncrId': 'NCR ID', + 'ncrDate': 'NCR-issued Time', + 'releaseTime': 'Release Time', + 'releaseEmp': 'Release Employee', + 'releaseComment': 'Release Comment', + 'holdComment': 'Hold Comment', + 'comment': 'Comment', + 'commentDate': 'Run Card Comment', + 'commentEmp': 'Run Card Comment Employee', + 'futureHoldComment': 'Future Hold Comment', + 'holdEmp': 'Hold Employee', + 'holdDept': 'Hold Employee Dept', + 'produceRegion': 'Produce Region', + 'priority': 'Work Order Priority', + 'tmttRemaining': 'TMTT Remaining', + 'dieConsumption': 'Die Consumption Qty', + 'wipStatus': 'WIP Status', + 'dataUpdateDate': 'Data Update Date' +} + + +def get_lot_detail(lotid: str) -> Optional[Dict[str, Any]]: + """Get detailed information for a specific lot. + + Uses Redis cache when available, falls back to Oracle direct query. + + Args: + lotid: The LOTID to retrieve + + Returns: + Dict with lot details or None if not found + """ + # Try cache first + cached_df = _get_wip_dataframe() + if cached_df is not None: + try: + df = cached_df[cached_df['LOTID'] == lotid] + + if df.empty: + return None + + row = df.iloc[0] + return _build_lot_detail_response(row) + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.warning(f"Cache-based lot detail failed, falling back to Oracle: {exc}") + + # Fallback to Oracle direct query + return _get_lot_detail_from_oracle(lotid) + + +def _get_lot_detail_from_oracle(lotid: str) -> Optional[Dict[str, Any]]: + """Get lot detail directly from Oracle (fallback).""" + try: + sql = f""" + SELECT + LOTID, + WORKORDER, + QTY, + QTY2, + STATUS, + HOLDREASONNAME, + CURRENTHOLDCOUNT, + OWNER, + STARTDATE, + UTS, + PRODUCT, + PRODUCTLINENAME, + PACKAGE_LEF, + PJ_FUNCTION, + PJ_TYPE, + BOP, + FIRSTNAME, + WAFERNAME, + WAFERLOT, + SPECNAME, + SPECSEQUENCE, + WORKCENTERNAME, + WORKCENTERSEQUENCE, + WORKCENTER_GROUP, + WORKCENTER_SHORT, + AGEBYDAYS, + EQUIPMENTS, + EQUIPMENTCOUNT, + WORKFLOWNAME, + DATECODE, + LEADFRAMENAME, + LEADFRAMEOPTION, + COMNAME, + LOCATIONNAME, + EVENTNAME, + OCCURRENCEDATE, + RELEASETIME, + RELEASEEMP, + RELEASEREASON, + COMMENT_HOLD, + CONTAINERCOMMENTS, + COMMENT_DATE, + COMMENT_EMP, + COMMENT_FUTURE, + HOLDEMP, + DEPTNAME, + PJ_PRODUCEREGION, + PRIORITYCODENAME, + TMTT_R, + WAFER_FACTOR, + SYS_DATE + FROM {WIP_VIEW} + WHERE LOTID = :lotid + """ + df = read_sql_df(sql, {'lotid': lotid}) + + if df is None or df.empty: + return None + + row = df.iloc[0] + return _build_lot_detail_response(row) + except (DatabasePoolExhaustedError, DatabaseCircuitOpenError): + raise + except Exception as exc: + logger.error(f"Lot detail query failed: {exc}") + import traceback + traceback.print_exc() + return None + + +def _build_lot_detail_response(row) -> Dict[str, Any]: + """Build lot detail response from DataFrame row.""" + # Helper to safely get value from row (handles NaN and missing columns) + def safe_get(col, default=None): + try: + val = row.get(col) + if pd.isna(val): + return default + return val + except Exception: + return default + + # Helper to safely get int value + def safe_int(col, default=0): + val = safe_get(col) + if val is None: + return default + try: + return int(val) + except (ValueError, TypeError): + return default + + # Helper to safely get float value + def safe_float(col, default=0.0): + val = safe_get(col) + if val is None: + return default + try: + return float(val) + except (ValueError, TypeError): + return default + + # Helper to format date value + def format_date(col): + val = safe_get(col) + if val is None: + return None + try: + return str(val) + except Exception: + return None + + # Compute WIP status + equipment_count = safe_int('EQUIPMENTCOUNT') + hold_count = safe_int('CURRENTHOLDCOUNT') + + if equipment_count > 0: + wip_status = 'RUN' + elif hold_count > 0: + wip_status = 'HOLD' + else: + wip_status = 'QUEUE' + + return { + 'lotId': _safe_value(safe_get('LOTID')), + 'workorder': _safe_value(safe_get('WORKORDER')), + 'qty': safe_int('QTY'), + 'qty2': safe_int('QTY2') if safe_get('QTY2') is not None else None, + 'status': _safe_value(safe_get('STATUS')), + 'holdReason': _safe_value(safe_get('HOLDREASONNAME')), + 'holdCount': hold_count, + 'owner': _safe_value(safe_get('OWNER')), + 'startDate': format_date('STARTDATE'), + 'uts': _safe_value(safe_get('UTS')), + 'product': _safe_value(safe_get('PRODUCT')), + 'productLine': _safe_value(safe_get('PRODUCTLINENAME')), + 'packageLef': _safe_value(safe_get('PACKAGE_LEF')), + 'pjFunction': _safe_value(safe_get('PJ_FUNCTION')), + 'pjType': _safe_value(safe_get('PJ_TYPE')), + 'bop': _safe_value(safe_get('BOP')), + 'waferLotId': _safe_value(safe_get('FIRSTNAME')), + 'waferPn': _safe_value(safe_get('WAFERNAME')), + 'waferLotPrefix': _safe_value(safe_get('WAFERLOT')), + 'spec': _safe_value(safe_get('SPECNAME')), + 'specSequence': safe_int('SPECSEQUENCE') if safe_get('SPECSEQUENCE') is not None else None, + 'workcenter': _safe_value(safe_get('WORKCENTERNAME')), + 'workcenterSequence': safe_int('WORKCENTERSEQUENCE') if safe_get('WORKCENTERSEQUENCE') is not None else None, + 'workcenterGroup': _safe_value(safe_get('WORKCENTER_GROUP')), + 'workcenterShort': _safe_value(safe_get('WORKCENTER_SHORT')), + 'ageByDays': round(safe_float('AGEBYDAYS'), 2), + 'equipment': _safe_value(safe_get('EQUIPMENTS')), + 'equipmentCount': equipment_count, + 'workflow': _safe_value(safe_get('WORKFLOWNAME')), + 'dateCode': _safe_value(safe_get('DATECODE')), + 'leadframeName': _safe_value(safe_get('LEADFRAMENAME')), + 'leadframeOption': _safe_value(safe_get('LEADFRAMEOPTION')), + 'compoundName': _safe_value(safe_get('COMNAME')), + 'location': _safe_value(safe_get('LOCATIONNAME')), + 'ncrId': _safe_value(safe_get('EVENTNAME')), + 'ncrDate': format_date('OCCURRENCEDATE'), + 'releaseTime': format_date('RELEASETIME'), + 'releaseEmp': _safe_value(safe_get('RELEASEEMP')), + 'releaseComment': _safe_value(safe_get('RELEASEREASON')), + 'holdComment': _safe_value(safe_get('COMMENT_HOLD')), + 'comment': _safe_value(safe_get('CONTAINERCOMMENTS')), + 'commentDate': _safe_value(safe_get('COMMENT_DATE')), + 'commentEmp': _safe_value(safe_get('COMMENT_EMP')), + 'futureHoldComment': _safe_value(safe_get('COMMENT_FUTURE')), + 'holdEmp': _safe_value(safe_get('HOLDEMP')), + 'holdDept': _safe_value(safe_get('DEPTNAME')), + 'produceRegion': _safe_value(safe_get('PJ_PRODUCEREGION')), + 'priority': _safe_value(safe_get('PRIORITYCODENAME')), + 'tmttRemaining': _safe_value(safe_get('TMTT_R')), + 'dieConsumption': safe_int('WAFER_FACTOR') if safe_get('WAFER_FACTOR') is not None else None, + 'wipStatus': wip_status, + 'dataUpdateDate': format_date('SYS_DATE'), + 'fieldLabels': LOT_DETAIL_FIELD_LABELS + } diff --git a/src/mes_dashboard/sql/__init__.py b/src/mes_dashboard/sql/__init__.py new file mode 100644 index 0000000..16fa7d6 --- /dev/null +++ b/src/mes_dashboard/sql/__init__.py @@ -0,0 +1,90 @@ +""" +SQL Query Management Module + +Provides safe SQL query loading, building, and common filters. + +Architecture Overview: + This module provides three main components for SQL query management: + + 1. SQLLoader - Load SQL templates from .sql files with LRU caching + 2. QueryBuilder - Build parameterized WHERE conditions safely + 3. CommonFilters - Reusable filter patterns for common queries + +Directory Structure: + src/mes_dashboard/sql/ + ├── __init__.py # Public API exports + ├── loader.py # SQLLoader implementation + ├── builder.py # QueryBuilder implementation + ├── filters.py # CommonFilters implementation + ├── dashboard/ # Dashboard-related SQL files + │ ├── kpi.sql + │ ├── heatmap.sql + │ ├── workcenter_cards.sql + │ └── resource_detail_with_job.sql + ├── resource/ # Resource status SQL files + │ ├── latest_status.sql + │ ├── by_status.sql + │ ├── by_workcenter.sql + │ ├── detail.sql + │ └── workcenter_status_matrix.sql + ├── resource_history/ # Resource history SQL files + │ ├── kpi.sql + │ ├── trend.sql + │ ├── heatmap.sql + │ └── detail.sql + └── wip/ # WIP (Work In Progress) SQL files + ├── summary.sql + ├── matrix.sql + └── detail.sql + +SQL File Format: + SQL files use placeholders for dynamic parts: + + - {{ PLACEHOLDER }} - Replaced via str.replace() before execution + - :param_name - Oracle bind variables (filled by params dict) + + Example SQL file (resource/by_status.sql): + -- Resource count by status + -- Placeholders: + -- {{ LATEST_STATUS_SUBQUERY }} - Base CTE for latest status + -- Parameters: + -- (from QueryBuilder) + SELECT NEWSTATUSNAME, COUNT(*) as COUNT + FROM ({{ LATEST_STATUS_SUBQUERY }}) rs + WHERE 1=1 {{ WHERE_CLAUSE }} + GROUP BY NEWSTATUSNAME + +Usage Example: + >>> from mes_dashboard.sql import SQLLoader, QueryBuilder + >>> from mes_dashboard.core.database import read_sql_df + >>> + >>> # Load SQL template + >>> sql = SQLLoader.load("resource/by_status") + >>> + >>> # Build parameterized conditions + >>> builder = QueryBuilder() + >>> builder.add_in_condition("LOCATIONNAME", ["FAB1", "FAB2"]) + >>> builder.add_like_condition("WORKCENTERNAME", "ASSY", position="start") + >>> where_clause, params = builder.build_where_only() + >>> + >>> # Replace placeholders and execute + >>> sql = sql.replace("{{ LATEST_STATUS_SUBQUERY }}", base_cte) + >>> sql = sql.replace("{{ WHERE_CLAUSE }}", where_clause) + >>> df = read_sql_df(sql, params) + +SQL Injection Prevention: + - Always use QueryBuilder for user-provided values + - Use :param_name bind variables for all dynamic values + - Placeholders {{ }} are only for static, pre-defined SQL fragments + - Never interpolate user input directly into SQL strings +""" + +from .loader import SQLLoader +from .builder import QueryBuilder +from .filters import CommonFilters + +__all__ = [ + "SQLLoader", + "QueryBuilder", + "CommonFilters", +] diff --git a/src/mes_dashboard/sql/builder.py b/src/mes_dashboard/sql/builder.py new file mode 100644 index 0000000..6cf3893 --- /dev/null +++ b/src/mes_dashboard/sql/builder.py @@ -0,0 +1,263 @@ +""" +Query Builder + +Provides safe SQL query building with parameterized conditions. +""" + +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Tuple + + +@dataclass +class QueryBuilder: + """ + Safe SQL query builder with parameterized conditions. + + Builds WHERE clauses with Oracle bind variables (:param_name) + to prevent SQL injection. + """ + + base_sql: str = "" + conditions: List[str] = field(default_factory=list) + params: Dict[str, Any] = field(default_factory=dict) + _param_counter: int = field(default=0, repr=False) + + def _next_param(self) -> str: + """Generate next parameter name.""" + name = f"p{self._param_counter}" + self._param_counter += 1 + return name + + def add_condition(self, condition: str) -> "QueryBuilder": + """ + Add a fixed condition (no parameters). + + Args: + condition: SQL condition string + + Returns: + self for method chaining + """ + self.conditions.append(condition) + return self + + def add_param_condition( + self, + column: str, + value: Any, + operator: str = "=", + ) -> "QueryBuilder": + """ + Add a parameterized condition. + + Args: + column: Column name + value: Value to compare + operator: Comparison operator (default: "=") + + Returns: + self for method chaining + """ + param_name = self._next_param() + self.conditions.append(f"{column} {operator} :{param_name}") + self.params[param_name] = value + return self + + def add_in_condition( + self, + column: str, + values: List[Any], + ) -> "QueryBuilder": + """ + Add an IN condition with parameterized values. + + Args: + column: Column name + values: List of values for IN clause + + Returns: + self for method chaining + """ + if not values: + return self + + param_names = [] + for val in values: + param_name = self._next_param() + param_names.append(f":{param_name}") + self.params[param_name] = val + + self.conditions.append(f"{column} IN ({', '.join(param_names)})") + return self + + def add_not_in_condition( + self, + column: str, + values: List[Any], + allow_null: bool = False, + ) -> "QueryBuilder": + """ + Add a NOT IN condition with parameterized values. + + Args: + column: Column name + values: List of values to exclude + allow_null: If True, also allows NULL values + + Returns: + self for method chaining + """ + if not values: + return self + + param_names = [] + for val in values: + param_name = self._next_param() + param_names.append(f":{param_name}") + self.params[param_name] = val + + not_in_clause = f"{column} NOT IN ({', '.join(param_names)})" + + if allow_null: + self.conditions.append(f"({column} IS NULL OR {not_in_clause})") + else: + self.conditions.append(not_in_clause) + + return self + + def add_like_condition( + self, + column: str, + value: str, + position: str = "both", + ) -> "QueryBuilder": + """ + Add a LIKE condition with escaped wildcards. + + Args: + column: Column name + value: Search value (wildcards will be escaped) + position: Where to add wildcards: + - "both": %value% + - "start": value% + - "end": %value + + Returns: + self for method chaining + """ + # Escape SQL LIKE wildcards + escaped = value.replace("\\", "\\\\").replace("%", "\\%").replace("_", "\\_") + + if position == "both": + pattern = f"%{escaped}%" + elif position == "start": + pattern = f"{escaped}%" + elif position == "end": + pattern = f"%{escaped}" + else: + pattern = escaped + + param_name = self._next_param() + self.conditions.append(f"{column} LIKE :{param_name} ESCAPE '\\'") + self.params[param_name] = pattern + + return self + + def add_or_like_conditions( + self, + column: str, + values: List[str], + position: str = "both", + case_insensitive: bool = False, + ) -> "QueryBuilder": + """ + Add multiple LIKE conditions combined with OR. + + Args: + column: Column name + values: List of search values (wildcards will be escaped) + position: Where to add wildcards (both/start/end) + case_insensitive: If True, use UPPER() for case-insensitive matching + + Returns: + self for method chaining + """ + if not values: + return self + + like_conditions = [] + col_expr = f"UPPER({column})" if case_insensitive else column + + for val in values: + # Escape SQL LIKE wildcards + escaped = val.replace("\\", "\\\\").replace("%", "\\%").replace("_", "\\_") + if case_insensitive: + escaped = escaped.upper() + + if position == "both": + pattern = f"%{escaped}%" + elif position == "start": + pattern = f"{escaped}%" + elif position == "end": + pattern = f"%{escaped}" + else: + pattern = escaped + + param_name = self._next_param() + like_conditions.append(f"{col_expr} LIKE :{param_name} ESCAPE '\\'") + self.params[param_name] = pattern + + self.conditions.append(f"({' OR '.join(like_conditions)})") + return self + + def add_is_null(self, column: str) -> "QueryBuilder": + """Add IS NULL condition.""" + self.conditions.append(f"{column} IS NULL") + return self + + def add_is_not_null(self, column: str) -> "QueryBuilder": + """Add IS NOT NULL condition.""" + self.conditions.append(f"{column} IS NOT NULL") + return self + + def build(self) -> Tuple[str, Dict[str, Any]]: + """ + Build the final SQL with WHERE clause. + + Replaces {{ WHERE_CLAUSE }} placeholder in base_sql. + If no conditions, placeholder is replaced with empty string. + + Returns: + Tuple of (sql_string, params_dict) + """ + if self.conditions: + where_clause = f"WHERE {' AND '.join(self.conditions)}" + else: + where_clause = "" + + sql = self.base_sql.replace("{{ WHERE_CLAUSE }}", where_clause) + return sql, self.params.copy() + + def build_where_only(self) -> Tuple[str, Dict[str, Any]]: + """ + Build only the WHERE clause (without base SQL). + + Returns: + Tuple of (where_clause, params_dict) + """ + if self.conditions: + where_clause = f"WHERE {' AND '.join(self.conditions)}" + else: + where_clause = "" + return where_clause, self.params.copy() + + def get_conditions_sql(self) -> str: + """Get conditions as AND-joined string (without WHERE keyword).""" + return " AND ".join(self.conditions) if self.conditions else "" + + def reset(self) -> "QueryBuilder": + """Reset conditions and params, keep base_sql.""" + self.conditions = [] + self.params = {} + self._param_counter = 0 + return self diff --git a/src/mes_dashboard/sql/dashboard/heatmap.sql b/src/mes_dashboard/sql/dashboard/heatmap.sql new file mode 100644 index 0000000..3483e1c --- /dev/null +++ b/src/mes_dashboard/sql/dashboard/heatmap.sql @@ -0,0 +1,31 @@ +-- Utilization Heatmap Query +-- Returns equipment utilization data by workcenter and date +-- +-- Calculates PRD% = PRD_HOURS / AVAIL_HOURS * 100 +-- where AVAIL_HOURS = PRD + SBY + UDT + SDT + EGT (excludes NST) +-- +-- Parameters: +-- :days - Number of days to look back +-- +-- Dynamic placeholders: +-- LOCATION_FILTER - Location exclusion filter +-- ASSET_STATUS_FILTER - Asset status exclusion filter +-- FLAG_FILTER - Equipment flag filters (isProduction, isKey, isMonitor) + +SELECT + ss.WORKCENTERNAME, + TRUNC(ss.TXNDATE) as DATA_DATE, + SUM(CASE WHEN ss.OLDSTATUSNAME = 'PRD' THEN ss.HOURS ELSE 0 END) as PRD_HOURS, + SUM(CASE WHEN ss.OLDSTATUSNAME IN ('PRD', 'SBY', 'UDT', 'SDT', 'EGT') THEN ss.HOURS ELSE 0 END) as AVAIL_HOURS +FROM DWH.DW_MES_RESOURCESTATUS_SHIFT ss +JOIN DWH.DW_MES_RESOURCE r ON ss.HISTORYID = r.RESOURCEID +WHERE ss.TXNDATE >= TRUNC(SYSDATE) - :days + AND ss.TXNDATE < TRUNC(SYSDATE) + AND ss.WORKCENTERNAME IS NOT NULL + AND ((r.OBJECTCATEGORY = 'ASSEMBLY' AND r.OBJECTTYPE = 'ASSEMBLY') + OR (r.OBJECTCATEGORY = 'WAFERSORT' AND r.OBJECTTYPE = 'WAFERSORT')) + {{ LOCATION_FILTER }} + {{ ASSET_STATUS_FILTER }} + {{ FLAG_FILTER }} +GROUP BY ss.WORKCENTERNAME, TRUNC(ss.TXNDATE) +ORDER BY ss.WORKCENTERNAME, DATA_DATE diff --git a/src/mes_dashboard/sql/dashboard/kpi.sql b/src/mes_dashboard/sql/dashboard/kpi.sql new file mode 100644 index 0000000..da9f96d --- /dev/null +++ b/src/mes_dashboard/sql/dashboard/kpi.sql @@ -0,0 +1,26 @@ +-- Dashboard KPI Query +-- Returns overall KPI statistics for dashboard header +-- +-- Status categories: +-- RUN: PRD (Production) +-- DOWN: UDT + SDT (Down Time) +-- IDLE: SBY + NST (Idle) +-- ENG: EGT (Engineering Time) +-- +-- OU% = PRD / (PRD + SBY + EGT + SDT + UDT) * 100 +-- +-- Placeholders: +-- LATEST_STATUS_SUBQUERY - Base subquery for latest resource status +-- WHERE_CLAUSE - Additional filter conditions + +SELECT + COUNT(*) as TOTAL, + SUM(CASE WHEN NEWSTATUSNAME = 'PRD' THEN 1 ELSE 0 END) as PRD_COUNT, + SUM(CASE WHEN NEWSTATUSNAME = 'SBY' THEN 1 ELSE 0 END) as SBY_COUNT, + SUM(CASE WHEN NEWSTATUSNAME = 'UDT' THEN 1 ELSE 0 END) as UDT_COUNT, + SUM(CASE WHEN NEWSTATUSNAME = 'SDT' THEN 1 ELSE 0 END) as SDT_COUNT, + SUM(CASE WHEN NEWSTATUSNAME = 'EGT' THEN 1 ELSE 0 END) as EGT_COUNT, + SUM(CASE WHEN NEWSTATUSNAME = 'NST' THEN 1 ELSE 0 END) as NST_COUNT, + SUM(CASE WHEN NEWSTATUSNAME NOT IN ('PRD','SBY','UDT','SDT','EGT','NST') THEN 1 ELSE 0 END) as OTHER_COUNT +FROM ({{ LATEST_STATUS_SUBQUERY }}) rs +{{ WHERE_CLAUSE }} diff --git a/src/mes_dashboard/sql/dashboard/kpi_standalone.sql b/src/mes_dashboard/sql/dashboard/kpi_standalone.sql new file mode 100644 index 0000000..30aa414 --- /dev/null +++ b/src/mes_dashboard/sql/dashboard/kpi_standalone.sql @@ -0,0 +1,64 @@ +-- Dashboard KPI Standalone Query +-- Returns overall KPI statistics for dashboard header +-- This is a self-contained query with CTE for optimal performance +-- +-- Placeholders: +-- DAYS_BACK - Number of days to look back +-- LOCATION_FILTER - Location exclusion filter (AND ...) +-- ASSET_STATUS_FILTER - Asset status exclusion filter (AND ...) +-- WHERE_CLAUSE - Additional filter conditions + +WITH resource_latest_status AS ( + SELECT * + FROM ( + SELECT + r.RESOURCEID, + r.RESOURCENAME, + r.OBJECTCATEGORY, + r.OBJECTTYPE, + r.RESOURCEFAMILYNAME, + r.WORKCENTERNAME, + r.LOCATIONNAME, + r.VENDORNAME, + r.VENDORMODEL, + r.PJ_DEPARTMENT, + r.PJ_ASSETSSTATUS, + r.PJ_ISPRODUCTION, + r.PJ_ISKEY, + r.PJ_ISMONITOR, + r.PJ_LOTID, + r.DESCRIPTION, + s.NEWSTATUSNAME, + s.NEWREASONNAME, + s.LASTSTATUSCHANGEDATE, + s.OLDSTATUSNAME, + s.OLDREASONNAME, + s.AVAILABILITY, + s.JOBID, + s.TXNDATE, + ROW_NUMBER() OVER ( + PARTITION BY r.RESOURCEID + ORDER BY s.LASTSTATUSCHANGEDATE DESC NULLS LAST, + COALESCE(s.TXNDATE, s.LASTSTATUSCHANGEDATE) DESC + ) AS rn + FROM DWH.DW_MES_RESOURCE r + JOIN DWH.DW_MES_RESOURCESTATUS s ON r.RESOURCEID = s.HISTORYID + WHERE ((r.OBJECTCATEGORY = 'ASSEMBLY' AND r.OBJECTTYPE = 'ASSEMBLY') + OR (r.OBJECTCATEGORY = 'WAFERSORT' AND r.OBJECTTYPE = 'WAFERSORT')) + AND COALESCE(s.TXNDATE, s.LASTSTATUSCHANGEDATE) >= SYSDATE - {{ DAYS_BACK }} + {{ LOCATION_FILTER }} + {{ ASSET_STATUS_FILTER }} + ) + WHERE rn = 1 +) +SELECT + COUNT(*) as TOTAL, + SUM(CASE WHEN NEWSTATUSNAME = 'PRD' THEN 1 ELSE 0 END) as PRD_COUNT, + SUM(CASE WHEN NEWSTATUSNAME = 'SBY' THEN 1 ELSE 0 END) as SBY_COUNT, + SUM(CASE WHEN NEWSTATUSNAME = 'UDT' THEN 1 ELSE 0 END) as UDT_COUNT, + SUM(CASE WHEN NEWSTATUSNAME = 'SDT' THEN 1 ELSE 0 END) as SDT_COUNT, + SUM(CASE WHEN NEWSTATUSNAME = 'EGT' THEN 1 ELSE 0 END) as EGT_COUNT, + SUM(CASE WHEN NEWSTATUSNAME = 'NST' THEN 1 ELSE 0 END) as NST_COUNT, + SUM(CASE WHEN NEWSTATUSNAME NOT IN ('PRD','SBY','UDT','SDT','EGT','NST') THEN 1 ELSE 0 END) as OTHER_COUNT +FROM resource_latest_status +{{ WHERE_CLAUSE }} diff --git a/src/mes_dashboard/sql/dashboard/ou_trend.sql b/src/mes_dashboard/sql/dashboard/ou_trend.sql new file mode 100644 index 0000000..1b17df7 --- /dev/null +++ b/src/mes_dashboard/sql/dashboard/ou_trend.sql @@ -0,0 +1,29 @@ +-- OU (Operating Utilization) Trend Query +-- Returns daily OU% for the past N days +-- +-- Placeholders: +-- LOCATION_FILTER - Location exclusion filter +-- ASSET_STATUS_FILTER - Asset status exclusion filter +-- FLAG_FILTER - Equipment flag filter (isProduction, isKey, isMonitor) +-- Parameters: +-- :days - Number of days to look back + +SELECT + TRUNC(ss.TXNDATE) as DATA_DATE, + SUM(CASE WHEN ss.OLDSTATUSNAME = 'PRD' THEN ss.HOURS ELSE 0 END) as PRD_HOURS, + SUM(CASE WHEN ss.OLDSTATUSNAME = 'SBY' THEN ss.HOURS ELSE 0 END) as SBY_HOURS, + SUM(CASE WHEN ss.OLDSTATUSNAME = 'UDT' THEN ss.HOURS ELSE 0 END) as UDT_HOURS, + SUM(CASE WHEN ss.OLDSTATUSNAME = 'SDT' THEN ss.HOURS ELSE 0 END) as SDT_HOURS, + SUM(CASE WHEN ss.OLDSTATUSNAME = 'EGT' THEN ss.HOURS ELSE 0 END) as EGT_HOURS, + SUM(ss.HOURS) as TOTAL_HOURS +FROM DWH.DW_MES_RESOURCESTATUS_SHIFT ss +JOIN DWH.DW_MES_RESOURCE r ON ss.HISTORYID = r.RESOURCEID +WHERE ss.TXNDATE >= TRUNC(SYSDATE) - :days + AND ss.TXNDATE < TRUNC(SYSDATE) + AND ((r.OBJECTCATEGORY = 'ASSEMBLY' AND r.OBJECTTYPE = 'ASSEMBLY') + OR (r.OBJECTCATEGORY = 'WAFERSORT' AND r.OBJECTTYPE = 'WAFERSORT')) + {{ LOCATION_FILTER }} + {{ ASSET_STATUS_FILTER }} + {{ FLAG_FILTER }} +GROUP BY TRUNC(ss.TXNDATE) +ORDER BY DATA_DATE diff --git a/src/mes_dashboard/sql/dashboard/resource_detail_with_job.sql b/src/mes_dashboard/sql/dashboard/resource_detail_with_job.sql new file mode 100644 index 0000000..3bae2df --- /dev/null +++ b/src/mes_dashboard/sql/dashboard/resource_detail_with_job.sql @@ -0,0 +1,101 @@ +-- Resource detail with JOB info for SDT/UDT drill-down +-- Placeholders: +-- DAYS_BACK - Number of days to look back +-- LOCATION_FILTER - Location exclusion filter (e.g., "AND r.LOCATIONNAME NOT IN (...)") +-- ASSET_STATUS_FILTER - Asset status exclusion filter +-- WHERE_CLAUSE - Dynamic WHERE conditions for final SELECT +-- Parameters: +-- :start_row - Pagination start row +-- :end_row - Pagination end row + +WITH latest_txn AS ( + SELECT MAX(COALESCE(TXNDATE, LASTSTATUSCHANGEDATE)) AS MAX_TXNDATE + FROM DWH.DW_MES_RESOURCESTATUS +), +base_data AS ( + SELECT * + FROM ( + SELECT + r.RESOURCEID, + r.RESOURCENAME, + r.OBJECTCATEGORY, + r.OBJECTTYPE, + r.RESOURCEFAMILYNAME, + r.WORKCENTERNAME, + r.LOCATIONNAME, + r.VENDORNAME, + r.VENDORMODEL, + r.PJ_DEPARTMENT, + r.PJ_ASSETSSTATUS, + r.PJ_ISPRODUCTION, + r.PJ_ISKEY, + r.PJ_ISMONITOR, + r.PJ_LOTID, + r.DESCRIPTION, + s.NEWSTATUSNAME, + s.NEWREASONNAME, + s.LASTSTATUSCHANGEDATE, + s.OLDSTATUSNAME, + s.OLDREASONNAME, + s.AVAILABILITY, + s.JOBID, + s.TXNDATE, + ROW_NUMBER() OVER ( + PARTITION BY r.RESOURCEID + ORDER BY s.LASTSTATUSCHANGEDATE DESC NULLS LAST, + COALESCE(s.TXNDATE, s.LASTSTATUSCHANGEDATE) DESC + ) AS rn + FROM DWH.DW_MES_RESOURCE r + JOIN DWH.DW_MES_RESOURCESTATUS s ON r.RESOURCEID = s.HISTORYID + CROSS JOIN latest_txn lt + WHERE ((r.OBJECTCATEGORY = 'ASSEMBLY' AND r.OBJECTTYPE = 'ASSEMBLY') + OR (r.OBJECTCATEGORY = 'WAFERSORT' AND r.OBJECTTYPE = 'WAFERSORT')) + AND COALESCE(s.TXNDATE, s.LASTSTATUSCHANGEDATE) >= lt.MAX_TXNDATE - {{ DAYS_BACK }} + {{ LOCATION_FILTER }} + {{ ASSET_STATUS_FILTER }} + ) + WHERE rn = 1 +), +max_time AS ( + SELECT MAX(LASTSTATUSCHANGEDATE) AS MAX_STATUS_TIME FROM base_data +) +SELECT * FROM ( + SELECT + rs.RESOURCENAME, + rs.WORKCENTERNAME, + rs.RESOURCEFAMILYNAME, + rs.NEWSTATUSNAME, + rs.NEWREASONNAME, + rs.LASTSTATUSCHANGEDATE, + rs.PJ_DEPARTMENT, + rs.VENDORNAME, + rs.VENDORMODEL, + rs.PJ_ISPRODUCTION, + rs.PJ_ISKEY, + rs.PJ_ISMONITOR, + j.JOBID, + rs.PJ_LOTID, + j.JOBORDERNAME, + j.JOBSTATUS, + j.SYMPTOMCODENAME, + j.CAUSECODENAME, + j.REPAIRCODENAME, + j.CREATEDATE as JOB_CREATEDATE, + j.FIRSTCLOCKONDATE, + mt.MAX_STATUS_TIME, + ROUND((mt.MAX_STATUS_TIME - rs.LASTSTATUSCHANGEDATE) * 24 * 60, 0) as DOWN_MINUTES, + ROW_NUMBER() OVER ( + ORDER BY + CASE rs.NEWSTATUSNAME + WHEN 'UDT' THEN 1 + WHEN 'SDT' THEN 2 + ELSE 3 + END, + rs.LASTSTATUSCHANGEDATE DESC NULLS LAST + ) AS rn + FROM base_data rs + CROSS JOIN max_time mt + LEFT JOIN DWH.DW_MES_JOB j ON j.RESOURCEID = rs.RESOURCEID + AND j.CREATEDATE = rs.LASTSTATUSCHANGEDATE + WHERE {{ WHERE_CLAUSE }} +) WHERE rn BETWEEN :start_row AND :end_row diff --git a/src/mes_dashboard/sql/dashboard/workcenter_cards.sql b/src/mes_dashboard/sql/dashboard/workcenter_cards.sql new file mode 100644 index 0000000..e703379 --- /dev/null +++ b/src/mes_dashboard/sql/dashboard/workcenter_cards.sql @@ -0,0 +1,17 @@ +-- Workcenter status cards aggregation +-- Placeholders: +-- LATEST_STATUS_SUBQUERY - Base subquery for latest resource status +-- WHERE_CLAUSE - Dynamic WHERE conditions + +SELECT + WORKCENTERNAME, + COUNT(*) as TOTAL, + SUM(CASE WHEN NEWSTATUSNAME = 'PRD' THEN 1 ELSE 0 END) as PRD, + SUM(CASE WHEN NEWSTATUSNAME = 'SBY' THEN 1 ELSE 0 END) as SBY, + SUM(CASE WHEN NEWSTATUSNAME = 'UDT' THEN 1 ELSE 0 END) as UDT, + SUM(CASE WHEN NEWSTATUSNAME = 'SDT' THEN 1 ELSE 0 END) as SDT, + SUM(CASE WHEN NEWSTATUSNAME = 'EGT' THEN 1 ELSE 0 END) as EGT, + SUM(CASE WHEN NEWSTATUSNAME = 'NST' THEN 1 ELSE 0 END) as NST +FROM ({{ LATEST_STATUS_SUBQUERY }}) rs +WHERE {{ WHERE_CLAUSE }} +GROUP BY WORKCENTERNAME diff --git a/src/mes_dashboard/sql/filters.py b/src/mes_dashboard/sql/filters.py new file mode 100644 index 0000000..526062e --- /dev/null +++ b/src/mes_dashboard/sql/filters.py @@ -0,0 +1,287 @@ +""" +Common SQL Filters + +Provides reusable filter building methods for common query patterns. +""" + +from typing import Any, Dict, List, Optional, Union + +from mes_dashboard.config import EXCLUDED_ASSET_STATUSES, EXCLUDED_LOCATIONS +from mes_dashboard.config.constants import EQUIPMENT_FLAG_FILTERS + +from .builder import QueryBuilder + + +# Non-quality hold reasons (canonical source, used by wip_service.py) +# All other hold reasons are considered quality holds +NON_QUALITY_HOLD_REASONS = { + "IQC檢驗(久存品驗證)(QC)", + "大中/安波幅50pcs樣品留樣(PD)", + "工程驗證(PE)", + "工程驗證(RD)", + "指定機台生產", + "特殊需求(X-Ray全檢)", + "特殊需求管控", + "第一次量產QC品質確認(QC)", + "需綁尾數(PD)", + "樣品需求留存打樣(樣品)", + "盤點(收線)需求", +} + + +class CommonFilters: + """Common SQL filter builders.""" + + # ========================================================= + # Location & Asset Status Filters + # ========================================================= + + @staticmethod + def add_location_exclusion( + builder: QueryBuilder, + column: str = "LOCATIONNAME", + ) -> QueryBuilder: + """ + Add location exclusion filter. + + Excludes locations defined in EXCLUDED_LOCATIONS config. + Allows NULL values. + + Args: + builder: QueryBuilder instance + column: Column name (default: LOCATIONNAME) + + Returns: + QueryBuilder for method chaining + """ + if EXCLUDED_LOCATIONS: + builder.add_not_in_condition(column, EXCLUDED_LOCATIONS, allow_null=True) + return builder + + @staticmethod + def add_asset_status_exclusion( + builder: QueryBuilder, + column: str = "PJ_ASSETSSTATUS", + ) -> QueryBuilder: + """ + Add asset status exclusion filter. + + Excludes statuses defined in EXCLUDED_ASSET_STATUSES config. + Allows NULL values. + + Args: + builder: QueryBuilder instance + column: Column name (default: PJ_ASSETSSTATUS) + + Returns: + QueryBuilder for method chaining + """ + if EXCLUDED_ASSET_STATUSES: + builder.add_not_in_condition( + column, EXCLUDED_ASSET_STATUSES, allow_null=True + ) + return builder + + # ========================================================= + # WIP Base Filters + # ========================================================= + + @staticmethod + def add_wip_base_filters( + builder: QueryBuilder, + workorder: Optional[str] = None, + lotid: Optional[str] = None, + package: Optional[str] = None, + pj_type: Optional[str] = None, + ) -> QueryBuilder: + """ + Add WIP base filters (fuzzy search). + + Args: + builder: QueryBuilder instance + workorder: Workorder filter (LIKE %value%) + lotid: Lot ID filter (LIKE %value%) + package: Package filter (LIKE %value%) + pj_type: PJ type filter (LIKE %value%) + + Returns: + QueryBuilder for method chaining + """ + if workorder: + builder.add_like_condition("WORKORDER", workorder) + if lotid: + builder.add_like_condition("LOTID", lotid) + if package: + builder.add_like_condition("PACKAGE_LEF", package) + if pj_type: + builder.add_like_condition("PJ_TYPE", pj_type) + return builder + + # ========================================================= + # Status Filters + # ========================================================= + + @staticmethod + def add_status_filter( + builder: QueryBuilder, + status: Optional[str] = None, + statuses: Optional[List[str]] = None, + column: str = "STATUS", + ) -> QueryBuilder: + """ + Add status filter. + + Args: + builder: QueryBuilder instance + status: Single status value + statuses: List of status values + column: Column name (default: STATUS) + + Returns: + QueryBuilder for method chaining + """ + if status: + builder.add_param_condition(column, status) + elif statuses: + builder.add_in_condition(column, statuses) + return builder + + # ========================================================= + # Hold Type Filters + # ========================================================= + + @staticmethod + def add_hold_type_filter( + builder: QueryBuilder, + hold_type: Optional[str] = None, + column: str = "HOLDREASONNAME", + ) -> QueryBuilder: + """ + Add hold type filter (quality vs non-quality). + + Args: + builder: QueryBuilder instance + hold_type: "quality" or "non_quality" + column: Column name (default: HOLDREASONNAME) + + Returns: + QueryBuilder for method chaining + """ + if hold_type == "quality": + # Quality holds: exclude non-quality reasons + builder.add_not_in_condition(column, list(NON_QUALITY_HOLD_REASONS)) + elif hold_type == "non_quality": + # Non-quality holds: only non-quality reasons + builder.add_in_condition(column, list(NON_QUALITY_HOLD_REASONS)) + return builder + + @staticmethod + def is_quality_hold(reason: Optional[str]) -> bool: + """Check if a hold reason is quality-related.""" + return reason not in NON_QUALITY_HOLD_REASONS + + @staticmethod + def get_non_quality_reasons_sql() -> str: + """Get non-quality hold reasons as SQL-safe literal list. + + Used for embedding in SQL CASE expressions where bind parameters + cannot be used. Values are from a constant set (not user input). + + Returns: + SQL-safe string for IN clause, e.g., "'reason1', 'reason2', ..." + """ + # Escape single quotes in values (replace ' with '') + escaped = [f"'{r.replace(chr(39), chr(39)+chr(39))}'" for r in NON_QUALITY_HOLD_REASONS] + return ", ".join(escaped) + + # ========================================================= + # Equipment/Resource Filters + # ========================================================= + + @staticmethod + def add_equipment_filter( + builder: QueryBuilder, + resource_ids: Optional[List[str]] = None, + workcenters: Optional[List[str]] = None, + ) -> QueryBuilder: + """ + Add equipment/resource filters. + + Args: + builder: QueryBuilder instance + resource_ids: List of resource IDs + workcenters: List of workcenter names + + Returns: + QueryBuilder for method chaining + """ + if resource_ids: + builder.add_in_condition("RESOURCEID", resource_ids) + if workcenters: + builder.add_in_condition("WORKCENTERNAME", workcenters) + return builder + + @staticmethod + def add_equipment_flag_filters( + builder: QueryBuilder, + filters: Optional[Dict] = None, + ) -> QueryBuilder: + """ + Add equipment flag filters (isProduction, isKey, isMonitor). + + These are safe boolean conditions from EQUIPMENT_FLAG_FILTERS config. + + Args: + builder: QueryBuilder instance + filters: Dict with flag keys (isProduction, isKey, isMonitor) + + Returns: + QueryBuilder for method chaining + """ + if not filters: + return builder + + for flag_key, sql_condition in EQUIPMENT_FLAG_FILTERS.items(): + if filters.get(flag_key): + builder.add_condition(sql_condition) + + return builder + + # ========================================================= + # Legacy Compatibility (for core/utils.py wrapper) + # ========================================================= + + @staticmethod + def build_location_filter_legacy( + locations: Optional[List[str]] = None, + excluded_locations: Optional[List[str]] = None, + ) -> str: + """ + Build location filter SQL string (legacy format). + + Deprecated: Use add_location_exclusion() with QueryBuilder instead. + """ + conditions = [] + if locations: + loc_list = ", ".join(f"'{loc}'" for loc in locations) + conditions.append(f"LOCATIONNAME IN ({loc_list})") + if excluded_locations: + exc_list = ", ".join(f"'{loc}'" for loc in excluded_locations) + conditions.append( + f"(LOCATIONNAME IS NULL OR LOCATIONNAME NOT IN ({exc_list}))" + ) + return " AND ".join(conditions) if conditions else "" + + @staticmethod + def build_asset_status_filter_legacy( + excluded_statuses: Optional[List[str]] = None, + ) -> str: + """ + Build asset status filter SQL string (legacy format). + + Deprecated: Use add_asset_status_exclusion() with QueryBuilder instead. + """ + if not excluded_statuses: + return "" + exc_list = ", ".join(f"'{s}'" for s in excluded_statuses) + return f"(PJ_ASSETSSTATUS IS NULL OR PJ_ASSETSSTATUS NOT IN ({exc_list}))" diff --git a/src/mes_dashboard/sql/job_query/job_list.sql b/src/mes_dashboard/sql/job_query/job_list.sql new file mode 100644 index 0000000..96302de --- /dev/null +++ b/src/mes_dashboard/sql/job_query/job_list.sql @@ -0,0 +1,33 @@ +-- Job List Query +-- Retrieves maintenance jobs for selected resources within date range +-- Placeholders: +-- RESOURCE_FILTER - Resource ID filter condition (e.g., RESOURCEID IN (...)) +-- Parameters: +-- :start_date - Start date (YYYY-MM-DD) +-- :end_date - End date (YYYY-MM-DD) + +SELECT + j.JOBID, + j.RESOURCEID, + j.RESOURCENAME, + j.JOBSTATUS, + j.JOBMODELNAME, + j.JOBORDERNAME, + j.CREATEDATE, + j.COMPLETEDATE, + j.CANCELDATE, + j.FIRSTCLOCKONDATE, + j.LASTCLOCKOFFDATE, + j.CAUSECODENAME, + j.REPAIRCODENAME, + j.SYMPTOMCODENAME, + j.PJ_CAUSECODE2NAME, + j.PJ_REPAIRCODE2NAME, + j.PJ_SYMPTOMCODE2NAME, + j.CREATE_EMPNAME, + j.COMPLETE_EMPNAME +FROM DWH.DW_MES_JOB j +WHERE {{ RESOURCE_FILTER }} + AND j.CREATEDATE >= TO_DATE(:start_date, 'YYYY-MM-DD') + AND j.CREATEDATE < TO_DATE(:end_date, 'YYYY-MM-DD') + 1 +ORDER BY j.RESOURCENAME, j.CREATEDATE DESC diff --git a/src/mes_dashboard/sql/job_query/job_txn_detail.sql b/src/mes_dashboard/sql/job_query/job_txn_detail.sql new file mode 100644 index 0000000..4f10e8c --- /dev/null +++ b/src/mes_dashboard/sql/job_query/job_txn_detail.sql @@ -0,0 +1,27 @@ +-- Job Transaction History Detail +-- Retrieves all transaction history for a single job +-- Parameters: +-- :job_id - The JOBID to query + +SELECT + h.JOBTXNHISTORYID, + h.JOBID, + h.TXNDATE, + h.FROMJOBSTATUS, + h.JOBSTATUS, + h.STAGENAME, + h.TOSTAGENAME, + h.CAUSECODENAME, + h.REPAIRCODENAME, + h.SYMPTOMCODENAME, + h.USER_EMPNO, + h.USER_NAME, + h.EMP_EMPNO, + h.EMP_NAME, + h.COMMENTS, + h.CDONAME, + h.JOBMODELNAME, + h.JOBORDERNAME +FROM DWH.DW_MES_JOBTXNHISTORY h +WHERE h.JOBID = :job_id +ORDER BY h.TXNDATE ASC diff --git a/src/mes_dashboard/sql/job_query/job_txn_export.sql b/src/mes_dashboard/sql/job_query/job_txn_export.sql new file mode 100644 index 0000000..c66af1e --- /dev/null +++ b/src/mes_dashboard/sql/job_query/job_txn_export.sql @@ -0,0 +1,35 @@ +-- Job Transaction Export (Full History) +-- Joins JOB and JOBTXNHISTORY for complete CSV export +-- Placeholders: +-- RESOURCE_FILTER - Resource ID filter condition (e.g., j.RESOURCEID IN (...)) +-- Parameters: +-- :start_date - Start date (YYYY-MM-DD) +-- :end_date - End date (YYYY-MM-DD) + +SELECT + j.RESOURCENAME, + j.JOBID, + j.JOBSTATUS as JOB_FINAL_STATUS, + j.JOBMODELNAME, + j.JOBORDERNAME, + j.CREATEDATE as JOB_CREATEDATE, + j.COMPLETEDATE as JOB_COMPLETEDATE, + j.CAUSECODENAME as JOB_CAUSECODENAME, + j.REPAIRCODENAME as JOB_REPAIRCODENAME, + j.SYMPTOMCODENAME as JOB_SYMPTOMCODENAME, + h.TXNDATE, + h.FROMJOBSTATUS, + h.JOBSTATUS as TXN_JOBSTATUS, + h.STAGENAME, + h.CAUSECODENAME as TXN_CAUSECODENAME, + h.REPAIRCODENAME as TXN_REPAIRCODENAME, + h.SYMPTOMCODENAME as TXN_SYMPTOMCODENAME, + h.USER_NAME, + h.EMP_NAME, + h.COMMENTS +FROM DWH.DW_MES_JOB j +JOIN DWH.DW_MES_JOBTXNHISTORY h ON j.JOBID = h.JOBID +WHERE {{ RESOURCE_FILTER }} + AND h.TXNDATE >= TO_DATE(:start_date, 'YYYY-MM-DD') + AND h.TXNDATE < TO_DATE(:end_date, 'YYYY-MM-DD') + 1 +ORDER BY j.RESOURCENAME, j.JOBID, h.TXNDATE diff --git a/src/mes_dashboard/sql/loader.py b/src/mes_dashboard/sql/loader.py new file mode 100644 index 0000000..25db078 --- /dev/null +++ b/src/mes_dashboard/sql/loader.py @@ -0,0 +1,66 @@ +""" +SQL File Loader + +Provides SQL file loading with LRU caching and structural parameter substitution. +""" + +from functools import lru_cache +from pathlib import Path +from typing import Optional + + +class SQLLoader: + """SQL file loader with LRU caching.""" + + _sql_dir: Path = Path(__file__).parent + + @classmethod + @lru_cache(maxsize=100) + def load(cls, name: str) -> str: + """ + Load SQL file content. + + Args: + name: SQL file path without extension, e.g., "wip/summary" + + Returns: + SQL file content as string + + Raises: + FileNotFoundError: If SQL file does not exist + """ + path = cls._sql_dir / f"{name}.sql" + if not path.exists(): + raise FileNotFoundError(f"SQL file not found: {path}") + return path.read_text(encoding="utf-8") + + @classmethod + def load_with_params(cls, name: str, **kwargs) -> str: + """ + Load SQL file and substitute structural parameters. + + Uses Jinja2-style placeholders: {{ param_name }} + Only use for structural parameters (table names, column lists), + NOT for user input values. + + Args: + name: SQL file path without extension + **kwargs: Parameters to substitute + + Returns: + SQL content with substituted parameters + """ + sql = cls.load(name) + for key, value in kwargs.items(): + sql = sql.replace(f"{{{{ {key} }}}}", str(value)) + return sql + + @classmethod + def clear_cache(cls) -> None: + """Clear the LRU cache.""" + cls.load.cache_clear() + + @classmethod + def cache_info(cls): + """Get cache statistics.""" + return cls.load.cache_info() diff --git a/src/mes_dashboard/sql/resource/by_status.sql b/src/mes_dashboard/sql/resource/by_status.sql new file mode 100644 index 0000000..e4af22f --- /dev/null +++ b/src/mes_dashboard/sql/resource/by_status.sql @@ -0,0 +1,11 @@ +-- Resource count by status +-- Placeholders: +-- LATEST_STATUS_SUBQUERY - Base subquery for latest resource status + +SELECT + NEWSTATUSNAME, + COUNT(*) as COUNT +FROM ({{ LATEST_STATUS_SUBQUERY }}) rs +WHERE NEWSTATUSNAME IS NOT NULL +GROUP BY NEWSTATUSNAME +ORDER BY COUNT DESC diff --git a/src/mes_dashboard/sql/resource/by_workcenter.sql b/src/mes_dashboard/sql/resource/by_workcenter.sql new file mode 100644 index 0000000..23db367 --- /dev/null +++ b/src/mes_dashboard/sql/resource/by_workcenter.sql @@ -0,0 +1,12 @@ +-- Resource count by workcenter and status +-- Placeholders: +-- LATEST_STATUS_SUBQUERY - Base subquery for latest resource status + +SELECT + WORKCENTERNAME, + NEWSTATUSNAME, + COUNT(*) as COUNT +FROM ({{ LATEST_STATUS_SUBQUERY }}) rs +WHERE WORKCENTERNAME IS NOT NULL +GROUP BY WORKCENTERNAME, NEWSTATUSNAME +ORDER BY WORKCENTERNAME, COUNT DESC diff --git a/src/mes_dashboard/sql/resource/detail.sql b/src/mes_dashboard/sql/resource/detail.sql new file mode 100644 index 0000000..d664659 --- /dev/null +++ b/src/mes_dashboard/sql/resource/detail.sql @@ -0,0 +1,30 @@ +-- Resource detail with pagination +-- Placeholders: +-- LATEST_STATUS_SUBQUERY - Base subquery for latest resource status +-- WHERE_CLAUSE - Dynamic WHERE conditions (e.g., AND ...) +-- Parameters: +-- :start_row - Pagination start row +-- :end_row - Pagination end row + +SELECT * FROM ( + SELECT + RESOURCENAME, + WORKCENTERNAME, + RESOURCEFAMILYNAME, + NEWSTATUSNAME, + NEWREASONNAME, + LASTSTATUSCHANGEDATE, + PJ_DEPARTMENT, + VENDORNAME, + VENDORMODEL, + PJ_ASSETSSTATUS, + AVAILABILITY, + PJ_ISPRODUCTION, + PJ_ISKEY, + PJ_ISMONITOR, + ROW_NUMBER() OVER ( + ORDER BY LASTSTATUSCHANGEDATE DESC NULLS LAST + ) AS rn + FROM ({{ LATEST_STATUS_SUBQUERY }}) rs + WHERE 1=1 {{ WHERE_CLAUSE }} +) WHERE rn BETWEEN :start_row AND :end_row diff --git a/src/mes_dashboard/sql/resource/distinct_statuses.sql b/src/mes_dashboard/sql/resource/distinct_statuses.sql new file mode 100644 index 0000000..fd95ef1 --- /dev/null +++ b/src/mes_dashboard/sql/resource/distinct_statuses.sql @@ -0,0 +1,19 @@ +-- Resource Distinct Statuses Query +-- Returns distinct status names from recent resource status changes +-- +-- Parameters: +-- :days_back - Number of days to look back for status changes + +WITH latest_txn AS ( + SELECT MAX(COALESCE(TXNDATE, LASTSTATUSCHANGEDATE)) AS MAX_TXNDATE + FROM DWH.DW_MES_RESOURCESTATUS +) +SELECT DISTINCT s.NEWSTATUSNAME +FROM DWH.DW_MES_RESOURCE r +JOIN DWH.DW_MES_RESOURCESTATUS s ON r.RESOURCEID = s.HISTORYID +CROSS JOIN latest_txn lt +WHERE ((r.OBJECTCATEGORY = 'ASSEMBLY' AND r.OBJECTTYPE = 'ASSEMBLY') + OR (r.OBJECTCATEGORY = 'WAFERSORT' AND r.OBJECTTYPE = 'WAFERSORT')) + AND COALESCE(s.TXNDATE, s.LASTSTATUSCHANGEDATE) >= lt.MAX_TXNDATE - :days_back + AND s.NEWSTATUSNAME IS NOT NULL +ORDER BY s.NEWSTATUSNAME diff --git a/src/mes_dashboard/sql/resource/latest_status.sql b/src/mes_dashboard/sql/resource/latest_status.sql new file mode 100644 index 0000000..7ac0956 --- /dev/null +++ b/src/mes_dashboard/sql/resource/latest_status.sql @@ -0,0 +1,52 @@ +-- Resource Latest Status Query +-- Returns the latest status for each resource using ROW_NUMBER() +-- +-- Dynamic placeholders: +-- days_back - Number of days to look back for status changes +-- LOCATION_FILTER - Location exclusion filter (AND ...) +-- ASSET_STATUS_FILTER - Asset status exclusion filter (AND ...) +-- +-- Note: This query is designed to be embedded as a subquery (no CTE/WITH clause) +-- The MAX_TXNDATE calculation is done inline using a scalar subquery with KEEP FIRST + +SELECT * +FROM ( + SELECT + r.RESOURCEID, + r.RESOURCENAME, + r.OBJECTCATEGORY, + r.OBJECTTYPE, + r.RESOURCEFAMILYNAME, + r.WORKCENTERNAME, + r.LOCATIONNAME, + r.VENDORNAME, + r.VENDORMODEL, + r.PJ_DEPARTMENT, + r.PJ_ASSETSSTATUS, + r.PJ_ISPRODUCTION, + r.PJ_ISKEY, + r.PJ_ISMONITOR, + r.PJ_LOTID, + r.DESCRIPTION, + s.NEWSTATUSNAME, + s.NEWREASONNAME, + s.LASTSTATUSCHANGEDATE, + s.OLDSTATUSNAME, + s.OLDREASONNAME, + s.AVAILABILITY, + s.JOBID, + s.TXNDATE, + ROW_NUMBER() OVER ( + PARTITION BY r.RESOURCEID + ORDER BY s.LASTSTATUSCHANGEDATE DESC NULLS LAST, + COALESCE(s.TXNDATE, s.LASTSTATUSCHANGEDATE) DESC + ) AS rn + FROM DWH.DW_MES_RESOURCE r + JOIN DWH.DW_MES_RESOURCESTATUS s ON r.RESOURCEID = s.HISTORYID + WHERE ((r.OBJECTCATEGORY = 'ASSEMBLY' AND r.OBJECTTYPE = 'ASSEMBLY') + OR (r.OBJECTCATEGORY = 'WAFERSORT' AND r.OBJECTTYPE = 'WAFERSORT')) + AND COALESCE(s.TXNDATE, s.LASTSTATUSCHANGEDATE) >= SYSDATE - {{ days_back }} + {{ LOCATION_FILTER }} + {{ ASSET_STATUS_FILTER }} +) +WHERE rn = 1 diff --git a/src/mes_dashboard/sql/resource/workcenter_status_matrix.sql b/src/mes_dashboard/sql/resource/workcenter_status_matrix.sql new file mode 100644 index 0000000..b1e2131 --- /dev/null +++ b/src/mes_dashboard/sql/resource/workcenter_status_matrix.sql @@ -0,0 +1,33 @@ +-- Resource workcenter × status matrix +-- Placeholders: +-- LATEST_STATUS_SUBQUERY - Base subquery for latest resource status + +SELECT + WORKCENTERNAME, + CASE NEWSTATUSNAME + WHEN 'PRD' THEN 'PRD' + WHEN 'SBY' THEN 'SBY' + WHEN 'UDT' THEN 'UDT' + WHEN 'SDT' THEN 'SDT' + WHEN 'EGT' THEN 'EGT' + WHEN 'NST' THEN 'NST' + WHEN 'SCRAP' THEN 'SCRAP' + ELSE 'OTHER' + END as STATUS_CATEGORY, + NEWSTATUSNAME, + COUNT(*) as COUNT +FROM ({{ LATEST_STATUS_SUBQUERY }}) rs +WHERE WORKCENTERNAME IS NOT NULL +GROUP BY WORKCENTERNAME, + CASE NEWSTATUSNAME + WHEN 'PRD' THEN 'PRD' + WHEN 'SBY' THEN 'SBY' + WHEN 'UDT' THEN 'UDT' + WHEN 'SDT' THEN 'SDT' + WHEN 'EGT' THEN 'EGT' + WHEN 'NST' THEN 'NST' + WHEN 'SCRAP' THEN 'SCRAP' + ELSE 'OTHER' + END, + NEWSTATUSNAME +ORDER BY WORKCENTERNAME, STATUS_CATEGORY diff --git a/src/mes_dashboard/sql/resource_history/detail.sql b/src/mes_dashboard/sql/resource_history/detail.sql new file mode 100644 index 0000000..d99554e --- /dev/null +++ b/src/mes_dashboard/sql/resource_history/detail.sql @@ -0,0 +1,27 @@ +-- Detail Query for Resource History +-- Aggregates status hours by resource for detail table and CSV export +-- Placeholders: +-- HISTORYID_FILTER - Resource ID filter condition (e.g., HISTORYID IN (...)) +-- Parameters: +-- :start_date - Start date (YYYY-MM-DD) +-- :end_date - End date (YYYY-MM-DD) + +WITH shift_data AS ( + SELECT /*+ MATERIALIZE */ HISTORYID, OLDSTATUSNAME, HOURS + FROM DWH.DW_MES_RESOURCESTATUS_SHIFT + WHERE TXNDATE >= TO_DATE(:start_date, 'YYYY-MM-DD') + AND TXNDATE < TO_DATE(:end_date, 'YYYY-MM-DD') + 1 + AND {{ HISTORYID_FILTER }} +) +SELECT + HISTORYID, + SUM(CASE WHEN OLDSTATUSNAME = 'PRD' THEN HOURS ELSE 0 END) as PRD_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'SBY' THEN HOURS ELSE 0 END) as SBY_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'UDT' THEN HOURS ELSE 0 END) as UDT_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'SDT' THEN HOURS ELSE 0 END) as SDT_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'EGT' THEN HOURS ELSE 0 END) as EGT_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'NST' THEN HOURS ELSE 0 END) as NST_HOURS, + SUM(HOURS) as TOTAL_HOURS +FROM shift_data +GROUP BY HISTORYID +ORDER BY HISTORYID diff --git a/src/mes_dashboard/sql/resource_history/heatmap.sql b/src/mes_dashboard/sql/resource_history/heatmap.sql new file mode 100644 index 0000000..c1b2938 --- /dev/null +++ b/src/mes_dashboard/sql/resource_history/heatmap.sql @@ -0,0 +1,27 @@ +-- Heatmap Query for Resource History +-- Aggregates status hours by resource and date for heatmap visualization +-- Placeholders: +-- HISTORYID_FILTER - Resource ID filter condition (e.g., HISTORYID IN (...)) +-- DATE_TRUNC - Date truncation expression (e.g., TRUNC(TXNDATE, 'MM')) +-- Parameters: +-- :start_date - Start date (YYYY-MM-DD) +-- :end_date - End date (YYYY-MM-DD) + +WITH shift_data AS ( + SELECT /*+ MATERIALIZE */ HISTORYID, TXNDATE, OLDSTATUSNAME, HOURS + FROM DWH.DW_MES_RESOURCESTATUS_SHIFT + WHERE TXNDATE >= TO_DATE(:start_date, 'YYYY-MM-DD') + AND TXNDATE < TO_DATE(:end_date, 'YYYY-MM-DD') + 1 + AND {{ HISTORYID_FILTER }} +) +SELECT + HISTORYID, + {{ DATE_TRUNC }} as DATA_DATE, + SUM(CASE WHEN OLDSTATUSNAME = 'PRD' THEN HOURS ELSE 0 END) as PRD_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'SBY' THEN HOURS ELSE 0 END) as SBY_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'UDT' THEN HOURS ELSE 0 END) as UDT_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'SDT' THEN HOURS ELSE 0 END) as SDT_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'EGT' THEN HOURS ELSE 0 END) as EGT_HOURS +FROM shift_data +GROUP BY HISTORYID, {{ DATE_TRUNC }} +ORDER BY HISTORYID, DATA_DATE diff --git a/src/mes_dashboard/sql/resource_history/kpi.sql b/src/mes_dashboard/sql/resource_history/kpi.sql new file mode 100644 index 0000000..9eb65db --- /dev/null +++ b/src/mes_dashboard/sql/resource_history/kpi.sql @@ -0,0 +1,24 @@ +-- KPI Query for Resource History +-- Aggregates status hours across all filtered resources +-- Placeholders: +-- HISTORYID_FILTER - Resource ID filter condition (e.g., HISTORYID IN (...)) +-- Parameters: +-- :start_date - Start date (YYYY-MM-DD) +-- :end_date - End date (YYYY-MM-DD) + +WITH shift_data AS ( + SELECT /*+ MATERIALIZE */ HISTORYID, TXNDATE, OLDSTATUSNAME, HOURS + FROM DWH.DW_MES_RESOURCESTATUS_SHIFT + WHERE TXNDATE >= TO_DATE(:start_date, 'YYYY-MM-DD') + AND TXNDATE < TO_DATE(:end_date, 'YYYY-MM-DD') + 1 + AND {{ HISTORYID_FILTER }} +) +SELECT + SUM(CASE WHEN OLDSTATUSNAME = 'PRD' THEN HOURS ELSE 0 END) as PRD_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'SBY' THEN HOURS ELSE 0 END) as SBY_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'UDT' THEN HOURS ELSE 0 END) as UDT_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'SDT' THEN HOURS ELSE 0 END) as SDT_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'EGT' THEN HOURS ELSE 0 END) as EGT_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'NST' THEN HOURS ELSE 0 END) as NST_HOURS, + COUNT(DISTINCT HISTORYID) as MACHINE_COUNT +FROM shift_data diff --git a/src/mes_dashboard/sql/resource_history/trend.sql b/src/mes_dashboard/sql/resource_history/trend.sql new file mode 100644 index 0000000..6e4f0e3 --- /dev/null +++ b/src/mes_dashboard/sql/resource_history/trend.sql @@ -0,0 +1,28 @@ +-- Trend Query for Resource History +-- Aggregates status hours by date for trend visualization +-- Placeholders: +-- HISTORYID_FILTER - Resource ID filter condition (e.g., HISTORYID IN (...)) +-- DATE_TRUNC - Date truncation expression (e.g., TRUNC(TXNDATE, 'MM')) +-- Parameters: +-- :start_date - Start date (YYYY-MM-DD) +-- :end_date - End date (YYYY-MM-DD) + +WITH shift_data AS ( + SELECT /*+ MATERIALIZE */ HISTORYID, TXNDATE, OLDSTATUSNAME, HOURS + FROM DWH.DW_MES_RESOURCESTATUS_SHIFT + WHERE TXNDATE >= TO_DATE(:start_date, 'YYYY-MM-DD') + AND TXNDATE < TO_DATE(:end_date, 'YYYY-MM-DD') + 1 + AND {{ HISTORYID_FILTER }} +) +SELECT + {{ DATE_TRUNC }} as DATA_DATE, + SUM(CASE WHEN OLDSTATUSNAME = 'PRD' THEN HOURS ELSE 0 END) as PRD_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'SBY' THEN HOURS ELSE 0 END) as SBY_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'UDT' THEN HOURS ELSE 0 END) as UDT_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'SDT' THEN HOURS ELSE 0 END) as SDT_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'EGT' THEN HOURS ELSE 0 END) as EGT_HOURS, + SUM(CASE WHEN OLDSTATUSNAME = 'NST' THEN HOURS ELSE 0 END) as NST_HOURS, + COUNT(DISTINCT HISTORYID) as MACHINE_COUNT +FROM shift_data +GROUP BY {{ DATE_TRUNC }} +ORDER BY DATA_DATE diff --git a/src/mes_dashboard/sql/wip/detail.sql b/src/mes_dashboard/sql/wip/detail.sql new file mode 100644 index 0000000..dbe7968 --- /dev/null +++ b/src/mes_dashboard/sql/wip/detail.sql @@ -0,0 +1,30 @@ +-- WIP Detail Query +-- Returns paginated lot details for a specific workcenter group +-- +-- Uses ROW_NUMBER() for efficient pagination +-- +-- Parameters: +-- :offset - Starting row offset (0-based) +-- :limit - Number of rows to return +-- +-- Dynamic placeholders: +-- WHERE_CLAUSE - Filter conditions + +SELECT * FROM ( + SELECT + LOTID, + EQUIPMENTS, + STATUS, + HOLDREASONNAME, + QTY, + PACKAGE_LEF, + SPECNAME, + CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) > 0 THEN 'RUN' + WHEN COALESCE(CURRENTHOLDCOUNT, 0) > 0 THEN 'HOLD' + ELSE 'QUEUE' END AS WIP_STATUS, + ROW_NUMBER() OVER (ORDER BY LOTID) as RN + FROM DWH.DW_MES_LOT_V + {{ WHERE_CLAUSE }} +) +WHERE RN > :offset AND RN <= :offset + :limit +ORDER BY RN diff --git a/src/mes_dashboard/sql/wip/matrix.sql b/src/mes_dashboard/sql/wip/matrix.sql new file mode 100644 index 0000000..d4e1fd7 --- /dev/null +++ b/src/mes_dashboard/sql/wip/matrix.sql @@ -0,0 +1,18 @@ +-- WIP Matrix Query +-- Returns workcenter x product line (package) matrix +-- +-- Aggregates QTY by WORKCENTER_GROUP and PACKAGE_LEF +-- Used for the overview dashboard matrix visualization +-- +-- Dynamic placeholders: +-- WHERE_CLAUSE - Filter conditions including status and hold type + +SELECT + WORKCENTER_GROUP, + WORKCENTERSEQUENCE_GROUP, + PACKAGE_LEF, + SUM(QTY) as QTY +FROM DWH.DW_MES_LOT_V +{{ WHERE_CLAUSE }} +GROUP BY WORKCENTER_GROUP, WORKCENTERSEQUENCE_GROUP, PACKAGE_LEF +ORDER BY WORKCENTERSEQUENCE_GROUP, PACKAGE_LEF diff --git a/src/mes_dashboard/sql/wip/summary.sql b/src/mes_dashboard/sql/wip/summary.sql new file mode 100644 index 0000000..d4a3e58 --- /dev/null +++ b/src/mes_dashboard/sql/wip/summary.sql @@ -0,0 +1,48 @@ +-- WIP Summary Query +-- Returns overall WIP KPI statistics +-- +-- WIP Status Logic: +-- RUN: EQUIPMENTCOUNT > 0 +-- HOLD: EQUIPMENTCOUNT = 0 AND CURRENTHOLDCOUNT > 0 +-- QUEUE: EQUIPMENTCOUNT = 0 AND CURRENTHOLDCOUNT = 0 +-- +-- Hold Type Logic: +-- Quality Hold: Not in NON_QUALITY_HOLD_REASONS +-- Non-Quality Hold: In NON_QUALITY_HOLD_REASONS +-- +-- Dynamic placeholders: +-- WHERE_CLAUSE - Filter conditions +-- NON_QUALITY_REASONS - List of non-quality hold reason values + +SELECT + COUNT(*) as TOTAL_LOTS, + SUM(QTY) as TOTAL_QTY_PCS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) > 0 THEN 1 ELSE 0 END) as RUN_LOTS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) > 0 THEN QTY ELSE 0 END) as RUN_QTY_PCS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) = 0 + AND COALESCE(CURRENTHOLDCOUNT, 0) > 0 THEN 1 ELSE 0 END) as HOLD_LOTS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) = 0 + AND COALESCE(CURRENTHOLDCOUNT, 0) > 0 THEN QTY ELSE 0 END) as HOLD_QTY_PCS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) = 0 + AND COALESCE(CURRENTHOLDCOUNT, 0) > 0 + AND (HOLDREASONNAME IS NULL OR HOLDREASONNAME NOT IN ({{ NON_QUALITY_REASONS }})) + THEN 1 ELSE 0 END) as QUALITY_HOLD_LOTS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) = 0 + AND COALESCE(CURRENTHOLDCOUNT, 0) > 0 + AND (HOLDREASONNAME IS NULL OR HOLDREASONNAME NOT IN ({{ NON_QUALITY_REASONS }})) + THEN QTY ELSE 0 END) as QUALITY_HOLD_QTY_PCS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) = 0 + AND COALESCE(CURRENTHOLDCOUNT, 0) > 0 + AND HOLDREASONNAME IN ({{ NON_QUALITY_REASONS }}) + THEN 1 ELSE 0 END) as NON_QUALITY_HOLD_LOTS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) = 0 + AND COALESCE(CURRENTHOLDCOUNT, 0) > 0 + AND HOLDREASONNAME IN ({{ NON_QUALITY_REASONS }}) + THEN QTY ELSE 0 END) as NON_QUALITY_HOLD_QTY_PCS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) = 0 + AND COALESCE(CURRENTHOLDCOUNT, 0) = 0 THEN 1 ELSE 0 END) as QUEUE_LOTS, + SUM(CASE WHEN COALESCE(EQUIPMENTCOUNT, 0) = 0 + AND COALESCE(CURRENTHOLDCOUNT, 0) = 0 THEN QTY ELSE 0 END) as QUEUE_QTY_PCS, + MAX(SYS_DATE) as DATA_UPDATE_DATE +FROM DWH.DW_MES_LOT_V +{{ WHERE_CLAUSE }} diff --git a/src/mes_dashboard/static/js/echarts.min.js b/src/mes_dashboard/static/js/echarts.min.js new file mode 100644 index 0000000..c007c24 --- /dev/null +++ b/src/mes_dashboard/static/js/echarts.min.js @@ -0,0 +1,45 @@ + +/* +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, +* software distributed under the License is distributed on an +* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +* KIND, either express or implied. See the License for the +* specific language governing permissions and limitations +* under the License. +*/ + +!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((t="undefined"!=typeof globalThis?globalThis:t||self).echarts={})}(this,(function(t){"use strict"; +/*! ***************************************************************************** + Copyright (c) Microsoft Corporation. + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH + REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, + INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR + OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + PERFORMANCE OF THIS SOFTWARE. + ***************************************************************************** */var e=function(t,n){return e=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var n in e)Object.prototype.hasOwnProperty.call(e,n)&&(t[n]=e[n])},e(t,n)};function n(t,n){if("function"!=typeof n&&null!==n)throw new TypeError("Class extends value "+String(n)+" is not a constructor or null");function i(){this.constructor=t}e(t,n),t.prototype=null===n?Object.create(n):(i.prototype=n.prototype,new i)}var i=function(){this.firefox=!1,this.ie=!1,this.edge=!1,this.newEdge=!1,this.weChat=!1},r=new function(){this.browser=new i,this.node=!1,this.wxa=!1,this.worker=!1,this.svgSupported=!1,this.touchEventsSupported=!1,this.pointerEventsSupported=!1,this.domSupported=!1,this.transformSupported=!1,this.transform3dSupported=!1,this.hasGlobalWindow="undefined"!=typeof window};"object"==typeof wx&&"function"==typeof wx.getSystemInfoSync?(r.wxa=!0,r.touchEventsSupported=!0):"undefined"==typeof document&&"undefined"!=typeof self?r.worker=!0:"undefined"==typeof navigator?(r.node=!0,r.svgSupported=!0):function(t,e){var n=e.browser,i=t.match(/Firefox\/([\d.]+)/),r=t.match(/MSIE\s([\d.]+)/)||t.match(/Trident\/.+?rv:(([\d.]+))/),o=t.match(/Edge?\/([\d.]+)/),a=/micromessenger/i.test(t);i&&(n.firefox=!0,n.version=i[1]);r&&(n.ie=!0,n.version=r[1]);o&&(n.edge=!0,n.version=o[1],n.newEdge=+o[1].split(".")[0]>18);a&&(n.weChat=!0);e.svgSupported="undefined"!=typeof SVGRect,e.touchEventsSupported="ontouchstart"in window&&!n.ie&&!n.edge,e.pointerEventsSupported="onpointerdown"in window&&(n.edge||n.ie&&+n.version>=11),e.domSupported="undefined"!=typeof document;var s=document.documentElement.style;e.transform3dSupported=(n.ie&&"transition"in s||n.edge||"WebKitCSSMatrix"in window&&"m11"in new WebKitCSSMatrix||"MozPerspective"in s)&&!("OTransition"in s),e.transformSupported=e.transform3dSupported||n.ie&&+n.version>=9}(navigator.userAgent,r);var o="sans-serif",a="12px "+o;var s,l,u=function(t){var e={};if("undefined"==typeof JSON)return e;for(var n=0;n=0)o=r*t.length;else for(var c=0;c>1)%2;a.style.cssText=["position: absolute","visibility: hidden","padding: 0","margin: 0","border-width: 0","user-select: none","width:0","height:0",i[s]+":0",r[l]+":0",i[1-s]+":auto",r[1-l]+":auto",""].join("!important;"),t.appendChild(a),n.push(a)}return n}(e,a),l=function(t,e,n){for(var i=n?"invTrans":"trans",r=e[i],o=e.srcCoords,a=[],s=[],l=!0,u=0;u<4;u++){var h=t[u].getBoundingClientRect(),c=2*u,p=h.left,d=h.top;a.push(p,d),l=l&&o&&p===o[c]&&d===o[c+1],s.push(t[u].offsetLeft,t[u].offsetTop)}return l&&r?r:(e.srcCoords=a,e[i]=n?$t(s,a):$t(a,s))}(s,a,o);if(l)return l(t,n,i),!0}return!1}function ee(t){return"CANVAS"===t.nodeName.toUpperCase()}var ne=/([&<>"'])/g,ie={"&":"&","<":"<",">":">",'"':""","'":"'"};function re(t){return null==t?"":(t+"").replace(ne,(function(t,e){return ie[e]}))}var oe=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,ae=[],se=r.browser.firefox&&+r.browser.version.split(".")[0]<39;function le(t,e,n,i){return n=n||{},i?ue(t,e,n):se&&null!=e.layerX&&e.layerX!==e.offsetX?(n.zrX=e.layerX,n.zrY=e.layerY):null!=e.offsetX?(n.zrX=e.offsetX,n.zrY=e.offsetY):ue(t,e,n),n}function ue(t,e,n){if(r.domSupported&&t.getBoundingClientRect){var i=e.clientX,o=e.clientY;if(ee(t)){var a=t.getBoundingClientRect();return n.zrX=i-a.left,void(n.zrY=o-a.top)}if(te(ae,t,i,o))return n.zrX=ae[0],void(n.zrY=ae[1])}n.zrX=n.zrY=0}function he(t){return t||window.event}function ce(t,e,n){if(null!=(e=he(e)).zrX)return e;var i=e.type;if(i&&i.indexOf("touch")>=0){var r="touchend"!==i?e.targetTouches[0]:e.changedTouches[0];r&&le(t,r,e,n)}else{le(t,e,e,n);var o=function(t){var e=t.wheelDelta;if(e)return e;var n=t.deltaX,i=t.deltaY;if(null==n||null==i)return e;return 3*(0!==i?Math.abs(i):Math.abs(n))*(i>0?-1:i<0?1:n>0?-1:1)}(e);e.zrDelta=o?o/120:-(e.detail||0)/3}var a=e.button;return null==e.which&&void 0!==a&&oe.test(e.type)&&(e.which=1&a?1:2&a?3:4&a?2:0),e}function pe(t,e,n,i){t.addEventListener(e,n,i)}var de=function(t){t.preventDefault(),t.stopPropagation(),t.cancelBubble=!0};function fe(t){return 2===t.which||3===t.which}var ge=function(){function t(){this._track=[]}return t.prototype.recognize=function(t,e,n){return this._doTrack(t,e,n),this._recognize(t)},t.prototype.clear=function(){return this._track.length=0,this},t.prototype._doTrack=function(t,e,n){var i=t.touches;if(i){for(var r={points:[],touches:[],target:e,event:t},o=0,a=i.length;o1&&r&&r.length>1){var a=ye(r)/ye(o);!isFinite(a)&&(a=1),e.pinchScale=a;var s=[((i=r)[0][0]+i[1][0])/2,(i[0][1]+i[1][1])/2];return e.pinchX=s[0],e.pinchY=s[1],{type:"pinch",target:t[0].target,event:e}}}}};function me(){return[1,0,0,1,0,0]}function xe(t){return t[0]=1,t[1]=0,t[2]=0,t[3]=1,t[4]=0,t[5]=0,t}function _e(t,e){return t[0]=e[0],t[1]=e[1],t[2]=e[2],t[3]=e[3],t[4]=e[4],t[5]=e[5],t}function be(t,e,n){var i=e[0]*n[0]+e[2]*n[1],r=e[1]*n[0]+e[3]*n[1],o=e[0]*n[2]+e[2]*n[3],a=e[1]*n[2]+e[3]*n[3],s=e[0]*n[4]+e[2]*n[5]+e[4],l=e[1]*n[4]+e[3]*n[5]+e[5];return t[0]=i,t[1]=r,t[2]=o,t[3]=a,t[4]=s,t[5]=l,t}function we(t,e,n){return t[0]=e[0],t[1]=e[1],t[2]=e[2],t[3]=e[3],t[4]=e[4]+n[0],t[5]=e[5]+n[1],t}function Se(t,e,n){var i=e[0],r=e[2],o=e[4],a=e[1],s=e[3],l=e[5],u=Math.sin(n),h=Math.cos(n);return t[0]=i*h+a*u,t[1]=-i*u+a*h,t[2]=r*h+s*u,t[3]=-r*u+h*s,t[4]=h*o+u*l,t[5]=h*l-u*o,t}function Me(t,e,n){var i=n[0],r=n[1];return t[0]=e[0]*i,t[1]=e[1]*r,t[2]=e[2]*i,t[3]=e[3]*r,t[4]=e[4]*i,t[5]=e[5]*r,t}function Ie(t,e){var n=e[0],i=e[2],r=e[4],o=e[1],a=e[3],s=e[5],l=n*a-o*i;return l?(l=1/l,t[0]=a*l,t[1]=-o*l,t[2]=-i*l,t[3]=n*l,t[4]=(i*s-a*r)*l,t[5]=(o*r-n*s)*l,t):null}function Te(t){var e=[1,0,0,1,0,0];return _e(e,t),e}var Ce=Object.freeze({__proto__:null,create:me,identity:xe,copy:_e,mul:be,translate:we,rotate:Se,scale:Me,invert:Ie,clone:Te}),De=function(){function t(t,e){this.x=t||0,this.y=e||0}return t.prototype.copy=function(t){return this.x=t.x,this.y=t.y,this},t.prototype.clone=function(){return new t(this.x,this.y)},t.prototype.set=function(t,e){return this.x=t,this.y=e,this},t.prototype.equal=function(t){return t.x===this.x&&t.y===this.y},t.prototype.add=function(t){return this.x+=t.x,this.y+=t.y,this},t.prototype.scale=function(t){this.x*=t,this.y*=t},t.prototype.scaleAndAdd=function(t,e){this.x+=t.x*e,this.y+=t.y*e},t.prototype.sub=function(t){return this.x-=t.x,this.y-=t.y,this},t.prototype.dot=function(t){return this.x*t.x+this.y*t.y},t.prototype.len=function(){return Math.sqrt(this.x*this.x+this.y*this.y)},t.prototype.lenSquare=function(){return this.x*this.x+this.y*this.y},t.prototype.normalize=function(){var t=this.len();return this.x/=t,this.y/=t,this},t.prototype.distance=function(t){var e=this.x-t.x,n=this.y-t.y;return Math.sqrt(e*e+n*n)},t.prototype.distanceSquare=function(t){var e=this.x-t.x,n=this.y-t.y;return e*e+n*n},t.prototype.negate=function(){return this.x=-this.x,this.y=-this.y,this},t.prototype.transform=function(t){if(t){var e=this.x,n=this.y;return this.x=t[0]*e+t[2]*n+t[4],this.y=t[1]*e+t[3]*n+t[5],this}},t.prototype.toArray=function(t){return t[0]=this.x,t[1]=this.y,t},t.prototype.fromArray=function(t){this.x=t[0],this.y=t[1]},t.set=function(t,e,n){t.x=e,t.y=n},t.copy=function(t,e){t.x=e.x,t.y=e.y},t.len=function(t){return Math.sqrt(t.x*t.x+t.y*t.y)},t.lenSquare=function(t){return t.x*t.x+t.y*t.y},t.dot=function(t,e){return t.x*e.x+t.y*e.y},t.add=function(t,e,n){t.x=e.x+n.x,t.y=e.y+n.y},t.sub=function(t,e,n){t.x=e.x-n.x,t.y=e.y-n.y},t.scale=function(t,e,n){t.x=e.x*n,t.y=e.y*n},t.scaleAndAdd=function(t,e,n,i){t.x=e.x+n.x*i,t.y=e.y+n.y*i},t.lerp=function(t,e,n,i){var r=1-i;t.x=r*e.x+i*n.x,t.y=r*e.y+i*n.y},t}(),Ae=Math.min,ke=Math.max,Le=new De,Pe=new De,Oe=new De,Re=new De,Ne=new De,Ee=new De,ze=function(){function t(t,e,n,i){n<0&&(t+=n,n=-n),i<0&&(e+=i,i=-i),this.x=t,this.y=e,this.width=n,this.height=i}return t.prototype.union=function(t){var e=Ae(t.x,this.x),n=Ae(t.y,this.y);isFinite(this.x)&&isFinite(this.width)?this.width=ke(t.x+t.width,this.x+this.width)-e:this.width=t.width,isFinite(this.y)&&isFinite(this.height)?this.height=ke(t.y+t.height,this.y+this.height)-n:this.height=t.height,this.x=e,this.y=n},t.prototype.applyTransform=function(e){t.applyTransform(this,this,e)},t.prototype.calculateTransform=function(t){var e=this,n=t.width/e.width,i=t.height/e.height,r=[1,0,0,1,0,0];return we(r,r,[-e.x,-e.y]),Me(r,r,[n,i]),we(r,r,[t.x,t.y]),r},t.prototype.intersect=function(e,n){if(!e)return!1;e instanceof t||(e=t.create(e));var i=this,r=i.x,o=i.x+i.width,a=i.y,s=i.y+i.height,l=e.x,u=e.x+e.width,h=e.y,c=e.y+e.height,p=!(of&&(f=x,gf&&(f=_,v=n.x&&t<=n.x+n.width&&e>=n.y&&e<=n.y+n.height},t.prototype.clone=function(){return new t(this.x,this.y,this.width,this.height)},t.prototype.copy=function(e){t.copy(this,e)},t.prototype.plain=function(){return{x:this.x,y:this.y,width:this.width,height:this.height}},t.prototype.isFinite=function(){return isFinite(this.x)&&isFinite(this.y)&&isFinite(this.width)&&isFinite(this.height)},t.prototype.isZero=function(){return 0===this.width||0===this.height},t.create=function(e){return new t(e.x,e.y,e.width,e.height)},t.copy=function(t,e){t.x=e.x,t.y=e.y,t.width=e.width,t.height=e.height},t.applyTransform=function(e,n,i){if(i){if(i[1]<1e-5&&i[1]>-1e-5&&i[2]<1e-5&&i[2]>-1e-5){var r=i[0],o=i[3],a=i[4],s=i[5];return e.x=n.x*r+a,e.y=n.y*o+s,e.width=n.width*r,e.height=n.height*o,e.width<0&&(e.x+=e.width,e.width=-e.width),void(e.height<0&&(e.y+=e.height,e.height=-e.height))}Le.x=Oe.x=n.x,Le.y=Re.y=n.y,Pe.x=Re.x=n.x+n.width,Pe.y=Oe.y=n.y+n.height,Le.transform(i),Re.transform(i),Pe.transform(i),Oe.transform(i),e.x=Ae(Le.x,Pe.x,Oe.x,Re.x),e.y=Ae(Le.y,Pe.y,Oe.y,Re.y);var l=ke(Le.x,Pe.x,Oe.x,Re.x),u=ke(Le.y,Pe.y,Oe.y,Re.y);e.width=l-e.x,e.height=u-e.y}else e!==n&&t.copy(e,n)},t}(),Ve="silent";function Be(){de(this.event)}var Fe=function(t){function e(){var e=null!==t&&t.apply(this,arguments)||this;return e.handler=null,e}return n(e,t),e.prototype.dispose=function(){},e.prototype.setCursor=function(){},e}(jt),Ge=function(t,e){this.x=t,this.y=e},We=["click","dblclick","mousewheel","mouseout","mouseup","mousedown","mousemove","contextmenu"],He=new ze(0,0,0,0),Ye=function(t){function e(e,n,i,r,o){var a=t.call(this)||this;return a._hovered=new Ge(0,0),a.storage=e,a.painter=n,a.painterRoot=r,a._pointerSize=o,i=i||new Fe,a.proxy=null,a.setHandlerProxy(i),a._draggingMgr=new Zt(a),a}return n(e,t),e.prototype.setHandlerProxy=function(t){this.proxy&&this.proxy.dispose(),t&&(E(We,(function(e){t.on&&t.on(e,this[e],this)}),this),t.handler=this),this.proxy=t},e.prototype.mousemove=function(t){var e=t.zrX,n=t.zrY,i=Ze(this,e,n),r=this._hovered,o=r.target;o&&!o.__zr&&(o=(r=this.findHover(r.x,r.y)).target);var a=this._hovered=i?new Ge(e,n):this.findHover(e,n),s=a.target,l=this.proxy;l.setCursor&&l.setCursor(s?s.cursor:"default"),o&&s!==o&&this.dispatchToElement(r,"mouseout",t),this.dispatchToElement(a,"mousemove",t),s&&s!==o&&this.dispatchToElement(a,"mouseover",t)},e.prototype.mouseout=function(t){var e=t.zrEventControl;"only_globalout"!==e&&this.dispatchToElement(this._hovered,"mouseout",t),"no_globalout"!==e&&this.trigger("globalout",{type:"globalout",event:t})},e.prototype.resize=function(){this._hovered=new Ge(0,0)},e.prototype.dispatch=function(t,e){var n=this[t];n&&n.call(this,e)},e.prototype.dispose=function(){this.proxy.dispose(),this.storage=null,this.proxy=null,this.painter=null},e.prototype.setCursorStyle=function(t){var e=this.proxy;e.setCursor&&e.setCursor(t)},e.prototype.dispatchToElement=function(t,e,n){var i=(t=t||{}).target;if(!i||!i.silent){for(var r="on"+e,o=function(t,e,n){return{type:t,event:n,target:e.target,topTarget:e.topTarget,cancelBubble:!1,offsetX:n.zrX,offsetY:n.zrY,gestureEvent:n.gestureEvent,pinchX:n.pinchX,pinchY:n.pinchY,pinchScale:n.pinchScale,wheelDelta:n.zrDelta,zrByTouch:n.zrByTouch,which:n.which,stop:Be}}(e,t,n);i&&(i[r]&&(o.cancelBubble=!!i[r].call(i,o)),i.trigger(e,o),i=i.__hostTarget?i.__hostTarget:i.parent,!o.cancelBubble););o.cancelBubble||(this.trigger(e,o),this.painter&&this.painter.eachOtherLayer&&this.painter.eachOtherLayer((function(t){"function"==typeof t[r]&&t[r].call(t,o),t.trigger&&t.trigger(e,o)})))}},e.prototype.findHover=function(t,e,n){var i=this.storage.getDisplayList(),r=new Ge(t,e);if(Ue(i,r,t,e,n),this._pointerSize&&!r.target){for(var o=[],a=this._pointerSize,s=a/2,l=new ze(t-s,e-s,a,a),u=i.length-1;u>=0;u--){var h=i[u];h===n||h.ignore||h.ignoreCoarsePointer||h.parent&&h.parent.ignoreCoarsePointer||(He.copy(h.getBoundingRect()),h.transform&&He.applyTransform(h.transform),He.intersect(l)&&o.push(h))}if(o.length)for(var c=Math.PI/12,p=2*Math.PI,d=0;d=0;o--){var a=t[o],s=void 0;if(a!==r&&!a.ignore&&(s=Xe(a,n,i))&&(!e.topTarget&&(e.topTarget=a),s!==Ve)){e.target=a;break}}}function Ze(t,e,n){var i=t.painter;return e<0||e>i.getWidth()||n<0||n>i.getHeight()}E(["click","mousedown","mouseup","mousewheel","dblclick","contextmenu"],(function(t){Ye.prototype[t]=function(e){var n,i,r=e.zrX,o=e.zrY,a=Ze(this,r,o);if("mouseup"===t&&a||(i=(n=this.findHover(r,o)).target),"mousedown"===t)this._downEl=i,this._downPoint=[e.zrX,e.zrY],this._upEl=i;else if("mouseup"===t)this._upEl=i;else if("click"===t){if(this._downEl!==this._upEl||!this._downPoint||Vt(this._downPoint,[e.zrX,e.zrY])>4)return;this._downPoint=null}this.dispatchToElement(n,t,e)}}));function je(t,e,n,i){var r=e+1;if(r===n)return 1;if(i(t[r++],t[e])<0){for(;r=0;)r++;return r-e}function qe(t,e,n,i,r){for(i===e&&i++;i>>1])<0?l=o:s=o+1;var u=i-s;switch(u){case 3:t[s+3]=t[s+2];case 2:t[s+2]=t[s+1];case 1:t[s+1]=t[s];break;default:for(;u>0;)t[s+u]=t[s+u-1],u--}t[s]=a}}function Ke(t,e,n,i,r,o){var a=0,s=0,l=1;if(o(t,e[n+r])>0){for(s=i-r;l0;)a=l,(l=1+(l<<1))<=0&&(l=s);l>s&&(l=s),a+=r,l+=r}else{for(s=r+1;ls&&(l=s);var u=a;a=r-l,l=r-u}for(a++;a>>1);o(t,e[n+h])>0?a=h+1:l=h}return l}function $e(t,e,n,i,r,o){var a=0,s=0,l=1;if(o(t,e[n+r])<0){for(s=r+1;ls&&(l=s);var u=a;a=r-l,l=r-u}else{for(s=i-r;l=0;)a=l,(l=1+(l<<1))<=0&&(l=s);l>s&&(l=s),a+=r,l+=r}for(a++;a>>1);o(t,e[n+h])<0?l=h:a=h+1}return l}function Je(t,e){var n,i,r=7,o=0;t.length;var a=[];function s(s){var l=n[s],u=i[s],h=n[s+1],c=i[s+1];i[s]=u+c,s===o-3&&(n[s+1]=n[s+2],i[s+1]=i[s+2]),o--;var p=$e(t[h],t,l,u,0,e);l+=p,0!==(u-=p)&&0!==(c=Ke(t[l+u-1],t,h,c,c-1,e))&&(u<=c?function(n,i,o,s){var l=0;for(l=0;l=7||d>=7);if(f)break;g<0&&(g=0),g+=2}if((r=g)<1&&(r=1),1===i){for(l=0;l=0;l--)t[d+l]=t[p+l];return void(t[c]=a[h])}var f=r;for(;;){var g=0,y=0,v=!1;do{if(e(a[h],t[u])<0){if(t[c--]=t[u--],g++,y=0,0==--i){v=!0;break}}else if(t[c--]=a[h--],y++,g=0,1==--s){v=!0;break}}while((g|y)=0;l--)t[d+l]=t[p+l];if(0===i){v=!0;break}}if(t[c--]=a[h--],1==--s){v=!0;break}if(0!==(y=s-Ke(t[u],a,0,s,s-1,e))){for(s-=y,d=(c-=y)+1,p=(h-=y)+1,l=0;l=7||y>=7);if(v)break;f<0&&(f=0),f+=2}(r=f)<1&&(r=1);if(1===s){for(d=(c-=i)+1,p=(u-=i)+1,l=i-1;l>=0;l--)t[d+l]=t[p+l];t[c]=a[h]}else{if(0===s)throw new Error;for(p=c-(s-1),l=0;l1;){var t=o-2;if(t>=1&&i[t-1]<=i[t]+i[t+1]||t>=2&&i[t-2]<=i[t]+i[t-1])i[t-1]i[t+1])break;s(t)}},forceMergeRuns:function(){for(;o>1;){var t=o-2;t>0&&i[t-1]=32;)e|=1&t,t>>=1;return t+e}(r);do{if((o=je(t,n,i,e))s&&(l=s),qe(t,n,n+l,n+o,e),o=l}a.pushRun(n,o),a.mergeRuns(),r-=o,n+=o}while(0!==r);a.forceMergeRuns()}}}var tn=!1;function en(){tn||(tn=!0,console.warn("z / z2 / zlevel of displayable is invalid, which may cause unexpected errors"))}function nn(t,e){return t.zlevel===e.zlevel?t.z===e.z?t.z2-e.z2:t.z-e.z:t.zlevel-e.zlevel}var rn=function(){function t(){this._roots=[],this._displayList=[],this._displayListLen=0,this.displayableSortFunc=nn}return t.prototype.traverse=function(t,e){for(var n=0;n0&&(u.__clipPaths=[]),isNaN(u.z)&&(en(),u.z=0),isNaN(u.z2)&&(en(),u.z2=0),isNaN(u.zlevel)&&(en(),u.zlevel=0),this._displayList[this._displayListLen++]=u}var h=t.getDecalElement&&t.getDecalElement();h&&this._updateAndAddDisplayable(h,e,n);var c=t.getTextGuideLine();c&&this._updateAndAddDisplayable(c,e,n);var p=t.getTextContent();p&&this._updateAndAddDisplayable(p,e,n)}},t.prototype.addRoot=function(t){t.__zr&&t.__zr.storage===this||this._roots.push(t)},t.prototype.delRoot=function(t){if(t instanceof Array)for(var e=0,n=t.length;e=0&&this._roots.splice(i,1)}},t.prototype.delAllRoots=function(){this._roots=[],this._displayList=[],this._displayListLen=0},t.prototype.getRoots=function(){return this._roots},t.prototype.dispose=function(){this._displayList=null,this._roots=null},t}(),on=r.hasGlobalWindow&&(window.requestAnimationFrame&&window.requestAnimationFrame.bind(window)||window.msRequestAnimationFrame&&window.msRequestAnimationFrame.bind(window)||window.mozRequestAnimationFrame||window.webkitRequestAnimationFrame)||function(t){return setTimeout(t,16)},an={linear:function(t){return t},quadraticIn:function(t){return t*t},quadraticOut:function(t){return t*(2-t)},quadraticInOut:function(t){return(t*=2)<1?.5*t*t:-.5*(--t*(t-2)-1)},cubicIn:function(t){return t*t*t},cubicOut:function(t){return--t*t*t+1},cubicInOut:function(t){return(t*=2)<1?.5*t*t*t:.5*((t-=2)*t*t+2)},quarticIn:function(t){return t*t*t*t},quarticOut:function(t){return 1- --t*t*t*t},quarticInOut:function(t){return(t*=2)<1?.5*t*t*t*t:-.5*((t-=2)*t*t*t-2)},quinticIn:function(t){return t*t*t*t*t},quinticOut:function(t){return--t*t*t*t*t+1},quinticInOut:function(t){return(t*=2)<1?.5*t*t*t*t*t:.5*((t-=2)*t*t*t*t+2)},sinusoidalIn:function(t){return 1-Math.cos(t*Math.PI/2)},sinusoidalOut:function(t){return Math.sin(t*Math.PI/2)},sinusoidalInOut:function(t){return.5*(1-Math.cos(Math.PI*t))},exponentialIn:function(t){return 0===t?0:Math.pow(1024,t-1)},exponentialOut:function(t){return 1===t?1:1-Math.pow(2,-10*t)},exponentialInOut:function(t){return 0===t?0:1===t?1:(t*=2)<1?.5*Math.pow(1024,t-1):.5*(2-Math.pow(2,-10*(t-1)))},circularIn:function(t){return 1-Math.sqrt(1-t*t)},circularOut:function(t){return Math.sqrt(1- --t*t)},circularInOut:function(t){return(t*=2)<1?-.5*(Math.sqrt(1-t*t)-1):.5*(Math.sqrt(1-(t-=2)*t)+1)},elasticIn:function(t){var e,n=.1;return 0===t?0:1===t?1:(!n||n<1?(n=1,e=.1):e=.4*Math.asin(1/n)/(2*Math.PI),-n*Math.pow(2,10*(t-=1))*Math.sin((t-e)*(2*Math.PI)/.4))},elasticOut:function(t){var e,n=.1;return 0===t?0:1===t?1:(!n||n<1?(n=1,e=.1):e=.4*Math.asin(1/n)/(2*Math.PI),n*Math.pow(2,-10*t)*Math.sin((t-e)*(2*Math.PI)/.4)+1)},elasticInOut:function(t){var e,n=.1,i=.4;return 0===t?0:1===t?1:(!n||n<1?(n=1,e=.1):e=i*Math.asin(1/n)/(2*Math.PI),(t*=2)<1?n*Math.pow(2,10*(t-=1))*Math.sin((t-e)*(2*Math.PI)/i)*-.5:n*Math.pow(2,-10*(t-=1))*Math.sin((t-e)*(2*Math.PI)/i)*.5+1)},backIn:function(t){var e=1.70158;return t*t*((e+1)*t-e)},backOut:function(t){var e=1.70158;return--t*t*((e+1)*t+e)+1},backInOut:function(t){var e=2.5949095;return(t*=2)<1?t*t*((e+1)*t-e)*.5:.5*((t-=2)*t*((e+1)*t+e)+2)},bounceIn:function(t){return 1-an.bounceOut(1-t)},bounceOut:function(t){return t<1/2.75?7.5625*t*t:t<2/2.75?7.5625*(t-=1.5/2.75)*t+.75:t<2.5/2.75?7.5625*(t-=2.25/2.75)*t+.9375:7.5625*(t-=2.625/2.75)*t+.984375},bounceInOut:function(t){return t<.5?.5*an.bounceIn(2*t):.5*an.bounceOut(2*t-1)+.5}},sn=Math.pow,ln=Math.sqrt,un=1e-8,hn=1e-4,cn=ln(3),pn=1/3,dn=Mt(),fn=Mt(),gn=Mt();function yn(t){return t>-1e-8&&tun||t<-1e-8}function mn(t,e,n,i,r){var o=1-r;return o*o*(o*t+3*r*e)+r*r*(r*i+3*o*n)}function xn(t,e,n,i,r){var o=1-r;return 3*(((e-t)*o+2*(n-e)*r)*o+(i-n)*r*r)}function _n(t,e,n,i,r,o){var a=i+3*(e-n)-t,s=3*(n-2*e+t),l=3*(e-t),u=t-r,h=s*s-3*a*l,c=s*l-9*a*u,p=l*l-3*s*u,d=0;if(yn(h)&&yn(c)){if(yn(s))o[0]=0;else(M=-l/s)>=0&&M<=1&&(o[d++]=M)}else{var f=c*c-4*h*p;if(yn(f)){var g=c/h,y=-g/2;(M=-s/a+g)>=0&&M<=1&&(o[d++]=M),y>=0&&y<=1&&(o[d++]=y)}else if(f>0){var v=ln(f),m=h*s+1.5*a*(-c+v),x=h*s+1.5*a*(-c-v);(M=(-s-((m=m<0?-sn(-m,pn):sn(m,pn))+(x=x<0?-sn(-x,pn):sn(x,pn))))/(3*a))>=0&&M<=1&&(o[d++]=M)}else{var _=(2*h*s-3*a*c)/(2*ln(h*h*h)),b=Math.acos(_)/3,w=ln(h),S=Math.cos(b),M=(-s-2*w*S)/(3*a),I=(y=(-s+w*(S+cn*Math.sin(b)))/(3*a),(-s+w*(S-cn*Math.sin(b)))/(3*a));M>=0&&M<=1&&(o[d++]=M),y>=0&&y<=1&&(o[d++]=y),I>=0&&I<=1&&(o[d++]=I)}}return d}function bn(t,e,n,i,r){var o=6*n-12*e+6*t,a=9*e+3*i-3*t-9*n,s=3*e-3*t,l=0;if(yn(a)){if(vn(o))(h=-s/o)>=0&&h<=1&&(r[l++]=h)}else{var u=o*o-4*a*s;if(yn(u))r[0]=-o/(2*a);else if(u>0){var h,c=ln(u),p=(-o-c)/(2*a);(h=(-o+c)/(2*a))>=0&&h<=1&&(r[l++]=h),p>=0&&p<=1&&(r[l++]=p)}}return l}function wn(t,e,n,i,r,o){var a=(e-t)*r+t,s=(n-e)*r+e,l=(i-n)*r+n,u=(s-a)*r+a,h=(l-s)*r+s,c=(h-u)*r+u;o[0]=t,o[1]=a,o[2]=u,o[3]=c,o[4]=c,o[5]=h,o[6]=l,o[7]=i}function Sn(t,e,n,i,r,o,a,s,l,u,h){var c,p,d,f,g,y=.005,v=1/0;dn[0]=l,dn[1]=u;for(var m=0;m<1;m+=.05)fn[0]=mn(t,n,r,a,m),fn[1]=mn(e,i,o,s,m),(f=Ft(dn,fn))=0&&f=0&&y=1?1:_n(0,i,o,1,t,s)&&mn(0,r,a,1,s[0])}}}var On=function(){function t(t){this._inited=!1,this._startTime=0,this._pausedTime=0,this._paused=!1,this._life=t.life||1e3,this._delay=t.delay||0,this.loop=t.loop||!1,this.onframe=t.onframe||bt,this.ondestroy=t.ondestroy||bt,this.onrestart=t.onrestart||bt,t.easing&&this.setEasing(t.easing)}return t.prototype.step=function(t,e){if(this._inited||(this._startTime=t+this._delay,this._inited=!0),!this._paused){var n=this._life,i=t-this._startTime-this._pausedTime,r=i/n;r<0&&(r=0),r=Math.min(r,1);var o=this.easingFunc,a=o?o(r):r;if(this.onframe(a),1===r){if(!this.loop)return!0;var s=i%n;this._startTime=t-s,this._pausedTime=0,this.onrestart()}return!1}this._pausedTime+=e},t.prototype.pause=function(){this._paused=!0},t.prototype.resume=function(){this._paused=!1},t.prototype.setEasing=function(t){this.easing=t,this.easingFunc=X(t)?t:an[t]||Pn(t)},t}(),Rn=function(t){this.value=t},Nn=function(){function t(){this._len=0}return t.prototype.insert=function(t){var e=new Rn(t);return this.insertEntry(e),e},t.prototype.insertEntry=function(t){this.head?(this.tail.next=t,t.prev=this.tail,t.next=null,this.tail=t):this.head=this.tail=t,this._len++},t.prototype.remove=function(t){var e=t.prev,n=t.next;e?e.next=n:this.head=n,n?n.prev=e:this.tail=e,t.next=t.prev=null,this._len--},t.prototype.len=function(){return this._len},t.prototype.clear=function(){this.head=this.tail=null,this._len=0},t}(),En=function(){function t(t){this._list=new Nn,this._maxSize=10,this._map={},this._maxSize=t}return t.prototype.put=function(t,e){var n=this._list,i=this._map,r=null;if(null==i[t]){var o=n.len(),a=this._lastRemovedEntry;if(o>=this._maxSize&&o>0){var s=n.head;n.remove(s),delete i[s.key],r=s.value,this._lastRemovedEntry=s}a?a.value=e:a=new Rn(e),a.key=t,n.insertEntry(a),i[t]=a}return r},t.prototype.get=function(t){var e=this._map[t],n=this._list;if(null!=e)return e!==n.tail&&(n.remove(e),n.insertEntry(e)),e.value},t.prototype.clear=function(){this._list.clear(),this._map={}},t.prototype.len=function(){return this._list.len()},t}(),zn={transparent:[0,0,0,0],aliceblue:[240,248,255,1],antiquewhite:[250,235,215,1],aqua:[0,255,255,1],aquamarine:[127,255,212,1],azure:[240,255,255,1],beige:[245,245,220,1],bisque:[255,228,196,1],black:[0,0,0,1],blanchedalmond:[255,235,205,1],blue:[0,0,255,1],blueviolet:[138,43,226,1],brown:[165,42,42,1],burlywood:[222,184,135,1],cadetblue:[95,158,160,1],chartreuse:[127,255,0,1],chocolate:[210,105,30,1],coral:[255,127,80,1],cornflowerblue:[100,149,237,1],cornsilk:[255,248,220,1],crimson:[220,20,60,1],cyan:[0,255,255,1],darkblue:[0,0,139,1],darkcyan:[0,139,139,1],darkgoldenrod:[184,134,11,1],darkgray:[169,169,169,1],darkgreen:[0,100,0,1],darkgrey:[169,169,169,1],darkkhaki:[189,183,107,1],darkmagenta:[139,0,139,1],darkolivegreen:[85,107,47,1],darkorange:[255,140,0,1],darkorchid:[153,50,204,1],darkred:[139,0,0,1],darksalmon:[233,150,122,1],darkseagreen:[143,188,143,1],darkslateblue:[72,61,139,1],darkslategray:[47,79,79,1],darkslategrey:[47,79,79,1],darkturquoise:[0,206,209,1],darkviolet:[148,0,211,1],deeppink:[255,20,147,1],deepskyblue:[0,191,255,1],dimgray:[105,105,105,1],dimgrey:[105,105,105,1],dodgerblue:[30,144,255,1],firebrick:[178,34,34,1],floralwhite:[255,250,240,1],forestgreen:[34,139,34,1],fuchsia:[255,0,255,1],gainsboro:[220,220,220,1],ghostwhite:[248,248,255,1],gold:[255,215,0,1],goldenrod:[218,165,32,1],gray:[128,128,128,1],green:[0,128,0,1],greenyellow:[173,255,47,1],grey:[128,128,128,1],honeydew:[240,255,240,1],hotpink:[255,105,180,1],indianred:[205,92,92,1],indigo:[75,0,130,1],ivory:[255,255,240,1],khaki:[240,230,140,1],lavender:[230,230,250,1],lavenderblush:[255,240,245,1],lawngreen:[124,252,0,1],lemonchiffon:[255,250,205,1],lightblue:[173,216,230,1],lightcoral:[240,128,128,1],lightcyan:[224,255,255,1],lightgoldenrodyellow:[250,250,210,1],lightgray:[211,211,211,1],lightgreen:[144,238,144,1],lightgrey:[211,211,211,1],lightpink:[255,182,193,1],lightsalmon:[255,160,122,1],lightseagreen:[32,178,170,1],lightskyblue:[135,206,250,1],lightslategray:[119,136,153,1],lightslategrey:[119,136,153,1],lightsteelblue:[176,196,222,1],lightyellow:[255,255,224,1],lime:[0,255,0,1],limegreen:[50,205,50,1],linen:[250,240,230,1],magenta:[255,0,255,1],maroon:[128,0,0,1],mediumaquamarine:[102,205,170,1],mediumblue:[0,0,205,1],mediumorchid:[186,85,211,1],mediumpurple:[147,112,219,1],mediumseagreen:[60,179,113,1],mediumslateblue:[123,104,238,1],mediumspringgreen:[0,250,154,1],mediumturquoise:[72,209,204,1],mediumvioletred:[199,21,133,1],midnightblue:[25,25,112,1],mintcream:[245,255,250,1],mistyrose:[255,228,225,1],moccasin:[255,228,181,1],navajowhite:[255,222,173,1],navy:[0,0,128,1],oldlace:[253,245,230,1],olive:[128,128,0,1],olivedrab:[107,142,35,1],orange:[255,165,0,1],orangered:[255,69,0,1],orchid:[218,112,214,1],palegoldenrod:[238,232,170,1],palegreen:[152,251,152,1],paleturquoise:[175,238,238,1],palevioletred:[219,112,147,1],papayawhip:[255,239,213,1],peachpuff:[255,218,185,1],peru:[205,133,63,1],pink:[255,192,203,1],plum:[221,160,221,1],powderblue:[176,224,230,1],purple:[128,0,128,1],red:[255,0,0,1],rosybrown:[188,143,143,1],royalblue:[65,105,225,1],saddlebrown:[139,69,19,1],salmon:[250,128,114,1],sandybrown:[244,164,96,1],seagreen:[46,139,87,1],seashell:[255,245,238,1],sienna:[160,82,45,1],silver:[192,192,192,1],skyblue:[135,206,235,1],slateblue:[106,90,205,1],slategray:[112,128,144,1],slategrey:[112,128,144,1],snow:[255,250,250,1],springgreen:[0,255,127,1],steelblue:[70,130,180,1],tan:[210,180,140,1],teal:[0,128,128,1],thistle:[216,191,216,1],tomato:[255,99,71,1],turquoise:[64,224,208,1],violet:[238,130,238,1],wheat:[245,222,179,1],white:[255,255,255,1],whitesmoke:[245,245,245,1],yellow:[255,255,0,1],yellowgreen:[154,205,50,1]};function Vn(t){return(t=Math.round(t))<0?0:t>255?255:t}function Bn(t){return t<0?0:t>1?1:t}function Fn(t){var e=t;return e.length&&"%"===e.charAt(e.length-1)?Vn(parseFloat(e)/100*255):Vn(parseInt(e,10))}function Gn(t){var e=t;return e.length&&"%"===e.charAt(e.length-1)?Bn(parseFloat(e)/100):Bn(parseFloat(e))}function Wn(t,e,n){return n<0?n+=1:n>1&&(n-=1),6*n<1?t+(e-t)*n*6:2*n<1?e:3*n<2?t+(e-t)*(2/3-n)*6:t}function Hn(t,e,n){return t+(e-t)*n}function Yn(t,e,n,i,r){return t[0]=e,t[1]=n,t[2]=i,t[3]=r,t}function Xn(t,e){return t[0]=e[0],t[1]=e[1],t[2]=e[2],t[3]=e[3],t}var Un=new En(20),Zn=null;function jn(t,e){Zn&&Xn(Zn,e),Zn=Un.put(t,Zn||e.slice())}function qn(t,e){if(t){e=e||[];var n=Un.get(t);if(n)return Xn(e,n);var i=(t+="").replace(/ /g,"").toLowerCase();if(i in zn)return Xn(e,zn[i]),jn(t,e),e;var r,o=i.length;if("#"===i.charAt(0))return 4===o||5===o?(r=parseInt(i.slice(1,4),16))>=0&&r<=4095?(Yn(e,(3840&r)>>4|(3840&r)>>8,240&r|(240&r)>>4,15&r|(15&r)<<4,5===o?parseInt(i.slice(4),16)/15:1),jn(t,e),e):void Yn(e,0,0,0,1):7===o||9===o?(r=parseInt(i.slice(1,7),16))>=0&&r<=16777215?(Yn(e,(16711680&r)>>16,(65280&r)>>8,255&r,9===o?parseInt(i.slice(7),16)/255:1),jn(t,e),e):void Yn(e,0,0,0,1):void 0;var a=i.indexOf("("),s=i.indexOf(")");if(-1!==a&&s+1===o){var l=i.substr(0,a),u=i.substr(a+1,s-(a+1)).split(","),h=1;switch(l){case"rgba":if(4!==u.length)return 3===u.length?Yn(e,+u[0],+u[1],+u[2],1):Yn(e,0,0,0,1);h=Gn(u.pop());case"rgb":return u.length>=3?(Yn(e,Fn(u[0]),Fn(u[1]),Fn(u[2]),3===u.length?h:Gn(u[3])),jn(t,e),e):void Yn(e,0,0,0,1);case"hsla":return 4!==u.length?void Yn(e,0,0,0,1):(u[3]=Gn(u[3]),Kn(u,e),jn(t,e),e);case"hsl":return 3!==u.length?void Yn(e,0,0,0,1):(Kn(u,e),jn(t,e),e);default:return}}Yn(e,0,0,0,1)}}function Kn(t,e){var n=(parseFloat(t[0])%360+360)%360/360,i=Gn(t[1]),r=Gn(t[2]),o=r<=.5?r*(i+1):r+i-r*i,a=2*r-o;return Yn(e=e||[],Vn(255*Wn(a,o,n+1/3)),Vn(255*Wn(a,o,n)),Vn(255*Wn(a,o,n-1/3)),1),4===t.length&&(e[3]=t[3]),e}function $n(t,e){var n=qn(t);if(n){for(var i=0;i<3;i++)n[i]=e<0?n[i]*(1-e)|0:(255-n[i])*e+n[i]|0,n[i]>255?n[i]=255:n[i]<0&&(n[i]=0);return ri(n,4===n.length?"rgba":"rgb")}}function Jn(t,e,n){if(e&&e.length&&t>=0&&t<=1){n=n||[];var i=t*(e.length-1),r=Math.floor(i),o=Math.ceil(i),a=e[r],s=e[o],l=i-r;return n[0]=Vn(Hn(a[0],s[0],l)),n[1]=Vn(Hn(a[1],s[1],l)),n[2]=Vn(Hn(a[2],s[2],l)),n[3]=Bn(Hn(a[3],s[3],l)),n}}var Qn=Jn;function ti(t,e,n){if(e&&e.length&&t>=0&&t<=1){var i=t*(e.length-1),r=Math.floor(i),o=Math.ceil(i),a=qn(e[r]),s=qn(e[o]),l=i-r,u=ri([Vn(Hn(a[0],s[0],l)),Vn(Hn(a[1],s[1],l)),Vn(Hn(a[2],s[2],l)),Bn(Hn(a[3],s[3],l))],"rgba");return n?{color:u,leftIndex:r,rightIndex:o,value:i}:u}}var ei=ti;function ni(t,e,n,i){var r=qn(t);if(t)return r=function(t){if(t){var e,n,i=t[0]/255,r=t[1]/255,o=t[2]/255,a=Math.min(i,r,o),s=Math.max(i,r,o),l=s-a,u=(s+a)/2;if(0===l)e=0,n=0;else{n=u<.5?l/(s+a):l/(2-s-a);var h=((s-i)/6+l/2)/l,c=((s-r)/6+l/2)/l,p=((s-o)/6+l/2)/l;i===s?e=p-c:r===s?e=1/3+h-p:o===s&&(e=2/3+c-h),e<0&&(e+=1),e>1&&(e-=1)}var d=[360*e,n,u];return null!=t[3]&&d.push(t[3]),d}}(r),null!=e&&(r[0]=function(t){return(t=Math.round(t))<0?0:t>360?360:t}(e)),null!=n&&(r[1]=Gn(n)),null!=i&&(r[2]=Gn(i)),ri(Kn(r),"rgba")}function ii(t,e){var n=qn(t);if(n&&null!=e)return n[3]=Bn(e),ri(n,"rgba")}function ri(t,e){if(t&&t.length){var n=t[0]+","+t[1]+","+t[2];return"rgba"!==e&&"hsva"!==e&&"hsla"!==e||(n+=","+t[3]),e+"("+n+")"}}function oi(t,e){var n=qn(t);return n?(.299*n[0]+.587*n[1]+.114*n[2])*n[3]/255+(1-n[3])*e:0}var ai=Object.freeze({__proto__:null,parse:qn,lift:$n,toHex:function(t){var e=qn(t);if(e)return((1<<24)+(e[0]<<16)+(e[1]<<8)+ +e[2]).toString(16).slice(1)},fastLerp:Jn,fastMapToColor:Qn,lerp:ti,mapToColor:ei,modifyHSL:ni,modifyAlpha:ii,stringify:ri,lum:oi,random:function(){return ri([Math.round(255*Math.random()),Math.round(255*Math.random()),Math.round(255*Math.random())],"rgb")}}),si=Math.round;function li(t){var e;if(t&&"transparent"!==t){if("string"==typeof t&&t.indexOf("rgba")>-1){var n=qn(t);n&&(t="rgb("+n[0]+","+n[1]+","+n[2]+")",e=n[3])}}else t="none";return{color:t,opacity:null==e?1:e}}var ui=1e-4;function hi(t){return t-1e-4}function ci(t){return si(1e3*t)/1e3}function pi(t){return si(1e4*t)/1e4}var di={left:"start",right:"end",center:"middle",middle:"middle"};function fi(t){return t&&!!t.image}function gi(t){return fi(t)||function(t){return t&&!!t.svgElement}(t)}function yi(t){return"linear"===t.type}function vi(t){return"radial"===t.type}function mi(t){return t&&("linear"===t.type||"radial"===t.type)}function xi(t){return"url(#"+t+")"}function _i(t){var e=t.getGlobalScale(),n=Math.max(e[0],e[1]);return Math.max(Math.ceil(Math.log(n)/Math.log(10)),1)}function bi(t){var e=t.x||0,n=t.y||0,i=(t.rotation||0)*wt,r=rt(t.scaleX,1),o=rt(t.scaleY,1),a=t.skewX||0,s=t.skewY||0,l=[];return(e||n)&&l.push("translate("+e+"px,"+n+"px)"),i&&l.push("rotate("+i+")"),1===r&&1===o||l.push("scale("+r+","+o+")"),(a||s)&&l.push("skew("+si(a*wt)+"deg, "+si(s*wt)+"deg)"),l.join(" ")}var wi=r.hasGlobalWindow&&X(window.btoa)?function(t){return window.btoa(unescape(encodeURIComponent(t)))}:"undefined"!=typeof Buffer?function(t){return Buffer.from(t).toString("base64")}:function(t){return null},Si=Array.prototype.slice;function Mi(t,e,n){return(e-t)*n+t}function Ii(t,e,n,i){for(var r=e.length,o=0;oi?e:t,o=Math.min(n,i),a=r[o-1]||{color:[0,0,0,0],offset:0},s=o;sa)i.length=a;else for(var s=o;s=1},t.prototype.getAdditiveTrack=function(){return this._additiveTrack},t.prototype.addKeyframe=function(t,e,n){this._needsSort=!0;var i=this.keyframes,r=i.length,o=!1,a=6,s=e;if(N(e)){var l=function(t){return N(t&&t[0])?2:1}(e);a=l,(1===l&&!j(e[0])||2===l&&!j(e[0][0]))&&(o=!0)}else if(j(e)&&!nt(e))a=0;else if(U(e))if(isNaN(+e)){var u=qn(e);u&&(s=u,a=3)}else a=0;else if(Q(e)){var h=A({},s);h.colorStops=z(e.colorStops,(function(t){return{offset:t.offset,color:qn(t.color)}})),yi(e)?a=4:vi(e)&&(a=5),s=h}0===r?this.valType=a:a===this.valType&&6!==a||(o=!0),this.discrete=this.discrete||o;var c={time:t,value:s,rawValue:e,percent:0};return n&&(c.easing=n,c.easingFunc=X(n)?n:an[n]||Pn(n)),i.push(c),c},t.prototype.prepare=function(t,e){var n=this.keyframes;this._needsSort&&n.sort((function(t,e){return t.time-e.time}));for(var i=this.valType,r=n.length,o=n[r-1],a=this.discrete,s=Oi(i),l=Pi(i),u=0;u=0&&!(l[n].percent<=e);n--);n=d(n,u-2)}else{for(n=p;ne);n++);n=d(n-1,u-2)}r=l[n+1],i=l[n]}if(i&&r){this._lastFr=n,this._lastFrP=e;var f=r.percent-i.percent,g=0===f?1:d((e-i.percent)/f,1);r.easingFunc&&(g=r.easingFunc(g));var y=o?this._additiveValue:c?Ri:t[h];if(!Oi(s)&&!c||y||(y=this._additiveValue=[]),this.discrete)t[h]=g<1?i.rawValue:r.rawValue;else if(Oi(s))1===s?Ii(y,i[a],r[a],g):function(t,e,n,i){for(var r=e.length,o=r&&e[0].length,a=0;a0&&s.addKeyframe(0,ki(l),i),this._trackKeys.push(a)}s.addKeyframe(t,ki(e[a]),i)}return this._maxTime=Math.max(this._maxTime,t),this},t.prototype.pause=function(){this._clip.pause(),this._paused=!0},t.prototype.resume=function(){this._clip.resume(),this._paused=!1},t.prototype.isPaused=function(){return!!this._paused},t.prototype.duration=function(t){return this._maxTime=t,this._force=!0,this},t.prototype._doneCallback=function(){this._setTracksFinished(),this._clip=null;var t=this._doneCbs;if(t)for(var e=t.length,n=0;n0)){this._started=1;for(var e=this,n=[],i=this._maxTime||0,r=0;r1){var a=o.pop();r.addKeyframe(a.time,t[i]),r.prepare(this._maxTime,r.getAdditiveTrack())}}}},t}();function zi(){return(new Date).getTime()}var Vi,Bi,Fi=function(t){function e(e){var n=t.call(this)||this;return n._running=!1,n._time=0,n._pausedTime=0,n._pauseStart=0,n._paused=!1,e=e||{},n.stage=e.stage||{},n}return n(e,t),e.prototype.addClip=function(t){t.animation&&this.removeClip(t),this._head?(this._tail.next=t,t.prev=this._tail,t.next=null,this._tail=t):this._head=this._tail=t,t.animation=this},e.prototype.addAnimator=function(t){t.animation=this;var e=t.getClip();e&&this.addClip(e)},e.prototype.removeClip=function(t){if(t.animation){var e=t.prev,n=t.next;e?e.next=n:this._head=n,n?n.prev=e:this._tail=e,t.next=t.prev=t.animation=null}},e.prototype.removeAnimator=function(t){var e=t.getClip();e&&this.removeClip(e),t.animation=null},e.prototype.update=function(t){for(var e=zi()-this._pausedTime,n=e-this._time,i=this._head;i;){var r=i.next;i.step(e,n)?(i.ondestroy(),this.removeClip(i),i=r):i=r}this._time=e,t||(this.trigger("frame",n),this.stage.update&&this.stage.update())},e.prototype._startLoop=function(){var t=this;this._running=!0,on((function e(){t._running&&(on(e),!t._paused&&t.update())}))},e.prototype.start=function(){this._running||(this._time=zi(),this._pausedTime=0,this._startLoop())},e.prototype.stop=function(){this._running=!1},e.prototype.pause=function(){this._paused||(this._pauseStart=zi(),this._paused=!0)},e.prototype.resume=function(){this._paused&&(this._pausedTime+=zi()-this._pauseStart,this._paused=!1)},e.prototype.clear=function(){for(var t=this._head;t;){var e=t.next;t.prev=t.next=t.animation=null,t=e}this._head=this._tail=null},e.prototype.isFinished=function(){return null==this._head},e.prototype.animate=function(t,e){e=e||{},this.start();var n=new Ei(t,e.loop);return this.addAnimator(n),n},e}(jt),Gi=r.domSupported,Wi=(Bi={pointerdown:1,pointerup:1,pointermove:1,pointerout:1},{mouse:Vi=["click","dblclick","mousewheel","wheel","mouseout","mouseup","mousedown","mousemove","contextmenu"],touch:["touchstart","touchend","touchmove"],pointer:z(Vi,(function(t){var e=t.replace("mouse","pointer");return Bi.hasOwnProperty(e)?e:t}))}),Hi=["mousemove","mouseup"],Yi=["pointermove","pointerup"],Xi=!1;function Ui(t){var e=t.pointerType;return"pen"===e||"touch"===e}function Zi(t){t&&(t.zrByTouch=!0)}function ji(t,e){for(var n=e,i=!1;n&&9!==n.nodeType&&!(i=n.domBelongToZr||n!==e&&n===t.painterRoot);)n=n.parentNode;return i}var qi=function(t,e){this.stopPropagation=bt,this.stopImmediatePropagation=bt,this.preventDefault=bt,this.type=e.type,this.target=this.currentTarget=t.dom,this.pointerType=e.pointerType,this.clientX=e.clientX,this.clientY=e.clientY},Ki={mousedown:function(t){t=ce(this.dom,t),this.__mayPointerCapture=[t.zrX,t.zrY],this.trigger("mousedown",t)},mousemove:function(t){t=ce(this.dom,t);var e=this.__mayPointerCapture;!e||t.zrX===e[0]&&t.zrY===e[1]||this.__togglePointerCapture(!0),this.trigger("mousemove",t)},mouseup:function(t){t=ce(this.dom,t),this.__togglePointerCapture(!1),this.trigger("mouseup",t)},mouseout:function(t){ji(this,(t=ce(this.dom,t)).toElement||t.relatedTarget)||(this.__pointerCapturing&&(t.zrEventControl="no_globalout"),this.trigger("mouseout",t))},wheel:function(t){Xi=!0,t=ce(this.dom,t),this.trigger("mousewheel",t)},mousewheel:function(t){Xi||(t=ce(this.dom,t),this.trigger("mousewheel",t))},touchstart:function(t){Zi(t=ce(this.dom,t)),this.__lastTouchMoment=new Date,this.handler.processGesture(t,"start"),Ki.mousemove.call(this,t),Ki.mousedown.call(this,t)},touchmove:function(t){Zi(t=ce(this.dom,t)),this.handler.processGesture(t,"change"),Ki.mousemove.call(this,t)},touchend:function(t){Zi(t=ce(this.dom,t)),this.handler.processGesture(t,"end"),Ki.mouseup.call(this,t),+new Date-+this.__lastTouchMoment<300&&Ki.click.call(this,t)},pointerdown:function(t){Ki.mousedown.call(this,t)},pointermove:function(t){Ui(t)||Ki.mousemove.call(this,t)},pointerup:function(t){Ki.mouseup.call(this,t)},pointerout:function(t){Ui(t)||Ki.mouseout.call(this,t)}};E(["click","dblclick","contextmenu"],(function(t){Ki[t]=function(e){e=ce(this.dom,e),this.trigger(t,e)}}));var $i={pointermove:function(t){Ui(t)||$i.mousemove.call(this,t)},pointerup:function(t){$i.mouseup.call(this,t)},mousemove:function(t){this.trigger("mousemove",t)},mouseup:function(t){var e=this.__pointerCapturing;this.__togglePointerCapture(!1),this.trigger("mouseup",t),e&&(t.zrEventControl="only_globalout",this.trigger("mouseout",t))}};function Ji(t,e){var n=e.domHandlers;r.pointerEventsSupported?E(Wi.pointer,(function(i){tr(e,i,(function(e){n[i].call(t,e)}))})):(r.touchEventsSupported&&E(Wi.touch,(function(i){tr(e,i,(function(r){n[i].call(t,r),function(t){t.touching=!0,null!=t.touchTimer&&(clearTimeout(t.touchTimer),t.touchTimer=null),t.touchTimer=setTimeout((function(){t.touching=!1,t.touchTimer=null}),700)}(e)}))})),E(Wi.mouse,(function(i){tr(e,i,(function(r){r=he(r),e.touching||n[i].call(t,r)}))})))}function Qi(t,e){function n(n){tr(e,n,(function(i){i=he(i),ji(t,i.target)||(i=function(t,e){return ce(t.dom,new qi(t,e),!0)}(t,i),e.domHandlers[n].call(t,i))}),{capture:!0})}r.pointerEventsSupported?E(Yi,n):r.touchEventsSupported||E(Hi,n)}function tr(t,e,n,i){t.mounted[e]=n,t.listenerOpts[e]=i,pe(t.domTarget,e,n,i)}function er(t){var e,n,i,r,o=t.mounted;for(var a in o)o.hasOwnProperty(a)&&(e=t.domTarget,n=a,i=o[a],r=t.listenerOpts[a],e.removeEventListener(n,i,r));t.mounted={}}var nr=function(t,e){this.mounted={},this.listenerOpts={},this.touching=!1,this.domTarget=t,this.domHandlers=e},ir=function(t){function e(e,n){var i=t.call(this)||this;return i.__pointerCapturing=!1,i.dom=e,i.painterRoot=n,i._localHandlerScope=new nr(e,Ki),Gi&&(i._globalHandlerScope=new nr(document,$i)),Ji(i,i._localHandlerScope),i}return n(e,t),e.prototype.dispose=function(){er(this._localHandlerScope),Gi&&er(this._globalHandlerScope)},e.prototype.setCursor=function(t){this.dom.style&&(this.dom.style.cursor=t||"default")},e.prototype.__togglePointerCapture=function(t){if(this.__mayPointerCapture=null,Gi&&+this.__pointerCapturing^+t){this.__pointerCapturing=t;var e=this._globalHandlerScope;t?Qi(this,e):er(e)}},e}(jt),rr=1;r.hasGlobalWindow&&(rr=Math.max(window.devicePixelRatio||window.screen&&window.screen.deviceXDPI/window.screen.logicalXDPI||1,1));var or=rr,ar="#333",sr="#ccc",lr=xe,ur=5e-5;function hr(t){return t>ur||t<-5e-5}var cr=[],pr=[],dr=[1,0,0,1,0,0],fr=Math.abs,gr=function(){function t(){}return t.prototype.getLocalTransform=function(e){return t.getLocalTransform(this,e)},t.prototype.setPosition=function(t){this.x=t[0],this.y=t[1]},t.prototype.setScale=function(t){this.scaleX=t[0],this.scaleY=t[1]},t.prototype.setSkew=function(t){this.skewX=t[0],this.skewY=t[1]},t.prototype.setOrigin=function(t){this.originX=t[0],this.originY=t[1]},t.prototype.needLocalTransform=function(){return hr(this.rotation)||hr(this.x)||hr(this.y)||hr(this.scaleX-1)||hr(this.scaleY-1)||hr(this.skewX)||hr(this.skewY)},t.prototype.updateTransform=function(){var t=this.parent&&this.parent.transform,e=this.needLocalTransform(),n=this.transform;e||t?(n=n||[1,0,0,1,0,0],e?this.getLocalTransform(n):lr(n),t&&(e?be(n,t,n):_e(n,t)),this.transform=n,this._resolveGlobalScaleRatio(n)):n&&(lr(n),this.invTransform=null)},t.prototype._resolveGlobalScaleRatio=function(t){var e=this.globalScaleRatio;if(null!=e&&1!==e){this.getGlobalScale(cr);var n=cr[0]<0?-1:1,i=cr[1]<0?-1:1,r=((cr[0]-n)*e+n)/cr[0]||0,o=((cr[1]-i)*e+i)/cr[1]||0;t[0]*=r,t[1]*=r,t[2]*=o,t[3]*=o}this.invTransform=this.invTransform||[1,0,0,1,0,0],Ie(this.invTransform,t)},t.prototype.getComputedTransform=function(){for(var t=this,e=[];t;)e.push(t),t=t.parent;for(;t=e.pop();)t.updateTransform();return this.transform},t.prototype.setLocalTransform=function(t){if(t){var e=t[0]*t[0]+t[1]*t[1],n=t[2]*t[2]+t[3]*t[3],i=Math.atan2(t[1],t[0]),r=Math.PI/2+i-Math.atan2(t[3],t[2]);n=Math.sqrt(n)*Math.cos(r),e=Math.sqrt(e),this.skewX=r,this.skewY=0,this.rotation=-i,this.x=+t[4],this.y=+t[5],this.scaleX=e,this.scaleY=n,this.originX=0,this.originY=0}},t.prototype.decomposeTransform=function(){if(this.transform){var t=this.parent,e=this.transform;t&&t.transform&&(be(pr,t.invTransform,e),e=pr);var n=this.originX,i=this.originY;(n||i)&&(dr[4]=n,dr[5]=i,be(pr,e,dr),pr[4]-=n,pr[5]-=i,e=pr),this.setLocalTransform(e)}},t.prototype.getGlobalScale=function(t){var e=this.transform;return t=t||[],e?(t[0]=Math.sqrt(e[0]*e[0]+e[1]*e[1]),t[1]=Math.sqrt(e[2]*e[2]+e[3]*e[3]),e[0]<0&&(t[0]=-t[0]),e[3]<0&&(t[1]=-t[1]),t):(t[0]=1,t[1]=1,t)},t.prototype.transformCoordToLocal=function(t,e){var n=[t,e],i=this.invTransform;return i&&Wt(n,n,i),n},t.prototype.transformCoordToGlobal=function(t,e){var n=[t,e],i=this.transform;return i&&Wt(n,n,i),n},t.prototype.getLineScale=function(){var t=this.transform;return t&&fr(t[0]-1)>1e-10&&fr(t[3]-1)>1e-10?Math.sqrt(fr(t[0]*t[3]-t[2]*t[1])):1},t.prototype.copyTransform=function(t){vr(this,t)},t.getLocalTransform=function(t,e){e=e||[];var n=t.originX||0,i=t.originY||0,r=t.scaleX,o=t.scaleY,a=t.anchorX,s=t.anchorY,l=t.rotation||0,u=t.x,h=t.y,c=t.skewX?Math.tan(t.skewX):0,p=t.skewY?Math.tan(-t.skewY):0;if(n||i||a||s){var d=n+a,f=i+s;e[4]=-d*r-c*f*o,e[5]=-f*o-p*d*r}else e[4]=e[5]=0;return e[0]=r,e[3]=o,e[1]=p*r,e[2]=c*o,l&&Se(e,e,l),e[4]+=n+u,e[5]+=i+h,e},t.initDefaultProps=function(){var e=t.prototype;e.scaleX=e.scaleY=e.globalScaleRatio=1,e.x=e.y=e.originX=e.originY=e.skewX=e.skewY=e.rotation=e.anchorX=e.anchorY=0}(),t}(),yr=["x","y","originX","originY","anchorX","anchorY","rotation","scaleX","scaleY","skewX","skewY"];function vr(t,e){for(var n=0;n=0?parseFloat(t)/100*e:parseFloat(t):t}function Tr(t,e,n){var i=e.position||"inside",r=null!=e.distance?e.distance:5,o=n.height,a=n.width,s=o/2,l=n.x,u=n.y,h="left",c="top";if(i instanceof Array)l+=Ir(i[0],n.width),u+=Ir(i[1],n.height),h=null,c=null;else switch(i){case"left":l-=r,u+=s,h="right",c="middle";break;case"right":l+=r+a,u+=s,c="middle";break;case"top":l+=a/2,u-=r,h="center",c="bottom";break;case"bottom":l+=a/2,u+=o+r,h="center";break;case"inside":l+=a/2,u+=s,h="center",c="middle";break;case"insideLeft":l+=r,u+=s,c="middle";break;case"insideRight":l+=a-r,u+=s,h="right",c="middle";break;case"insideTop":l+=a/2,u+=r,h="center";break;case"insideBottom":l+=a/2,u+=o-r,h="center",c="bottom";break;case"insideTopLeft":l+=r,u+=r;break;case"insideTopRight":l+=a-r,u+=r,h="right";break;case"insideBottomLeft":l+=r,u+=o-r,c="bottom";break;case"insideBottomRight":l+=a-r,u+=o-r,h="right",c="bottom"}return(t=t||{}).x=l,t.y=u,t.align=h,t.verticalAlign=c,t}var Cr="__zr_normal__",Dr=yr.concat(["ignore"]),Ar=V(yr,(function(t,e){return t[e]=!0,t}),{ignore:!1}),kr={},Lr=new ze(0,0,0,0),Pr=function(){function t(t){this.id=M(),this.animators=[],this.currentStates=[],this.states={},this._init(t)}return t.prototype._init=function(t){this.attr(t)},t.prototype.drift=function(t,e,n){switch(this.draggable){case"horizontal":e=0;break;case"vertical":t=0}var i=this.transform;i||(i=this.transform=[1,0,0,1,0,0]),i[4]+=t,i[5]+=e,this.decomposeTransform(),this.markRedraw()},t.prototype.beforeUpdate=function(){},t.prototype.afterUpdate=function(){},t.prototype.update=function(){this.updateTransform(),this.__dirty&&this.updateInnerText()},t.prototype.updateInnerText=function(t){var e=this._textContent;if(e&&(!e.ignore||t)){this.textConfig||(this.textConfig={});var n=this.textConfig,i=n.local,r=e.innerTransformable,o=void 0,a=void 0,s=!1;r.parent=i?this:null;var l=!1;if(r.copyTransform(e),null!=n.position){var u=Lr;n.layoutRect?u.copy(n.layoutRect):u.copy(this.getBoundingRect()),i||u.applyTransform(this.transform),this.calculateTextPosition?this.calculateTextPosition(kr,n,u):Tr(kr,n,u),r.x=kr.x,r.y=kr.y,o=kr.align,a=kr.verticalAlign;var h=n.origin;if(h&&null!=n.rotation){var c=void 0,p=void 0;"center"===h?(c=.5*u.width,p=.5*u.height):(c=Ir(h[0],u.width),p=Ir(h[1],u.height)),l=!0,r.originX=-r.x+c+(i?0:u.x),r.originY=-r.y+p+(i?0:u.y)}}null!=n.rotation&&(r.rotation=n.rotation);var d=n.offset;d&&(r.x+=d[0],r.y+=d[1],l||(r.originX=-d[0],r.originY=-d[1]));var f=null==n.inside?"string"==typeof n.position&&n.position.indexOf("inside")>=0:n.inside,g=this._innerTextDefaultStyle||(this._innerTextDefaultStyle={}),y=void 0,v=void 0,m=void 0;f&&this.canBeInsideText()?(y=n.insideFill,v=n.insideStroke,null!=y&&"auto"!==y||(y=this.getInsideTextFill()),null!=v&&"auto"!==v||(v=this.getInsideTextStroke(y),m=!0)):(y=n.outsideFill,v=n.outsideStroke,null!=y&&"auto"!==y||(y=this.getOutsideFill()),null!=v&&"auto"!==v||(v=this.getOutsideStroke(y),m=!0)),(y=y||"#000")===g.fill&&v===g.stroke&&m===g.autoStroke&&o===g.align&&a===g.verticalAlign||(s=!0,g.fill=y,g.stroke=v,g.autoStroke=m,g.align=o,g.verticalAlign=a,e.setDefaultTextStyle(g)),e.__dirty|=1,s&&e.dirtyStyle(!0)}},t.prototype.canBeInsideText=function(){return!0},t.prototype.getInsideTextFill=function(){return"#fff"},t.prototype.getInsideTextStroke=function(t){return"#000"},t.prototype.getOutsideFill=function(){return this.__zr&&this.__zr.isDarkMode()?sr:ar},t.prototype.getOutsideStroke=function(t){var e=this.__zr&&this.__zr.getBackgroundColor(),n="string"==typeof e&&qn(e);n||(n=[255,255,255,1]);for(var i=n[3],r=this.__zr.isDarkMode(),o=0;o<3;o++)n[o]=n[o]*i+(r?0:255)*(1-i);return n[3]=1,ri(n,"rgba")},t.prototype.traverse=function(t,e){},t.prototype.attrKV=function(t,e){"textConfig"===t?this.setTextConfig(e):"textContent"===t?this.setTextContent(e):"clipPath"===t?this.setClipPath(e):"extra"===t?(this.extra=this.extra||{},A(this.extra,e)):this[t]=e},t.prototype.hide=function(){this.ignore=!0,this.markRedraw()},t.prototype.show=function(){this.ignore=!1,this.markRedraw()},t.prototype.attr=function(t,e){if("string"==typeof t)this.attrKV(t,e);else if(q(t))for(var n=G(t),i=0;i0},t.prototype.getState=function(t){return this.states[t]},t.prototype.ensureState=function(t){var e=this.states;return e[t]||(e[t]={}),e[t]},t.prototype.clearStates=function(t){this.useState(Cr,!1,t)},t.prototype.useState=function(t,e,n,i){var r=t===Cr;if(this.hasState()||!r){var o=this.currentStates,a=this.stateTransition;if(!(P(o,t)>=0)||!e&&1!==o.length){var s;if(this.stateProxy&&!r&&(s=this.stateProxy(t)),s||(s=this.states&&this.states[t]),s||r){r||this.saveCurrentToNormalState(s);var l=!!(s&&s.hoverLayer||i);l&&this._toggleHoverLayerFlag(!0),this._applyStateObj(t,s,this._normalState,e,!n&&!this.__inHover&&a&&a.duration>0,a);var u=this._textContent,h=this._textGuide;return u&&u.useState(t,e,n,l),h&&h.useState(t,e,n,l),r?(this.currentStates=[],this._normalState={}):e?this.currentStates.push(t):this.currentStates=[t],this._updateAnimationTargets(),this.markRedraw(),!l&&this.__inHover&&(this._toggleHoverLayerFlag(!1),this.__dirty&=-2),s}I("State "+t+" not exists.")}}},t.prototype.useStates=function(t,e,n){if(t.length){var i=[],r=this.currentStates,o=t.length,a=o===r.length;if(a)for(var s=0;s0,d);var f=this._textContent,g=this._textGuide;f&&f.useStates(t,e,c),g&&g.useStates(t,e,c),this._updateAnimationTargets(),this.currentStates=t.slice(),this.markRedraw(),!c&&this.__inHover&&(this._toggleHoverLayerFlag(!1),this.__dirty&=-2)}else this.clearStates()},t.prototype._updateAnimationTargets=function(){for(var t=0;t=0){var n=this.currentStates.slice();n.splice(e,1),this.useStates(n)}},t.prototype.replaceState=function(t,e,n){var i=this.currentStates.slice(),r=P(i,t),o=P(i,e)>=0;r>=0?o?i.splice(r,1):i[r]=e:n&&!o&&i.push(e),this.useStates(i)},t.prototype.toggleState=function(t,e){e?this.useState(t,!0):this.removeState(t)},t.prototype._mergeStates=function(t){for(var e,n={},i=0;i=0&&e.splice(n,1)})),this.animators.push(t),n&&n.animation.addAnimator(t),n&&n.wakeUp()},t.prototype.updateDuringAnimation=function(t){this.markRedraw()},t.prototype.stopAnimation=function(t,e){for(var n=this.animators,i=n.length,r=[],o=0;o0&&n.during&&o[0].during((function(t,e){n.during(e)}));for(var p=0;p0||r.force&&!a.length){var w,S=void 0,M=void 0,I=void 0;if(s){M={},p&&(S={});for(_=0;_=0&&(n.splice(i,0,t),this._doAdd(t))}return this},e.prototype.replace=function(t,e){var n=P(this._children,t);return n>=0&&this.replaceAt(e,n),this},e.prototype.replaceAt=function(t,e){var n=this._children,i=n[e];if(t&&t!==this&&t.parent!==this&&t!==i){n[e]=t,i.parent=null;var r=this.__zr;r&&i.removeSelfFromZr(r),this._doAdd(t)}return this},e.prototype._doAdd=function(t){t.parent&&t.parent.remove(t),t.parent=this;var e=this.__zr;e&&e!==t.__zr&&t.addSelfToZr(e),e&&e.refresh()},e.prototype.remove=function(t){var e=this.__zr,n=this._children,i=P(n,t);return i<0||(n.splice(i,1),t.parent=null,e&&t.removeSelfFromZr(e),e&&e.refresh()),this},e.prototype.removeAll=function(){for(var t=this._children,e=this.__zr,n=0;n0&&(this._stillFrameAccum++,this._stillFrameAccum>this._sleepAfterStill&&this.animation.stop())},t.prototype.setSleepAfterStill=function(t){this._sleepAfterStill=t},t.prototype.wakeUp=function(){this.animation.start(),this._stillFrameAccum=0},t.prototype.refreshHover=function(){this._needsRefreshHover=!0},t.prototype.refreshHoverImmediately=function(){this._needsRefreshHover=!1,this.painter.refreshHover&&"canvas"===this.painter.getType()&&this.painter.refreshHover()},t.prototype.resize=function(t){t=t||{},this.painter.resize(t.width,t.height),this.handler.resize()},t.prototype.clearAnimation=function(){this.animation.clear()},t.prototype.getWidth=function(){return this.painter.getWidth()},t.prototype.getHeight=function(){return this.painter.getHeight()},t.prototype.setCursorStyle=function(t){this.handler.setCursorStyle(t)},t.prototype.findHover=function(t,e){return this.handler.findHover(t,e)},t.prototype.on=function(t,e,n){return this.handler.on(t,e,n),this},t.prototype.off=function(t,e){this.handler.off(t,e)},t.prototype.trigger=function(t,e){this.handler.trigger(t,e)},t.prototype.clear=function(){for(var t=this.storage.getRoots(),e=0;e0){if(t<=r)return a;if(t>=o)return s}else{if(t>=r)return a;if(t<=o)return s}else{if(t===r)return a;if(t===o)return s}return(t-r)/l*u+a}function Ur(t,e){switch(t){case"center":case"middle":t="50%";break;case"left":case"top":t="0%";break;case"right":case"bottom":t="100%"}return U(t)?(n=t,n.replace(/^\s+|\s+$/g,"")).match(/%$/)?parseFloat(t)/100*e:parseFloat(t):null==t?NaN:+t;var n}function Zr(t,e,n){return null==e&&(e=10),e=Math.min(Math.max(0,e),20),t=(+t).toFixed(e),n?t:+t}function jr(t){return t.sort((function(t,e){return t-e})),t}function qr(t){if(t=+t,isNaN(t))return 0;if(t>1e-14)for(var e=1,n=0;n<15;n++,e*=10)if(Math.round(t*e)/e===t)return n;return Kr(t)}function Kr(t){var e=t.toString().toLowerCase(),n=e.indexOf("e"),i=n>0?+e.slice(n+1):0,r=n>0?n:e.length,o=e.indexOf("."),a=o<0?0:r-1-o;return Math.max(0,a-i)}function $r(t,e){var n=Math.log,i=Math.LN10,r=Math.floor(n(t[1]-t[0])/i),o=Math.round(n(Math.abs(e[1]-e[0]))/i),a=Math.min(Math.max(-r+o,0),20);return isFinite(a)?a:20}function Jr(t,e){var n=V(t,(function(t,e){return t+(isNaN(e)?0:e)}),0);if(0===n)return[];for(var i=Math.pow(10,e),r=z(t,(function(t){return(isNaN(t)?0:t)/n*i*100})),o=100*i,a=z(r,(function(t){return Math.floor(t)})),s=V(a,(function(t,e){return t+e}),0),l=z(r,(function(t,e){return t-a[e]}));su&&(u=l[c],h=c);++a[h],l[h]=0,++s}return z(a,(function(t){return t/i}))}function Qr(t,e){var n=Math.max(qr(t),qr(e)),i=t+e;return n>20?i:Zr(i,n)}var to=9007199254740991;function eo(t){var e=2*Math.PI;return(t%e+e)%e}function no(t){return t>-1e-4&&t=10&&e++,e}function so(t,e){var n=ao(t),i=Math.pow(10,n),r=t/i;return t=(e?r<1.5?1:r<2.5?2:r<4?3:r<7?5:10:r<1?1:r<2?2:r<3?3:r<5?5:10)*i,n>=-20?+t.toFixed(n<0?-n:0):t}function lo(t,e){var n=(t.length-1)*e+1,i=Math.floor(n),r=+t[i-1],o=n-i;return o?r+o*(t[i]-r):r}function uo(t){t.sort((function(t,e){return s(t,e,0)?-1:1}));for(var e=-1/0,n=1,i=0;i=0||r&&P(r,s)<0)){var l=n.getShallow(s,e);null!=l&&(o[t[a][0]]=l)}}return o}}var Qo=Jo([["fill","color"],["shadowBlur"],["shadowOffsetX"],["shadowOffsetY"],["opacity"],["shadowColor"]]),ta=function(){function t(){}return t.prototype.getAreaStyle=function(t,e){return Qo(this,t,e)},t}(),ea=new En(50);function na(t){if("string"==typeof t){var e=ea.get(t);return e&&e.image}return t}function ia(t,e,n,i,r){if(t){if("string"==typeof t){if(e&&e.__zrImageSrc===t||!n)return e;var o=ea.get(t),a={hostEl:n,cb:i,cbPayload:r};return o?!oa(e=o.image)&&o.pending.push(a):((e=h.loadImage(t,ra,ra)).__zrImageSrc=t,ea.put(t,e.__cachedImgObj={image:e,pending:[a]})),e}return t}return e}function ra(){var t=this.__cachedImgObj;this.onload=this.onerror=this.__cachedImgObj=null;for(var e=0;e=a;l++)s-=a;var u=xr(n,e);return u>s&&(n="",u=0),s=t-u,r.ellipsis=n,r.ellipsisWidth=u,r.contentWidth=s,r.containerWidth=t,r}function ua(t,e){var n=e.containerWidth,i=e.font,r=e.contentWidth;if(!n)return"";var o=xr(t,i);if(o<=n)return t;for(var a=0;;a++){if(o<=r||a>=e.maxIterations){t+=e.ellipsis;break}var s=0===a?ha(t,r,e.ascCharWidth,e.cnCharWidth):o>0?Math.floor(t.length*r/o):0;o=xr(t=t.substr(0,s),i)}return""===t&&(t=e.placeholder),t}function ha(t,e,n,i){for(var r=0,o=0,a=t.length;o0&&f+i.accumWidth>i.width&&(o=e.split("\n"),c=!0),i.accumWidth=f}else{var g=va(e,h,i.width,i.breakAll,i.accumWidth);i.accumWidth=g.accumWidth+d,a=g.linesWidths,o=g.lines}}else o=e.split("\n");for(var y=0;y=32&&e<=591||e>=880&&e<=4351||e>=4608&&e<=5119||e>=7680&&e<=8303}(t)||!!ga[t]}function va(t,e,n,i,r){for(var o=[],a=[],s="",l="",u=0,h=0,c=0;cn:r+h+d>n)?h?(s||l)&&(f?(s||(s=l,l="",h=u=0),o.push(s),a.push(h-u),l+=p,s="",h=u+=d):(l&&(s+=l,l="",u=0),o.push(s),a.push(h),s=p,h=d)):f?(o.push(l),a.push(u),l=p,u=d):(o.push(p),a.push(d)):(h+=d,f?(l+=p,u+=d):(l&&(s+=l,l="",u=0),s+=p))}else l&&(s+=l,h+=u),o.push(s),a.push(h),s="",l="",u=0,h=0}return o.length||s||(s=t,l="",u=0),l&&(s+=l),s&&(o.push(s),a.push(h)),1===o.length&&(h+=r),{accumWidth:h,lines:o,linesWidths:a}}var ma="__zr_style_"+Math.round(10*Math.random()),xa={shadowBlur:0,shadowOffsetX:0,shadowOffsetY:0,shadowColor:"#000",opacity:1,blend:"source-over"},_a={style:{shadowBlur:!0,shadowOffsetX:!0,shadowOffsetY:!0,shadowColor:!0,opacity:!0}};xa[ma]=!0;var ba=["z","z2","invisible"],wa=["invisible"],Sa=function(t){function e(e){return t.call(this,e)||this}var i;return n(e,t),e.prototype._init=function(e){for(var n=G(e),i=0;i1e-4)return s[0]=t-n,s[1]=e-i,l[0]=t+n,void(l[1]=e+i);if(La[0]=Aa(r)*n+t,La[1]=Da(r)*i+e,Pa[0]=Aa(o)*n+t,Pa[1]=Da(o)*i+e,u(s,La,Pa),h(l,La,Pa),(r%=ka)<0&&(r+=ka),(o%=ka)<0&&(o+=ka),r>o&&!a?o+=ka:rr&&(Oa[0]=Aa(d)*n+t,Oa[1]=Da(d)*i+e,u(s,Oa,s),h(l,Oa,l))}var Ga={M:1,L:2,C:3,Q:4,A:5,Z:6,R:7},Wa=[],Ha=[],Ya=[],Xa=[],Ua=[],Za=[],ja=Math.min,qa=Math.max,Ka=Math.cos,$a=Math.sin,Ja=Math.abs,Qa=Math.PI,ts=2*Qa,es="undefined"!=typeof Float32Array,ns=[];function is(t){return Math.round(t/Qa*1e8)/1e8%2*Qa}function rs(t,e){var n=is(t[0]);n<0&&(n+=ts);var i=n-t[0],r=t[1];r+=i,!e&&r-n>=ts?r=n+ts:e&&n-r>=ts?r=n-ts:!e&&n>r?r=n+(ts-is(n-r)):e&&n0&&(this._ux=Ja(n/or/t)||0,this._uy=Ja(n/or/e)||0)},t.prototype.setDPR=function(t){this.dpr=t},t.prototype.setContext=function(t){this._ctx=t},t.prototype.getContext=function(){return this._ctx},t.prototype.beginPath=function(){return this._ctx&&this._ctx.beginPath(),this.reset(),this},t.prototype.reset=function(){this._saveData&&(this._len=0),this._pathSegLen&&(this._pathSegLen=null,this._pathLen=0),this._version++},t.prototype.moveTo=function(t,e){return this._drawPendingPt(),this.addData(Ga.M,t,e),this._ctx&&this._ctx.moveTo(t,e),this._x0=t,this._y0=e,this._xi=t,this._yi=e,this},t.prototype.lineTo=function(t,e){var n=Ja(t-this._xi),i=Ja(e-this._yi),r=n>this._ux||i>this._uy;if(this.addData(Ga.L,t,e),this._ctx&&r&&this._ctx.lineTo(t,e),r)this._xi=t,this._yi=e,this._pendingPtDist=0;else{var o=n*n+i*i;o>this._pendingPtDist&&(this._pendingPtX=t,this._pendingPtY=e,this._pendingPtDist=o)}return this},t.prototype.bezierCurveTo=function(t,e,n,i,r,o){return this._drawPendingPt(),this.addData(Ga.C,t,e,n,i,r,o),this._ctx&&this._ctx.bezierCurveTo(t,e,n,i,r,o),this._xi=r,this._yi=o,this},t.prototype.quadraticCurveTo=function(t,e,n,i){return this._drawPendingPt(),this.addData(Ga.Q,t,e,n,i),this._ctx&&this._ctx.quadraticCurveTo(t,e,n,i),this._xi=n,this._yi=i,this},t.prototype.arc=function(t,e,n,i,r,o){this._drawPendingPt(),ns[0]=i,ns[1]=r,rs(ns,o),i=ns[0];var a=(r=ns[1])-i;return this.addData(Ga.A,t,e,n,n,i,a,0,o?0:1),this._ctx&&this._ctx.arc(t,e,n,i,r,o),this._xi=Ka(r)*n+t,this._yi=$a(r)*n+e,this},t.prototype.arcTo=function(t,e,n,i,r){return this._drawPendingPt(),this._ctx&&this._ctx.arcTo(t,e,n,i,r),this},t.prototype.rect=function(t,e,n,i){return this._drawPendingPt(),this._ctx&&this._ctx.rect(t,e,n,i),this.addData(Ga.R,t,e,n,i),this},t.prototype.closePath=function(){this._drawPendingPt(),this.addData(Ga.Z);var t=this._ctx,e=this._x0,n=this._y0;return t&&t.closePath(),this._xi=e,this._yi=n,this},t.prototype.fill=function(t){t&&t.fill(),this.toStatic()},t.prototype.stroke=function(t){t&&t.stroke(),this.toStatic()},t.prototype.len=function(){return this._len},t.prototype.setData=function(t){var e=t.length;this.data&&this.data.length===e||!es||(this.data=new Float32Array(e));for(var n=0;nu.length&&(this._expandData(),u=this.data);for(var h=0;h0&&(this._ctx&&this._ctx.lineTo(this._pendingPtX,this._pendingPtY),this._pendingPtDist=0)},t.prototype._expandData=function(){if(!(this.data instanceof Array)){for(var t=[],e=0;e11&&(this.data=new Float32Array(t)))}},t.prototype.getBoundingRect=function(){Ya[0]=Ya[1]=Ua[0]=Ua[1]=Number.MAX_VALUE,Xa[0]=Xa[1]=Za[0]=Za[1]=-Number.MAX_VALUE;var t,e=this.data,n=0,i=0,r=0,o=0;for(t=0;tn||Ja(y)>i||c===e-1)&&(f=Math.sqrt(A*A+y*y),r=g,o=x);break;case Ga.C:var v=t[c++],m=t[c++],x=(g=t[c++],t[c++]),_=t[c++],b=t[c++];f=Mn(r,o,v,m,g,x,_,b,10),r=_,o=b;break;case Ga.Q:f=kn(r,o,v=t[c++],m=t[c++],g=t[c++],x=t[c++],10),r=g,o=x;break;case Ga.A:var w=t[c++],S=t[c++],M=t[c++],I=t[c++],T=t[c++],C=t[c++],D=C+T;c+=1;t[c++];d&&(a=Ka(T)*M+w,s=$a(T)*I+S),f=qa(M,I)*ja(ts,Math.abs(C)),r=Ka(D)*M+w,o=$a(D)*I+S;break;case Ga.R:a=r=t[c++],s=o=t[c++],f=2*t[c++]+2*t[c++];break;case Ga.Z:var A=a-r;y=s-o;f=Math.sqrt(A*A+y*y),r=a,o=s}f>=0&&(l[h++]=f,u+=f)}return this._pathLen=u,u},t.prototype.rebuildPath=function(t,e){var n,i,r,o,a,s,l,u,h,c,p=this.data,d=this._ux,f=this._uy,g=this._len,y=e<1,v=0,m=0,x=0;if(!y||(this._pathSegLen||this._calculateLength(),l=this._pathSegLen,u=e*this._pathLen))t:for(var _=0;_0&&(t.lineTo(h,c),x=0),b){case Ga.M:n=r=p[_++],i=o=p[_++],t.moveTo(r,o);break;case Ga.L:a=p[_++],s=p[_++];var S=Ja(a-r),M=Ja(s-o);if(S>d||M>f){if(y){if(v+(j=l[m++])>u){var I=(u-v)/j;t.lineTo(r*(1-I)+a*I,o*(1-I)+s*I);break t}v+=j}t.lineTo(a,s),r=a,o=s,x=0}else{var T=S*S+M*M;T>x&&(h=a,c=s,x=T)}break;case Ga.C:var C=p[_++],D=p[_++],A=p[_++],k=p[_++],L=p[_++],P=p[_++];if(y){if(v+(j=l[m++])>u){wn(r,C,A,L,I=(u-v)/j,Wa),wn(o,D,k,P,I,Ha),t.bezierCurveTo(Wa[1],Ha[1],Wa[2],Ha[2],Wa[3],Ha[3]);break t}v+=j}t.bezierCurveTo(C,D,A,k,L,P),r=L,o=P;break;case Ga.Q:C=p[_++],D=p[_++],A=p[_++],k=p[_++];if(y){if(v+(j=l[m++])>u){Dn(r,C,A,I=(u-v)/j,Wa),Dn(o,D,k,I,Ha),t.quadraticCurveTo(Wa[1],Ha[1],Wa[2],Ha[2]);break t}v+=j}t.quadraticCurveTo(C,D,A,k),r=A,o=k;break;case Ga.A:var O=p[_++],R=p[_++],N=p[_++],E=p[_++],z=p[_++],V=p[_++],B=p[_++],F=!p[_++],G=N>E?N:E,W=Ja(N-E)>.001,H=z+V,Y=!1;if(y)v+(j=l[m++])>u&&(H=z+V*(u-v)/j,Y=!0),v+=j;if(W&&t.ellipse?t.ellipse(O,R,N,E,B,z,H,F):t.arc(O,R,G,z,H,F),Y)break t;w&&(n=Ka(z)*N+O,i=$a(z)*E+R),r=Ka(H)*N+O,o=$a(H)*E+R;break;case Ga.R:n=r=p[_],i=o=p[_+1],a=p[_++],s=p[_++];var X=p[_++],U=p[_++];if(y){if(v+(j=l[m++])>u){var Z=u-v;t.moveTo(a,s),t.lineTo(a+ja(Z,X),s),(Z-=X)>0&&t.lineTo(a+X,s+ja(Z,U)),(Z-=U)>0&&t.lineTo(a+qa(X-Z,0),s+U),(Z-=X)>0&&t.lineTo(a,s+qa(U-Z,0));break t}v+=j}t.rect(a,s,X,U);break;case Ga.Z:if(y){var j;if(v+(j=l[m++])>u){I=(u-v)/j;t.lineTo(r*(1-I)+n*I,o*(1-I)+i*I);break t}v+=j}t.closePath(),r=n,o=i}}},t.prototype.clone=function(){var e=new t,n=this.data;return e.data=n.slice?n.slice():Array.prototype.slice.call(n),e._len=this._len,e},t.CMD=Ga,t.initDefaultProps=function(){var e=t.prototype;e._saveData=!0,e._ux=0,e._uy=0,e._pendingPtDist=0,e._version=0}(),t}();function as(t,e,n,i,r,o,a){if(0===r)return!1;var s=r,l=0;if(a>e+s&&a>i+s||at+s&&o>n+s||oe+c&&h>i+c&&h>o+c&&h>s+c||ht+c&&u>n+c&&u>r+c&&u>a+c||ue+u&&l>i+u&&l>o+u||lt+u&&s>n+u&&s>r+u||sn||h+ur&&(r+=cs);var p=Math.atan2(l,s);return p<0&&(p+=cs),p>=i&&p<=r||p+cs>=i&&p+cs<=r}function ds(t,e,n,i,r,o){if(o>e&&o>i||or?s:0}var fs=os.CMD,gs=2*Math.PI;var ys=[-1,-1,-1],vs=[-1,-1];function ms(t,e,n,i,r,o,a,s,l,u){if(u>e&&u>i&&u>o&&u>s||u1&&(h=void 0,h=vs[0],vs[0]=vs[1],vs[1]=h),f=mn(e,i,o,s,vs[0]),d>1&&(g=mn(e,i,o,s,vs[1]))),2===d?ve&&s>i&&s>o||s=0&&h<=1&&(r[l++]=h);else{var u=a*a-4*o*s;if(yn(u))(h=-a/(2*o))>=0&&h<=1&&(r[l++]=h);else if(u>0){var h,c=ln(u),p=(-a-c)/(2*o);(h=(-a+c)/(2*o))>=0&&h<=1&&(r[l++]=h),p>=0&&p<=1&&(r[l++]=p)}}return l}(e,i,o,s,ys);if(0===l)return 0;var u=Cn(e,i,o);if(u>=0&&u<=1){for(var h=0,c=In(e,i,o,u),p=0;pn||s<-n)return 0;var l=Math.sqrt(n*n-s*s);ys[0]=-l,ys[1]=l;var u=Math.abs(i-r);if(u<1e-4)return 0;if(u>=gs-1e-4){i=0,r=gs;var h=o?1:-1;return a>=ys[0]+t&&a<=ys[1]+t?h:0}if(i>r){var c=i;i=r,r=c}i<0&&(i+=gs,r+=gs);for(var p=0,d=0;d<2;d++){var f=ys[d];if(f+t>a){var g=Math.atan2(s,f);h=o?1:-1;g<0&&(g=gs+g),(g>=i&&g<=r||g+gs>=i&&g+gs<=r)&&(g>Math.PI/2&&g<1.5*Math.PI&&(h=-h),p+=h)}}return p}function bs(t,e,n,i,r){for(var o,a,s,l,u=t.data,h=t.len(),c=0,p=0,d=0,f=0,g=0,y=0;y1&&(n||(c+=ds(p,d,f,g,i,r))),m&&(f=p=u[y],g=d=u[y+1]),v){case fs.M:p=f=u[y++],d=g=u[y++];break;case fs.L:if(n){if(as(p,d,u[y],u[y+1],e,i,r))return!0}else c+=ds(p,d,u[y],u[y+1],i,r)||0;p=u[y++],d=u[y++];break;case fs.C:if(n){if(ss(p,d,u[y++],u[y++],u[y++],u[y++],u[y],u[y+1],e,i,r))return!0}else c+=ms(p,d,u[y++],u[y++],u[y++],u[y++],u[y],u[y+1],i,r)||0;p=u[y++],d=u[y++];break;case fs.Q:if(n){if(ls(p,d,u[y++],u[y++],u[y],u[y+1],e,i,r))return!0}else c+=xs(p,d,u[y++],u[y++],u[y],u[y+1],i,r)||0;p=u[y++],d=u[y++];break;case fs.A:var x=u[y++],_=u[y++],b=u[y++],w=u[y++],S=u[y++],M=u[y++];y+=1;var I=!!(1-u[y++]);o=Math.cos(S)*b+x,a=Math.sin(S)*w+_,m?(f=o,g=a):c+=ds(p,d,o,a,i,r);var T=(i-x)*w/b+x;if(n){if(ps(x,_,w,S,S+M,I,e,T,r))return!0}else c+=_s(x,_,w,S,S+M,I,T,r);p=Math.cos(S+M)*b+x,d=Math.sin(S+M)*w+_;break;case fs.R:if(f=p=u[y++],g=d=u[y++],o=f+u[y++],a=g+u[y++],n){if(as(f,g,o,g,e,i,r)||as(o,g,o,a,e,i,r)||as(o,a,f,a,e,i,r)||as(f,a,f,g,e,i,r))return!0}else c+=ds(o,g,o,a,i,r),c+=ds(f,a,f,g,i,r);break;case fs.Z:if(n){if(as(p,d,f,g,e,i,r))return!0}else c+=ds(p,d,f,g,i,r);p=f,d=g}}return n||(s=d,l=g,Math.abs(s-l)<1e-4)||(c+=ds(p,d,f,g,i,r)||0),0!==c}var ws=k({fill:"#000",stroke:null,strokePercent:1,fillOpacity:1,strokeOpacity:1,lineDashOffset:0,lineWidth:1,lineCap:"butt",miterLimit:10,strokeNoScale:!1,strokeFirst:!1},xa),Ss={style:k({fill:!0,stroke:!0,strokePercent:!0,fillOpacity:!0,strokeOpacity:!0,lineDashOffset:!0,lineWidth:!0,miterLimit:!0},_a.style)},Ms=yr.concat(["invisible","culling","z","z2","zlevel","parent"]),Is=function(t){function e(e){return t.call(this,e)||this}var i;return n(e,t),e.prototype.update=function(){var n=this;t.prototype.update.call(this);var i=this.style;if(i.decal){var r=this._decalEl=this._decalEl||new e;r.buildPath===e.prototype.buildPath&&(r.buildPath=function(t){n.buildPath(t,n.shape)}),r.silent=!0;var o=r.style;for(var a in i)o[a]!==i[a]&&(o[a]=i[a]);o.fill=i.fill?i.decal:null,o.decal=null,o.shadowColor=null,i.strokeFirst&&(o.stroke=null);for(var s=0;s.5?ar:e>.2?"#eee":sr}if(t)return sr}return ar},e.prototype.getInsideTextStroke=function(t){var e=this.style.fill;if(U(e)){var n=this.__zr;if(!(!n||!n.isDarkMode())===oi(t,0)<.4)return e}},e.prototype.buildPath=function(t,e,n){},e.prototype.pathUpdated=function(){this.__dirty&=-5},e.prototype.getUpdatedPathProxy=function(t){return!this.path&&this.createPathProxy(),this.path.beginPath(),this.buildPath(this.path,this.shape,t),this.path},e.prototype.createPathProxy=function(){this.path=new os(!1)},e.prototype.hasStroke=function(){var t=this.style,e=t.stroke;return!(null==e||"none"===e||!(t.lineWidth>0))},e.prototype.hasFill=function(){var t=this.style.fill;return null!=t&&"none"!==t},e.prototype.getBoundingRect=function(){var t=this._rect,e=this.style,n=!t;if(n){var i=!1;this.path||(i=!0,this.createPathProxy());var r=this.path;(i||4&this.__dirty)&&(r.beginPath(),this.buildPath(r,this.shape,!1),this.pathUpdated()),t=r.getBoundingRect()}if(this._rect=t,this.hasStroke()&&this.path&&this.path.len()>0){var o=this._rectStroke||(this._rectStroke=t.clone());if(this.__dirty||n){o.copy(t);var a=e.strokeNoScale?this.getLineScale():1,s=e.lineWidth;if(!this.hasFill()){var l=this.strokeContainThreshold;s=Math.max(s,null==l?4:l)}a>1e-10&&(o.width+=s/a,o.height+=s/a,o.x-=s/a/2,o.y-=s/a/2)}return o}return t},e.prototype.contain=function(t,e){var n=this.transformCoordToLocal(t,e),i=this.getBoundingRect(),r=this.style;if(t=n[0],e=n[1],i.contain(t,e)){var o=this.path;if(this.hasStroke()){var a=r.lineWidth,s=r.strokeNoScale?this.getLineScale():1;if(s>1e-10&&(this.hasFill()||(a=Math.max(a,this.strokeContainThreshold)),function(t,e,n,i){return bs(t,e,!0,n,i)}(o,a/s,t,e)))return!0}if(this.hasFill())return function(t,e,n){return bs(t,0,!1,e,n)}(o,t,e)}return!1},e.prototype.dirtyShape=function(){this.__dirty|=4,this._rect&&(this._rect=null),this._decalEl&&this._decalEl.dirtyShape(),this.markRedraw()},e.prototype.dirty=function(){this.dirtyStyle(),this.dirtyShape()},e.prototype.animateShape=function(t){return this.animate("shape",t)},e.prototype.updateDuringAnimation=function(t){"style"===t?this.dirtyStyle():"shape"===t?this.dirtyShape():this.markRedraw()},e.prototype.attrKV=function(e,n){"shape"===e?this.setShape(n):t.prototype.attrKV.call(this,e,n)},e.prototype.setShape=function(t,e){var n=this.shape;return n||(n=this.shape={}),"string"==typeof t?n[t]=e:A(n,t),this.dirtyShape(),this},e.prototype.shapeChanged=function(){return!!(4&this.__dirty)},e.prototype.createStyle=function(t){return mt(ws,t)},e.prototype._innerSaveToNormal=function(e){t.prototype._innerSaveToNormal.call(this,e);var n=this._normalState;e.shape&&!n.shape&&(n.shape=A({},this.shape))},e.prototype._applyStateObj=function(e,n,i,r,o,a){t.prototype._applyStateObj.call(this,e,n,i,r,o,a);var s,l=!(n&&r);if(n&&n.shape?o?r?s=n.shape:(s=A({},i.shape),A(s,n.shape)):(s=A({},r?this.shape:i.shape),A(s,n.shape)):l&&(s=i.shape),s)if(o){this.shape=A({},this.shape);for(var u={},h=G(s),c=0;c0},e.prototype.hasFill=function(){var t=this.style.fill;return null!=t&&"none"!==t},e.prototype.createStyle=function(t){return mt(Ts,t)},e.prototype.setBoundingRect=function(t){this._rect=t},e.prototype.getBoundingRect=function(){var t=this.style;if(!this._rect){var e=t.text;null!=e?e+="":e="";var n=br(e,t.font,t.textAlign,t.textBaseline);if(n.x+=t.x||0,n.y+=t.y||0,this.hasStroke()){var i=t.lineWidth;n.x-=i/2,n.y-=i/2,n.width+=i,n.height+=i}this._rect=n}return this._rect},e.initDefaultProps=void(e.prototype.dirtyRectTolerance=10),e}(Sa);Cs.prototype.type="tspan";var Ds=k({x:0,y:0},xa),As={style:k({x:!0,y:!0,width:!0,height:!0,sx:!0,sy:!0,sWidth:!0,sHeight:!0},_a.style)};var ks=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.prototype.createStyle=function(t){return mt(Ds,t)},e.prototype._getSize=function(t){var e=this.style,n=e[t];if(null!=n)return n;var i,r=(i=e.image)&&"string"!=typeof i&&i.width&&i.height?e.image:this.__image;if(!r)return 0;var o="width"===t?"height":"width",a=e[o];return null==a?r[t]:r[t]/r[o]*a},e.prototype.getWidth=function(){return this._getSize("width")},e.prototype.getHeight=function(){return this._getSize("height")},e.prototype.getAnimationStyleProps=function(){return As},e.prototype.getBoundingRect=function(){var t=this.style;return this._rect||(this._rect=new ze(t.x||0,t.y||0,this.getWidth(),this.getHeight())),this._rect},e}(Sa);ks.prototype.type="image";var Ls=Math.round;function Ps(t,e,n){if(e){var i=e.x1,r=e.x2,o=e.y1,a=e.y2;t.x1=i,t.x2=r,t.y1=o,t.y2=a;var s=n&&n.lineWidth;return s?(Ls(2*i)===Ls(2*r)&&(t.x1=t.x2=Rs(i,s,!0)),Ls(2*o)===Ls(2*a)&&(t.y1=t.y2=Rs(o,s,!0)),t):t}}function Os(t,e,n){if(e){var i=e.x,r=e.y,o=e.width,a=e.height;t.x=i,t.y=r,t.width=o,t.height=a;var s=n&&n.lineWidth;return s?(t.x=Rs(i,s,!0),t.y=Rs(r,s,!0),t.width=Math.max(Rs(i+o,s,!1)-t.x,0===o?0:1),t.height=Math.max(Rs(r+a,s,!1)-t.y,0===a?0:1),t):t}}function Rs(t,e,n){if(!e)return t;var i=Ls(2*t);return(i+Ls(e))%2==0?i/2:(i+(n?1:-1))/2}var Ns=function(){this.x=0,this.y=0,this.width=0,this.height=0},Es={},zs=function(t){function e(e){return t.call(this,e)||this}return n(e,t),e.prototype.getDefaultShape=function(){return new Ns},e.prototype.buildPath=function(t,e){var n,i,r,o;if(this.subPixelOptimize){var a=Os(Es,e,this.style);n=a.x,i=a.y,r=a.width,o=a.height,a.r=e.r,e=a}else n=e.x,i=e.y,r=e.width,o=e.height;e.r?function(t,e){var n,i,r,o,a,s=e.x,l=e.y,u=e.width,h=e.height,c=e.r;u<0&&(s+=u,u=-u),h<0&&(l+=h,h=-h),"number"==typeof c?n=i=r=o=c:c instanceof Array?1===c.length?n=i=r=o=c[0]:2===c.length?(n=r=c[0],i=o=c[1]):3===c.length?(n=c[0],i=o=c[1],r=c[2]):(n=c[0],i=c[1],r=c[2],o=c[3]):n=i=r=o=0,n+i>u&&(n*=u/(a=n+i),i*=u/a),r+o>u&&(r*=u/(a=r+o),o*=u/a),i+r>h&&(i*=h/(a=i+r),r*=h/a),n+o>h&&(n*=h/(a=n+o),o*=h/a),t.moveTo(s+n,l),t.lineTo(s+u-i,l),0!==i&&t.arc(s+u-i,l+i,i,-Math.PI/2,0),t.lineTo(s+u,l+h-r),0!==r&&t.arc(s+u-r,l+h-r,r,0,Math.PI/2),t.lineTo(s+o,l+h),0!==o&&t.arc(s+o,l+h-o,o,Math.PI/2,Math.PI),t.lineTo(s,l+n),0!==n&&t.arc(s+n,l+n,n,Math.PI,1.5*Math.PI)}(t,e):t.rect(n,i,r,o)},e.prototype.isZeroArea=function(){return!this.shape.width||!this.shape.height},e}(Is);zs.prototype.type="rect";var Vs={fill:"#000"},Bs={style:k({fill:!0,stroke:!0,fillOpacity:!0,strokeOpacity:!0,lineWidth:!0,fontSize:!0,lineHeight:!0,width:!0,height:!0,textShadowColor:!0,textShadowBlur:!0,textShadowOffsetX:!0,textShadowOffsetY:!0,backgroundColor:!0,padding:!0,borderColor:!0,borderWidth:!0,borderRadius:!0},_a.style)},Fs=function(t){function e(e){var n=t.call(this)||this;return n.type="text",n._children=[],n._defaultStyle=Vs,n.attr(e),n}return n(e,t),e.prototype.childrenRef=function(){return this._children},e.prototype.update=function(){t.prototype.update.call(this),this.styleChanged()&&this._updateSubTexts();for(var e=0;ed&&h){var f=Math.floor(d/l);n=n.slice(0,f)}if(t&&a&&null!=c)for(var g=la(c,o,e.ellipsis,{minChar:e.truncateMinChar,placeholder:e.placeholder}),y=0;y0,T=null!=t.width&&("truncate"===t.overflow||"break"===t.overflow||"breakAll"===t.overflow),C=i.calculatedLineHeight,D=0;Dl&&fa(n,t.substring(l,u),e,s),fa(n,i[2],e,s,i[1]),l=aa.lastIndex}lo){b>0?(m.tokens=m.tokens.slice(0,b),y(m,_,x),n.lines=n.lines.slice(0,v+1)):n.lines=n.lines.slice(0,v);break t}var C=w.width,D=null==C||"auto"===C;if("string"==typeof C&&"%"===C.charAt(C.length-1))P.percentWidth=C,h.push(P),P.contentWidth=xr(P.text,I);else{if(D){var A=w.backgroundColor,k=A&&A.image;k&&oa(k=na(k))&&(P.width=Math.max(P.width,k.width*T/k.height))}var L=f&&null!=r?r-_:null;null!=L&&L=0&&"right"===(C=x[T]).align;)this._placeToken(C,t,b,f,I,"right",y),w-=C.width,I-=C.width,T--;for(M+=(n-(M-d)-(g-I)-w)/2;S<=T;)C=x[S],this._placeToken(C,t,b,f,M+C.width/2,"center",y),M+=C.width,S++;f+=b}},e.prototype._placeToken=function(t,e,n,i,r,o,s){var l=e.rich[t.styleName]||{};l.text=t.text;var u=t.verticalAlign,h=i+n/2;"top"===u?h=i+t.height/2:"bottom"===u&&(h=i+n-t.height/2),!t.isLineHolder&&Js(l)&&this._renderBackground(l,e,"right"===o?r-t.width:"center"===o?r-t.width/2:r,h-t.height/2,t.width,t.height);var c=!!l.backgroundColor,p=t.textPadding;p&&(r=Ks(r,o,p),h-=t.height/2-p[0]-t.innerHeight/2);var d=this._getOrCreateChild(Cs),f=d.createStyle();d.useStyle(f);var g=this._defaultStyle,y=!1,v=0,m=qs("fill"in l?l.fill:"fill"in e?e.fill:(y=!0,g.fill)),x=js("stroke"in l?l.stroke:"stroke"in e?e.stroke:c||s||g.autoStroke&&!y?null:(v=2,g.stroke)),_=l.textShadowBlur>0||e.textShadowBlur>0;f.text=t.text,f.x=r,f.y=h,_&&(f.shadowBlur=l.textShadowBlur||e.textShadowBlur||0,f.shadowColor=l.textShadowColor||e.textShadowColor||"transparent",f.shadowOffsetX=l.textShadowOffsetX||e.textShadowOffsetX||0,f.shadowOffsetY=l.textShadowOffsetY||e.textShadowOffsetY||0),f.textAlign=o,f.textBaseline="middle",f.font=t.font||a,f.opacity=ot(l.opacity,e.opacity,1),Xs(f,l),x&&(f.lineWidth=ot(l.lineWidth,e.lineWidth,v),f.lineDash=rt(l.lineDash,e.lineDash),f.lineDashOffset=e.lineDashOffset||0,f.stroke=x),m&&(f.fill=m);var b=t.contentWidth,w=t.contentHeight;d.setBoundingRect(new ze(wr(f.x,b,f.textAlign),Sr(f.y,w,f.textBaseline),b,w))},e.prototype._renderBackground=function(t,e,n,i,r,o){var a,s,l,u=t.backgroundColor,h=t.borderWidth,c=t.borderColor,p=u&&u.image,d=u&&!p,f=t.borderRadius,g=this;if(d||t.lineHeight||h&&c){(a=this._getOrCreateChild(zs)).useStyle(a.createStyle()),a.style.fill=null;var y=a.shape;y.x=n,y.y=i,y.width=r,y.height=o,y.r=f,a.dirtyShape()}if(d)(l=a.style).fill=u||null,l.fillOpacity=rt(t.fillOpacity,1);else if(p){(s=this._getOrCreateChild(ks)).onload=function(){g.dirtyStyle()};var v=s.style;v.image=u.image,v.x=n,v.y=i,v.width=r,v.height=o}h&&c&&((l=a.style).lineWidth=h,l.stroke=c,l.strokeOpacity=rt(t.strokeOpacity,1),l.lineDash=t.borderDash,l.lineDashOffset=t.borderDashOffset||0,a.strokeContainThreshold=0,a.hasFill()&&a.hasStroke()&&(l.strokeFirst=!0,l.lineWidth*=2));var m=(a||s).style;m.shadowBlur=t.shadowBlur||0,m.shadowColor=t.shadowColor||"transparent",m.shadowOffsetX=t.shadowOffsetX||0,m.shadowOffsetY=t.shadowOffsetY||0,m.opacity=ot(t.opacity,e.opacity,1)},e.makeFont=function(t){var e="";return Us(t)&&(e=[t.fontStyle,t.fontWeight,Ys(t.fontSize),t.fontFamily||"sans-serif"].join(" ")),e&&ut(e)||t.textFont||t.font},e}(Sa),Gs={left:!0,right:1,center:1},Ws={top:1,bottom:1,middle:1},Hs=["fontStyle","fontWeight","fontSize","fontFamily"];function Ys(t){return"string"!=typeof t||-1===t.indexOf("px")&&-1===t.indexOf("rem")&&-1===t.indexOf("em")?isNaN(+t)?"12px":t+"px":t}function Xs(t,e){for(var n=0;n=0,o=!1;if(t instanceof Is){var a=il(t),s=r&&a.selectFill||a.normalFill,l=r&&a.selectStroke||a.normalStroke;if(dl(s)||dl(l)){var u=(i=i||{}).style||{};"inherit"===u.fill?(o=!0,i=A({},i),(u=A({},u)).fill=s):!dl(u.fill)&&dl(s)?(o=!0,i=A({},i),(u=A({},u)).fill=gl(s)):!dl(u.stroke)&&dl(l)&&(o||(i=A({},i),u=A({},u)),u.stroke=gl(l)),i.style=u}}if(i&&null==i.z2){o||(i=A({},i));var h=t.z2EmphasisLift;i.z2=t.z2+(null!=h?h:sl)}return i}(this,0,e,n);if("blur"===t)return function(t,e,n){var i=P(t.currentStates,e)>=0,r=t.style.opacity,o=i?null:function(t,e,n,i){for(var r=t.style,o={},a=0;a0){var o={dataIndex:r,seriesIndex:t.seriesIndex};null!=i&&(o.dataType=i),e.push(o)}}))})),e}function Hl(t,e,n){ql(t,!0),Ml(t,Cl),Xl(t,e,n)}function Yl(t,e,n,i){i?function(t){ql(t,!1)}(t):Hl(t,e,n)}function Xl(t,e,n){var i=Qs(t);null!=e?(i.focus=e,i.blurScope=n):i.focus&&(i.focus=null)}var Ul=["emphasis","blur","select"],Zl={itemStyle:"getItemStyle",lineStyle:"getLineStyle",areaStyle:"getAreaStyle"};function jl(t,e,n,i){n=n||"itemStyle";for(var r=0;r1&&(a*=ru(f),s*=ru(f));var g=(r===o?-1:1)*ru((a*a*(s*s)-a*a*(d*d)-s*s*(p*p))/(a*a*(d*d)+s*s*(p*p)))||0,y=g*a*d/s,v=g*-s*p/a,m=(t+n)/2+au(c)*y-ou(c)*v,x=(e+i)/2+ou(c)*y+au(c)*v,_=hu([1,0],[(p-y)/a,(d-v)/s]),b=[(p-y)/a,(d-v)/s],w=[(-1*p-y)/a,(-1*d-v)/s],S=hu(b,w);if(uu(b,w)<=-1&&(S=su),uu(b,w)>=1&&(S=0),S<0){var M=Math.round(S/su*1e6)/1e6;S=2*su+M%2*su}h.addData(u,m,x,a,s,_,S,c,o)}var pu=/([mlvhzcqtsa])([^mlvhzcqtsa]*)/gi,du=/-?([0-9]*\.)?[0-9]+([eE]-?[0-9]+)?/g;var fu=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.prototype.applyTransform=function(t){},e}(Is);function gu(t){return null!=t.setData}function yu(t,e){var n=function(t){var e=new os;if(!t)return e;var n,i=0,r=0,o=i,a=r,s=os.CMD,l=t.match(pu);if(!l)return e;for(var u=0;uk*k+L*L&&(M=T,I=C),{cx:M,cy:I,x0:-h,y0:-c,x1:M*(r/b-1),y1:I*(r/b-1)}}function Nu(t,e){var n,i=Lu(e.r,0),r=Lu(e.r0||0,0),o=i>0;if(o||r>0){if(o||(i=r,r=0),r>i){var a=i;i=r,r=a}var s=e.startAngle,l=e.endAngle;if(!isNaN(s)&&!isNaN(l)){var u=e.cx,h=e.cy,c=!!e.clockwise,p=Au(l-s),d=p>Mu&&p%Mu;if(d>Ou&&(p=d),i>Ou)if(p>Mu-Ou)t.moveTo(u+i*Tu(s),h+i*Iu(s)),t.arc(u,h,i,s,l,!c),r>Ou&&(t.moveTo(u+r*Tu(l),h+r*Iu(l)),t.arc(u,h,r,l,s,c));else{var f=void 0,g=void 0,y=void 0,v=void 0,m=void 0,x=void 0,_=void 0,b=void 0,w=void 0,S=void 0,M=void 0,I=void 0,T=void 0,C=void 0,D=void 0,A=void 0,k=i*Tu(s),L=i*Iu(s),P=r*Tu(l),O=r*Iu(l),R=p>Ou;if(R){var N=e.cornerRadius;N&&(n=function(t){var e;if(Y(t)){var n=t.length;if(!n)return t;e=1===n?[t[0],t[0],0,0]:2===n?[t[0],t[0],t[1],t[1]]:3===n?t.concat(t[2]):t}else e=[t,t,t,t];return e}(N),f=n[0],g=n[1],y=n[2],v=n[3]);var E=Au(i-r)/2;if(m=Pu(E,y),x=Pu(E,v),_=Pu(E,f),b=Pu(E,g),M=w=Lu(m,x),I=S=Lu(_,b),(w>Ou||S>Ou)&&(T=i*Tu(l),C=i*Iu(l),D=r*Tu(s),A=r*Iu(s),pOu){var X=Pu(y,M),U=Pu(v,M),Z=Ru(D,A,k,L,i,X,c),j=Ru(T,C,P,O,i,U,c);t.moveTo(u+Z.cx+Z.x0,h+Z.cy+Z.y0),M0&&t.arc(u+Z.cx,h+Z.cy,X,Du(Z.y0,Z.x0),Du(Z.y1,Z.x1),!c),t.arc(u,h,i,Du(Z.cy+Z.y1,Z.cx+Z.x1),Du(j.cy+j.y1,j.cx+j.x1),!c),U>0&&t.arc(u+j.cx,h+j.cy,U,Du(j.y1,j.x1),Du(j.y0,j.x0),!c))}else t.moveTo(u+k,h+L),t.arc(u,h,i,s,l,!c);else t.moveTo(u+k,h+L);if(r>Ou&&R)if(I>Ou){X=Pu(f,I),Z=Ru(P,O,T,C,r,-(U=Pu(g,I)),c),j=Ru(k,L,D,A,r,-X,c);t.lineTo(u+Z.cx+Z.x0,h+Z.cy+Z.y0),I0&&t.arc(u+Z.cx,h+Z.cy,U,Du(Z.y0,Z.x0),Du(Z.y1,Z.x1),!c),t.arc(u,h,r,Du(Z.cy+Z.y1,Z.cx+Z.x1),Du(j.cy+j.y1,j.cx+j.x1),c),X>0&&t.arc(u+j.cx,h+j.cy,X,Du(j.y1,j.x1),Du(j.y0,j.x0),!c))}else t.lineTo(u+P,h+O),t.arc(u,h,r,l,s,c);else t.lineTo(u+P,h+O)}else t.moveTo(u,h);t.closePath()}}}var Eu=function(){this.cx=0,this.cy=0,this.r0=0,this.r=0,this.startAngle=0,this.endAngle=2*Math.PI,this.clockwise=!0,this.cornerRadius=0},zu=function(t){function e(e){return t.call(this,e)||this}return n(e,t),e.prototype.getDefaultShape=function(){return new Eu},e.prototype.buildPath=function(t,e){Nu(t,e)},e.prototype.isZeroArea=function(){return this.shape.startAngle===this.shape.endAngle||this.shape.r===this.shape.r0},e}(Is);zu.prototype.type="sector";var Vu=function(){this.cx=0,this.cy=0,this.r=0,this.r0=0},Bu=function(t){function e(e){return t.call(this,e)||this}return n(e,t),e.prototype.getDefaultShape=function(){return new Vu},e.prototype.buildPath=function(t,e){var n=e.cx,i=e.cy,r=2*Math.PI;t.moveTo(n+e.r,i),t.arc(n,i,e.r,0,r,!1),t.moveTo(n+e.r0,i),t.arc(n,i,e.r0,0,r,!0)},e}(Is);function Fu(t,e,n){var i=e.smooth,r=e.points;if(r&&r.length>=2){if(i){var o=function(t,e,n,i){var r,o,a,s,l=[],u=[],h=[],c=[];if(i){a=[1/0,1/0],s=[-1/0,-1/0];for(var p=0,d=t.length;poh[1]){if(a=!1,r)return a;var u=Math.abs(oh[0]-rh[1]),h=Math.abs(rh[0]-oh[1]);Math.min(u,h)>i.len()&&(u0){var c={duration:h.duration,delay:h.delay||0,easing:h.easing,done:o,force:!!o||!!a,setToFinal:!u,scope:t,during:a};l?e.animateFrom(n,c):e.animateTo(n,c)}else e.stopAnimation(),!l&&e.attr(n),a&&a(1),o&&o()}function fh(t,e,n,i,r,o){dh("update",t,e,n,i,r,o)}function gh(t,e,n,i,r,o){dh("enter",t,e,n,i,r,o)}function yh(t){if(!t.__zr)return!0;for(var e=0;eMath.abs(o[1])?o[0]>0?"right":"left":o[1]>0?"bottom":"top"}function Bh(t){return!t.isGroup}function Fh(t,e,n){if(t&&e){var i,r=(i={},t.traverse((function(t){Bh(t)&&t.anid&&(i[t.anid]=t)})),i);e.traverse((function(t){if(Bh(t)&&t.anid){var e=r[t.anid];if(e){var i=o(t);t.attr(o(e)),fh(t,i,n,Qs(t).dataIndex)}}}))}function o(t){var e={x:t.x,y:t.y,rotation:t.rotation};return function(t){return null!=t.shape}(t)&&(e.shape=A({},t.shape)),e}}function Gh(t,e){return z(t,(function(t){var n=t[0];n=bh(n,e.x),n=wh(n,e.x+e.width);var i=t[1];return i=bh(i,e.y),[n,i=wh(i,e.y+e.height)]}))}function Wh(t,e){var n=bh(t.x,e.x),i=wh(t.x+t.width,e.x+e.width),r=bh(t.y,e.y),o=wh(t.y+t.height,e.y+e.height);if(i>=n&&o>=r)return{x:n,y:r,width:i-n,height:o-r}}function Hh(t,e,n){var i=A({rectHover:!0},e),r=i.style={strokeNoScale:!0};if(n=n||{x:-1,y:-1,width:2,height:2},t)return 0===t.indexOf("image://")?(r.image=t.slice(8),k(r,n),new ks(i)):Ah(t.replace("path://",""),i,n,"center")}function Yh(t,e,n,i,r){for(var o=0,a=r[r.length-1];o=-1e-6)return!1;var f=t-r,g=e-o,y=Uh(f,g,u,h)/d;if(y<0||y>1)return!1;var v=Uh(f,g,c,p)/d;return!(v<0||v>1)}function Uh(t,e,n,i){return t*i-n*e}function Zh(t){var e=t.itemTooltipOption,n=t.componentModel,i=t.itemName,r=U(e)?{formatter:e}:e,o=n.mainType,a=n.componentIndex,s={componentType:o,name:i,$vars:["name"]};s[o+"Index"]=a;var l=t.formatterParamsExtra;l&&E(G(l),(function(t){_t(s,t)||(s[t]=l[t],s.$vars.push(t))}));var u=Qs(t.el);u.componentMainType=o,u.componentIndex=a,u.tooltipConfig={name:i,option:k({content:i,formatterParams:s},r)}}function jh(t,e){var n;t.isGroup&&(n=e(t)),n||t.traverse(e)}function qh(t,e){if(t)if(Y(t))for(var n=0;n-1?Dc:kc;function Rc(t,e){t=t.toUpperCase(),Pc[t]=new Mc(e),Lc[t]=e}function Nc(t){return Pc[t]}Rc(Ac,{time:{month:["January","February","March","April","May","June","July","August","September","October","November","December"],monthAbbr:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],dayOfWeek:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],dayOfWeekAbbr:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"]},legend:{selector:{all:"All",inverse:"Inv"}},toolbox:{brush:{title:{rect:"Box Select",polygon:"Lasso Select",lineX:"Horizontally Select",lineY:"Vertically Select",keep:"Keep Selections",clear:"Clear Selections"}},dataView:{title:"Data View",lang:["Data View","Close","Refresh"]},dataZoom:{title:{zoom:"Zoom",back:"Zoom Reset"}},magicType:{title:{line:"Switch to Line Chart",bar:"Switch to Bar Chart",stack:"Stack",tiled:"Tile"}},restore:{title:"Restore"},saveAsImage:{title:"Save as Image",lang:["Right Click to Save Image"]}},series:{typeNames:{pie:"Pie chart",bar:"Bar chart",line:"Line chart",scatter:"Scatter plot",effectScatter:"Ripple scatter plot",radar:"Radar chart",tree:"Tree",treemap:"Treemap",boxplot:"Boxplot",candlestick:"Candlestick",k:"K line chart",heatmap:"Heat map",map:"Map",parallel:"Parallel coordinate map",lines:"Line graph",graph:"Relationship graph",sankey:"Sankey diagram",funnel:"Funnel chart",gauge:"Gauge",pictorialBar:"Pictorial bar",themeRiver:"Theme River Map",sunburst:"Sunburst"}},aria:{general:{withTitle:'This is a chart about "{title}"',withoutTitle:"This is a chart"},series:{single:{prefix:"",withName:" with type {seriesType} named {seriesName}.",withoutName:" with type {seriesType}."},multiple:{prefix:". It consists of {seriesCount} series count.",withName:" The {seriesId} series is a {seriesType} representing {seriesName}.",withoutName:" The {seriesId} series is a {seriesType}.",separator:{middle:"",end:""}}},data:{allData:"The data is as follows: ",partialData:"The first {displayCnt} items are: ",withName:"the data for {name} is {value}",withoutName:"{value}",separator:{middle:", ",end:". "}}}}),Rc(Dc,{time:{month:["一月","二月","三月","四月","五月","六月","七月","八月","九月","十月","十一月","十二月"],monthAbbr:["1月","2月","3月","4月","5月","6月","7月","8月","9月","10月","11月","12月"],dayOfWeek:["星期日","星期一","星期二","星期三","星期四","星期五","星期六"],dayOfWeekAbbr:["日","一","二","三","四","五","六"]},legend:{selector:{all:"全选",inverse:"反选"}},toolbox:{brush:{title:{rect:"矩形选择",polygon:"圈选",lineX:"横向选择",lineY:"纵向选择",keep:"保持选择",clear:"清除选择"}},dataView:{title:"数据视图",lang:["数据视图","关闭","刷新"]},dataZoom:{title:{zoom:"区域缩放",back:"区域缩放还原"}},magicType:{title:{line:"切换为折线图",bar:"切换为柱状图",stack:"切换为堆叠",tiled:"切换为平铺"}},restore:{title:"还原"},saveAsImage:{title:"保存为图片",lang:["右键另存为图片"]}},series:{typeNames:{pie:"饼图",bar:"柱状图",line:"折线图",scatter:"散点图",effectScatter:"涟漪散点图",radar:"雷达图",tree:"树图",treemap:"矩形树图",boxplot:"箱型图",candlestick:"K线图",k:"K线图",heatmap:"热力图",map:"地图",parallel:"平行坐标图",lines:"线图",graph:"关系图",sankey:"桑基图",funnel:"漏斗图",gauge:"仪表盘图",pictorialBar:"象形柱图",themeRiver:"主题河流图",sunburst:"旭日图"}},aria:{general:{withTitle:"这是一个关于“{title}”的图表。",withoutTitle:"这是一个图表,"},series:{single:{prefix:"",withName:"图表类型是{seriesType},表示{seriesName}。",withoutName:"图表类型是{seriesType}。"},multiple:{prefix:"它由{seriesCount}个图表系列组成。",withName:"第{seriesId}个系列是一个表示{seriesName}的{seriesType},",withoutName:"第{seriesId}个系列是一个{seriesType},",separator:{middle:";",end:"。"}}},data:{allData:"其数据是——",partialData:"其中,前{displayCnt}项是——",withName:"{name}的数据是{value}",withoutName:"{value}",separator:{middle:",",end:""}}}});var Ec=1e3,zc=6e4,Vc=36e5,Bc=864e5,Fc=31536e6,Gc={year:"{yyyy}",month:"{MMM}",day:"{d}",hour:"{HH}:{mm}",minute:"{HH}:{mm}",second:"{HH}:{mm}:{ss}",millisecond:"{HH}:{mm}:{ss} {SSS}",none:"{yyyy}-{MM}-{dd} {HH}:{mm}:{ss} {SSS}"},Wc="{yyyy}-{MM}-{dd}",Hc={year:"{yyyy}",month:"{yyyy}-{MM}",day:Wc,hour:Wc+" "+Gc.hour,minute:Wc+" "+Gc.minute,second:Wc+" "+Gc.second,millisecond:Gc.none},Yc=["year","month","day","hour","minute","second","millisecond"],Xc=["year","half-year","quarter","month","week","half-week","day","half-day","quarter-day","hour","minute","second","millisecond"];function Uc(t,e){return"0000".substr(0,e-(t+="").length)+t}function Zc(t){switch(t){case"half-year":case"quarter":return"month";case"week":case"half-week":return"day";case"half-day":case"quarter-day":return"hour";default:return t}}function jc(t){return t===Zc(t)}function qc(t,e,n,i){var r=ro(t),o=r[Jc(n)](),a=r[Qc(n)]()+1,s=Math.floor((a-1)/3)+1,l=r[tp(n)](),u=r["get"+(n?"UTC":"")+"Day"](),h=r[ep(n)](),c=(h-1)%12+1,p=r[np(n)](),d=r[ip(n)](),f=r[rp(n)](),g=(i instanceof Mc?i:Nc(i||Oc)||Pc[kc]).getModel("time"),y=g.get("month"),v=g.get("monthAbbr"),m=g.get("dayOfWeek"),x=g.get("dayOfWeekAbbr");return(e||"").replace(/{yyyy}/g,o+"").replace(/{yy}/g,Uc(o%100+"",2)).replace(/{Q}/g,s+"").replace(/{MMMM}/g,y[a-1]).replace(/{MMM}/g,v[a-1]).replace(/{MM}/g,Uc(a,2)).replace(/{M}/g,a+"").replace(/{dd}/g,Uc(l,2)).replace(/{d}/g,l+"").replace(/{eeee}/g,m[u]).replace(/{ee}/g,x[u]).replace(/{e}/g,u+"").replace(/{HH}/g,Uc(h,2)).replace(/{H}/g,h+"").replace(/{hh}/g,Uc(c+"",2)).replace(/{h}/g,c+"").replace(/{mm}/g,Uc(p,2)).replace(/{m}/g,p+"").replace(/{ss}/g,Uc(d,2)).replace(/{s}/g,d+"").replace(/{SSS}/g,Uc(f,3)).replace(/{S}/g,f+"")}function Kc(t,e){var n=ro(t),i=n[Qc(e)]()+1,r=n[tp(e)](),o=n[ep(e)](),a=n[np(e)](),s=n[ip(e)](),l=0===n[rp(e)](),u=l&&0===s,h=u&&0===a,c=h&&0===o,p=c&&1===r;return p&&1===i?"year":p?"month":c?"day":h?"hour":u?"minute":l?"second":"millisecond"}function $c(t,e,n){var i=j(t)?ro(t):t;switch(e=e||Kc(t,n)){case"year":return i[Jc(n)]();case"half-year":return i[Qc(n)]()>=6?1:0;case"quarter":return Math.floor((i[Qc(n)]()+1)/4);case"month":return i[Qc(n)]();case"day":return i[tp(n)]();case"half-day":return i[ep(n)]()/24;case"hour":return i[ep(n)]();case"minute":return i[np(n)]();case"second":return i[ip(n)]();case"millisecond":return i[rp(n)]()}}function Jc(t){return t?"getUTCFullYear":"getFullYear"}function Qc(t){return t?"getUTCMonth":"getMonth"}function tp(t){return t?"getUTCDate":"getDate"}function ep(t){return t?"getUTCHours":"getHours"}function np(t){return t?"getUTCMinutes":"getMinutes"}function ip(t){return t?"getUTCSeconds":"getSeconds"}function rp(t){return t?"getUTCMilliseconds":"getMilliseconds"}function op(t){return t?"setUTCFullYear":"setFullYear"}function ap(t){return t?"setUTCMonth":"setMonth"}function sp(t){return t?"setUTCDate":"setDate"}function lp(t){return t?"setUTCHours":"setHours"}function up(t){return t?"setUTCMinutes":"setMinutes"}function hp(t){return t?"setUTCSeconds":"setSeconds"}function cp(t){return t?"setUTCMilliseconds":"setMilliseconds"}function pp(t){if(!co(t))return U(t)?t:"-";var e=(t+"").split(".");return e[0].replace(/(\d{1,3})(?=(?:\d{3})+(?!\d))/g,"$1,")+(e.length>1?"."+e[1]:"")}function dp(t,e){return t=(t||"").toLowerCase().replace(/-(.)/g,(function(t,e){return e.toUpperCase()})),e&&t&&(t=t.charAt(0).toUpperCase()+t.slice(1)),t}var fp=st;function gp(t,e,n){function i(t){return t&&ut(t)?t:"-"}function r(t){return!(null==t||isNaN(t)||!isFinite(t))}var o="time"===e,a=t instanceof Date;if(o||a){var s=o?ro(t):t;if(!isNaN(+s))return qc(s,"{yyyy}-{MM}-{dd} {HH}:{mm}:{ss}",n);if(a)return"-"}if("ordinal"===e)return Z(t)?i(t):j(t)&&r(t)?t+"":"-";var l=ho(t);return r(l)?pp(l):Z(t)?i(t):"boolean"==typeof t?t+"":"-"}var yp=["a","b","c","d","e","f","g"],vp=function(t,e){return"{"+t+(null==e?"":e)+"}"};function mp(t,e,n){Y(e)||(e=[e]);var i=e.length;if(!i)return"";for(var r=e[0].$vars||[],o=0;o':'':{renderMode:o,content:"{"+(n.markerId||"markerX")+"|} ",style:"subItem"===r?{width:4,height:4,borderRadius:2,backgroundColor:i}:{width:10,height:10,borderRadius:5,backgroundColor:i}}:""}function _p(t,e){return e=e||"transparent",U(t)?t:q(t)&&t.colorStops&&(t.colorStops[0]||{}).color||e}function bp(t,e){if("_blank"===e||"blank"===e){var n=window.open();n.opener=null,n.location.href=t}else window.open(t,e)}var wp=E,Sp=["left","right","top","bottom","width","height"],Mp=[["width","left","right"],["height","top","bottom"]];function Ip(t,e,n,i,r){var o=0,a=0;null==i&&(i=1/0),null==r&&(r=1/0);var s=0;e.eachChild((function(l,u){var h,c,p=l.getBoundingRect(),d=e.childAt(u+1),f=d&&d.getBoundingRect();if("horizontal"===t){var g=p.width+(f?-f.x+p.x:0);(h=o+g)>i||l.newline?(o=0,h=g,a+=s+n,s=p.height):s=Math.max(s,p.height)}else{var y=p.height+(f?-f.y+p.y:0);(c=a+y)>r||l.newline?(o+=s+n,a=0,c=y,s=p.width):s=Math.max(s,p.width)}l.newline||(l.x=o,l.y=a,l.markRedraw(),"horizontal"===t?o=h+n:a=c+n)}))}var Tp=Ip;H(Ip,"vertical"),H(Ip,"horizontal");function Cp(t,e,n){n=fp(n||0);var i=e.width,r=e.height,o=Ur(t.left,i),a=Ur(t.top,r),s=Ur(t.right,i),l=Ur(t.bottom,r),u=Ur(t.width,i),h=Ur(t.height,r),c=n[2]+n[0],p=n[1]+n[3],d=t.aspect;switch(isNaN(u)&&(u=i-s-p-o),isNaN(h)&&(h=r-l-c-a),null!=d&&(isNaN(u)&&isNaN(h)&&(d>i/r?u=.8*i:h=.8*r),isNaN(u)&&(u=d*h),isNaN(h)&&(h=u/d)),isNaN(o)&&(o=i-s-u-p),isNaN(a)&&(a=r-l-h-c),t.left||t.right){case"center":o=i/2-u/2-n[3];break;case"right":o=i-u-p}switch(t.top||t.bottom){case"middle":case"center":a=r/2-h/2-n[0];break;case"bottom":a=r-h-c}o=o||0,a=a||0,isNaN(u)&&(u=i-p-o-(s||0)),isNaN(h)&&(h=r-c-a-(l||0));var f=new ze(o+n[3],a+n[0],u,h);return f.margin=n,f}function Dp(t,e,n,i,r,o){var a,s=!r||!r.hv||r.hv[0],l=!r||!r.hv||r.hv[1],u=r&&r.boundingMode||"all";if((o=o||t).x=t.x,o.y=t.y,!s&&!l)return!1;if("raw"===u)a="group"===t.type?new ze(0,0,+e.width||0,+e.height||0):t.getBoundingRect();else if(a=t.getBoundingRect(),t.needLocalTransform()){var h=t.getLocalTransform();(a=a.clone()).applyTransform(h)}var c=Cp(k({width:a.width,height:a.height},e),n,i),p=s?c.x-a.x:0,d=l?c.y-a.y:0;return"raw"===u?(o.x=p,o.y=d):(o.x+=p,o.y+=d),o===t&&t.markRedraw(),!0}function Ap(t){var e=t.layoutMode||t.constructor.layoutMode;return q(e)?e:e?{type:e}:null}function kp(t,e,n){var i=n&&n.ignoreSize;!Y(i)&&(i=[i,i]);var r=a(Mp[0],0),o=a(Mp[1],1);function a(n,r){var o={},a=0,u={},h=0;if(wp(n,(function(e){u[e]=t[e]})),wp(n,(function(t){s(e,t)&&(o[t]=u[t]=e[t]),l(o,t)&&a++,l(u,t)&&h++})),i[r])return l(e,n[1])?u[n[2]]=null:l(e,n[2])&&(u[n[1]]=null),u;if(2!==h&&a){if(a>=2)return o;for(var c=0;c=0;a--)o=C(o,n[a],!0);e.defaultOption=o}return e.defaultOption},e.prototype.getReferringComponents=function(t,e){var n=t+"Index",i=t+"Id";return Bo(this.ecModel,t,{index:this.get(n,!0),id:this.get(i,!0)},e)},e.prototype.getBoxLayoutParams=function(){var t=this;return{left:t.get("left"),top:t.get("top"),right:t.get("right"),bottom:t.get("bottom"),width:t.get("width"),height:t.get("height")}},e.prototype.getZLevelKey=function(){return""},e.prototype.setZLevel=function(t){this.option.zlevel=t},e.protoInitialize=function(){var t=e.prototype;t.type="component",t.id="",t.name="",t.mainType="",t.subType="",t.componentIndex=0}(),e}(Mc);Zo(Rp,Mc),$o(Rp),function(t){var e={};t.registerSubTypeDefaulter=function(t,n){var i=Xo(t);e[i.main]=n},t.determineSubType=function(n,i){var r=i.type;if(!r){var o=Xo(n).main;t.hasSubTypes(n)&&e[o]&&(r=e[o](i))}return r}}(Rp),function(t,e){function n(t,e){return t[e]||(t[e]={predecessor:[],successor:[]}),t[e]}t.topologicalTravel=function(t,i,r,o){if(t.length){var a=function(t){var i={},r=[];return E(t,(function(o){var a=n(i,o),s=function(t,e){var n=[];return E(t,(function(t){P(e,t)>=0&&n.push(t)})),n}(a.originalDeps=e(o),t);a.entryCount=s.length,0===a.entryCount&&r.push(o),E(s,(function(t){P(a.predecessor,t)<0&&a.predecessor.push(t);var e=n(i,t);P(e.successor,t)<0&&e.successor.push(o)}))})),{graph:i,noEntryList:r}}(i),s=a.graph,l=a.noEntryList,u={};for(E(t,(function(t){u[t]=!0}));l.length;){var h=l.pop(),c=s[h],p=!!u[h];p&&(r.call(o,h,c.originalDeps.slice()),delete u[h]),E(c.successor,p?f:d)}E(u,(function(){var t="";throw new Error(t)}))}function d(t){s[t].entryCount--,0===s[t].entryCount&&l.push(t)}function f(t){u[t]=!0,d(t)}}}(Rp,(function(t){var e=[];E(Rp.getClassesByMainType(t),(function(t){e=e.concat(t.dependencies||t.prototype.dependencies||[])})),e=z(e,(function(t){return Xo(t).main})),"dataset"!==t&&P(e,"dataset")<=0&&e.unshift("dataset");return e}));var Np="";"undefined"!=typeof navigator&&(Np=navigator.platform||"");var Ep="rgba(0, 0, 0, 0.2)",zp={darkMode:"auto",colorBy:"series",color:["#5470c6","#91cc75","#fac858","#ee6666","#73c0de","#3ba272","#fc8452","#9a60b4","#ea7ccc"],gradientColor:["#f6efa6","#d88273","#bf444c"],aria:{decal:{decals:[{color:Ep,dashArrayX:[1,0],dashArrayY:[2,5],symbolSize:1,rotation:Math.PI/6},{color:Ep,symbol:"circle",dashArrayX:[[8,8],[0,8,8,0]],dashArrayY:[6,0],symbolSize:.8},{color:Ep,dashArrayX:[1,0],dashArrayY:[4,3],rotation:-Math.PI/4},{color:Ep,dashArrayX:[[6,6],[0,6,6,0]],dashArrayY:[6,0]},{color:Ep,dashArrayX:[[1,0],[1,6]],dashArrayY:[1,0,6,0],rotation:Math.PI/4},{color:Ep,symbol:"triangle",dashArrayX:[[9,9],[0,9,9,0]],dashArrayY:[7,2],symbolSize:.75}]}},textStyle:{fontFamily:Np.match(/^Win/)?"Microsoft YaHei":"sans-serif",fontSize:12,fontStyle:"normal",fontWeight:"normal"},blendMode:null,stateAnimation:{duration:300,easing:"cubicOut"},animation:"auto",animationDuration:1e3,animationDurationUpdate:500,animationEasing:"cubicInOut",animationEasingUpdate:"cubicInOut",animationThreshold:2e3,progressiveThreshold:3e3,progressive:400,hoverLayerThreshold:3e3,useUTC:!1},Vp=yt(["tooltip","label","itemName","itemId","itemGroupId","seriesName"]),Bp="original",Fp="arrayRows",Gp="objectRows",Wp="keyedColumns",Hp="typedArray",Yp="unknown",Xp="column",Up="row",Zp=1,jp=2,qp=3,Kp=Oo();function $p(t,e,n){var i={},r=Qp(e);if(!r||!t)return i;var o,a,s=[],l=[],u=e.ecModel,h=Kp(u).datasetMap,c=r.uid+"_"+n.seriesLayoutBy;E(t=t.slice(),(function(e,n){var r=q(e)?e:t[n]={name:e};"ordinal"===r.type&&null==o&&(o=n,a=f(r)),i[r.name]=[]}));var p=h.get(c)||h.set(c,{categoryWayDim:a,valueWayDim:0});function d(t,e,n){for(var i=0;ie)return t[i];return t[n-1]}(i,a):n;if((h=h||n)&&h.length){var c=h[l];return r&&(u[r]=c),s.paletteIdx=(l+1)%h.length,c}}var cd="\0_ec_inner";var pd=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.prototype.init=function(t,e,n,i,r,o){i=i||{},this.option=null,this._theme=new Mc(i),this._locale=new Mc(r),this._optionManager=o},e.prototype.setOption=function(t,e,n){var i=gd(e);this._optionManager.setOption(t,n,i),this._resetOption(null,i)},e.prototype.resetOption=function(t,e){return this._resetOption(t,gd(e))},e.prototype._resetOption=function(t,e){var n=!1,i=this._optionManager;if(!t||"recreate"===t){var r=i.mountOption("recreate"===t);0,this.option&&"recreate"!==t?(this.restoreData(),this._mergeOption(r,e)):od(this,r),n=!0}if("timeline"!==t&&"media"!==t||this.restoreData(),!t||"recreate"===t||"timeline"===t){var o=i.getTimelineOption(this);o&&(n=!0,this._mergeOption(o,e))}if(!t||"recreate"===t||"media"===t){var a=i.getMediaOption(this);a.length&&E(a,(function(t){n=!0,this._mergeOption(t,e)}),this)}return n},e.prototype.mergeOption=function(t){this._mergeOption(t,null)},e.prototype._mergeOption=function(t,e){var n=this.option,i=this._componentsMap,r=this._componentsCount,o=[],a=yt(),s=e&&e.replaceMergeMainTypeMap;Kp(this).datasetMap=yt(),E(t,(function(t,e){null!=t&&(Rp.hasClass(e)?e&&(o.push(e),a.set(e,!0)):n[e]=null==n[e]?T(t):C(n[e],t,!0))})),s&&s.each((function(t,e){Rp.hasClass(e)&&!a.get(e)&&(o.push(e),a.set(e,!0))})),Rp.topologicalTravel(o,Rp.getAllClassMainTypes(),(function(e){var o=function(t,e,n){var i=nd.get(e);if(!i)return n;var r=i(t);return r?n.concat(r):n}(this,e,bo(t[e])),a=i.get(e),l=a?s&&s.get(e)?"replaceMerge":"normalMerge":"replaceAll",u=To(a,o,l);(function(t,e,n){E(t,(function(t){var i=t.newOption;q(i)&&(t.keyInfo.mainType=e,t.keyInfo.subType=function(t,e,n,i){return e.type?e.type:n?n.subType:i.determineSubType(t,e)}(e,i,t.existing,n))}))})(u,e,Rp),n[e]=null,i.set(e,null),r.set(e,0);var h,c=[],p=[],d=0;E(u,(function(t,n){var i=t.existing,r=t.newOption;if(r){var o="series"===e,a=Rp.getClass(e,t.keyInfo.subType,!o);if(!a)return;if("tooltip"===e){if(h)return void 0;h=!0}if(i&&i.constructor===a)i.name=t.keyInfo.name,i.mergeOption(r,this),i.optionUpdated(r,!1);else{var s=A({componentIndex:n},t.keyInfo);A(i=new a(r,this,this,s),s),t.brandNew&&(i.__requireNewView=!0),i.init(r,this,this),i.optionUpdated(null,!0)}}else i&&(i.mergeOption({},this),i.optionUpdated({},!1));i?(c.push(i.option),p.push(i),d++):(c.push(void 0),p.push(void 0))}),this),n[e]=c,i.set(e,p),r.set(e,d),"series"===e&&id(this)}),this),this._seriesIndices||id(this)},e.prototype.getOption=function(){var t=T(this.option);return E(t,(function(e,n){if(Rp.hasClass(n)){for(var i=bo(e),r=i.length,o=!1,a=r-1;a>=0;a--)i[a]&&!Lo(i[a])?o=!0:(i[a]=null,!o&&r--);i.length=r,t[n]=i}})),delete t[cd],t},e.prototype.getTheme=function(){return this._theme},e.prototype.getLocaleModel=function(){return this._locale},e.prototype.setUpdatePayload=function(t){this._payload=t},e.prototype.getUpdatePayload=function(){return this._payload},e.prototype.getComponent=function(t,e){var n=this._componentsMap.get(t);if(n){var i=n[e||0];if(i)return i;if(null==e)for(var r=0;r=e:"max"===n?t<=e:t===e})(i[a],t,o)||(r=!1)}})),r}var Sd=E,Md=q,Id=["areaStyle","lineStyle","nodeStyle","linkStyle","chordStyle","label","labelLine"];function Td(t){var e=t&&t.itemStyle;if(e)for(var n=0,i=Id.length;n=0;g--){var y=t[g];if(s||(p=y.data.rawIndexOf(y.stackedByDimension,c)),p>=0){var v=y.data.getByRawIndex(y.stackResultDimension,p);if("all"===l||"positive"===l&&v>0||"negative"===l&&v<0||"samesign"===l&&d>=0&&v>0||"samesign"===l&&d<=0&&v<0){d=Qr(d,v),f=v;break}}}return i[0]=d,i[1]=f,i}))}))}var Yd,Xd,Ud,Zd,jd,qd=function(t){this.data=t.data||(t.sourceFormat===Wp?{}:[]),this.sourceFormat=t.sourceFormat||Yp,this.seriesLayoutBy=t.seriesLayoutBy||Xp,this.startIndex=t.startIndex||0,this.dimensionsDetectedCount=t.dimensionsDetectedCount,this.metaRawOption=t.metaRawOption;var e=this.dimensionsDefine=t.dimensionsDefine;if(e)for(var n=0;nu&&(u=d)}s[0]=l,s[1]=u}},i=function(){return this._data?this._data.length/this._dimSize:0};function r(t){for(var e=0;e=0&&(s=o.interpolatedValue[l])}return null!=s?s+"":""})):void 0},t.prototype.getRawValue=function(t,e){return gf(this.getData(e),t)},t.prototype.formatTooltip=function(t,e,n){},t}();function mf(t){var e,n;return q(t)?t.type&&(n=t):e=t,{text:e,frag:n}}function xf(t){return new _f(t)}var _f=function(){function t(t){t=t||{},this._reset=t.reset,this._plan=t.plan,this._count=t.count,this._onDirty=t.onDirty,this._dirty=!0}return t.prototype.perform=function(t){var e,n=this._upstream,i=t&&t.skip;if(this._dirty&&n){var r=this.context;r.data=r.outputData=n.context.outputData}this.__pipeline&&(this.__pipeline.currentTask=this),this._plan&&!i&&(e=this._plan(this.context));var o,a=h(this._modBy),s=this._modDataCount||0,l=h(t&&t.modBy),u=t&&t.modDataCount||0;function h(t){return!(t>=1)&&(t=1),t}a===l&&s===u||(e="reset"),(this._dirty||"reset"===e)&&(this._dirty=!1,o=this._doReset(i)),this._modBy=l,this._modDataCount=u;var c=t&&t.step;if(this._dueEnd=n?n._outputDueEnd:this._count?this._count(this.context):1/0,this._progress){var p=this._dueIndex,d=Math.min(null!=c?this._dueIndex+c:1/0,this._dueEnd);if(!i&&(o||p1&&i>0?s:a}};return o;function a(){return e=t?null:oe},gte:function(t,e){return t>=e}},Tf=function(){function t(t,e){if(!j(e)){var n="";0,vo(n)}this._opFn=If[t],this._rvalFloat=ho(e)}return t.prototype.evaluate=function(t){return j(t)?this._opFn(t,this._rvalFloat):this._opFn(ho(t),this._rvalFloat)},t}(),Cf=function(){function t(t,e){var n="desc"===t;this._resultLT=n?1:-1,null==e&&(e=n?"min":"max"),this._incomparable="min"===e?-1/0:1/0}return t.prototype.evaluate=function(t,e){var n=j(t)?t:ho(t),i=j(e)?e:ho(e),r=isNaN(n),o=isNaN(i);if(r&&(n=this._incomparable),o&&(i=this._incomparable),r&&o){var a=U(t),s=U(e);a&&(n=s?t:0),s&&(i=a?e:0)}return ni?-this._resultLT:0},t}(),Df=function(){function t(t,e){this._rval=e,this._isEQ=t,this._rvalTypeof=typeof e,this._rvalFloat=ho(e)}return t.prototype.evaluate=function(t){var e=t===this._rval;if(!e){var n=typeof t;n===this._rvalTypeof||"number"!==n&&"number"!==this._rvalTypeof||(e=ho(t)===this._rvalFloat)}return this._isEQ?e:!e},t}();function Af(t,e){return"eq"===t||"ne"===t?new Df("eq"===t,e):_t(If,t)?new Tf(t,e):null}var kf=function(){function t(){}return t.prototype.getRawData=function(){throw new Error("not supported")},t.prototype.getRawDataItem=function(t){throw new Error("not supported")},t.prototype.cloneRawData=function(){},t.prototype.getDimensionInfo=function(t){},t.prototype.cloneAllDimensionInfo=function(){},t.prototype.count=function(){},t.prototype.retrieveValue=function(t,e){},t.prototype.retrieveValueFromItem=function(t,e){},t.prototype.convertValue=function(t,e){return wf(t,e)},t}();function Lf(t){var e=t.sourceFormat;if(!zf(e)){var n="";0,vo(n)}return t.data}function Pf(t){var e=t.sourceFormat,n=t.data;if(!zf(e)){var i="";0,vo(i)}if(e===Fp){for(var r=[],o=0,a=n.length;o65535?Ff:Gf}function Uf(t,e,n,i,r){var o=Yf[n||"float"];if(r){var a=t[e],s=a&&a.length;if(s!==i){for(var l=new o(i),u=0;ug[1]&&(g[1]=f)}return this._rawCount=this._count=s,{start:a,end:s}},t.prototype._initDataFromProvider=function(t,e,n){for(var i=this._provider,r=this._chunks,o=this._dimensions,a=o.length,s=this._rawExtent,l=z(o,(function(t){return t.property})),u=0;uy[1]&&(y[1]=g)}}!i.persistent&&i.clean&&i.clean(),this._rawCount=this._count=e,this._extent=[]},t.prototype.count=function(){return this._count},t.prototype.get=function(t,e){if(!(e>=0&&e=0&&e=this._rawCount||t<0)return-1;if(!this._indices)return t;var e=this._indices,n=e[t];if(null!=n&&nt))return o;r=o-1}}return-1},t.prototype.indicesOfNearest=function(t,e,n){var i=this._chunks[t],r=[];if(!i)return r;null==n&&(n=1/0);for(var o=1/0,a=-1,s=0,l=0,u=this.count();l=0&&a<0)&&(o=c,a=h,s=0),h===a&&(r[s++]=l))}return r.length=s,r},t.prototype.getIndices=function(){var t,e=this._indices;if(e){var n=e.constructor,i=this._count;if(n===Array){t=new n(i);for(var r=0;r=u&&x<=h||isNaN(x))&&(a[s++]=d),d++}p=!0}else if(2===r){f=c[i[0]];var y=c[i[1]],v=t[i[1]][0],m=t[i[1]][1];for(g=0;g=u&&x<=h||isNaN(x))&&(_>=v&&_<=m||isNaN(_))&&(a[s++]=d),d++}p=!0}}if(!p)if(1===r)for(g=0;g=u&&x<=h||isNaN(x))&&(a[s++]=b)}else for(g=0;gt[M][1])&&(w=!1)}w&&(a[s++]=e.getRawIndex(g))}return sy[1]&&(y[1]=g)}}}},t.prototype.lttbDownSample=function(t,e){var n,i,r,o=this.clone([t],!0),a=o._chunks[t],s=this.count(),l=0,u=Math.floor(1/e),h=this.getRawIndex(0),c=new(Xf(this._rawCount))(Math.min(2*(Math.ceil(s/u)+2),s));c[l++]=h;for(var p=1;pn&&(n=i,r=I)}M>0&&M<_-x&&(c[l++]=Math.min(S,r),r=Math.max(S,r)),c[l++]=r,h=r}return c[l++]=this.getRawIndex(s-1),o._count=l,o._indices=c,o.getRawIndex=this._getRawIdx,o},t.prototype.downSample=function(t,e,n,i){for(var r=this.clone([t],!0),o=r._chunks,a=[],s=Math.floor(1/e),l=o[t],u=this.count(),h=r._rawExtent[t]=[1/0,-1/0],c=new(Xf(this._rawCount))(Math.ceil(u/s)),p=0,d=0;du-d&&(s=u-d,a.length=s);for(var f=0;fh[1]&&(h[1]=y),c[p++]=v}return r._count=p,r._indices=c,r._updateGetRawIdx(),r},t.prototype.each=function(t,e){if(this._count)for(var n=t.length,i=this._chunks,r=0,o=this.count();ra&&(a=l)}return i=[o,a],this._extent[t]=i,i},t.prototype.getRawDataItem=function(t){var e=this.getRawIndex(t);if(this._provider.persistent)return this._provider.getItem(e);for(var n=[],i=this._chunks,r=0;r=0?this._indices[t]:-1},t.prototype._updateGetRawIdx=function(){this.getRawIndex=this._indices?this._getRawIdx:this._getRawIdxIdentity},t.internalField=function(){function t(t,e,n,i){return wf(t[i],this._dimensions[i])}Vf={arrayRows:t,objectRows:function(t,e,n,i){return wf(t[e],this._dimensions[i])},keyedColumns:t,original:function(t,e,n,i){var r=t&&(null==t.value?t:t.value);return wf(r instanceof Array?r[i]:r,this._dimensions[i])},typedArray:function(t,e,n,i){return t[i]}}}(),t}(),jf=function(){function t(t){this._sourceList=[],this._storeList=[],this._upstreamSignList=[],this._versionSignBase=0,this._dirty=!0,this._sourceHost=t}return t.prototype.dirty=function(){this._setLocalSource([],[]),this._storeList=[],this._dirty=!0},t.prototype._setLocalSource=function(t,e){this._sourceList=t,this._upstreamSignList=e,this._versionSignBase++,this._versionSignBase>9e10&&(this._versionSignBase=0)},t.prototype._getVersionSign=function(){return this._sourceHost.uid+"_"+this._versionSignBase},t.prototype.prepareSource=function(){this._isDirty()&&(this._createSource(),this._dirty=!1)},t.prototype._createSource=function(){this._setLocalSource([],[]);var t,e,n=this._sourceHost,i=this._getUpstreamSourceManagers(),r=!!i.length;if(Kf(n)){var o=n,a=void 0,s=void 0,l=void 0;if(r){var u=i[0];u.prepareSource(),a=(l=u.getSource()).data,s=l.sourceFormat,e=[u._getVersionSign()]}else s=$(a=o.get("data",!0))?Hp:Bp,e=[];var h=this._getSourceMetaRawOption()||{},c=l&&l.metaRawOption||{},p=rt(h.seriesLayoutBy,c.seriesLayoutBy)||null,d=rt(h.sourceHeader,c.sourceHeader),f=rt(h.dimensions,c.dimensions);t=p!==c.seriesLayoutBy||!!d!=!!c.sourceHeader||f?[$d(a,{seriesLayoutBy:p,sourceHeader:d,dimensions:f},s)]:[]}else{var g=n;if(r){var y=this._applyTransform(i);t=y.sourceList,e=y.upstreamSignList}else{t=[$d(g.get("source",!0),this._getSourceMetaRawOption(),null)],e=[]}}this._setLocalSource(t,e)},t.prototype._applyTransform=function(t){var e,n=this._sourceHost,i=n.get("transform",!0),r=n.get("fromTransformResult",!0);if(null!=r){var o="";1!==t.length&&$f(o)}var a,s=[],l=[];return E(t,(function(t){t.prepareSource();var e=t.getSource(r||0),n="";null==r||e||$f(n),s.push(e),l.push(t._getVersionSign())})),i?e=function(t,e,n){var i=bo(t),r=i.length,o="";r||vo(o);for(var a=0,s=r;a1||n>0&&!t.noHeader;return E(t.blocks,(function(t){var n=og(t);n>=e&&(e=n+ +(i&&(!n||ig(t)&&!t.noHeader)))})),e}return 0}function ag(t,e,n,i){var r,o=e.noHeader,a=(r=og(e),{html:tg[r],richText:eg[r]}),s=[],l=e.blocks||[];lt(!l||Y(l)),l=l||[];var u=t.orderMode;if(e.sortBlocks&&u){l=l.slice();var h={valueAsc:"asc",valueDesc:"desc"};if(_t(h,u)){var c=new Cf(h[u],null);l.sort((function(t,e){return c.evaluate(t.sortParam,e.sortParam)}))}else"seriesDesc"===u&&l.reverse()}E(l,(function(n,r){var o=e.valueFormatter,l=rg(n)(o?A(A({},t),{valueFormatter:o}):t,n,r>0?a.html:0,i);null!=l&&s.push(l)}));var p="richText"===t.renderMode?s.join(a.richText):ug(s.join(""),o?n:a.html);if(o)return p;var d=gp(e.header,"ordinal",t.useUTC),f=Qf(i,t.renderMode).nameStyle;return"richText"===t.renderMode?hg(t,d,f)+a.richText+p:ug('
'+re(d)+"
"+p,n)}function sg(t,e,n,i){var r=t.renderMode,o=e.noName,a=e.noValue,s=!e.markerType,l=e.name,u=t.useUTC,h=e.valueFormatter||t.valueFormatter||function(t){return z(t=Y(t)?t:[t],(function(t,e){return gp(t,Y(d)?d[e]:d,u)}))};if(!o||!a){var c=s?"":t.markupStyleCreator.makeTooltipMarker(e.markerType,e.markerColor||"#333",r),p=o?"":gp(l,"ordinal",u),d=e.valueType,f=a?[]:h(e.value),g=!s||!o,y=!s&&o,v=Qf(i,r),m=v.nameStyle,x=v.valueStyle;return"richText"===r?(s?"":c)+(o?"":hg(t,p,m))+(a?"":function(t,e,n,i,r){var o=[r],a=i?10:20;return n&&o.push({padding:[0,0,0,a],align:"right"}),t.markupStyleCreator.wrapRichTextStyle(Y(e)?e.join(" "):e,o)}(t,f,g,y,x)):ug((s?"":c)+(o?"":function(t,e,n){return''+re(t)+""}(p,!s,m))+(a?"":function(t,e,n,i){var r=n?"10px":"20px",o=e?"float:right;margin-left:"+r:"";return t=Y(t)?t:[t],''+z(t,(function(t){return re(t)})).join("  ")+""}(f,g,y,x)),n)}}function lg(t,e,n,i,r,o){if(t)return rg(t)({useUTC:r,renderMode:n,orderMode:i,markupStyleCreator:e,valueFormatter:t.valueFormatter},t,0,o)}function ug(t,e){return'
'+t+'
'}function hg(t,e,n){return t.markupStyleCreator.wrapRichTextStyle(e,n)}function cg(t,e){return _p(t.getData().getItemVisual(e,"style")[t.visualDrawType])}function pg(t,e){var n=t.get("padding");return null!=n?n:"richText"===e?[8,10]:10}var dg=function(){function t(){this.richTextStyles={},this._nextStyleNameId=po()}return t.prototype._generateStyleName=function(){return"__EC_aUTo_"+this._nextStyleNameId++},t.prototype.makeTooltipMarker=function(t,e,n){var i="richText"===n?this._generateStyleName():null,r=xp({color:e,type:t,renderMode:n,markerId:i});return U(r)?r:(this.richTextStyles[i]=r.style,r.content)},t.prototype.wrapRichTextStyle=function(t,e){var n={};Y(e)?E(e,(function(t){return A(n,t)})):A(n,e);var i=this._generateStyleName();return this.richTextStyles[i]=n,"{"+i+"|"+t+"}"},t}();function fg(t){var e,n,i,r,o=t.series,a=t.dataIndex,s=t.multipleSeries,l=o.getData(),u=l.mapDimensionsAll("defaultedTooltip"),h=u.length,c=o.getRawValue(a),p=Y(c),d=cg(o,a);if(h>1||p&&!h){var f=function(t,e,n,i,r){var o=e.getData(),a=V(t,(function(t,e,n){var i=o.getDimensionInfo(n);return t||i&&!1!==i.tooltip&&null!=i.displayName}),!1),s=[],l=[],u=[];function h(t,e){var n=o.getDimensionInfo(e);n&&!1!==n.otherDims.tooltip&&(a?u.push(ng("nameValue",{markerType:"subItem",markerColor:r,name:n.displayName,value:t,valueType:n.type})):(s.push(t),l.push(n.type)))}return i.length?E(i,(function(t){h(gf(o,n,t),t)})):E(t,h),{inlineValues:s,inlineValueTypes:l,blocks:u}}(c,o,a,u,d);e=f.inlineValues,n=f.inlineValueTypes,i=f.blocks,r=f.inlineValues[0]}else if(h){var g=l.getDimensionInfo(u[0]);r=e=gf(l,a,u[0]),n=g.type}else r=e=p?c[0]:c;var y=ko(o),v=y&&o.name||"",m=l.getName(a),x=s?v:m;return ng("section",{header:v,noHeader:s||!y,sortParam:r,blocks:[ng("nameValue",{markerType:"item",markerColor:d,name:x,noName:!ut(x),value:e,valueType:n})].concat(i||[])})}var gg=Oo();function yg(t,e){return t.getName(e)||t.getId(e)}var vg="__universalTransitionEnabled",mg=function(t){function e(){var e=null!==t&&t.apply(this,arguments)||this;return e._selectedDataIndicesMap={},e}return n(e,t),e.prototype.init=function(t,e,n){this.seriesIndex=this.componentIndex,this.dataTask=xf({count:_g,reset:bg}),this.dataTask.context={model:this},this.mergeDefaultAndTheme(t,n),(gg(this).sourceManager=new jf(this)).prepareSource();var i=this.getInitialData(t,n);Sg(i,this),this.dataTask.context.data=i,gg(this).dataBeforeProcessed=i,xg(this),this._initSelectedMapFromData(i)},e.prototype.mergeDefaultAndTheme=function(t,e){var n=Ap(this),i=n?Lp(t):{},r=this.subType;Rp.hasClass(r)&&(r+="Series"),C(t,e.getTheme().get(this.subType)),C(t,this.getDefaultOption()),wo(t,"label",["show"]),this.fillDataTextStyle(t.data),n&&kp(t,i,n)},e.prototype.mergeOption=function(t,e){t=C(this.option,t,!0),this.fillDataTextStyle(t.data);var n=Ap(this);n&&kp(this.option,t,n);var i=gg(this).sourceManager;i.dirty(),i.prepareSource();var r=this.getInitialData(t,e);Sg(r,this),this.dataTask.dirty(),this.dataTask.context.data=r,gg(this).dataBeforeProcessed=r,xg(this),this._initSelectedMapFromData(r)},e.prototype.fillDataTextStyle=function(t){if(t&&!$(t))for(var e=["show"],n=0;nthis.getShallow("animationThreshold")&&(e=!1),!!e},e.prototype.restoreData=function(){this.dataTask.dirty()},e.prototype.getColorFromPalette=function(t,e,n){var i=this.ecModel,r=ld.prototype.getColorFromPalette.call(this,t,e,n);return r||(r=i.getColorFromPalette(t,e,n)),r},e.prototype.coordDimToDataDim=function(t){return this.getRawData().mapDimensionsAll(t)},e.prototype.getProgressive=function(){return this.get("progressive")},e.prototype.getProgressiveThreshold=function(){return this.get("progressiveThreshold")},e.prototype.select=function(t,e){this._innerSelect(this.getData(e),t)},e.prototype.unselect=function(t,e){var n=this.option.selectedMap;if(n){var i=this.option.selectedMode,r=this.getData(e);if("series"===i||"all"===n)return this.option.selectedMap={},void(this._selectedDataIndicesMap={});for(var o=0;o=0&&n.push(r)}return n},e.prototype.isSelected=function(t,e){var n=this.option.selectedMap;if(!n)return!1;var i=this.getData(e);return("all"===n||n[yg(i,t)])&&!i.getItemModel(t).get(["select","disabled"])},e.prototype.isUniversalTransitionEnabled=function(){if(this[vg])return!0;var t=this.option.universalTransition;return!!t&&(!0===t||t&&t.enabled)},e.prototype._innerSelect=function(t,e){var n,i,r=this.option,o=r.selectedMode,a=e.length;if(o&&a)if("series"===o)r.selectedMap="all";else if("multiple"===o){q(r.selectedMap)||(r.selectedMap={});for(var s=r.selectedMap,l=0;l0&&this._innerSelect(t,e)}},e.registerClass=function(t){return Rp.registerClass(t)},e.protoInitialize=function(){var t=e.prototype;t.type="series.__base__",t.seriesIndex=0,t.ignoreStyleOnData=!1,t.hasSymbolVisual=!1,t.defaultSymbol="circle",t.visualStyleAccessPath="itemStyle",t.visualDrawType="fill"}(),e}(Rp);function xg(t){var e=t.name;ko(t)||(t.name=function(t){var e=t.getRawData(),n=e.mapDimensionsAll("seriesName"),i=[];return E(n,(function(t){var n=e.getDimensionInfo(t);n.displayName&&i.push(n.displayName)})),i.join(" ")}(t)||e)}function _g(t){return t.model.getRawData().count()}function bg(t){var e=t.model;return e.setData(e.getRawData().cloneShallow()),wg}function wg(t,e){e.outputData&&t.end>e.outputData.count()&&e.model.getRawData().cloneShallow(e.outputData)}function Sg(t,e){E(vt(t.CHANGABLE_METHODS,t.DOWNSAMPLE_METHODS),(function(n){t.wrapMethod(n,H(Mg,e))}))}function Mg(t,e){var n=Ig(t);return n&&n.setOutputEnd((e||this).count()),e}function Ig(t){var e=(t.ecModel||{}).scheduler,n=e&&e.getPipeline(t.uid);if(n){var i=n.currentTask;if(i){var r=i.agentStubMap;r&&(i=r.get(t.uid))}return i}}R(mg,vf),R(mg,ld),Zo(mg,Rp);var Tg=function(){function t(){this.group=new zr,this.uid=Tc("viewComponent")}return t.prototype.init=function(t,e){},t.prototype.render=function(t,e,n,i){},t.prototype.dispose=function(t,e){},t.prototype.updateView=function(t,e,n,i){},t.prototype.updateLayout=function(t,e,n,i){},t.prototype.updateVisual=function(t,e,n,i){},t.prototype.toggleBlurSeries=function(t,e,n){},t.prototype.eachRendered=function(t){var e=this.group;e&&e.traverse(t)},t}();function Cg(){var t=Oo();return function(e){var n=t(e),i=e.pipelineContext,r=!!n.large,o=!!n.progressiveRender,a=n.large=!(!i||!i.large),s=n.progressiveRender=!(!i||!i.progressiveRender);return!(r===a&&o===s)&&"reset"}}Uo(Tg),$o(Tg);var Dg=Oo(),Ag=Cg(),kg=function(){function t(){this.group=new zr,this.uid=Tc("viewChart"),this.renderTask=xf({plan:Og,reset:Rg}),this.renderTask.context={view:this}}return t.prototype.init=function(t,e){},t.prototype.render=function(t,e,n,i){0},t.prototype.highlight=function(t,e,n,i){var r=t.getData(i&&i.dataType);r&&Pg(r,i,"emphasis")},t.prototype.downplay=function(t,e,n,i){var r=t.getData(i&&i.dataType);r&&Pg(r,i,"normal")},t.prototype.remove=function(t,e){this.group.removeAll()},t.prototype.dispose=function(t,e){},t.prototype.updateView=function(t,e,n,i){this.render(t,e,n,i)},t.prototype.updateLayout=function(t,e,n,i){this.render(t,e,n,i)},t.prototype.updateVisual=function(t,e,n,i){this.render(t,e,n,i)},t.prototype.eachRendered=function(t){qh(this.group,t)},t.markUpdateMethod=function(t,e){Dg(t).updateMethod=e},t.protoInitialize=void(t.prototype.type="chart"),t}();function Lg(t,e,n){t&&Kl(t)&&("emphasis"===e?kl:Ll)(t,n)}function Pg(t,e,n){var i=Po(t,e),r=e&&null!=e.highlightKey?function(t){var e=nl[t];return null==e&&el<=32&&(e=nl[t]=el++),e}(e.highlightKey):null;null!=i?E(bo(i),(function(e){Lg(t.getItemGraphicEl(e),n,r)})):t.eachItemGraphicEl((function(t){Lg(t,n,r)}))}function Og(t){return Ag(t.model)}function Rg(t){var e=t.model,n=t.ecModel,i=t.api,r=t.payload,o=e.pipelineContext.progressiveRender,a=t.view,s=r&&Dg(r).updateMethod,l=o?"incrementalPrepareRender":s&&a[s]?s:"render";return"render"!==l&&a[l](e,n,i,r),Ng[l]}Uo(kg),$o(kg);var Ng={incrementalPrepareRender:{progress:function(t,e){e.view.incrementalRender(t,e.model,e.ecModel,e.api,e.payload)}},render:{forceFirstProgress:!0,progress:function(t,e){e.view.render(e.model,e.ecModel,e.api,e.payload)}}},Eg="\0__throttleOriginMethod",zg="\0__throttleRate",Vg="\0__throttleType";function Bg(t,e,n){var i,r,o,a,s,l=0,u=0,h=null;function c(){u=(new Date).getTime(),h=null,t.apply(o,a||[])}e=e||0;var p=function(){for(var t=[],p=0;p=0?c():h=setTimeout(c,-r),l=i};return p.clear=function(){h&&(clearTimeout(h),h=null)},p.debounceNextCall=function(t){s=t},p}function Fg(t,e,n,i){var r=t[e];if(r){var o=r[Eg]||r,a=r[Vg];if(r[zg]!==n||a!==i){if(null==n||!i)return t[e]=o;(r=t[e]=Bg(o,n,"debounce"===i))[Eg]=o,r[Vg]=i,r[zg]=n}return r}}function Gg(t,e){var n=t[e];n&&n[Eg]&&(n.clear&&n.clear(),t[e]=n[Eg])}var Wg=Oo(),Hg={itemStyle:Jo(bc,!0),lineStyle:Jo(mc,!0)},Yg={lineStyle:"stroke",itemStyle:"fill"};function Xg(t,e){var n=t.visualStyleMapper||Hg[e];return n||(console.warn("Unknown style type '"+e+"'."),Hg.itemStyle)}function Ug(t,e){var n=t.visualDrawType||Yg[e];return n||(console.warn("Unknown style type '"+e+"'."),"fill")}var Zg={createOnAllSeries:!0,performRawSeries:!0,reset:function(t,e){var n=t.getData(),i=t.visualStyleAccessPath||"itemStyle",r=t.getModel(i),o=Xg(t,i)(r),a=r.getShallow("decal");a&&(n.setVisual("decal",a),a.dirty=!0);var s=Ug(t,i),l=o[s],u=X(l)?l:null,h="auto"===o.fill||"auto"===o.stroke;if(!o[s]||u||h){var c=t.getColorFromPalette(t.name,null,e.getSeriesCount());o[s]||(o[s]=c,n.setVisual("colorFromPalette",!0)),o.fill="auto"===o.fill||X(o.fill)?c:o.fill,o.stroke="auto"===o.stroke||X(o.stroke)?c:o.stroke}if(n.setVisual("style",o),n.setVisual("drawType",s),!e.isSeriesFiltered(t)&&u)return n.setVisual("colorFromPalette",!1),{dataEach:function(e,n){var i=t.getDataParams(n),r=A({},o);r[s]=u(i),e.setItemVisual(n,"style",r)}}}},jg=new Mc,qg={createOnAllSeries:!0,performRawSeries:!0,reset:function(t,e){if(!t.ignoreStyleOnData&&!e.isSeriesFiltered(t)){var n=t.getData(),i=t.visualStyleAccessPath||"itemStyle",r=Xg(t,i),o=n.getVisual("drawType");return{dataEach:n.hasItemOption?function(t,e){var n=t.getRawDataItem(e);if(n&&n[i]){jg.option=n[i];var a=r(jg);A(t.ensureUniqueItemVisual(e,"style"),a),jg.option.decal&&(t.setItemVisual(e,"decal",jg.option.decal),jg.option.decal.dirty=!0),o in a&&t.setItemVisual(e,"colorFromPalette",!1)}}:null}}}},Kg={performRawSeries:!0,overallReset:function(t){var e=yt();t.eachSeries((function(t){var n=t.getColorBy();if(!t.isColorBySeries()){var i=t.type+"-"+n,r=e.get(i);r||(r={},e.set(i,r)),Wg(t).scope=r}})),t.eachSeries((function(e){if(!e.isColorBySeries()&&!t.isSeriesFiltered(e)){var n=e.getRawData(),i={},r=e.getData(),o=Wg(e).scope,a=e.visualStyleAccessPath||"itemStyle",s=Ug(e,a);r.each((function(t){var e=r.getRawIndex(t);i[e]=t})),n.each((function(t){var a=i[t];if(r.getItemVisual(a,"colorFromPalette")){var l=r.ensureUniqueItemVisual(a,"style"),u=n.getName(t)||t+"",h=n.count();l[s]=e.getColorFromPalette(u,o,h)}}))}}))}},$g=Math.PI;var Jg=function(){function t(t,e,n,i){this._stageTaskMap=yt(),this.ecInstance=t,this.api=e,n=this._dataProcessorHandlers=n.slice(),i=this._visualHandlers=i.slice(),this._allHandlers=n.concat(i)}return t.prototype.restoreData=function(t,e){t.restoreData(e),this._stageTaskMap.each((function(t){var e=t.overallTask;e&&e.dirty()}))},t.prototype.getPerformArgs=function(t,e){if(t.__pipeline){var n=this._pipelineMap.get(t.__pipeline.id),i=n.context,r=!e&&n.progressiveEnabled&&(!i||i.progressiveRender)&&t.__idxInPipeline>n.blockIndex?n.step:null,o=i&&i.modDataCount;return{step:r,modBy:null!=o?Math.ceil(o/r):null,modDataCount:o}}},t.prototype.getPipeline=function(t){return this._pipelineMap.get(t)},t.prototype.updateStreamModes=function(t,e){var n=this._pipelineMap.get(t.uid),i=t.getData().count(),r=n.progressiveEnabled&&e.incrementalPrepareRender&&i>=n.threshold,o=t.get("large")&&i>=t.get("largeThreshold"),a="mod"===t.get("progressiveChunkMode")?i:null;t.pipelineContext=n.context={progressiveRender:r,modDataCount:a,large:o}},t.prototype.restorePipelines=function(t){var e=this,n=e._pipelineMap=yt();t.eachSeries((function(t){var i=t.getProgressive(),r=t.uid;n.set(r,{id:r,head:null,tail:null,threshold:t.getProgressiveThreshold(),progressiveEnabled:i&&!(t.preventIncremental&&t.preventIncremental()),blockIndex:-1,step:Math.round(i||700),count:0}),e._pipe(t,t.dataTask)}))},t.prototype.prepareStageTasks=function(){var t=this._stageTaskMap,e=this.api.getModel(),n=this.api;E(this._allHandlers,(function(i){var r=t.get(i.uid)||t.set(i.uid,{}),o="";lt(!(i.reset&&i.overallReset),o),i.reset&&this._createSeriesStageTask(i,r,e,n),i.overallReset&&this._createOverallStageTask(i,r,e,n)}),this)},t.prototype.prepareView=function(t,e,n,i){var r=t.renderTask,o=r.context;o.model=e,o.ecModel=n,o.api=i,r.__block=!t.incrementalPrepareRender,this._pipe(e,r)},t.prototype.performDataProcessorTasks=function(t,e){this._performStageTasks(this._dataProcessorHandlers,t,e,{block:!0})},t.prototype.performVisualTasks=function(t,e,n){this._performStageTasks(this._visualHandlers,t,e,n)},t.prototype._performStageTasks=function(t,e,n,i){i=i||{};var r=!1,o=this;function a(t,e){return t.setDirty&&(!t.dirtyMap||t.dirtyMap.get(e.__pipeline.id))}E(t,(function(t,s){if(!i.visualType||i.visualType===t.visualType){var l=o._stageTaskMap.get(t.uid),u=l.seriesTaskMap,h=l.overallTask;if(h){var c,p=h.agentStubMap;p.each((function(t){a(i,t)&&(t.dirty(),c=!0)})),c&&h.dirty(),o.updatePayload(h,n);var d=o.getPerformArgs(h,i.block);p.each((function(t){t.perform(d)})),h.perform(d)&&(r=!0)}else u&&u.each((function(s,l){a(i,s)&&s.dirty();var u=o.getPerformArgs(s,i.block);u.skip=!t.performRawSeries&&e.isSeriesFiltered(s.context.model),o.updatePayload(s,n),s.perform(u)&&(r=!0)}))}})),this.unfinished=r||this.unfinished},t.prototype.performSeriesTasks=function(t){var e;t.eachSeries((function(t){e=t.dataTask.perform()||e})),this.unfinished=e||this.unfinished},t.prototype.plan=function(){this._pipelineMap.each((function(t){var e=t.tail;do{if(e.__block){t.blockIndex=e.__idxInPipeline;break}e=e.getUpstream()}while(e)}))},t.prototype.updatePayload=function(t,e){"remain"!==e&&(t.context.payload=e)},t.prototype._createSeriesStageTask=function(t,e,n,i){var r=this,o=e.seriesTaskMap,a=e.seriesTaskMap=yt(),s=t.seriesType,l=t.getTargetSeries;function u(e){var s=e.uid,l=a.set(s,o&&o.get(s)||xf({plan:iy,reset:ry,count:sy}));l.context={model:e,ecModel:n,api:i,useClearVisual:t.isVisual&&!t.isLayout,plan:t.plan,reset:t.reset,scheduler:r},r._pipe(e,l)}t.createOnAllSeries?n.eachRawSeries(u):s?n.eachRawSeriesByType(s,u):l&&l(n,i).each(u)},t.prototype._createOverallStageTask=function(t,e,n,i){var r=this,o=e.overallTask=e.overallTask||xf({reset:Qg});o.context={ecModel:n,api:i,overallReset:t.overallReset,scheduler:r};var a=o.agentStubMap,s=o.agentStubMap=yt(),l=t.seriesType,u=t.getTargetSeries,h=!0,c=!1,p="";function d(t){var e=t.uid,n=s.set(e,a&&a.get(e)||(c=!0,xf({reset:ty,onDirty:ny})));n.context={model:t,overallProgress:h},n.agent=o,n.__block=h,r._pipe(t,n)}lt(!t.createOnAllSeries,p),l?n.eachRawSeriesByType(l,d):u?u(n,i).each(d):(h=!1,E(n.getSeries(),d)),c&&o.dirty()},t.prototype._pipe=function(t,e){var n=t.uid,i=this._pipelineMap.get(n);!i.head&&(i.head=e),i.tail&&i.tail.pipe(e),i.tail=e,e.__idxInPipeline=i.count++,e.__pipeline=i},t.wrapStageHandler=function(t,e){return X(t)&&(t={overallReset:t,seriesType:ly(t)}),t.uid=Tc("stageHandler"),e&&(t.visualType=e),t},t}();function Qg(t){t.overallReset(t.ecModel,t.api,t.payload)}function ty(t){return t.overallProgress&&ey}function ey(){this.agent.dirty(),this.getDownstream().dirty()}function ny(){this.agent&&this.agent.dirty()}function iy(t){return t.plan?t.plan(t.model,t.ecModel,t.api,t.payload):null}function ry(t){t.useClearVisual&&t.data.clearAllVisual();var e=t.resetDefines=bo(t.reset(t.model,t.ecModel,t.api,t.payload));return e.length>1?z(e,(function(t,e){return ay(e)})):oy}var oy=ay(0);function ay(t){return function(e,n){var i=n.data,r=n.resetDefines[t];if(r&&r.dataEach)for(var o=e.start;o0&&h===r.length-u.length){var c=r.slice(0,h);"data"!==c&&(e.mainType=c,e[u.toLowerCase()]=t,s=!0)}}a.hasOwnProperty(r)&&(n[r]=t,s=!0),s||(i[r]=t)}))}return{cptQuery:e,dataQuery:n,otherQuery:i}},t.prototype.filter=function(t,e){var n=this.eventInfo;if(!n)return!0;var i=n.targetEl,r=n.packedEvent,o=n.model,a=n.view;if(!o||!a)return!0;var s=e.cptQuery,l=e.dataQuery;return u(s,o,"mainType")&&u(s,o,"subType")&&u(s,o,"index","componentIndex")&&u(s,o,"name")&&u(s,o,"id")&&u(l,r,"name")&&u(l,r,"dataIndex")&&u(l,r,"dataType")&&(!a.filterForExposedEvent||a.filterForExposedEvent(t,e.otherQuery,i,r));function u(t,e,n,i){return null==t[n]||e[i||n]===t[n]}},t.prototype.afterTrigger=function(){this.eventInfo=null},t}(),by=["symbol","symbolSize","symbolRotate","symbolOffset"],wy=by.concat(["symbolKeepAspect"]),Sy={createOnAllSeries:!0,performRawSeries:!0,reset:function(t,e){var n=t.getData();if(t.legendIcon&&n.setVisual("legendIcon",t.legendIcon),t.hasSymbolVisual){for(var i={},r={},o=!1,a=0;a=0&&Xy(l)?l:.5,t.createRadialGradient(a,s,0,a,s,l)}(t,e,n):function(t,e,n){var i=null==e.x?0:e.x,r=null==e.x2?1:e.x2,o=null==e.y?0:e.y,a=null==e.y2?0:e.y2;return e.global||(i=i*n.width+n.x,r=r*n.width+n.x,o=o*n.height+n.y,a=a*n.height+n.y),i=Xy(i)?i:0,r=Xy(r)?r:1,o=Xy(o)?o:0,a=Xy(a)?a:0,t.createLinearGradient(i,o,r,a)}(t,e,n),r=e.colorStops,o=0;o0&&(e=i.lineDash,n=i.lineWidth,e&&"solid"!==e&&n>0?"dashed"===e?[4*n,2*n]:"dotted"===e?[n]:j(e)?[e]:Y(e)?e:null:null),o=i.lineDashOffset;if(r){var a=i.strokeNoScale&&t.getLineScale?t.getLineScale():1;a&&1!==a&&(r=z(r,(function(t){return t/a})),o/=a)}return[r,o]}var Ky=new os(!0);function $y(t){var e=t.stroke;return!(null==e||"none"===e||!(t.lineWidth>0))}function Jy(t){return"string"==typeof t&&"none"!==t}function Qy(t){var e=t.fill;return null!=e&&"none"!==e}function tv(t,e){if(null!=e.fillOpacity&&1!==e.fillOpacity){var n=t.globalAlpha;t.globalAlpha=e.fillOpacity*e.opacity,t.fill(),t.globalAlpha=n}else t.fill()}function ev(t,e){if(null!=e.strokeOpacity&&1!==e.strokeOpacity){var n=t.globalAlpha;t.globalAlpha=e.strokeOpacity*e.opacity,t.stroke(),t.globalAlpha=n}else t.stroke()}function nv(t,e,n){var i=ia(e.image,e.__image,n);if(oa(i)){var r=t.createPattern(i,e.repeat||"repeat");if("function"==typeof DOMMatrix&&r&&r.setTransform){var o=new DOMMatrix;o.translateSelf(e.x||0,e.y||0),o.rotateSelf(0,0,(e.rotation||0)*wt),o.scaleSelf(e.scaleX||1,e.scaleY||1),r.setTransform(o)}return r}}var iv=["shadowBlur","shadowOffsetX","shadowOffsetY"],rv=[["lineCap","butt"],["lineJoin","miter"],["miterLimit",10]];function ov(t,e,n,i,r){var o=!1;if(!i&&e===(n=n||{}))return!1;if(i||e.opacity!==n.opacity){lv(t,r),o=!0;var a=Math.max(Math.min(e.opacity,1),0);t.globalAlpha=isNaN(a)?xa.opacity:a}(i||e.blend!==n.blend)&&(o||(lv(t,r),o=!0),t.globalCompositeOperation=e.blend||xa.blend);for(var s=0;s0&&t.unfinished);t.unfinished||this._zr.flush()}}},e.prototype.getDom=function(){return this._dom},e.prototype.getId=function(){return this.id},e.prototype.getZr=function(){return this._zr},e.prototype.isSSR=function(){return this._ssr},e.prototype.setOption=function(t,e,n){if(!this[Iv])if(this._disposed)nm(this.id);else{var i,r,o;if(q(e)&&(n=e.lazyUpdate,i=e.silent,r=e.replaceMerge,o=e.transition,e=e.notMerge),this[Iv]=!0,!this._model||e){var a=new bd(this._api),s=this._theme,l=this._model=new pd;l.scheduler=this._scheduler,l.ssr=this._ssr,l.init(null,null,null,s,this._locale,a)}this._model.setOption(t,{replaceMerge:r},am);var u={seriesTransition:o,optionChanged:!0};if(n)this[Tv]={silent:i,updateParams:u},this[Iv]=!1,this.getZr().wakeUp();else{try{Ov(this),Ev.update.call(this,null,u)}catch(t){throw this[Tv]=null,this[Iv]=!1,t}this._ssr||this._zr.flush(),this[Tv]=null,this[Iv]=!1,Fv.call(this,i),Gv.call(this,i)}}},e.prototype.setTheme=function(){yo()},e.prototype.getModel=function(){return this._model},e.prototype.getOption=function(){return this._model&&this._model.getOption()},e.prototype.getWidth=function(){return this._zr.getWidth()},e.prototype.getHeight=function(){return this._zr.getHeight()},e.prototype.getDevicePixelRatio=function(){return this._zr.painter.dpr||r.hasGlobalWindow&&window.devicePixelRatio||1},e.prototype.getRenderedCanvas=function(t){return this.renderToCanvas(t)},e.prototype.renderToCanvas=function(t){t=t||{};var e=this._zr.painter;return e.getRenderedCanvas({backgroundColor:t.backgroundColor||this._model.get("backgroundColor"),pixelRatio:t.pixelRatio||this.getDevicePixelRatio()})},e.prototype.renderToSVGString=function(t){t=t||{};var e=this._zr.painter;return e.renderToString({useViewBox:t.useViewBox})},e.prototype.getSvgDataURL=function(){if(r.svgSupported){var t=this._zr;return E(t.storage.getDisplayList(),(function(t){t.stopAnimation(null,!0)})),t.painter.toDataURL()}},e.prototype.getDataURL=function(t){if(!this._disposed){var e=(t=t||{}).excludeComponents,n=this._model,i=[],r=this;E(e,(function(t){n.eachComponent({mainType:t},(function(t){var e=r._componentsMap[t.__viewId];e.group.ignore||(i.push(e),e.group.ignore=!0)}))}));var o="svg"===this._zr.painter.getType()?this.getSvgDataURL():this.renderToCanvas(t).toDataURL("image/"+(t&&t.type||"png"));return E(i,(function(t){t.group.ignore=!1})),o}nm(this.id)},e.prototype.getConnectedDataURL=function(t){if(!this._disposed){var e="svg"===t.type,n=this.group,i=Math.min,r=Math.max,o=1/0;if(cm[n]){var a=o,s=o,l=-1/0,u=-1/0,c=[],p=t&&t.pixelRatio||this.getDevicePixelRatio();E(hm,(function(o,h){if(o.group===n){var p=e?o.getZr().painter.getSvgDom().innerHTML:o.renderToCanvas(T(t)),d=o.getDom().getBoundingClientRect();a=i(d.left,a),s=i(d.top,s),l=r(d.right,l),u=r(d.bottom,u),c.push({dom:p,left:d.left,top:d.top})}}));var d=(l*=p)-(a*=p),f=(u*=p)-(s*=p),g=h.createCanvas(),y=Gr(g,{renderer:e?"svg":"canvas"});if(y.resize({width:d,height:f}),e){var v="";return E(c,(function(t){var e=t.left-a,n=t.top-s;v+=''+t.dom+""})),y.painter.getSvgRoot().innerHTML=v,t.connectedBackgroundColor&&y.painter.setBackgroundColor(t.connectedBackgroundColor),y.refreshImmediately(),y.painter.toDataURL()}return t.connectedBackgroundColor&&y.add(new zs({shape:{x:0,y:0,width:d,height:f},style:{fill:t.connectedBackgroundColor}})),E(c,(function(t){var e=new ks({style:{x:t.left*p-a,y:t.top*p-s,image:t.dom}});y.add(e)})),y.refreshImmediately(),g.toDataURL("image/"+(t&&t.type||"png"))}return this.getDataURL(t)}nm(this.id)},e.prototype.convertToPixel=function(t,e){return zv(this,"convertToPixel",t,e)},e.prototype.convertFromPixel=function(t,e){return zv(this,"convertFromPixel",t,e)},e.prototype.containPixel=function(t,e){var n;if(!this._disposed)return E(No(this._model,t),(function(t,i){i.indexOf("Models")>=0&&E(t,(function(t){var r=t.coordinateSystem;if(r&&r.containPoint)n=n||!!r.containPoint(e);else if("seriesModels"===i){var o=this._chartsMap[t.__viewId];o&&o.containPoint&&(n=n||o.containPoint(e,t))}else 0}),this)}),this),!!n;nm(this.id)},e.prototype.getVisual=function(t,e){var n=No(this._model,t,{defaultMainType:"series"}),i=n.seriesModel;var r=i.getData(),o=n.hasOwnProperty("dataIndexInside")?n.dataIndexInside:n.hasOwnProperty("dataIndex")?r.indexOfRawIndex(n.dataIndex):null;return null!=o?Iy(r,o,e):Ty(r,e)},e.prototype.getViewOfComponentModel=function(t){return this._componentsMap[t.__viewId]},e.prototype.getViewOfSeriesModel=function(t){return this._chartsMap[t.__viewId]},e.prototype._initEvents=function(){var t,e,n,i=this;E(em,(function(t){var e=function(e){var n,r=i.getModel(),o=e.target,a="globalout"===t;if(a?n={}:o&&ky(o,(function(t){var e=Qs(t);if(e&&null!=e.dataIndex){var i=e.dataModel||r.getSeriesByIndex(e.seriesIndex);return n=i&&i.getDataParams(e.dataIndex,e.dataType,o)||{},!0}if(e.eventData)return n=A({},e.eventData),!0}),!0),n){var s=n.componentType,l=n.componentIndex;"markLine"!==s&&"markPoint"!==s&&"markArea"!==s||(s="series",l=n.seriesIndex);var u=s&&null!=l&&r.getComponent(s,l),h=u&&i["series"===u.mainType?"_chartsMap":"_componentsMap"][u.__viewId];0,n.event=e,n.type=t,i._$eventProcessor.eventInfo={targetEl:o,packedEvent:n,model:u,view:h},i.trigger(t,n)}};e.zrEventfulCallAtLast=!0,i._zr.on(t,e,i)})),E(rm,(function(t,e){i._messageCenter.on(e,(function(t){this.trigger(e,t)}),i)})),E(["selectchanged"],(function(t){i._messageCenter.on(t,(function(e){this.trigger(t,e)}),i)})),t=this._messageCenter,e=this,n=this._api,t.on("selectchanged",(function(t){var i=n.getModel();t.isFromClick?(Ay("map","selectchanged",e,i,t),Ay("pie","selectchanged",e,i,t)):"select"===t.fromAction?(Ay("map","selected",e,i,t),Ay("pie","selected",e,i,t)):"unselect"===t.fromAction&&(Ay("map","unselected",e,i,t),Ay("pie","unselected",e,i,t))}))},e.prototype.isDisposed=function(){return this._disposed},e.prototype.clear=function(){this._disposed?nm(this.id):this.setOption({series:[]},!0)},e.prototype.dispose=function(){if(this._disposed)nm(this.id);else{this._disposed=!0,this.getDom()&&Fo(this.getDom(),fm,"");var t=this,e=t._api,n=t._model;E(t._componentsViews,(function(t){t.dispose(n,e)})),E(t._chartsViews,(function(t){t.dispose(n,e)})),t._zr.dispose(),t._dom=t._model=t._chartsMap=t._componentsMap=t._chartsViews=t._componentsViews=t._scheduler=t._api=t._zr=t._throttledZrFlush=t._theme=t._coordSysMgr=t._messageCenter=null,delete hm[t.id]}},e.prototype.resize=function(t){if(!this[Iv])if(this._disposed)nm(this.id);else{this._zr.resize(t);var e=this._model;if(this._loadingFX&&this._loadingFX.resize(),e){var n=e.resetOption("media"),i=t&&t.silent;this[Tv]&&(null==i&&(i=this[Tv].silent),n=!0,this[Tv]=null),this[Iv]=!0;try{n&&Ov(this),Ev.update.call(this,{type:"resize",animation:A({duration:0},t&&t.animation)})}catch(t){throw this[Iv]=!1,t}this[Iv]=!1,Fv.call(this,i),Gv.call(this,i)}}},e.prototype.showLoading=function(t,e){if(this._disposed)nm(this.id);else if(q(t)&&(e=t,t=""),t=t||"default",this.hideLoading(),um[t]){var n=um[t](this._api,e),i=this._zr;this._loadingFX=n,i.add(n)}},e.prototype.hideLoading=function(){this._disposed?nm(this.id):(this._loadingFX&&this._zr.remove(this._loadingFX),this._loadingFX=null)},e.prototype.makeActionFromEvent=function(t){var e=A({},t);return e.type=rm[t.type],e},e.prototype.dispatchAction=function(t,e){if(this._disposed)nm(this.id);else if(q(e)||(e={silent:!!e}),im[t.type]&&this._model)if(this[Iv])this._pendingActions.push(t);else{var n=e.silent;Bv.call(this,t,n);var i=e.flush;i?this._zr.flush():!1!==i&&r.browser.weChat&&this._throttledZrFlush(),Fv.call(this,n),Gv.call(this,n)}},e.prototype.updateLabelLayout=function(){xv.trigger("series:layoutlabels",this._model,this._api,{updatedSeries:[]})},e.prototype.appendData=function(t){if(this._disposed)nm(this.id);else{var e=t.seriesIndex,n=this.getModel().getSeriesByIndex(e);0,n.appendData(t),this._scheduler.unfinished=!0,this.getZr().wakeUp()}},e.internalField=function(){function t(t){t.clearColorPalette(),t.eachSeries((function(t){t.clearColorPalette()}))}function e(t){for(var e=[],n=t.currentStates,i=0;i0?{duration:o,delay:i.get("delay"),easing:i.get("easing")}:null;n.eachRendered((function(t){if(t.states&&t.states.emphasis){if(yh(t))return;if(t instanceof Is&&function(t){var e=il(t);e.normalFill=t.style.fill,e.normalStroke=t.style.stroke;var n=t.states.select||{};e.selectFill=n.style&&n.style.fill||null,e.selectStroke=n.style&&n.style.stroke||null}(t),t.__dirty){var n=t.prevStates;n&&t.useStates(n)}if(r){t.stateTransition=a;var i=t.getTextContent(),o=t.getTextGuideLine();i&&(i.stateTransition=a),o&&(o.stateTransition=a)}t.__dirty&&e(t)}}))}Ov=function(t){var e=t._scheduler;e.restorePipelines(t._model),e.prepareStageTasks(),Rv(t,!0),Rv(t,!1),e.plan()},Rv=function(t,e){for(var n=t._model,i=t._scheduler,r=e?t._componentsViews:t._chartsViews,o=e?t._componentsMap:t._chartsMap,a=t._zr,s=t._api,l=0;le.get("hoverLayerThreshold")&&!r.node&&!r.worker&&e.eachSeries((function(e){if(!e.preventUsingHoverLayer){var n=t._chartsMap[e.__viewId];n.__alive&&n.eachRendered((function(t){t.states.emphasis&&(t.states.emphasis.hoverLayer=!0)}))}}))}(t,e),xv.trigger("series:afterupdate",e,n,l)},qv=function(t){t[Cv]=!0,t.getZr().wakeUp()},Kv=function(t){t[Cv]&&(t.getZr().storage.traverse((function(t){yh(t)||e(t)})),t[Cv]=!1)},Zv=function(t){return new(function(e){function i(){return null!==e&&e.apply(this,arguments)||this}return n(i,e),i.prototype.getCoordinateSystems=function(){return t._coordSysMgr.getCoordinateSystems()},i.prototype.getComponentByElement=function(e){for(;e;){var n=e.__ecComponentInfo;if(null!=n)return t._model.getComponent(n.mainType,n.index);e=e.parent}},i.prototype.enterEmphasis=function(e,n){kl(e,n),qv(t)},i.prototype.leaveEmphasis=function(e,n){Ll(e,n),qv(t)},i.prototype.enterBlur=function(e){Pl(e),qv(t)},i.prototype.leaveBlur=function(e){Ol(e),qv(t)},i.prototype.enterSelect=function(e){Rl(e),qv(t)},i.prototype.leaveSelect=function(e){Nl(e),qv(t)},i.prototype.getModel=function(){return t.getModel()},i.prototype.getViewOfComponentModel=function(e){return t.getViewOfComponentModel(e)},i.prototype.getViewOfSeriesModel=function(e){return t.getViewOfSeriesModel(e)},i}(vd))(t)},jv=function(t){function e(t,e){for(var n=0;n=0)){Dm.push(n);var o=Jg.wrapStageHandler(n,r);o.__prio=e,o.__raw=n,t.push(o)}}function km(t,e){um[t]=e}function Lm(t,e,n){var i=bv("registerMap");i&&i(t,e,n)}var Pm=function(t){var e=(t=T(t)).type,n="";e||vo(n);var i=e.split(":");2!==i.length&&vo(n);var r=!1;"echarts"===i[0]&&(e=i[1],r=!0),t.__isBuiltIn=r,Nf.set(e,t)};Cm(wv,Zg),Cm(Sv,qg),Cm(Sv,Kg),Cm(wv,Sy),Cm(Sv,My),Cm(7e3,(function(t,e){t.eachRawSeries((function(n){if(!t.isSeriesFiltered(n)){var i=n.getData();i.hasItemVisual()&&i.each((function(t){var n=i.getItemVisual(t,"decal");n&&(i.ensureUniqueItemVisual(t,"style").decal=gv(n,e))}));var r=i.getVisual("decal");if(r)i.getVisual("style").decal=gv(r,e)}}))})),xm(Wd),_m(900,(function(t){var e=yt();t.eachSeries((function(t){var n=t.get("stack");if(n){var i=e.get(n)||e.set(n,[]),r=t.getData(),o={stackResultDimension:r.getCalculationInfo("stackResultDimension"),stackedOverDimension:r.getCalculationInfo("stackedOverDimension"),stackedDimension:r.getCalculationInfo("stackedDimension"),stackedByDimension:r.getCalculationInfo("stackedByDimension"),isStackedByIndex:r.getCalculationInfo("isStackedByIndex"),data:r,seriesModel:t};if(!o.stackedDimension||!o.isStackedByIndex&&!o.stackedByDimension)return;i.length&&r.setCalculationInfo("stackedOnSeries",i[i.length-1].seriesModel),i.push(o)}})),e.each(Hd)})),km("default",(function(t,e){k(e=e||{},{text:"loading",textColor:"#000",fontSize:12,fontWeight:"normal",fontStyle:"normal",fontFamily:"sans-serif",maskColor:"rgba(255, 255, 255, 0.8)",showSpinner:!0,color:"#5470c6",spinnerRadius:10,lineWidth:5,zlevel:0});var n=new zr,i=new zs({style:{fill:e.maskColor},zlevel:e.zlevel,z:1e4});n.add(i);var r,o=new Fs({style:{text:e.text,fill:e.textColor,fontSize:e.fontSize,fontWeight:e.fontWeight,fontStyle:e.fontStyle,fontFamily:e.fontFamily},zlevel:e.zlevel,z:10001}),a=new zs({style:{fill:"none"},textContent:o,textConfig:{position:"right",distance:10},zlevel:e.zlevel,z:10001});return n.add(a),e.showSpinner&&((r=new Qu({shape:{startAngle:-$g/2,endAngle:-$g/2+.1,r:e.spinnerRadius},style:{stroke:e.color,lineCap:"round",lineWidth:e.lineWidth},zlevel:e.zlevel,z:10001})).animateShape(!0).when(1e3,{endAngle:3*$g/2}).start("circularInOut"),r.animateShape(!0).when(1e3,{startAngle:3*$g/2}).delay(300).start("circularInOut"),n.add(r)),n.resize=function(){var n=o.getBoundingRect().width,s=e.showSpinner?e.spinnerRadius:0,l=(t.getWidth()-2*s-(e.showSpinner&&n?10:0)-n)/2-(e.showSpinner&&n?0:5+n/2)+(e.showSpinner?0:n/2)+(n?0:s),u=t.getHeight()/2;e.showSpinner&&r.setShape({cx:l,cy:u}),a.setShape({x:l-s,y:u-s,width:2*s,height:2*s}),i.setShape({x:0,y:0,width:t.getWidth(),height:t.getHeight()})},n.resize(),n})),Mm({type:ll,event:ll,update:ll},bt),Mm({type:ul,event:ul,update:ul},bt),Mm({type:hl,event:hl,update:hl},bt),Mm({type:cl,event:cl,update:cl},bt),Mm({type:pl,event:pl,update:pl},bt),mm("light",fy),mm("dark",xy);var Om=[],Rm={registerPreprocessor:xm,registerProcessor:_m,registerPostInit:bm,registerPostUpdate:wm,registerUpdateLifecycle:Sm,registerAction:Mm,registerCoordinateSystem:Im,registerLayout:Tm,registerVisual:Cm,registerTransform:Pm,registerLoading:km,registerMap:Lm,registerImpl:function(t,e){_v[t]=e},PRIORITY:Mv,ComponentModel:Rp,ComponentView:Tg,SeriesModel:mg,ChartView:kg,registerComponentModel:function(t){Rp.registerClass(t)},registerComponentView:function(t){Tg.registerClass(t)},registerSeriesModel:function(t){mg.registerClass(t)},registerChartView:function(t){kg.registerClass(t)},registerSubTypeDefaulter:function(t,e){Rp.registerSubTypeDefaulter(t,e)},registerPainter:function(t,e){Wr(t,e)}};function Nm(t){Y(t)?E(t,(function(t){Nm(t)})):P(Om,t)>=0||(Om.push(t),X(t)&&(t={install:t}),t.install(Rm))}function Em(t){return null==t?0:t.length||1}function zm(t){return t}var Vm=function(){function t(t,e,n,i,r,o){this._old=t,this._new=e,this._oldKeyGetter=n||zm,this._newKeyGetter=i||zm,this.context=r,this._diffModeMultiple="multiple"===o}return t.prototype.add=function(t){return this._add=t,this},t.prototype.update=function(t){return this._update=t,this},t.prototype.updateManyToOne=function(t){return this._updateManyToOne=t,this},t.prototype.updateOneToMany=function(t){return this._updateOneToMany=t,this},t.prototype.updateManyToMany=function(t){return this._updateManyToMany=t,this},t.prototype.remove=function(t){return this._remove=t,this},t.prototype.execute=function(){this[this._diffModeMultiple?"_executeMultiple":"_executeOneToOne"]()},t.prototype._executeOneToOne=function(){var t=this._old,e=this._new,n={},i=new Array(t.length),r=new Array(e.length);this._initIndexMap(t,null,i,"_oldKeyGetter"),this._initIndexMap(e,n,r,"_newKeyGetter");for(var o=0;o1){var u=s.shift();1===s.length&&(n[a]=s[0]),this._update&&this._update(u,o)}else 1===l?(n[a]=null,this._update&&this._update(s,o)):this._remove&&this._remove(o)}this._performRestAdd(r,n)},t.prototype._executeMultiple=function(){var t=this._old,e=this._new,n={},i={},r=[],o=[];this._initIndexMap(t,n,r,"_oldKeyGetter"),this._initIndexMap(e,i,o,"_newKeyGetter");for(var a=0;a1&&1===c)this._updateManyToOne&&this._updateManyToOne(u,l),i[s]=null;else if(1===h&&c>1)this._updateOneToMany&&this._updateOneToMany(u,l),i[s]=null;else if(1===h&&1===c)this._update&&this._update(u,l),i[s]=null;else if(h>1&&c>1)this._updateManyToMany&&this._updateManyToMany(u,l),i[s]=null;else if(h>1)for(var p=0;p1)for(var a=0;a30}var Km,$m,Jm,Qm,tx,ex,nx,ix=q,rx=z,ox="undefined"==typeof Int32Array?Array:Int32Array,ax=["hasItemOption","_nameList","_idList","_invertedIndicesMap","_dimSummary","userOutput","_rawData","_dimValueGetter","_nameDimIdx","_idDimIdx","_nameRepeatCount"],sx=["_approximateExtent"],lx=function(){function t(t,e){var n;this.type="list",this._dimOmitted=!1,this._nameList=[],this._idList=[],this._visual={},this._layout={},this._itemVisuals=[],this._itemLayouts=[],this._graphicEls=[],this._approximateExtent={},this._calculationInfo={},this.hasItemOption=!1,this.TRANSFERABLE_METHODS=["cloneShallow","downSample","lttbDownSample","map"],this.CHANGABLE_METHODS=["filterSelf","selectRange"],this.DOWNSAMPLE_METHODS=["downSample","lttbDownSample"];var i=!1;Um(t)?(n=t.dimensions,this._dimOmitted=t.isDimensionOmitted(),this._schema=t):(i=!0,n=t),n=n||["x","y"];for(var r={},o=[],a={},s=!1,l={},u=0;u=e)){var n=this._store.getProvider();this._updateOrdinalMeta();var i=this._nameList,r=this._idList;if(n.getSource().sourceFormat===Bp&&!n.pure)for(var o=[],a=t;a0},t.prototype.ensureUniqueItemVisual=function(t,e){var n=this._itemVisuals,i=n[t];i||(i=n[t]={});var r=i[e];return null==r&&(Y(r=this.getVisual(e))?r=r.slice():ix(r)&&(r=A({},r)),i[e]=r),r},t.prototype.setItemVisual=function(t,e,n){var i=this._itemVisuals[t]||{};this._itemVisuals[t]=i,ix(e)?A(i,e):i[e]=n},t.prototype.clearAllVisual=function(){this._visual={},this._itemVisuals=[]},t.prototype.setLayout=function(t,e){ix(t)?A(this._layout,t):this._layout[t]=e},t.prototype.getLayout=function(t){return this._layout[t]},t.prototype.getItemLayout=function(t){return this._itemLayouts[t]},t.prototype.setItemLayout=function(t,e,n){this._itemLayouts[t]=n?A(this._itemLayouts[t]||{},e):e},t.prototype.clearItemLayouts=function(){this._itemLayouts.length=0},t.prototype.setItemGraphicEl=function(t,e){var n=this.hostModel&&this.hostModel.seriesIndex;tl(n,this.dataType,t,e),this._graphicEls[t]=e},t.prototype.getItemGraphicEl=function(t){return this._graphicEls[t]},t.prototype.eachItemGraphicEl=function(t,e){E(this._graphicEls,(function(n,i){n&&t&&t.call(e,n,i)}))},t.prototype.cloneShallow=function(e){return e||(e=new t(this._schema?this._schema:rx(this.dimensions,this._getDimInfo,this),this.hostModel)),tx(e,this),e._store=this._store,e},t.prototype.wrapMethod=function(t,e){var n=this[t];X(n)&&(this.__wrappedMethods=this.__wrappedMethods||[],this.__wrappedMethods.push(t),this[t]=function(){var t=n.apply(this,arguments);return e.apply(this,[t].concat(at(arguments)))})},t.internalField=(Km=function(t){var e=t._invertedIndicesMap;E(e,(function(n,i){var r=t._dimInfos[i],o=r.ordinalMeta,a=t._store;if(o){n=e[i]=new ox(o.categories.length);for(var s=0;s1&&(s+="__ec__"+u),i[e]=s}})),t}();function ux(t,e){Kd(t)||(t=Jd(t));var n=(e=e||{}).coordDimensions||[],i=e.dimensionsDefine||t.dimensionsDefine||[],r=yt(),o=[],a=function(t,e,n,i){var r=Math.max(t.dimensionsDetectedCount||1,e.length,n.length,i||0);return E(e,(function(t){var e;q(t)&&(e=t.dimsDef)&&(r=Math.max(r,e.length))})),r}(t,n,i,e.dimensionsCount),s=e.canOmitUnusedDimensions&&qm(a),l=i===t.dimensionsDefine,u=l?jm(t):Zm(i),h=e.encodeDefine;!h&&e.encodeDefaulter&&(h=e.encodeDefaulter(t,a));for(var c=yt(h),p=new Wf(a),d=0;d0&&(i.name=r+(o-1)),o++,e.set(r,o)}}(o),new Xm({source:t,dimensions:o,fullDimensionCount:a,dimensionOmitted:s})}function hx(t,e,n){if(n||e.hasKey(t)){for(var i=0;e.hasKey(t+i);)i++;t+=i}return e.set(t,!0),t}var cx=function(t){this.coordSysDims=[],this.axisMap=yt(),this.categoryAxisMap=yt(),this.coordSysName=t};var px={cartesian2d:function(t,e,n,i){var r=t.getReferringComponents("xAxis",zo).models[0],o=t.getReferringComponents("yAxis",zo).models[0];e.coordSysDims=["x","y"],n.set("x",r),n.set("y",o),dx(r)&&(i.set("x",r),e.firstCategoryDimIndex=0),dx(o)&&(i.set("y",o),null==e.firstCategoryDimIndex&&(e.firstCategoryDimIndex=1))},singleAxis:function(t,e,n,i){var r=t.getReferringComponents("singleAxis",zo).models[0];e.coordSysDims=["single"],n.set("single",r),dx(r)&&(i.set("single",r),e.firstCategoryDimIndex=0)},polar:function(t,e,n,i){var r=t.getReferringComponents("polar",zo).models[0],o=r.findAxisModel("radiusAxis"),a=r.findAxisModel("angleAxis");e.coordSysDims=["radius","angle"],n.set("radius",o),n.set("angle",a),dx(o)&&(i.set("radius",o),e.firstCategoryDimIndex=0),dx(a)&&(i.set("angle",a),null==e.firstCategoryDimIndex&&(e.firstCategoryDimIndex=1))},geo:function(t,e,n,i){e.coordSysDims=["lng","lat"]},parallel:function(t,e,n,i){var r=t.ecModel,o=r.getComponent("parallel",t.get("parallelIndex")),a=e.coordSysDims=o.dimensions.slice();E(o.parallelAxisIndex,(function(t,o){var s=r.getComponent("parallelAxis",t),l=a[o];n.set(l,s),dx(s)&&(i.set(l,s),null==e.firstCategoryDimIndex&&(e.firstCategoryDimIndex=o))}))}};function dx(t){return"category"===t.get("type")}function fx(t,e,n){var i,r,o,a=(n=n||{}).byIndex,s=n.stackedCoordDimension;!function(t){return!Um(t.schema)}(e)?(r=e.schema,i=r.dimensions,o=e.store):i=e;var l,u,h,c,p=!(!t||!t.get("stack"));if(E(i,(function(t,e){U(t)&&(i[e]=t={name:t}),p&&!t.isExtraCoord&&(a||l||!t.ordinalMeta||(l=t),u||"ordinal"===t.type||"time"===t.type||s&&s!==t.coordDim||(u=t))})),!u||a||l||(a=!0),u){h="__\0ecstackresult_"+t.id,c="__\0ecstackedover_"+t.id,l&&(l.createInvertedIndices=!0);var d=u.coordDim,f=u.type,g=0;E(i,(function(t){t.coordDim===d&&g++}));var y={name:h,coordDim:d,coordDimIndex:g,type:f,isExtraCoord:!0,isCalculationCoord:!0,storeDimIndex:i.length},v={name:c,coordDim:c,coordDimIndex:g+1,type:f,isExtraCoord:!0,isCalculationCoord:!0,storeDimIndex:i.length+1};r?(o&&(y.storeDimIndex=o.ensureCalculationDimension(c,f),v.storeDimIndex=o.ensureCalculationDimension(h,f)),r.appendCalculationDimension(y),r.appendCalculationDimension(v)):(i.push(y),i.push(v))}return{stackedDimension:u&&u.name,stackedByDimension:l&&l.name,isStackedByIndex:a,stackedOverDimension:c,stackResultDimension:h}}function gx(t,e){return!!e&&e===t.getCalculationInfo("stackedDimension")}function yx(t,e){return gx(t,e)?t.getCalculationInfo("stackResultDimension"):e}function vx(t,e,n){n=n||{};var i,r=e.getSourceManager(),o=!1;t?(o=!0,i=Jd(t)):o=(i=r.getSource()).sourceFormat===Bp;var a=function(t){var e=t.get("coordinateSystem"),n=new cx(e),i=px[e];if(i)return i(t,n,n.axisMap,n.categoryAxisMap),n}(e),s=function(t,e){var n,i=t.get("coordinateSystem"),r=xd.get(i);return e&&e.coordSysDims&&(n=z(e.coordSysDims,(function(t){var n={name:t},i=e.axisMap.get(t);if(i){var r=i.get("type");n.type=Gm(r)}return n}))),n||(n=r&&(r.getDimensionsInfo?r.getDimensionsInfo():r.dimensions.slice())||["x","y"]),n}(e,a),l=n.useEncodeDefaulter,u=X(l)?l:l?H($p,s,e):null,h=ux(i,{coordDimensions:s,generateCoord:n.generateCoord,encodeDefine:e.getEncode(),encodeDefaulter:u,canOmitUnusedDimensions:!o}),c=function(t,e,n){var i,r;return n&&E(t,(function(t,o){var a=t.coordDim,s=n.categoryAxisMap.get(a);s&&(null==i&&(i=o),t.ordinalMeta=s.getOrdinalMeta(),e&&(t.createInvertedIndices=!0)),null!=t.otherDims.itemName&&(r=!0)})),r||null==i||(t[i].otherDims.itemName=0),i}(h.dimensions,n.createInvertedIndices,a),p=o?null:r.getSharedDataStore(h),d=fx(e,{schema:h,store:p}),f=new lx(h,e);f.setCalculationInfo(d);var g=null!=c&&function(t){if(t.sourceFormat===Bp){var e=function(t){var e=0;for(;ee[1]&&(e[1]=t[1])},t.prototype.unionExtentFromData=function(t,e){this.unionExtent(t.getApproximateExtent(e))},t.prototype.getExtent=function(){return this._extent.slice()},t.prototype.setExtent=function(t,e){var n=this._extent;isNaN(t)||(n[0]=t),isNaN(e)||(n[1]=e)},t.prototype.isInExtentRange=function(t){return this._extent[0]<=t&&this._extent[1]>=t},t.prototype.isBlank=function(){return this._isBlank},t.prototype.setBlank=function(t){this._isBlank=t},t}();$o(mx);var xx=0,_x=function(){function t(t){this.categories=t.categories||[],this._needCollect=t.needCollect,this._deduplication=t.deduplication,this.uid=++xx}return t.createByAxisModel=function(e){var n=e.option,i=n.data,r=i&&z(i,bx);return new t({categories:r,needCollect:!r,deduplication:!1!==n.dedplication})},t.prototype.getOrdinal=function(t){return this._getOrCreateMap().get(t)},t.prototype.parseAndCollect=function(t){var e,n=this._needCollect;if(!U(t)&&!n)return t;if(n&&!this._deduplication)return e=this.categories.length,this.categories[e]=t,e;var i=this._getOrCreateMap();return null==(e=i.get(t))&&(n?(e=this.categories.length,this.categories[e]=t,i.set(t,e)):e=NaN),e},t.prototype._getOrCreateMap=function(){return this._map||(this._map=yt(this.categories))},t}();function bx(t){return q(t)&&null!=t.value?t.value:t+""}function Sx(t){return"interval"===t.type||"log"===t.type}function Mx(t,e,n,i){var r={},o=t[1]-t[0],a=r.interval=so(o/e,!0);null!=n&&ai&&(a=r.interval=i);var s=r.intervalPrecision=Tx(a);return function(t,e){!isFinite(t[0])&&(t[0]=e[0]),!isFinite(t[1])&&(t[1]=e[1]),Cx(t,0,e),Cx(t,1,e),t[0]>t[1]&&(t[0]=t[1])}(r.niceTickExtent=[Zr(Math.ceil(t[0]/a)*a,s),Zr(Math.floor(t[1]/a)*a,s)],t),r}function Ix(t){var e=Math.pow(10,ao(t)),n=t/e;return n?2===n?n=3:3===n?n=5:n*=2:n=1,Zr(n*e)}function Tx(t){return qr(t)+2}function Cx(t,e,n){t[e]=Math.max(Math.min(t[e],n[1]),n[0])}function Dx(t,e){return t>=e[0]&&t<=e[1]}function Ax(t,e){return e[1]===e[0]?.5:(t-e[0])/(e[1]-e[0])}function kx(t,e){return t*(e[1]-e[0])+e[0]}var Lx=function(t){function e(e){var n=t.call(this,e)||this;n.type="ordinal";var i=n.getSetting("ordinalMeta");return i||(i=new _x({})),Y(i)&&(i=new _x({categories:z(i,(function(t){return q(t)?t.value:t}))})),n._ordinalMeta=i,n._extent=n.getSetting("extent")||[0,i.categories.length-1],n}return n(e,t),e.prototype.parse=function(t){return null==t?NaN:U(t)?this._ordinalMeta.getOrdinal(t):Math.round(t)},e.prototype.contain=function(t){return Dx(t=this.parse(t),this._extent)&&null!=this._ordinalMeta.categories[t]},e.prototype.normalize=function(t){return Ax(t=this._getTickNumber(this.parse(t)),this._extent)},e.prototype.scale=function(t){return t=Math.round(kx(t,this._extent)),this.getRawOrdinalNumber(t)},e.prototype.getTicks=function(){for(var t=[],e=this._extent,n=e[0];n<=e[1];)t.push({value:n}),n++;return t},e.prototype.getMinorTicks=function(t){},e.prototype.setSortInfo=function(t){if(null!=t){for(var e=t.ordinalNumbers,n=this._ordinalNumbersByTick=[],i=this._ticksByOrdinalNumber=[],r=0,o=this._ordinalMeta.categories.length,a=Math.min(o,e.length);r=0&&t=0&&t=t},e.prototype.getOrdinalMeta=function(){return this._ordinalMeta},e.prototype.calcNiceTicks=function(){},e.prototype.calcNiceExtent=function(){},e.type="ordinal",e}(mx);mx.registerClass(Lx);var Px=Zr,Ox=function(t){function e(){var e=null!==t&&t.apply(this,arguments)||this;return e.type="interval",e._interval=0,e._intervalPrecision=2,e}return n(e,t),e.prototype.parse=function(t){return t},e.prototype.contain=function(t){return Dx(t,this._extent)},e.prototype.normalize=function(t){return Ax(t,this._extent)},e.prototype.scale=function(t){return kx(t,this._extent)},e.prototype.setExtent=function(t,e){var n=this._extent;isNaN(t)||(n[0]=parseFloat(t)),isNaN(e)||(n[1]=parseFloat(e))},e.prototype.unionExtent=function(t){var e=this._extent;t[0]e[1]&&(e[1]=t[1]),this.setExtent(e[0],e[1])},e.prototype.getInterval=function(){return this._interval},e.prototype.setInterval=function(t){this._interval=t,this._niceExtent=this._extent.slice(),this._intervalPrecision=Tx(t)},e.prototype.getTicks=function(t){var e=this._interval,n=this._extent,i=this._niceExtent,r=this._intervalPrecision,o=[];if(!e)return o;n[0]1e4)return[];var s=o.length?o[o.length-1].value:i[1];return n[1]>s&&(t?o.push({value:Px(s+e,r)}):o.push({value:n[1]})),o},e.prototype.getMinorTicks=function(t){for(var e=this.getTicks(!0),n=[],i=this.getExtent(),r=1;ri[0]&&h0&&(o=null===o?s:Math.min(o,s))}n[i]=o}}return n}(t),n=[];return E(t,(function(t){var i,r=t.coordinateSystem.getBaseAxis(),o=r.getExtent();if("category"===r.type)i=r.getBandWidth();else if("value"===r.type||"time"===r.type){var a=r.dim+"_"+r.index,s=e[a],l=Math.abs(o[1]-o[0]),u=r.scale.getExtent(),h=Math.abs(u[1]-u[0]);i=s?l/h*s:l}else{var c=t.getData();i=Math.abs(o[1]-o[0])/c.count()}var p=Ur(t.get("barWidth"),i),d=Ur(t.get("barMaxWidth"),i),f=Ur(t.get("barMinWidth")||(Ux(t)?.5:1),i),g=t.get("barGap"),y=t.get("barCategoryGap");n.push({bandWidth:i,barWidth:p,barMaxWidth:d,barMinWidth:f,barGap:g,barCategoryGap:y,axisKey:Bx(r),stackId:Vx(t)})})),Wx(n)}function Wx(t){var e={};E(t,(function(t,n){var i=t.axisKey,r=t.bandWidth,o=e[i]||{bandWidth:r,remainedWidth:r,autoWidthCount:0,categoryGap:null,gap:"20%",stacks:{}},a=o.stacks;e[i]=o;var s=t.stackId;a[s]||o.autoWidthCount++,a[s]=a[s]||{width:0,maxWidth:0};var l=t.barWidth;l&&!a[s].width&&(a[s].width=l,l=Math.min(o.remainedWidth,l),o.remainedWidth-=l);var u=t.barMaxWidth;u&&(a[s].maxWidth=u);var h=t.barMinWidth;h&&(a[s].minWidth=h);var c=t.barGap;null!=c&&(o.gap=c);var p=t.barCategoryGap;null!=p&&(o.categoryGap=p)}));var n={};return E(e,(function(t,e){n[e]={};var i=t.stacks,r=t.bandWidth,o=t.categoryGap;if(null==o){var a=G(i).length;o=Math.max(35-4*a,15)+"%"}var s=Ur(o,r),l=Ur(t.gap,1),u=t.remainedWidth,h=t.autoWidthCount,c=(u-s)/(h+(h-1)*l);c=Math.max(c,0),E(i,(function(t){var e=t.maxWidth,n=t.minWidth;if(t.width){i=t.width;e&&(i=Math.min(i,e)),n&&(i=Math.max(i,n)),t.width=i,u-=i+l*i,h--}else{var i=c;e&&ei&&(i=n),i!==c&&(t.width=i,u-=i+l*i,h--)}})),c=(u-s)/(h+(h-1)*l),c=Math.max(c,0);var p,d=0;E(i,(function(t,e){t.width||(t.width=c),p=t,d+=t.width*(1+l)})),p&&(d-=p.width*l);var f=-d/2;E(i,(function(t,i){n[e][i]=n[e][i]||{bandWidth:r,offset:f,width:t.width},f+=t.width*(1+l)}))})),n}function Hx(t,e){var n=Fx(t,e),i=Gx(n);E(n,(function(t){var e=t.getData(),n=t.coordinateSystem.getBaseAxis(),r=Vx(t),o=i[Bx(n)][r],a=o.offset,s=o.width;e.setLayout({bandWidth:o.bandWidth,offset:a,size:s})}))}function Yx(t){return{seriesType:t,plan:Cg(),reset:function(t){if(Xx(t)){var e=t.getData(),n=t.coordinateSystem,i=n.getBaseAxis(),r=n.getOtherAxis(i),o=e.getDimensionIndex(e.mapDimension(r.dim)),a=e.getDimensionIndex(e.mapDimension(i.dim)),s=t.get("showBackground",!0),l=e.mapDimension(r.dim),u=e.getCalculationInfo("stackResultDimension"),h=gx(e,l)&&!!e.getCalculationInfo("stackedOnSeries"),c=r.isHorizontal(),p=function(t,e){return e.toGlobalCoord(e.dataToCoord("log"===e.type?1:0))}(0,r),d=Ux(t),f=t.get("barMinHeight")||0,g=u&&e.getDimensionIndex(u),y=e.getLayout("size"),v=e.getLayout("offset");return{progress:function(t,e){for(var i,r=t.count,l=d&&Ex(3*r),u=d&&s&&Ex(3*r),m=d&&Ex(r),x=n.master.getRect(),_=c?x.width:x.height,b=e.getStore(),w=0;null!=(i=t.next());){var S=b.get(h?g:o,i),M=b.get(a,i),I=p,T=void 0;h&&(T=+S-b.get(o,i));var C=void 0,D=void 0,A=void 0,k=void 0;if(c){var L=n.dataToPoint([S,M]);if(h)I=n.dataToPoint([T,M])[0];C=I,D=L[1]+v,A=L[0]-I,k=y,Math.abs(A)0)for(var s=0;s=0;--s)if(l[u]){o=l[u];break}o=o||a.none}if(Y(o)){var h=null==t.level?0:t.level>=0?t.level:o.length+t.level;o=o[h=Math.min(h,o.length-1)]}}return qc(new Date(t.value),o,r,i)}(t,e,n,this.getSetting("locale"),i)},e.prototype.getTicks=function(){var t=this._interval,e=this._extent,n=[];if(!t)return n;n.push({value:e[0],level:0});var i=this.getSetting("useUTC"),r=function(t,e,n,i){var r=1e4,o=Xc,a=0;function s(t,e,n,r,o,a,s){for(var l=new Date(e),u=e,h=l[r]();u1&&0===u&&o.unshift({value:o[0].value-p})}}for(u=0;u=i[0]&&v<=i[1]&&c++)}var m=(i[1]-i[0])/e;if(c>1.5*m&&p>m/1.5)break;if(u.push(g),c>m||t===o[d])break}h=[]}}0;var x=B(z(u,(function(t){return B(t,(function(t){return t.value>=i[0]&&t.value<=i[1]&&!t.notAdd}))})),(function(t){return t.length>0})),_=[],b=x.length-1;for(d=0;dn&&(this._approxInterval=n);var o=jx.length,a=Math.min(function(t,e,n,i){for(;n>>1;t[r][1]16?16:t>7.5?7:t>3.5?4:t>1.5?2:1}function Kx(t){return(t/=2592e6)>6?6:t>3?3:t>2?2:1}function $x(t){return(t/=Vc)>12?12:t>6?6:t>3.5?4:t>2?2:1}function Jx(t,e){return(t/=e?zc:Ec)>30?30:t>20?20:t>15?15:t>10?10:t>5?5:t>2?2:1}function Qx(t){return so(t,!0)}function t_(t,e,n){var i=new Date(t);switch(Zc(e)){case"year":case"month":i[ap(n)](0);case"day":i[sp(n)](1);case"hour":i[lp(n)](0);case"minute":i[up(n)](0);case"second":i[hp(n)](0),i[cp(n)](0)}return i.getTime()}mx.registerClass(Zx);var e_=mx.prototype,n_=Ox.prototype,i_=Zr,r_=Math.floor,o_=Math.ceil,a_=Math.pow,s_=Math.log,l_=function(t){function e(){var e=null!==t&&t.apply(this,arguments)||this;return e.type="log",e.base=10,e._originalScale=new Ox,e._interval=0,e}return n(e,t),e.prototype.getTicks=function(t){var e=this._originalScale,n=this._extent,i=e.getExtent();return z(n_.getTicks.call(this,t),(function(t){var e=t.value,r=Zr(a_(this.base,e));return r=e===n[0]&&this._fixMin?h_(r,i[0]):r,{value:r=e===n[1]&&this._fixMax?h_(r,i[1]):r}}),this)},e.prototype.setExtent=function(t,e){var n=s_(this.base);t=s_(Math.max(0,t))/n,e=s_(Math.max(0,e))/n,n_.setExtent.call(this,t,e)},e.prototype.getExtent=function(){var t=this.base,e=e_.getExtent.call(this);e[0]=a_(t,e[0]),e[1]=a_(t,e[1]);var n=this._originalScale.getExtent();return this._fixMin&&(e[0]=h_(e[0],n[0])),this._fixMax&&(e[1]=h_(e[1],n[1])),e},e.prototype.unionExtent=function(t){this._originalScale.unionExtent(t);var e=this.base;t[0]=s_(t[0])/s_(e),t[1]=s_(t[1])/s_(e),e_.unionExtent.call(this,t)},e.prototype.unionExtentFromData=function(t,e){this.unionExtent(t.getApproximateExtent(e))},e.prototype.calcNiceTicks=function(t){t=t||10;var e=this._extent,n=e[1]-e[0];if(!(n===1/0||n<=0)){var i=oo(n);for(t/n*i<=.5&&(i*=10);!isNaN(i)&&Math.abs(i)<1&&Math.abs(i)>0;)i*=10;var r=[Zr(o_(e[0]/i)*i),Zr(r_(e[1]/i)*i)];this._interval=i,this._niceExtent=r}},e.prototype.calcNiceExtent=function(t){n_.calcNiceExtent.call(this,t),this._fixMin=t.fixMin,this._fixMax=t.fixMax},e.prototype.parse=function(t){return t},e.prototype.contain=function(t){return Dx(t=s_(t)/s_(this.base),this._extent)},e.prototype.normalize=function(t){return Ax(t=s_(t)/s_(this.base),this._extent)},e.prototype.scale=function(t){return t=kx(t,this._extent),a_(this.base,t)},e.type="log",e}(mx),u_=l_.prototype;function h_(t,e){return i_(t,qr(e))}u_.getMinorTicks=n_.getMinorTicks,u_.getLabel=n_.getLabel,mx.registerClass(l_);var c_=function(){function t(t,e,n){this._prepareParams(t,e,n)}return t.prototype._prepareParams=function(t,e,n){n[1]0&&s>0&&!l&&(a=0),a<0&&s<0&&!u&&(s=0));var c=this._determinedMin,p=this._determinedMax;return null!=c&&(a=c,l=!0),null!=p&&(s=p,u=!0),{min:a,max:s,minFixed:l,maxFixed:u,isBlank:h}},t.prototype.modifyDataMinMax=function(t,e){this[d_[t]]=e},t.prototype.setDeterminedMinMax=function(t,e){var n=p_[t];this[n]=e},t.prototype.freeze=function(){this.frozen=!0},t}(),p_={min:"_determinedMin",max:"_determinedMax"},d_={min:"_dataMin",max:"_dataMax"};function f_(t,e,n){var i=t.rawExtentInfo;return i||(i=new c_(t,e,n),t.rawExtentInfo=i,i)}function g_(t,e){return null==e?null:nt(e)?NaN:t.parse(e)}function y_(t,e){var n=t.type,i=f_(t,e,t.getExtent()).calculate();t.setBlank(i.isBlank);var r=i.min,o=i.max,a=e.ecModel;if(a&&"time"===n){var s=Fx("bar",a),l=!1;if(E(s,(function(t){l=l||t.getBaseAxis()===e.axis})),l){var u=Gx(s),h=function(t,e,n,i){var r=n.axis.getExtent(),o=r[1]-r[0],a=function(t,e,n){if(t&&e){var i=t[Bx(e)];return null!=i&&null!=n?i[Vx(n)]:i}}(i,n.axis);if(void 0===a)return{min:t,max:e};var s=1/0;E(a,(function(t){s=Math.min(t.offset,s)}));var l=-1/0;E(a,(function(t){l=Math.max(t.offset+t.width,l)})),s=Math.abs(s),l=Math.abs(l);var u=s+l,h=e-t,c=h/(1-(s+l)/o)-h;return e+=c*(l/u),t-=c*(s/u),{min:t,max:e}}(r,o,e,u);r=h.min,o=h.max}}return{extent:[r,o],fixMin:i.minFixed,fixMax:i.maxFixed}}function v_(t,e){var n=e,i=y_(t,n),r=i.extent,o=n.get("splitNumber");t instanceof l_&&(t.base=n.get("logBase"));var a=t.type,s=n.get("interval"),l="interval"===a||"time"===a;t.setExtent(r[0],r[1]),t.calcNiceExtent({splitNumber:o,fixMin:i.fixMin,fixMax:i.fixMax,minInterval:l?n.get("minInterval"):null,maxInterval:l?n.get("maxInterval"):null}),null!=s&&t.setInterval&&t.setInterval(s)}function m_(t,e){if(e=e||t.get("type"))switch(e){case"category":return new Lx({ordinalMeta:t.getOrdinalMeta?t.getOrdinalMeta():t.getCategories(),extent:[1/0,-1/0]});case"time":return new Zx({locale:t.ecModel.getLocaleModel(),useUTC:t.ecModel.get("useUTC")});default:return new(mx.getClass(e)||Ox)}}function x_(t){var e,n,i=t.getLabelModel().get("formatter"),r="category"===t.type?t.scale.getExtent()[0]:null;return"time"===t.scale.type?(n=i,function(e,i){return t.scale.getFormattedLabel(e,i,n)}):U(i)?function(e){return function(n){var i=t.scale.getLabel(n);return e.replace("{value}",null!=i?i:"")}}(i):X(i)?(e=i,function(n,i){return null!=r&&(i=n.value-r),e(__(t,n),i,null!=n.level?{level:n.level}:null)}):function(e){return t.scale.getLabel(e)}}function __(t,e){return"category"===t.type?t.scale.getLabel(e):e.value}function b_(t,e){var n=e*Math.PI/180,i=t.width,r=t.height,o=i*Math.abs(Math.cos(n))+Math.abs(r*Math.sin(n)),a=i*Math.abs(Math.sin(n))+Math.abs(r*Math.cos(n));return new ze(t.x,t.y,o,a)}function w_(t){var e=t.get("interval");return null==e?"auto":e}function S_(t){return"category"===t.type&&0===w_(t.getLabelModel())}function M_(t,e){var n={};return E(t.mapDimensionsAll(e),(function(e){n[yx(t,e)]=!0})),G(n)}var I_=function(){function t(){}return t.prototype.getNeedCrossZero=function(){return!this.option.scale},t.prototype.getCoordSysModel=function(){},t}();var T_={isDimensionStacked:gx,enableDataStack:fx,getStackedDimension:yx};var C_=Object.freeze({__proto__:null,createList:function(t){return vx(null,t)},getLayoutRect:Cp,dataStack:T_,createScale:function(t,e){var n=e;e instanceof Mc||(n=new Mc(e));var i=m_(n);return i.setExtent(t[0],t[1]),v_(i,n),i},mixinAxisModelCommonMethods:function(t){R(t,I_)},getECData:Qs,createTextStyle:function(t,e){return nc(t,null,null,"normal"!==(e=e||{}).state)},createDimensions:function(t,e){return ux(t,e).dimensions},createSymbol:Wy,enableHoverEmphasis:Hl});function D_(t,e){return Math.abs(t-e)<1e-8}function A_(t,e,n){var i=0,r=t[0];if(!r)return!1;for(var o=1;on&&(t=r,n=a)}if(t)return function(t){for(var e=0,n=0,i=0,r=t.length,o=t[r-1][0],a=t[r-1][1],s=0;s>1^-(1&s),l=l>>1^-(1&l),r=s+=r,o=l+=o,i.push([s/n,l/n])}return i}function F_(t,e){return z(B((t=function(t){if(!t.UTF8Encoding)return t;var e=t,n=e.UTF8Scale;return null==n&&(n=1024),E(e.features,(function(t){var e=t.geometry,i=e.encodeOffsets,r=e.coordinates;if(i)switch(e.type){case"LineString":e.coordinates=B_(r,i,n);break;case"Polygon":case"MultiLineString":V_(r,i,n);break;case"MultiPolygon":E(r,(function(t,e){return V_(t,i[e],n)}))}})),e.UTF8Encoding=!1,e}(t)).features,(function(t){return t.geometry&&t.properties&&t.geometry.coordinates.length>0})),(function(t){var n=t.properties,i=t.geometry,r=[];switch(i.type){case"Polygon":var o=i.coordinates;r.push(new R_(o[0],o.slice(1)));break;case"MultiPolygon":E(i.coordinates,(function(t){t[0]&&r.push(new R_(t[0],t.slice(1)))}));break;case"LineString":r.push(new N_([i.coordinates]));break;case"MultiLineString":r.push(new N_(i.coordinates))}var a=new E_(n[e||"name"],r,n.cp);return a.properties=n,a}))}var G_=Object.freeze({__proto__:null,linearMap:Xr,round:Zr,asc:jr,getPrecision:qr,getPrecisionSafe:Kr,getPixelPrecision:$r,getPercentWithPrecision:function(t,e,n){return t[e]&&Jr(t,n)[e]||0},MAX_SAFE_INTEGER:to,remRadian:eo,isRadianAroundZero:no,parseDate:ro,quantity:oo,quantityExponent:ao,nice:so,quantile:lo,reformIntervals:uo,isNumeric:co,numericToNumber:ho}),W_=Object.freeze({__proto__:null,parse:ro,format:qc}),H_=Object.freeze({__proto__:null,extendShape:Mh,extendPath:Th,makePath:Ah,makeImage:kh,mergePath:Ph,resizePath:Oh,createIcon:Hh,updateProps:fh,initProps:gh,getTransform:Eh,clipPointsByRect:Gh,clipRectByRect:Wh,registerShape:Ch,getShapeClass:Dh,Group:zr,Image:ks,Text:Fs,Circle:_u,Ellipse:wu,Sector:zu,Ring:Bu,Polygon:Wu,Polyline:Yu,Rect:zs,Line:Zu,BezierCurve:$u,Arc:Qu,IncrementalDisplayable:hh,CompoundPath:th,LinearGradient:nh,RadialGradient:ih,BoundingRect:ze}),Y_=Object.freeze({__proto__:null,addCommas:pp,toCamelCase:dp,normalizeCssArray:fp,encodeHTML:re,formatTpl:mp,getTooltipMarker:xp,formatTime:function(t,e,n){"week"!==t&&"month"!==t&&"quarter"!==t&&"half-year"!==t&&"year"!==t||(t="MM-dd\nyyyy");var i=ro(e),r=n?"getUTC":"get",o=i[r+"FullYear"](),a=i[r+"Month"]()+1,s=i[r+"Date"](),l=i[r+"Hours"](),u=i[r+"Minutes"](),h=i[r+"Seconds"](),c=i[r+"Milliseconds"]();return t=t.replace("MM",Uc(a,2)).replace("M",a).replace("yyyy",o).replace("yy",Uc(o%100+"",2)).replace("dd",Uc(s,2)).replace("d",s).replace("hh",Uc(l,2)).replace("h",l).replace("mm",Uc(u,2)).replace("m",u).replace("ss",Uc(h,2)).replace("s",h).replace("SSS",Uc(c,3))},capitalFirst:function(t){return t?t.charAt(0).toUpperCase()+t.substr(1):t},truncateText:sa,getTextRect:function(t,e,n,i,r,o,a,s){return new Fs({style:{text:t,font:e,align:n,verticalAlign:i,padding:r,rich:o,overflow:a?"truncate":null,lineHeight:s}}).getBoundingRect()}}),X_=Object.freeze({__proto__:null,map:z,each:E,indexOf:P,inherits:O,reduce:V,filter:B,bind:W,curry:H,isArray:Y,isString:U,isObject:q,isFunction:X,extend:A,defaults:k,clone:T,merge:C}),U_=Oo();function Z_(t){return"category"===t.type?function(t){var e=t.getLabelModel(),n=q_(t,e);return!e.get("show")||t.scale.isBlank()?{labels:[],labelCategoryInterval:n.labelCategoryInterval}:n}(t):function(t){var e=t.scale.getTicks(),n=x_(t);return{labels:z(e,(function(e,i){return{level:e.level,formattedLabel:n(e,i),rawLabel:t.scale.getLabel(e),tickValue:e.value}}))}}(t)}function j_(t,e){return"category"===t.type?function(t,e){var n,i,r=K_(t,"ticks"),o=w_(e),a=$_(r,o);if(a)return a;e.get("show")&&!t.scale.isBlank()||(n=[]);if(X(o))n=tb(t,o,!0);else if("auto"===o){var s=q_(t,t.getLabelModel());i=s.labelCategoryInterval,n=z(s.labels,(function(t){return t.tickValue}))}else n=Q_(t,i=o,!0);return J_(r,o,{ticks:n,tickCategoryInterval:i})}(t,e):{ticks:z(t.scale.getTicks(),(function(t){return t.value}))}}function q_(t,e){var n,i,r=K_(t,"labels"),o=w_(e),a=$_(r,o);return a||(X(o)?n=tb(t,o):(i="auto"===o?function(t){var e=U_(t).autoInterval;return null!=e?e:U_(t).autoInterval=t.calculateCategoryInterval()}(t):o,n=Q_(t,i)),J_(r,o,{labels:n,labelCategoryInterval:i}))}function K_(t,e){return U_(t)[e]||(U_(t)[e]=[])}function $_(t,e){for(var n=0;n1&&h/l>2&&(u=Math.round(Math.ceil(u/l)*l));var c=S_(t),p=a.get("showMinLabel")||c,d=a.get("showMaxLabel")||c;p&&u!==o[0]&&g(o[0]);for(var f=u;f<=o[1];f+=l)g(f);function g(t){var e={value:t};s.push(n?t:{formattedLabel:i(e),rawLabel:r.getLabel(e),tickValue:t})}return d&&f-l!==o[1]&&g(o[1]),s}function tb(t,e,n){var i=t.scale,r=x_(t),o=[];return E(i.getTicks(),(function(t){var a=i.getLabel(t),s=t.value;e(t.value,a)&&o.push(n?s:{formattedLabel:r(t),rawLabel:a,tickValue:s})})),o}var eb=[0,1],nb=function(){function t(t,e,n){this.onBand=!1,this.inverse=!1,this.dim=t,this.scale=e,this._extent=n||[0,0]}return t.prototype.contain=function(t){var e=this._extent,n=Math.min(e[0],e[1]),i=Math.max(e[0],e[1]);return t>=n&&t<=i},t.prototype.containData=function(t){return this.scale.contain(t)},t.prototype.getExtent=function(){return this._extent.slice()},t.prototype.getPixelPrecision=function(t){return $r(t||this.scale.getExtent(),this._extent)},t.prototype.setExtent=function(t,e){var n=this._extent;n[0]=t,n[1]=e},t.prototype.dataToCoord=function(t,e){var n=this._extent,i=this.scale;return t=i.normalize(t),this.onBand&&"ordinal"===i.type&&ib(n=n.slice(),i.count()),Xr(t,eb,n,e)},t.prototype.coordToData=function(t,e){var n=this._extent,i=this.scale;this.onBand&&"ordinal"===i.type&&ib(n=n.slice(),i.count());var r=Xr(t,n,eb,e);return this.scale.scale(r)},t.prototype.pointToData=function(t,e){},t.prototype.getTicksCoords=function(t){var e=(t=t||{}).tickModel||this.getTickModel(),n=z(j_(this,e).ticks,(function(t){return{coord:this.dataToCoord("ordinal"===this.scale.type?this.scale.getRawOrdinalNumber(t):t),tickValue:t}}),this);return function(t,e,n,i){var r=e.length;if(!t.onBand||n||!r)return;var o,a,s=t.getExtent();if(1===r)e[0].coord=s[0],o=e[1]={coord:s[1]};else{var l=e[r-1].tickValue-e[0].tickValue,u=(e[r-1].coord-e[0].coord)/l;E(e,(function(t){t.coord-=u/2})),a=1+t.scale.getExtent()[1]-e[r-1].tickValue,o={coord:e[r-1].coord+u*a},e.push(o)}var h=s[0]>s[1];c(e[0].coord,s[0])&&(i?e[0].coord=s[0]:e.shift());i&&c(s[0],e[0].coord)&&e.unshift({coord:s[0]});c(s[1],o.coord)&&(i?o.coord=s[1]:e.pop());i&&c(o.coord,s[1])&&e.push({coord:s[1]});function c(t,e){return t=Zr(t),e=Zr(e),h?t>e:t0&&t<100||(t=5),z(this.scale.getMinorTicks(t),(function(t){return z(t,(function(t){return{coord:this.dataToCoord(t),tickValue:t}}),this)}),this)},t.prototype.getViewLabels=function(){return Z_(this).labels},t.prototype.getLabelModel=function(){return this.model.getModel("axisLabel")},t.prototype.getTickModel=function(){return this.model.getModel("axisTick")},t.prototype.getBandWidth=function(){var t=this._extent,e=this.scale.getExtent(),n=e[1]-e[0]+(this.onBand?1:0);0===n&&(n=1);var i=Math.abs(t[1]-t[0]);return Math.abs(i)/n},t.prototype.calculateCategoryInterval=function(){return function(t){var e=function(t){var e=t.getLabelModel();return{axisRotate:t.getRotate?t.getRotate():t.isHorizontal&&!t.isHorizontal()?90:0,labelRotate:e.get("rotate")||0,font:e.getFont()}}(t),n=x_(t),i=(e.axisRotate-e.labelRotate)/180*Math.PI,r=t.scale,o=r.getExtent(),a=r.count();if(o[1]-o[0]<1)return 0;var s=1;a>40&&(s=Math.max(1,Math.floor(a/40)));for(var l=o[0],u=t.dataToCoord(l+1)-t.dataToCoord(l),h=Math.abs(u*Math.cos(i)),c=Math.abs(u*Math.sin(i)),p=0,d=0;l<=o[1];l+=s){var f,g,y=br(n({value:l}),e.font,"center","top");f=1.3*y.width,g=1.3*y.height,p=Math.max(p,f,7),d=Math.max(d,g,7)}var v=p/h,m=d/c;isNaN(v)&&(v=1/0),isNaN(m)&&(m=1/0);var x=Math.max(0,Math.floor(Math.min(v,m))),_=U_(t.model),b=t.getExtent(),w=_.lastAutoInterval,S=_.lastTickCount;return null!=w&&null!=S&&Math.abs(w-x)<=1&&Math.abs(S-a)<=1&&w>x&&_.axisExtent0===b[0]&&_.axisExtent1===b[1]?x=w:(_.lastTickCount=a,_.lastAutoInterval=x,_.axisExtent0=b[0],_.axisExtent1=b[1]),x}(this)},t}();function ib(t,e){var n=(t[1]-t[0])/e/2;t[0]+=n,t[1]-=n}var rb=2*Math.PI,ob=os.CMD,ab=["top","right","bottom","left"];function sb(t,e,n,i,r){var o=n.width,a=n.height;switch(t){case"top":i.set(n.x+o/2,n.y-e),r.set(0,-1);break;case"bottom":i.set(n.x+o/2,n.y+a+e),r.set(0,1);break;case"left":i.set(n.x-e,n.y+a/2),r.set(-1,0);break;case"right":i.set(n.x+o+e,n.y+a/2),r.set(1,0)}}function lb(t,e,n,i,r,o,a,s,l){a-=t,s-=e;var u=Math.sqrt(a*a+s*s),h=(a/=u)*n+t,c=(s/=u)*n+e;if(Math.abs(i-r)%rb<1e-4)return l[0]=h,l[1]=c,u-n;if(o){var p=i;i=hs(r),r=hs(p)}else i=hs(i),r=hs(r);i>r&&(r+=rb);var d=Math.atan2(s,a);if(d<0&&(d+=rb),d>=i&&d<=r||d+rb>=i&&d+rb<=r)return l[0]=h,l[1]=c,u-n;var f=n*Math.cos(i)+t,g=n*Math.sin(i)+e,y=n*Math.cos(r)+t,v=n*Math.sin(r)+e,m=(f-a)*(f-a)+(g-s)*(g-s),x=(y-a)*(y-a)+(v-s)*(v-s);return m0){e=e/180*Math.PI,fb.fromArray(t[0]),gb.fromArray(t[1]),yb.fromArray(t[2]),De.sub(vb,fb,gb),De.sub(mb,yb,gb);var n=vb.len(),i=mb.len();if(!(n<.001||i<.001)){vb.scale(1/n),mb.scale(1/i);var r=vb.dot(mb);if(Math.cos(e)1&&De.copy(bb,yb),bb.toArray(t[1])}}}}function Sb(t,e,n){if(n<=180&&n>0){n=n/180*Math.PI,fb.fromArray(t[0]),gb.fromArray(t[1]),yb.fromArray(t[2]),De.sub(vb,gb,fb),De.sub(mb,yb,gb);var i=vb.len(),r=mb.len();if(!(i<.001||r<.001))if(vb.scale(1/i),mb.scale(1/r),vb.dot(e)=a)De.copy(bb,yb);else{bb.scaleAndAdd(mb,o/Math.tan(Math.PI/2-s));var l=yb.x!==gb.x?(bb.x-gb.x)/(yb.x-gb.x):(bb.y-gb.y)/(yb.y-gb.y);if(isNaN(l))return;l<0?De.copy(bb,gb):l>1&&De.copy(bb,yb)}bb.toArray(t[1])}}}function Mb(t,e,n,i){var r="normal"===n,o=r?t:t.ensureState(n);o.ignore=e;var a=i.get("smooth");a&&!0===a&&(a=.3),o.shape=o.shape||{},a>0&&(o.shape.smooth=a);var s=i.getModel("lineStyle").getLineStyle();r?t.useStyle(s):o.style=s}function Ib(t,e){var n=e.smooth,i=e.points;if(i)if(t.moveTo(i[0][0],i[0][1]),n>0&&i.length>=3){var r=Vt(i[0],i[1]),o=Vt(i[1],i[2]);if(!r||!o)return t.lineTo(i[1][0],i[1][1]),void t.lineTo(i[2][0],i[2][1]);var a=Math.min(r,o)*n,s=Gt([],i[1],i[0],a/r),l=Gt([],i[1],i[2],a/o),u=Gt([],s,l,.5);t.bezierCurveTo(s[0],s[1],s[0],s[1],u[0],u[1]),t.bezierCurveTo(l[0],l[1],l[0],l[1],i[2][0],i[2][1])}else for(var h=1;h0&&o&&_(-h/a,0,a);var f,g,y=t[0],v=t[a-1];return m(),f<0&&b(-f,.8),g<0&&b(g,.8),m(),x(f,g,1),x(g,f,-1),m(),f<0&&w(-f),g<0&&w(g),u}function m(){f=y.rect[e]-i,g=r-v.rect[e]-v.rect[n]}function x(t,e,n){if(t<0){var i=Math.min(e,-t);if(i>0){_(i*n,0,a);var r=i+t;r<0&&b(-r*n,1)}else b(-t*n,1)}}function _(n,i,r){0!==n&&(u=!0);for(var o=i;o0)for(l=0;l0;l--){_(-(o[l-1]*c),l,a)}}}function w(t){var e=t<0?-1:1;t=Math.abs(t);for(var n=Math.ceil(t/(a-1)),i=0;i0?_(n,0,i+1):_(-n,a-i-1,a),(t-=n)<=0)return}}function kb(t,e,n,i){return Ab(t,"y","height",e,n,i)}function Lb(t){var e=[];t.sort((function(t,e){return e.priority-t.priority}));var n=new ze(0,0,0,0);function i(t){if(!t.ignore){var e=t.ensureState("emphasis");null==e.ignore&&(e.ignore=!1)}t.ignore=!0}for(var r=0;r=0&&n.attr(d.oldLayoutSelect),P(u,"emphasis")>=0&&n.attr(d.oldLayoutEmphasis)),fh(n,s,e,a)}else if(n.attr(s),!uc(n).valueAnimation){var h=rt(n.style.opacity,1);n.style.opacity=0,gh(n,{style:{opacity:h}},e,a)}if(d.oldLayout=s,n.states.select){var c=d.oldLayoutSelect={};Vb(c,s,Bb),Vb(c,n.states.select,Bb)}if(n.states.emphasis){var p=d.oldLayoutEmphasis={};Vb(p,s,Bb),Vb(p,n.states.emphasis,Bb)}cc(n,a,l,e,e)}if(i&&!i.ignore&&!i.invisible){r=(d=zb(i)).oldLayout;var d,f={points:i.shape.points};r?(i.attr({shape:r}),fh(i,{shape:f},e)):(i.setShape(f),i.style.strokePercent=0,gh(i,{style:{strokePercent:1}},e)),d.oldLayout=f}},t}(),Gb=Oo();var Wb=Math.sin,Hb=Math.cos,Yb=Math.PI,Xb=2*Math.PI,Ub=180/Yb,Zb=function(){function t(){}return t.prototype.reset=function(t){this._start=!0,this._d=[],this._str="",this._p=Math.pow(10,t||4)},t.prototype.moveTo=function(t,e){this._add("M",t,e)},t.prototype.lineTo=function(t,e){this._add("L",t,e)},t.prototype.bezierCurveTo=function(t,e,n,i,r,o){this._add("C",t,e,n,i,r,o)},t.prototype.quadraticCurveTo=function(t,e,n,i){this._add("Q",t,e,n,i)},t.prototype.arc=function(t,e,n,i,r,o){this.ellipse(t,e,n,n,0,i,r,o)},t.prototype.ellipse=function(t,e,n,i,r,o,a,s){var l=a-o,u=!s,h=Math.abs(l),c=hi(h-Xb)||(u?l>=Xb:-l>=Xb),p=l>0?l%Xb:l%Xb+Xb,d=!1;d=!!c||!hi(h)&&p>=Yb==!!u;var f=t+n*Hb(o),g=e+i*Wb(o);this._start&&this._add("M",f,g);var y=Math.round(r*Ub);if(c){var v=1/this._p,m=(u?1:-1)*(Xb-v);this._add("A",n,i,y,1,+u,t+n*Hb(o+m),e+i*Wb(o+m)),v>.01&&this._add("A",n,i,y,0,+u,f,g)}else{var x=t+n*Hb(a),_=e+i*Wb(a);this._add("A",n,i,y,+d,+u,x,_)}},t.prototype.rect=function(t,e,n,i){this._add("M",t,e),this._add("l",n,0),this._add("l",0,i),this._add("l",-n,0),this._add("Z")},t.prototype.closePath=function(){this._d.length>0&&this._add("Z")},t.prototype._add=function(t,e,n,i,r,o,a,s,l){for(var u=[],h=this._p,c=1;c"}(r,o)+("style"!==r?re(a):a||"")+(i?""+n+z(i,(function(e){return t(e)})).join(n)+n:"")+("")}(t)}function rw(t){return{zrId:t,shadowCache:{},patternCache:{},gradientCache:{},clipPathCache:{},defs:{},cssNodes:{},cssAnims:{},cssClassIdx:0,cssAnimIdx:0,shadowIdx:0,gradientIdx:0,patternIdx:0,clipPathIdx:0}}function ow(t,e,n,i){return nw("svg","root",{width:t,height:e,xmlns:Qb,"xmlns:xlink":tw,version:"1.1",baseProfile:"full",viewBox:!!i&&"0 0 "+t+" "+e},n)}var aw={cubicIn:"0.32,0,0.67,0",cubicOut:"0.33,1,0.68,1",cubicInOut:"0.65,0,0.35,1",quadraticIn:"0.11,0,0.5,0",quadraticOut:"0.5,1,0.89,1",quadraticInOut:"0.45,0,0.55,1",quarticIn:"0.5,0,0.75,0",quarticOut:"0.25,1,0.5,1",quarticInOut:"0.76,0,0.24,1",quinticIn:"0.64,0,0.78,0",quinticOut:"0.22,1,0.36,1",quinticInOut:"0.83,0,0.17,1",sinusoidalIn:"0.12,0,0.39,0",sinusoidalOut:"0.61,1,0.88,1",sinusoidalInOut:"0.37,0,0.63,1",exponentialIn:"0.7,0,0.84,0",exponentialOut:"0.16,1,0.3,1",exponentialInOut:"0.87,0,0.13,1",circularIn:"0.55,0,1,0.45",circularOut:"0,0.55,0.45,1",circularInOut:"0.85,0,0.15,1"},sw="transform-origin";function lw(t,e,n){var i=A({},t.shape);A(i,e),t.buildPath(n,i);var r=new Zb;return r.reset(_i(t)),n.rebuildPath(r,1),r.generateStr(),r.getStr()}function uw(t,e){var n=e.originX,i=e.originY;(n||i)&&(t[sw]=n+"px "+i+"px")}var hw={fill:"fill",opacity:"opacity",lineWidth:"stroke-width",lineDashOffset:"stroke-dashoffset"};function cw(t,e){var n=e.zrId+"-ani-"+e.cssAnimIdx++;return e.cssAnims[n]=t,n}function pw(t){return U(t)?aw[t]?"cubic-bezier("+aw[t]+")":Pn(t)?t:"":""}function dw(t,e,n,i){var r=t.animators,o=r.length,a=[];if(t instanceof th){var s=function(t,e,n){var i,r,o=t.shape.paths,a={};if(E(o,(function(t){var e=rw(n.zrId);e.animation=!0,dw(t,{},e,!0);var o=e.cssAnims,s=e.cssNodes,l=G(o),u=l.length;if(u){var h=o[r=l[u-1]];for(var c in h){var p=h[c];a[c]=a[c]||{d:""},a[c].d+=p.d||""}for(var d in s){var f=s[d].animation;f.indexOf(r)>=0&&(i=f)}}})),i){e.d=!1;var s=cw(a,n);return i.replace(r,s)}}(t,e,n);if(s)a.push(s);else if(!o)return}else if(!o)return;for(var l={},u=0;u0})).length)return cw(h,n)+" "+r[0]+" both"}for(var y in l){(s=g(l[y]))&&a.push(s)}if(a.length){var v=n.zrId+"-cls-"+n.cssClassIdx++;n.cssNodes["."+v]={animation:a.join(",")},e.class=v}}var fw=Math.round;function gw(t){return t&&U(t.src)}function yw(t){return t&&X(t.toDataURL)}function vw(t,e,n,i){Jb((function(r,o){var a="fill"===r||"stroke"===r;a&&mi(o)?Cw(e,t,r,i):a&&gi(o)?Dw(n,t,r,i):t[r]=o}),e,n,!1),function(t,e,n){var i=t.style;if(function(t){return t&&(t.shadowBlur||t.shadowOffsetX||t.shadowOffsetY)}(i)){var r=function(t){var e=t.style,n=t.getGlobalScale();return[e.shadowColor,(e.shadowBlur||0).toFixed(2),(e.shadowOffsetX||0).toFixed(2),(e.shadowOffsetY||0).toFixed(2),n[0],n[1]].join(",")}(t),o=n.shadowCache,a=o[r];if(!a){var s=t.getGlobalScale(),l=s[0],u=s[1];if(!l||!u)return;var h=i.shadowOffsetX||0,c=i.shadowOffsetY||0,p=i.shadowBlur,d=li(i.shadowColor),f=d.opacity,g=d.color,y=p/2/l+" "+p/2/u;a=n.zrId+"-s"+n.shadowIdx++,n.defs[a]=nw("filter",a,{id:a,x:"-100%",y:"-100%",width:"300%",height:"300%"},[nw("feDropShadow","",{dx:h/l,dy:c/u,stdDeviation:y,"flood-color":g,"flood-opacity":f})]),o[r]=a}e.filter=xi(a)}}(n,t,i)}function mw(t){return hi(t[0]-1)&&hi(t[1])&&hi(t[2])&&hi(t[3]-1)}function xw(t,e,n){if(e&&(!function(t){return hi(t[4])&&hi(t[5])}(e)||!mw(e))){var i=n?10:1e4;t.transform=mw(e)?"translate("+fw(e[4]*i)/i+" "+fw(e[5]*i)/i+")":function(t){return"matrix("+ci(t[0])+","+ci(t[1])+","+ci(t[2])+","+ci(t[3])+","+pi(t[4])+","+pi(t[5])+")"}(e)}}function _w(t,e,n){for(var i=t.points,r=[],o=0;ol?Hw(t,null==n[c+1]?null:n[c+1].elm,n,s,c):Yw(t,e,a,l))}(n,i,r):Bw(r)?(Bw(t.text)&&Ew(n,""),Hw(n,null,r,0,r.length-1)):Bw(i)?Yw(n,i,0,i.length-1):Bw(t.text)&&Ew(n,""):t.text!==e.text&&(Bw(i)&&Yw(n,i,0,i.length-1),Ew(n,e.text)))}var Zw=0,jw=function(){function t(t,e,n){if(this.type="svg",this.refreshHover=qw("refreshHover"),this.configLayer=qw("configLayer"),this.storage=e,this._opts=n=A({},n),this.root=t,this._id="zr"+Zw++,this._oldVNode=ow(n.width,n.height),t&&!n.ssr){var i=this._viewport=document.createElement("div");i.style.cssText="position:relative;overflow:hidden";var r=this._svgDom=this._oldVNode.elm=ew("svg");Xw(null,this._oldVNode),i.appendChild(r),t.appendChild(i)}this.resize(n.width,n.height)}return t.prototype.getType=function(){return this.type},t.prototype.getViewportRoot=function(){return this._viewport},t.prototype.getViewportRootOffset=function(){var t=this.getViewportRoot();if(t)return{offsetLeft:t.offsetLeft||0,offsetTop:t.offsetTop||0}},t.prototype.getSvgDom=function(){return this._svgDom},t.prototype.refresh=function(){if(this.root){var t=this.renderToVNode({willUpdate:!0});t.attrs.style="position:absolute;left:0;top:0;user-select:none",function(t,e){if(Gw(t,e))Uw(t,e);else{var n=t.elm,i=Rw(n);Ww(e),null!==i&&(Lw(i,e.elm,Nw(n)),Yw(i,[t],0,0))}}(this._oldVNode,t),this._oldVNode=t}},t.prototype.renderOneToVNode=function(t){return Tw(t,rw(this._id))},t.prototype.renderToVNode=function(t){t=t||{};var e=this.storage.getDisplayList(!0),n=this._width,i=this._height,r=rw(this._id);r.animation=t.animation,r.willUpdate=t.willUpdate,r.compress=t.compress;var o=[],a=this._bgVNode=function(t,e,n,i){var r;if(n&&"none"!==n)if(r=nw("rect","bg",{width:t,height:e,x:"0",y:"0",id:"0"}),mi(n))Cw({fill:n},r.attrs,"fill",i);else if(gi(n))Dw({style:{fill:n},dirty:bt,getBoundingRect:function(){return{width:t,height:e}}},r.attrs,"fill",i);else{var o=li(n),a=o.color,s=o.opacity;r.attrs.fill=a,s<1&&(r.attrs["fill-opacity"]=s)}return r}(n,i,this._backgroundColor,r);a&&o.push(a);var s=t.compress?null:this._mainVNode=nw("g","main",{},[]);this._paintList(e,r,s?s.children:o),s&&o.push(s);var l=z(G(r.defs),(function(t){return r.defs[t]}));if(l.length&&o.push(nw("defs","defs",{},l)),t.animation){var u=function(t,e,n){var i=(n=n||{}).newline?"\n":"",r=" {"+i,o=i+"}",a=z(G(t),(function(e){return e+r+z(G(t[e]),(function(n){return n+":"+t[e][n]+";"})).join(i)+o})).join(i),s=z(G(e),(function(t){return"@keyframes "+t+r+z(G(e[t]),(function(n){return n+r+z(G(e[t][n]),(function(i){var r=e[t][n][i];return"d"===i&&(r='path("'+r+'")'),i+":"+r+";"})).join(i)+o})).join(i)+o})).join(i);return a||s?[""].join(i):""}(r.cssNodes,r.cssAnims,{newline:!0});if(u){var h=nw("style","stl",{},[],u);o.push(h)}}return ow(n,i,o,t.useViewBox)},t.prototype.renderToString=function(t){return t=t||{},iw(this.renderToVNode({animation:rt(t.cssAnimation,!0),willUpdate:!1,compress:!0,useViewBox:rt(t.useViewBox,!0)}),{newline:!0})},t.prototype.setBackgroundColor=function(t){this._backgroundColor=t},t.prototype.getSvgRoot=function(){return this._mainVNode&&this._mainVNode.elm},t.prototype._paintList=function(t,e,n){for(var i,r,o=t.length,a=[],s=0,l=0,u=0;u=0&&(!c||!r||c[f]!==r[f]);f--);for(var g=d-1;g>f;g--)i=a[--s-1];for(var y=f+1;y=a)}}for(var h=this.__startIndex;h15)break}n.prevElClipPaths&&u.restore()};if(p)if(0===p.length)s=l.__endIndex;else for(var _=d.dpr,b=0;b0&&t>i[0]){for(s=0;st);s++);a=n[i[s]]}if(i.splice(s+1,0,t),n[t]=e,!e.virtual)if(a){var l=a.dom;l.nextSibling?o.insertBefore(e.dom,l.nextSibling):o.appendChild(e.dom)}else o.firstChild?o.insertBefore(e.dom,o.firstChild):o.appendChild(e.dom);e.__painter=this}},t.prototype.eachLayer=function(t,e){for(var n=this._zlevelList,i=0;i0?tS:0),this._needsManuallyCompositing),u.__builtin__||I("ZLevel "+l+" has been used by unkown layer "+u.id),u!==o&&(u.__used=!0,u.__startIndex!==r&&(u.__dirty=!0),u.__startIndex=r,u.incremental?u.__drawIndex=-1:u.__drawIndex=r,e(r),o=u),1&s.__dirty&&!s.__inHover&&(u.__dirty=!0,u.incremental&&u.__drawIndex<0&&(u.__drawIndex=r))}e(r),this.eachBuiltinLayer((function(t,e){!t.__used&&t.getElementCount()>0&&(t.__dirty=!0,t.__startIndex=t.__endIndex=t.__drawIndex=0),t.__dirty&&t.__drawIndex<0&&(t.__drawIndex=t.__startIndex)}))},t.prototype.clear=function(){return this.eachBuiltinLayer(this._clearLayer),this},t.prototype._clearLayer=function(t){t.clear()},t.prototype.setBackgroundColor=function(t){this._backgroundColor=t,E(this._layers,(function(t){t.setUnpainted()}))},t.prototype.configLayer=function(t,e){if(e){var n=this._layerConfig;n[t]?C(n[t],e,!0):n[t]=e;for(var i=0;i-1&&(s.style.stroke=s.style.fill,s.style.fill="#fff",s.style.lineWidth=2),e},e.type="series.line",e.dependencies=["grid","polar"],e.defaultOption={z:3,coordinateSystem:"cartesian2d",legendHoverLink:!0,clip:!0,label:{position:"top"},endLabel:{show:!1,valueAnimation:!0,distance:8},lineStyle:{width:2,type:"solid"},emphasis:{scale:!0},step:!1,smooth:!1,smoothMonotone:null,symbol:"emptyCircle",symbolSize:4,symbolRotate:null,showSymbol:!0,showAllSymbol:"auto",connectNulls:!1,sampling:"none",animationEasing:"linear",progressive:0,hoverLayerThreshold:1/0,universalTransition:{divideShape:"clone"},triggerLineEvent:!1},e}(mg);function iS(t,e){var n=t.mapDimensionsAll("defaultedLabel"),i=n.length;if(1===i){var r=gf(t,e,n[0]);return null!=r?r+"":null}if(i){for(var o=[],a=0;a=0&&i.push(e[o])}return i.join(" ")}var oS=function(t){function e(e,n,i,r){var o=t.call(this)||this;return o.updateData(e,n,i,r),o}return n(e,t),e.prototype._createSymbol=function(t,e,n,i,r){this.removeAll();var o=Wy(t,-1,-1,2,2,null,r);o.attr({z2:100,culling:!0,scaleX:i[0]/2,scaleY:i[1]/2}),o.drift=aS,this._symbolType=t,this.add(o)},e.prototype.stopSymbolAnimation=function(t){this.childAt(0).stopAnimation(null,t)},e.prototype.getSymbolType=function(){return this._symbolType},e.prototype.getSymbolPath=function(){return this.childAt(0)},e.prototype.highlight=function(){kl(this.childAt(0))},e.prototype.downplay=function(){Ll(this.childAt(0))},e.prototype.setZ=function(t,e){var n=this.childAt(0);n.zlevel=t,n.z=e},e.prototype.setDraggable=function(t,e){var n=this.childAt(0);n.draggable=t,n.cursor=!e&&t?"move":n.cursor},e.prototype.updateData=function(t,n,i,r){this.silent=!1;var o=t.getItemVisual(n,"symbol")||"circle",a=t.hostModel,s=e.getSymbolSize(t,n),l=o!==this._symbolType,u=r&&r.disableAnimation;if(l){var h=t.getItemVisual(n,"symbolKeepAspect");this._createSymbol(o,t,n,s,h)}else{(p=this.childAt(0)).silent=!1;var c={scaleX:s[0]/2,scaleY:s[1]/2};u?p.attr(c):fh(p,c,a,n),_h(p)}if(this._updateCommon(t,n,s,i,r),l){var p=this.childAt(0);if(!u){c={scaleX:this._sizeX,scaleY:this._sizeY,style:{opacity:p.style.opacity}};p.scaleX=p.scaleY=0,p.style.opacity=0,gh(p,c,a,n)}}u&&this.childAt(0).stopAnimation("leave")},e.prototype._updateCommon=function(t,e,n,i,r){var o,a,s,l,u,h,c,p,d,f=this.childAt(0),g=t.hostModel;if(i&&(o=i.emphasisItemStyle,a=i.blurItemStyle,s=i.selectItemStyle,l=i.focus,u=i.blurScope,c=i.labelStatesModels,p=i.hoverScale,d=i.cursorStyle,h=i.emphasisDisabled),!i||t.hasItemOption){var y=i&&i.itemModel?i.itemModel:t.getItemModel(e),v=y.getModel("emphasis");o=v.getModel("itemStyle").getItemStyle(),s=y.getModel(["select","itemStyle"]).getItemStyle(),a=y.getModel(["blur","itemStyle"]).getItemStyle(),l=v.get("focus"),u=v.get("blurScope"),h=v.get("disabled"),c=ec(y),p=v.getShallow("scale"),d=y.getShallow("cursor")}var m=t.getItemVisual(e,"symbolRotate");f.attr("rotation",(m||0)*Math.PI/180||0);var x=Yy(t.getItemVisual(e,"symbolOffset"),n);x&&(f.x=x[0],f.y=x[1]),d&&f.attr("cursor",d);var _=t.getItemVisual(e,"style"),b=_.fill;if(f instanceof ks){var w=f.style;f.useStyle(A({image:w.image,x:w.x,y:w.y,width:w.width,height:w.height},_))}else f.__isEmptyBrush?f.useStyle(A({},_)):f.useStyle(_),f.style.decal=null,f.setColor(b,r&&r.symbolInnerColor),f.style.strokeNoScale=!0;var S=t.getItemVisual(e,"liftZ"),M=this._z2;null!=S?null==M&&(this._z2=f.z2,f.z2+=S):null!=M&&(f.z2=M,this._z2=null);var I=r&&r.useNameLabel;tc(f,c,{labelFetcher:g,labelDataIndex:e,defaultText:function(e){return I?t.getName(e):iS(t,e)},inheritColor:b,defaultOpacity:_.opacity}),this._sizeX=n[0]/2,this._sizeY=n[1]/2;var T=f.ensureState("emphasis");T.style=o,f.ensureState("select").style=s,f.ensureState("blur").style=a;var C=null==p||!0===p?Math.max(1.1,3/this._sizeY):isFinite(p)&&p>0?+p:1;T.scaleX=this._sizeX*C,T.scaleY=this._sizeY*C,this.setSymbolScale(1),Yl(this,l,u,h)},e.prototype.setSymbolScale=function(t){this.scaleX=this.scaleY=t},e.prototype.fadeOut=function(t,e,n){var i=this.childAt(0),r=Qs(this).dataIndex,o=n&&n.animation;if(this.silent=i.silent=!0,n&&n.fadeLabel){var a=i.getTextContent();a&&vh(a,{style:{opacity:0}},e,{dataIndex:r,removeOpt:o,cb:function(){i.removeTextContent()}})}else i.removeTextContent();vh(i,{style:{opacity:0},scaleX:0,scaleY:0},e,{dataIndex:r,cb:t,removeOpt:o})},e.getSymbolSize=function(t,e){return Hy(t.getItemVisual(e,"symbolSize"))},e}(zr);function aS(t,e){this.parent.drift(t,e)}function sS(t,e,n,i){return e&&!isNaN(e[0])&&!isNaN(e[1])&&!(i.isIgnore&&i.isIgnore(n))&&!(i.clipShape&&!i.clipShape.contain(e[0],e[1]))&&"none"!==t.getItemVisual(n,"symbol")}function lS(t){return null==t||q(t)||(t={isIgnore:t}),t||{}}function uS(t){var e=t.hostModel,n=e.getModel("emphasis");return{emphasisItemStyle:n.getModel("itemStyle").getItemStyle(),blurItemStyle:e.getModel(["blur","itemStyle"]).getItemStyle(),selectItemStyle:e.getModel(["select","itemStyle"]).getItemStyle(),focus:n.get("focus"),blurScope:n.get("blurScope"),emphasisDisabled:n.get("disabled"),hoverScale:n.get("scale"),labelStatesModels:ec(e),cursorStyle:e.get("cursor")}}var hS=function(){function t(t){this.group=new zr,this._SymbolCtor=t||oS}return t.prototype.updateData=function(t,e){this._progressiveEls=null,e=lS(e);var n=this.group,i=t.hostModel,r=this._data,o=this._SymbolCtor,a=e.disableAnimation,s=uS(t),l={disableAnimation:a},u=e.getSymbolPoint||function(e){return t.getItemLayout(e)};r||n.removeAll(),t.diff(r).add((function(i){var r=u(i);if(sS(t,r,i,e)){var a=new o(t,i,s,l);a.setPosition(r),t.setItemGraphicEl(i,a),n.add(a)}})).update((function(h,c){var p=r.getItemGraphicEl(c),d=u(h);if(sS(t,d,h,e)){var f=t.getItemVisual(h,"symbol")||"circle",g=p&&p.getSymbolType&&p.getSymbolType();if(!p||g&&g!==f)n.remove(p),(p=new o(t,h,s,l)).setPosition(d);else{p.updateData(t,h,s,l);var y={x:d[0],y:d[1]};a?p.attr(y):fh(p,y,i)}n.add(p),t.setItemGraphicEl(h,p)}else n.remove(p)})).remove((function(t){var e=r.getItemGraphicEl(t);e&&e.fadeOut((function(){n.remove(e)}),i)})).execute(),this._getSymbolPoint=u,this._data=t},t.prototype.updateLayout=function(){var t=this,e=this._data;e&&e.eachItemGraphicEl((function(e,n){var i=t._getSymbolPoint(n);e.setPosition(i),e.markRedraw()}))},t.prototype.incrementalPrepareUpdate=function(t){this._seriesScope=uS(t),this._data=null,this.group.removeAll()},t.prototype.incrementalUpdate=function(t,e,n){function i(t){t.isGroup||(t.incremental=!0,t.ensureState("emphasis").hoverLayer=!0)}this._progressiveEls=[],n=lS(n);for(var r=t.start;r0?n=i[0]:i[1]<0&&(n=i[1]);return n}(r,n),a=i.dim,s=r.dim,l=e.mapDimension(s),u=e.mapDimension(a),h="x"===s||"radius"===s?1:0,c=z(t.dimensions,(function(t){return e.mapDimension(t)})),p=!1,d=e.getCalculationInfo("stackResultDimension");return gx(e,c[0])&&(p=!0,c[0]=d),gx(e,c[1])&&(p=!0,c[1]=d),{dataDimsForPoint:c,valueStart:o,valueAxisDim:s,baseAxisDim:a,stacked:!!p,valueDim:l,baseDim:u,baseDataOffset:h,stackedOverDimension:e.getCalculationInfo("stackedOverDimension")}}function pS(t,e,n,i){var r=NaN;t.stacked&&(r=n.get(n.getCalculationInfo("stackedOverDimension"),i)),isNaN(r)&&(r=t.valueStart);var o=t.baseDataOffset,a=[];return a[o]=n.get(t.baseDim,i),a[1-o]=r,e.dataToPoint(a)}var dS=Math.min,fS=Math.max;function gS(t,e){return isNaN(t)||isNaN(e)}function yS(t,e,n,i,r,o,a,s,l){for(var u,h,c,p,d,f,g=n,y=0;y=r||g<0)break;if(gS(v,m)){if(l){g+=o;continue}break}if(g===n)t[o>0?"moveTo":"lineTo"](v,m),c=v,p=m;else{var x=v-u,_=m-h;if(x*x+_*_<.5){g+=o;continue}if(a>0){for(var b=g+o,w=e[2*b],S=e[2*b+1];w===v&&S===m&&y=i||gS(w,S))d=v,f=m;else{T=w-u,C=S-h;var k=v-u,L=w-v,P=m-h,O=S-m,R=void 0,N=void 0;if("x"===s){var E=T>0?1:-1;d=v-E*(R=Math.abs(k))*a,f=m,D=v+E*(N=Math.abs(L))*a,A=m}else if("y"===s){var z=C>0?1:-1;d=v,f=m-z*(R=Math.abs(P))*a,D=v,A=m+z*(N=Math.abs(O))*a}else R=Math.sqrt(k*k+P*P),d=v-T*a*(1-(I=(N=Math.sqrt(L*L+O*O))/(N+R))),f=m-C*a*(1-I),A=m+C*a*I,D=dS(D=v+T*a*I,fS(w,v)),A=dS(A,fS(S,m)),D=fS(D,dS(w,v)),f=m-(C=(A=fS(A,dS(S,m)))-m)*R/N,d=dS(d=v-(T=D-v)*R/N,fS(u,v)),f=dS(f,fS(h,m)),D=v+(T=v-(d=fS(d,dS(u,v))))*N/R,A=m+(C=m-(f=fS(f,dS(h,m))))*N/R}t.bezierCurveTo(c,p,d,f,v,m),c=D,p=A}else t.lineTo(v,m)}u=v,h=m,g+=o}return y}var vS=function(){this.smooth=0,this.smoothConstraint=!0},mS=function(t){function e(e){var n=t.call(this,e)||this;return n.type="ec-polyline",n}return n(e,t),e.prototype.getDefaultStyle=function(){return{stroke:"#000",fill:null}},e.prototype.getDefaultShape=function(){return new vS},e.prototype.buildPath=function(t,e){var n=e.points,i=0,r=n.length/2;if(e.connectNulls){for(;r>0&&gS(n[2*r-2],n[2*r-1]);r--);for(;i=0){var y=a?(h-i)*g+i:(u-n)*g+n;return a?[t,y]:[y,t]}n=u,i=h;break;case o.C:u=r[l++],h=r[l++],c=r[l++],p=r[l++],d=r[l++],f=r[l++];var v=a?_n(n,u,c,d,t,s):_n(i,h,p,f,t,s);if(v>0)for(var m=0;m=0){y=a?mn(i,h,p,f,x):mn(n,u,c,d,x);return a?[t,y]:[y,t]}}n=d,i=f}}},e}(Is),xS=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e}(vS),_S=function(t){function e(e){var n=t.call(this,e)||this;return n.type="ec-polygon",n}return n(e,t),e.prototype.getDefaultShape=function(){return new xS},e.prototype.buildPath=function(t,e){var n=e.points,i=e.stackedOnPoints,r=0,o=n.length/2,a=e.smoothMonotone;if(e.connectNulls){for(;o>0&&gS(n[2*o-2],n[2*o-1]);o--);for(;r=0;a--){var s=t.getDimensionInfo(i[a].dimension);if("x"===(r=s&&s.coordDim)||"y"===r){o=i[a];break}}if(o){var l=e.getAxis(r),u=z(o.stops,(function(t){return{coord:l.toGlobalCoord(l.dataToCoord(t.value)),color:t.color}})),h=u.length,c=o.outerColors.slice();h&&u[0].coord>u[h-1].coord&&(u.reverse(),c.reverse());var p=function(t,e){var n,i,r=[],o=t.length;function a(t,e,n){var i=t.coord;return{coord:n,color:ti((n-i)/(e.coord-i),[t.color,e.color])}}for(var s=0;se){i?r.push(a(i,l,e)):n&&r.push(a(n,l,0),a(n,l,e));break}n&&(r.push(a(n,l,0)),n=null),r.push(l),i=l}}return r}(u,"x"===r?n.getWidth():n.getHeight()),d=p.length;if(!d&&h)return u[0].coord<0?c[1]?c[1]:u[h-1].color:c[0]?c[0]:u[0].color;var f=p[0].coord-10,g=p[d-1].coord+10,y=g-f;if(y<.001)return"transparent";E(p,(function(t){t.offset=(t.coord-f)/y})),p.push({offset:d?p[d-1].offset:.5,color:c[1]||"transparent"}),p.unshift({offset:d?p[0].offset:.5,color:c[0]||"transparent"});var v=new nh(0,0,0,0,p,!0);return v[r]=f,v[r+"2"]=g,v}}}function LS(t,e,n){var i=t.get("showAllSymbol"),r="auto"===i;if(!i||r){var o=n.getAxesByScale("ordinal")[0];if(o&&(!r||!function(t,e){var n=t.getExtent(),i=Math.abs(n[1]-n[0])/t.scale.count();isNaN(i)&&(i=0);for(var r=e.count(),o=Math.max(1,Math.round(r/5)),a=0;ai)return!1;return!0}(o,e))){var a=e.mapDimension(o.dim),s={};return E(o.getViewLabels(),(function(t){var e=o.scale.getRawOrdinalNumber(t.tickValue);s[e]=1})),function(t){return!s.hasOwnProperty(e.get(a,t))}}}}function PS(t,e){return[t[2*e],t[2*e+1]]}function OS(t){if(t.get(["endLabel","show"]))return!0;for(var e=0;e0&&"bolder"===t.get(["emphasis","lineStyle","width"]))&&(d.getState("emphasis").style.lineWidth=+d.style.lineWidth+1);Qs(d).seriesIndex=t.seriesIndex,Yl(d,L,P,O);var R=DS(t.get("smooth")),N=t.get("smoothMonotone");if(d.setShape({smooth:R,smoothMonotone:N,connectNulls:w}),f){var E=a.getCalculationInfo("stackedOnSeries"),z=0;f.useStyle(k(l.getAreaStyle(),{fill:C,opacity:.7,lineJoin:"bevel",decal:a.getVisual("style").decal})),E&&(z=DS(E.get("smooth"))),f.setShape({smooth:R,stackedOnSmooth:z,smoothMonotone:N,connectNulls:w}),jl(f,t,"areaStyle"),Qs(f).seriesIndex=t.seriesIndex,Yl(f,L,P,O)}var V=function(t){i._changePolyState(t)};a.eachItemGraphicEl((function(t){t&&(t.onHoverStateChange=V)})),this._polyline.onHoverStateChange=V,this._data=a,this._coordSys=r,this._stackedOnPoints=_,this._points=u,this._step=T,this._valueOrigin=m,t.get("triggerLineEvent")&&(this.packEventData(t,d),f&&this.packEventData(t,f))},e.prototype.packEventData=function(t,e){Qs(e).eventData={componentType:"series",componentSubType:"line",componentIndex:t.componentIndex,seriesIndex:t.seriesIndex,seriesName:t.name,seriesType:"line"}},e.prototype.highlight=function(t,e,n,i){var r=t.getData(),o=Po(r,i);if(this._changePolyState("emphasis"),!(o instanceof Array)&&null!=o&&o>=0){var a=r.getLayout("points"),s=r.getItemGraphicEl(o);if(!s){var l=a[2*o],u=a[2*o+1];if(isNaN(l)||isNaN(u))return;if(this._clipShapeForSymbol&&!this._clipShapeForSymbol.contain(l,u))return;var h=t.get("zlevel")||0,c=t.get("z")||0;(s=new oS(r,o)).x=l,s.y=u,s.setZ(h,c);var p=s.getSymbolPath().getTextContent();p&&(p.zlevel=h,p.z=c,p.z2=this._polyline.z2+1),s.__temp=!0,r.setItemGraphicEl(o,s),s.stopSymbolAnimation(!0),this.group.add(s)}s.highlight()}else kg.prototype.highlight.call(this,t,e,n,i)},e.prototype.downplay=function(t,e,n,i){var r=t.getData(),o=Po(r,i);if(this._changePolyState("normal"),null!=o&&o>=0){var a=r.getItemGraphicEl(o);a&&(a.__temp?(r.setItemGraphicEl(o,null),this.group.remove(a)):a.downplay())}else kg.prototype.downplay.call(this,t,e,n,i)},e.prototype._changePolyState=function(t){var e=this._polygon;Il(this._polyline,t),e&&Il(e,t)},e.prototype._newPolyline=function(t){var e=this._polyline;return e&&this._lineGroup.remove(e),e=new mS({shape:{points:t},segmentIgnoreThreshold:2,z2:10}),this._lineGroup.add(e),this._polyline=e,e},e.prototype._newPolygon=function(t,e){var n=this._polygon;return n&&this._lineGroup.remove(n),n=new _S({shape:{points:t,stackedOnPoints:e},segmentIgnoreThreshold:2}),this._lineGroup.add(n),this._polygon=n,n},e.prototype._initSymbolLabelAnimation=function(t,e,n){var i,r,o=e.getBaseAxis(),a=o.inverse;"cartesian2d"===e.type?(i=o.isHorizontal(),r=!1):"polar"===e.type&&(i="angle"===o.dim,r=!0);var s=t.hostModel,l=s.get("animationDuration");X(l)&&(l=l(null));var u=s.get("animationDelay")||0,h=X(u)?u(null):u;t.eachItemGraphicEl((function(t,o){var s=t;if(s){var c=[t.x,t.y],p=void 0,d=void 0,f=void 0;if(n)if(r){var g=n,y=e.pointToCoord(c);i?(p=g.startAngle,d=g.endAngle,f=-y[1]/180*Math.PI):(p=g.r0,d=g.r,f=y[0])}else{var v=n;i?(p=v.x,d=v.x+v.width,f=t.x):(p=v.y+v.height,d=v.y,f=t.y)}var m=d===p?0:(f-p)/(d-p);a&&(m=1-m);var x=X(u)?u(o):l*m+h,_=s.getSymbolPath(),b=_.getTextContent();s.attr({scaleX:0,scaleY:0}),s.animateTo({scaleX:1,scaleY:1},{duration:200,setToFinal:!0,delay:x}),b&&b.animateFrom({style:{opacity:0}},{duration:300,delay:x}),_.disableLabelAnimation=!0}}))},e.prototype._initOrUpdateEndLabel=function(t,e,n){var i=t.getModel("endLabel");if(OS(t)){var r=t.getData(),o=this._polyline,a=r.getLayout("points");if(!a)return o.removeTextContent(),void(this._endLabel=null);var s=this._endLabel;s||((s=this._endLabel=new Fs({z2:200})).ignoreClip=!0,o.setTextContent(this._endLabel),o.disableLabelAnimation=!0);var l=function(t){for(var e,n,i=t.length/2;i>0&&(e=t[2*i-2],n=t[2*i-1],isNaN(e)||isNaN(n));i--);return i-1}(a);l>=0&&(tc(o,ec(t,"endLabel"),{inheritColor:n,labelFetcher:t,labelDataIndex:l,defaultText:function(t,e,n){return null!=n?rS(r,n):iS(r,t)},enableTextSetter:!0},function(t,e){var n=e.getBaseAxis(),i=n.isHorizontal(),r=n.inverse,o=i?r?"right":"left":"center",a=i?"middle":r?"top":"bottom";return{normal:{align:t.get("align")||o,verticalAlign:t.get("verticalAlign")||a}}}(i,e)),o.textConfig.position=null)}else this._endLabel&&(this._polyline.removeTextContent(),this._endLabel=null)},e.prototype._endLabelOnDuring=function(t,e,n,i,r,o,a){var s=this._endLabel,l=this._polyline;if(s){t<1&&null==i.originalX&&(i.originalX=s.x,i.originalY=s.y);var u=n.getLayout("points"),h=n.hostModel,c=h.get("connectNulls"),p=o.get("precision"),d=o.get("distance")||0,f=a.getBaseAxis(),g=f.isHorizontal(),y=f.inverse,v=e.shape,m=y?g?v.x:v.y+v.height:g?v.x+v.width:v.y,x=(g?d:0)*(y?-1:1),_=(g?0:-d)*(y?-1:1),b=g?"x":"y",w=function(t,e,n){for(var i,r,o=t.length/2,a="x"===n?0:1,s=0,l=-1,u=0;u=e||i>=e&&r<=e){l=u;break}s=u,i=r}else i=r;return{range:[s,l],t:(e-i)/(r-i)}}(u,m,b),S=w.range,M=S[1]-S[0],I=void 0;if(M>=1){if(M>1&&!c){var T=PS(u,S[0]);s.attr({x:T[0]+x,y:T[1]+_}),r&&(I=h.getRawValue(S[0]))}else{(T=l.getPointOn(m,b))&&s.attr({x:T[0]+x,y:T[1]+_});var C=h.getRawValue(S[0]),D=h.getRawValue(S[1]);r&&(I=Wo(n,p,C,D,w.t))}i.lastFrameIndex=S[0]}else{var A=1===t||i.lastFrameIndex>0?S[0]:0;T=PS(u,A);r&&(I=h.getRawValue(A)),s.attr({x:T[0]+x,y:T[1]+_})}if(r){var k=uc(s);"function"==typeof k.setLabelText&&k.setLabelText(I)}}},e.prototype._doUpdateAnimation=function(t,e,n,i,r,o,a){var s=this._polyline,l=this._polygon,u=t.hostModel,h=function(t,e,n,i,r,o,a,s){for(var l=function(t,e){var n=[];return e.diff(t).add((function(t){n.push({cmd:"+",idx:t})})).update((function(t,e){n.push({cmd:"=",idx:e,idx1:t})})).remove((function(t){n.push({cmd:"-",idx:t})})).execute(),n}(t,e),u=[],h=[],c=[],p=[],d=[],f=[],g=[],y=cS(r,e,a),v=t.getLayout("points")||[],m=e.getLayout("points")||[],x=0;x3e3||l&&CS(p,f)>3e3)return s.stopAnimation(),s.setShape({points:d}),void(l&&(l.stopAnimation(),l.setShape({points:d,stackedOnPoints:f})));s.shape.__points=h.current,s.shape.points=c;var g={shape:{points:d}};h.current!==c&&(g.shape.__points=h.next),s.stopAnimation(),fh(s,g,u),l&&(l.setShape({points:c,stackedOnPoints:p}),l.stopAnimation(),fh(l,{shape:{stackedOnPoints:f}},u),s.shape.points!==l.shape.points&&(l.shape.points=s.shape.points));for(var y=[],v=h.status,m=0;me&&(e=t[n]);return isFinite(e)?e:NaN},min:function(t){for(var e=1/0,n=0;n10&&"cartesian2d"===o.type&&r){var s=o.getBaseAxis(),l=o.getOtherAxis(s),u=s.getExtent(),h=n.getDevicePixelRatio(),c=Math.abs(u[1]-u[0])*(h||1),p=Math.round(a/c);if(isFinite(p)&&p>1){"lttb"===r&&t.setData(i.lttbDownSample(i.mapDimension(l.dim),1/p));var d=void 0;U(r)?d=zS[r]:X(r)&&(d=r),d&&t.setData(i.downSample(i.mapDimension(l.dim),1/p,d,VS))}}}}}var FS=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.getInitialData=function(t,e){return vx(null,this,{useEncodeDefaulter:!0})},e.prototype.getMarkerPosition=function(t,e,n){var i=this.coordinateSystem;if(i&&i.clampData){var r=i.clampData(t),o=i.dataToPoint(r);if(n)E(i.getAxes(),(function(t,n){if("category"===t.type&&null!=e){var i=t.getTicksCoords(),a=r[n],s="x1"===e[n]||"y1"===e[n];if(s&&(a+=1),i.length<2)return;if(2===i.length)return void(o[n]=t.toGlobalCoord(t.getExtent()[s?1:0]));for(var l=void 0,u=void 0,h=1,c=0;ca){u=(p+l)/2;break}1===c&&(h=d-i[0].tickValue)}null==u&&(l?l&&(u=i[i.length-1].coord):u=i[0].coord),o[n]=t.toGlobalCoord(u)}}));else{var a=this.getData(),s=a.getLayout("offset"),l=a.getLayout("size"),u=i.getBaseAxis().isHorizontal()?0:1;o[u]+=s+l/2}return o}return[NaN,NaN]},e.type="series.__base_bar__",e.defaultOption={z:2,coordinateSystem:"cartesian2d",legendHoverLink:!0,barMinHeight:0,barMinAngle:0,large:!1,largeThreshold:400,progressive:3e3,progressiveChunkMode:"mod"},e}(mg);mg.registerClass(FS);var GS=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.getInitialData=function(){return vx(null,this,{useEncodeDefaulter:!0,createInvertedIndices:!!this.get("realtimeSort",!0)||null})},e.prototype.getProgressive=function(){return!!this.get("large")&&this.get("progressive")},e.prototype.getProgressiveThreshold=function(){var t=this.get("progressiveThreshold"),e=this.get("largeThreshold");return e>t&&(t=e),t},e.prototype.brushSelector=function(t,e,n){return n.rect(e.getItemLayout(t))},e.type="series.bar",e.dependencies=["grid","polar"],e.defaultOption=Cc(FS.defaultOption,{clip:!0,roundCap:!1,showBackground:!1,backgroundStyle:{color:"rgba(180, 180, 180, 0.2)",borderColor:null,borderWidth:0,borderType:"solid",borderRadius:0,shadowBlur:0,shadowColor:null,shadowOffsetX:0,shadowOffsetY:0,opacity:1},select:{itemStyle:{borderColor:"#212121"}},realtimeSort:!1}),e}(FS),WS=function(){this.cx=0,this.cy=0,this.r0=0,this.r=0,this.startAngle=0,this.endAngle=2*Math.PI,this.clockwise=!0},HS=function(t){function e(e){var n=t.call(this,e)||this;return n.type="sausage",n}return n(e,t),e.prototype.getDefaultShape=function(){return new WS},e.prototype.buildPath=function(t,e){var n=e.cx,i=e.cy,r=Math.max(e.r0||0,0),o=Math.max(e.r,0),a=.5*(o-r),s=r+a,l=e.startAngle,u=e.endAngle,h=e.clockwise,c=2*Math.PI,p=h?u-lo)return!0;o=u}return!1},e.prototype._isOrderDifferentInView=function(t,e){for(var n=e.scale,i=n.getExtent(),r=Math.max(0,i[0]),o=Math.min(i[1],n.getOrdinalMeta().categories.length-1);r<=o;++r)if(t.ordinalNumbers[r]!==n.getRawOrdinalNumber(r))return!0},e.prototype._updateSortWithinSameData=function(t,e,n,i){if(this._isOrderChangedWithinSameData(t,e,n)){var r=this._dataSort(t,n,e);this._isOrderDifferentInView(r,n)&&(this._removeOnRenderedListener(i),i.dispatchAction({type:"changeAxisOrder",componentType:n.dim+"Axis",axisId:n.index,sortInfo:r}))}},e.prototype._dispatchInitSort=function(t,e,n){var i=e.baseAxis,r=this._dataSort(t,i,(function(n){return t.get(t.mapDimension(e.otherAxis.dim),n)}));n.dispatchAction({type:"changeAxisOrder",componentType:i.dim+"Axis",isInitSort:!0,axisId:i.index,sortInfo:r})},e.prototype.remove=function(t,e){this._clear(this._model),this._removeOnRenderedListener(e)},e.prototype.dispose=function(t,e){this._removeOnRenderedListener(e)},e.prototype._removeOnRenderedListener=function(t){this._onRendered&&(t.getZr().off("rendered",this._onRendered),this._onRendered=null)},e.prototype._clear=function(t){var e=this.group,n=this._data;t&&t.isAnimationEnabled()&&n&&!this._isLargeDraw?(this._removeBackground(),this._backgroundEls=[],n.eachItemGraphicEl((function(e){xh(e,t,Qs(e).dataIndex)}))):e.removeAll(),this._data=null,this._isFirstFrame=!0},e.prototype._removeBackground=function(){this.group.remove(this._backgroundGroup),this._backgroundGroup=null},e.type="bar",e}(kg),KS={cartesian2d:function(t,e){var n=e.width<0?-1:1,i=e.height<0?-1:1;n<0&&(e.x+=e.width,e.width=-e.width),i<0&&(e.y+=e.height,e.height=-e.height);var r=t.x+t.width,o=t.y+t.height,a=ZS(e.x,t.x),s=jS(e.x+e.width,r),l=ZS(e.y,t.y),u=jS(e.y+e.height,o),h=sr?s:a,e.y=c&&l>o?u:l,e.width=h?0:s-a,e.height=c?0:u-l,n<0&&(e.x+=e.width,e.width=-e.width),i<0&&(e.y+=e.height,e.height=-e.height),h||c},polar:function(t,e){var n=e.r0<=e.r?1:-1;if(n<0){var i=e.r;e.r=e.r0,e.r0=i}var r=jS(e.r,t.r),o=ZS(e.r0,t.r0);e.r=r,e.r0=o;var a=r-o<0;if(n<0){i=e.r;e.r=e.r0,e.r0=i}return a}},$S={cartesian2d:function(t,e,n,i,r,o,a,s,l){var u=new zs({shape:A({},i),z2:1});(u.__dataIndex=n,u.name="item",o)&&(u.shape[r?"height":"width"]=0);return u},polar:function(t,e,n,i,r,o,a,s,l){var u=!r&&l?HS:zu,h=new u({shape:i,z2:1});h.name="item";var c,p,d=rM(r);if(h.calculateTextPosition=(c=d,p=({isRoundCap:u===HS}||{}).isRoundCap,function(t,e,n){var i=e.position;if(!i||i instanceof Array)return Tr(t,e,n);var r=c(i),o=null!=e.distance?e.distance:5,a=this.shape,s=a.cx,l=a.cy,u=a.r,h=a.r0,d=(u+h)/2,f=a.startAngle,g=a.endAngle,y=(f+g)/2,v=p?Math.abs(u-h)/2:0,m=Math.cos,x=Math.sin,_=s+u*m(f),b=l+u*x(f),w="left",S="top";switch(r){case"startArc":_=s+(h-o)*m(y),b=l+(h-o)*x(y),w="center",S="top";break;case"insideStartArc":_=s+(h+o)*m(y),b=l+(h+o)*x(y),w="center",S="bottom";break;case"startAngle":_=s+d*m(f)+YS(f,o+v,!1),b=l+d*x(f)+XS(f,o+v,!1),w="right",S="middle";break;case"insideStartAngle":_=s+d*m(f)+YS(f,-o+v,!1),b=l+d*x(f)+XS(f,-o+v,!1),w="left",S="middle";break;case"middle":_=s+d*m(y),b=l+d*x(y),w="center",S="middle";break;case"endArc":_=s+(u+o)*m(y),b=l+(u+o)*x(y),w="center",S="bottom";break;case"insideEndArc":_=s+(u-o)*m(y),b=l+(u-o)*x(y),w="center",S="top";break;case"endAngle":_=s+d*m(g)+YS(g,o+v,!0),b=l+d*x(g)+XS(g,o+v,!0),w="left",S="middle";break;case"insideEndAngle":_=s+d*m(g)+YS(g,-o+v,!0),b=l+d*x(g)+XS(g,-o+v,!0),w="right",S="middle";break;default:return Tr(t,e,n)}return(t=t||{}).x=_,t.y=b,t.align=w,t.verticalAlign=S,t}),o){var f=r?"r":"endAngle",g={};h.shape[f]=r?i.r0:i.startAngle,g[f]=i[f],(s?fh:gh)(h,{shape:g},o)}return h}};function JS(t,e,n,i,r,o,a,s){var l,u;o?(u={x:i.x,width:i.width},l={y:i.y,height:i.height}):(u={y:i.y,height:i.height},l={x:i.x,width:i.width}),s||(a?fh:gh)(n,{shape:l},e,r,null),(a?fh:gh)(n,{shape:u},e?t.baseAxis.model:null,r)}function QS(t,e){for(var n=0;n0?1:-1,a=i.height>0?1:-1;return{x:i.x+o*r/2,y:i.y+a*r/2,width:i.width-o*r,height:i.height-a*r}},polar:function(t,e,n){var i=t.getItemLayout(e);return{cx:i.cx,cy:i.cy,r0:i.r0,r:i.r,startAngle:i.startAngle,endAngle:i.endAngle,clockwise:i.clockwise}}};function rM(t){return function(t){var e=t?"Arc":"Angle";return function(t){switch(t){case"start":case"insideStart":case"end":case"insideEnd":return t+e;default:return t}}}(t)}function oM(t,e,n,i,r,o,a,s){var l=e.getItemVisual(n,"style");if(s){if(!o.get("roundCap")){var u=t.shape;A(u,US(i.getModel("itemStyle"),u,!0)),t.setShape(u)}}else{var h=i.get(["itemStyle","borderRadius"])||0;t.setShape("r",h)}t.useStyle(l);var c=i.getShallow("cursor");c&&t.attr("cursor",c);var p=s?a?r.r>=r.r0?"endArc":"startArc":r.endAngle>=r.startAngle?"endAngle":"startAngle":a?r.height>=0?"bottom":"top":r.width>=0?"right":"left",d=ec(i);tc(t,d,{labelFetcher:o,labelDataIndex:n,defaultText:iS(o.getData(),n),inheritColor:l.fill,defaultOpacity:l.opacity,defaultOutsidePosition:p});var f=t.getTextContent();if(s&&f){var g=i.get(["label","position"]);t.textConfig.inside="middle"===g||null,function(t,e,n,i){if(j(i))t.setTextConfig({rotation:i});else if(Y(e))t.setTextConfig({rotation:0});else{var r,o=t.shape,a=o.clockwise?o.startAngle:o.endAngle,s=o.clockwise?o.endAngle:o.startAngle,l=(a+s)/2,u=n(e);switch(u){case"startArc":case"insideStartArc":case"middle":case"insideEndArc":case"endArc":r=l;break;case"startAngle":case"insideStartAngle":r=a;break;case"endAngle":case"insideEndAngle":r=s;break;default:return void t.setTextConfig({rotation:0})}var h=1.5*Math.PI-r;"middle"===u&&h>Math.PI/2&&h<1.5*Math.PI&&(h-=Math.PI),t.setTextConfig({rotation:h})}}(t,"outside"===g?p:g,rM(a),i.get(["label","rotate"]))}hc(f,d,o.getRawValue(n),(function(t){return rS(e,t)}));var y=i.getModel(["emphasis"]);Yl(t,y.get("focus"),y.get("blurScope"),y.get("disabled")),jl(t,i),function(t){return null!=t.startAngle&&null!=t.endAngle&&t.startAngle===t.endAngle}(r)&&(t.style.fill="none",t.style.stroke="none",E(t.states,(function(t){t.style&&(t.style.fill=t.style.stroke="none")})))}var aM=function(){},sM=function(t){function e(e){var n=t.call(this,e)||this;return n.type="largeBar",n}return n(e,t),e.prototype.getDefaultShape=function(){return new aM},e.prototype.buildPath=function(t,e){for(var n=e.points,i=this.baseDimIdx,r=1-this.baseDimIdx,o=[],a=[],s=this.barWidth,l=0;l=s[0]&&e<=s[0]+l[0]&&n>=s[1]&&n<=s[1]+l[1])return a[h]}return-1}(this,t.offsetX,t.offsetY);Qs(this).dataIndex=e>=0?e:null}),30,!1);function hM(t,e,n){if(MS(n,"cartesian2d")){var i=e,r=n.getArea();return{x:t?i.x:r.x,y:t?r.y:i.y,width:t?i.width:r.width,height:t?r.height:i.height}}var o=e;return{cx:(r=n.getArea()).cx,cy:r.cy,r0:t?r.r0:o.r0,r:t?r.r:o.r,startAngle:t?o.startAngle:0,endAngle:t?o.endAngle:2*Math.PI}}var cM=2*Math.PI,pM=Math.PI/180;function dM(t,e){return Cp(t.getBoxLayoutParams(),{width:e.getWidth(),height:e.getHeight()})}function fM(t,e){var n=dM(t,e),i=t.get("center"),r=t.get("radius");Y(r)||(r=[0,r]);var o,a,s=Ur(n.width,e.getWidth()),l=Ur(n.height,e.getHeight()),u=Math.min(s,l),h=Ur(r[0],u/2),c=Ur(r[1],u/2),p=t.coordinateSystem;if(p){var d=p.dataToPoint(i);o=d[0]||0,a=d[1]||0}else Y(i)||(i=[i,i]),o=Ur(i[0],s)+n.x,a=Ur(i[1],l)+n.y;return{cx:o,cy:a,r0:h,r:c}}function gM(t,e,n){e.eachSeriesByType(t,(function(t){var e=t.getData(),i=e.mapDimension("value"),r=dM(t,n),o=fM(t,n),a=o.cx,s=o.cy,l=o.r,u=o.r0,h=-t.get("startAngle")*pM,c=t.get("minAngle")*pM,p=0;e.each(i,(function(t){!isNaN(t)&&p++}));var d=e.getSum(i),f=Math.PI/(d||p)*2,g=t.get("clockwise"),y=t.get("roseType"),v=t.get("stillShowZeroSum"),m=e.getDataExtent(i);m[0]=0;var x=cM,_=0,b=h,w=g?1:-1;if(e.setLayout({viewRect:r,r:l}),e.each(i,(function(t,n){var i;if(isNaN(t))e.setItemLayout(n,{angle:NaN,startAngle:NaN,endAngle:NaN,clockwise:g,cx:a,cy:s,r0:u,r:y?NaN:l});else{(i="area"!==y?0===d&&v?f:t*f:cM/p)n?a:o,h=Math.abs(l.label.y-n);if(h>=u.maxY){var c=l.label.x-e-l.len2*r,p=i+l.len,f=Math.abs(c)t.unconstrainedWidth?null:d:null;i.setStyle("width",f)}var g=i.getBoundingRect();o.width=g.width;var y=(i.style.margin||0)+2.1;o.height=g.height+y,o.y-=(o.height-c)/2}}}function _M(t){return"center"===t.position}function bM(t){var e,n,i=t.getData(),r=[],o=!1,a=(t.get("minShowLabelAngle")||0)*vM,s=i.getLayout("viewRect"),l=i.getLayout("r"),u=s.width,h=s.x,c=s.y,p=s.height;function d(t){t.ignore=!0}i.each((function(t){var s=i.getItemGraphicEl(t),c=s.shape,p=s.getTextContent(),f=s.getTextGuideLine(),g=i.getItemModel(t),y=g.getModel("label"),v=y.get("position")||g.get(["emphasis","label","position"]),m=y.get("distanceToLabelLine"),x=y.get("alignTo"),_=Ur(y.get("edgeDistance"),u),b=y.get("bleedMargin"),w=g.getModel("labelLine"),S=w.get("length");S=Ur(S,u);var M=w.get("length2");if(M=Ur(M,u),Math.abs(c.endAngle-c.startAngle)0?"right":"left":k>0?"left":"right"}var B=Math.PI,F=0,G=y.get("rotate");if(j(G))F=G*(B/180);else if("center"===v)F=0;else if("radial"===G||!0===G){F=k<0?-A+B:-A}else if("tangential"===G&&"outside"!==v&&"outer"!==v){var W=Math.atan2(k,L);W<0&&(W=2*B+W),L>0&&(W=B+W),F=W-B}if(o=!!F,p.x=I,p.y=T,p.rotation=F,p.setStyle({verticalAlign:"middle"}),P){p.setStyle({align:D});var H=p.states.select;H&&(H.x+=p.x,H.y+=p.y)}else{var Y=p.getBoundingRect().clone();Y.applyTransform(p.getComputedTransform());var X=(p.style.margin||0)+2.1;Y.y-=X/2,Y.height+=X,r.push({label:p,labelLine:f,position:v,len:S,len2:M,minTurnAngle:w.get("minTurnAngle"),maxSurfaceAngle:w.get("maxSurfaceAngle"),surfaceNormal:new De(k,L),linePoints:C,textAlign:D,labelDistance:m,labelAlignTo:x,edgeDistance:_,bleedMargin:b,rect:Y,unconstrainedWidth:Y.width,labelStyleWidth:p.style.width})}s.setTextConfig({inside:P})}})),!o&&t.get("avoidLabelOverlap")&&function(t,e,n,i,r,o,a,s){for(var l=[],u=[],h=Number.MAX_VALUE,c=-Number.MAX_VALUE,p=0;p0){for(var l=o.getItemLayout(0),u=1;isNaN(l&&l.startAngle)&&u=n.r0}},e.type="pie",e}(kg);function MM(t,e,n){e=Y(e)&&{coordDimensions:e}||A({encodeDefine:t.getEncode()},e);var i=t.getSource(),r=ux(i,e).dimensions,o=new lx(r,t);return o.initData(i,n),o}var IM=function(){function t(t,e){this._getDataWithEncodedVisual=t,this._getRawData=e}return t.prototype.getAllNames=function(){var t=this._getRawData();return t.mapArray(t.getName)},t.prototype.containName=function(t){return this._getRawData().indexOfName(t)>=0},t.prototype.indexOfName=function(t){return this._getDataWithEncodedVisual().indexOfName(t)},t.prototype.getItemVisual=function(t,e){return this._getDataWithEncodedVisual().getItemVisual(t,e)},t}(),TM=Oo(),CM=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.prototype.init=function(e){t.prototype.init.apply(this,arguments),this.legendVisualProvider=new IM(W(this.getData,this),W(this.getRawData,this)),this._defaultLabelLine(e)},e.prototype.mergeOption=function(){t.prototype.mergeOption.apply(this,arguments)},e.prototype.getInitialData=function(){return MM(this,{coordDimensions:["value"],encodeDefaulter:H(Jp,this)})},e.prototype.getDataParams=function(e){var n=this.getData(),i=TM(n),r=i.seats;if(!r){var o=[];n.each(n.mapDimension("value"),(function(t){o.push(t)})),r=i.seats=Jr(o,n.hostModel.get("percentPrecision"))}var a=t.prototype.getDataParams.call(this,e);return a.percent=r[e]||0,a.$vars.push("percent"),a},e.prototype._defaultLabelLine=function(t){wo(t,"labelLine",["show"]);var e=t.labelLine,n=t.emphasis.labelLine;e.show=e.show&&t.label.show,n.show=n.show&&t.emphasis.label.show},e.type="series.pie",e.defaultOption={z:2,legendHoverLink:!0,colorBy:"data",center:["50%","50%"],radius:[0,"75%"],clockwise:!0,startAngle:90,minAngle:0,minShowLabelAngle:0,selectedOffset:10,percentPrecision:2,stillShowZeroSum:!0,left:0,top:0,right:0,bottom:0,width:null,height:null,label:{rotate:0,show:!0,overflow:"truncate",position:"outer",alignTo:"none",edgeDistance:"25%",bleedMargin:10,distanceToLabelLine:5},labelLine:{show:!0,length:15,length2:15,smooth:!1,minTurnAngle:90,maxSurfaceAngle:90,lineStyle:{width:1,type:"solid"}},itemStyle:{borderWidth:1,borderJoin:"round"},showEmptyCircle:!0,emptyCircleStyle:{color:"lightgray",opacity:1},labelLayout:{hideOverlap:!0},emphasis:{scale:!0,scaleSize:5},avoidLabelOverlap:!0,animationType:"expansion",animationDuration:1e3,animationTypeUpdate:"transition",animationEasingUpdate:"cubicInOut",animationDurationUpdate:500,animationEasing:"cubicInOut"},e}(mg);var DM=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.hasSymbolVisual=!0,n}return n(e,t),e.prototype.getInitialData=function(t,e){return vx(null,this,{useEncodeDefaulter:!0})},e.prototype.getProgressive=function(){var t=this.option.progressive;return null==t?this.option.large?5e3:this.get("progressive"):t},e.prototype.getProgressiveThreshold=function(){var t=this.option.progressiveThreshold;return null==t?this.option.large?1e4:this.get("progressiveThreshold"):t},e.prototype.brushSelector=function(t,e,n){return n.point(e.getItemLayout(t))},e.prototype.getZLevelKey=function(){return this.getData().count()>this.getProgressiveThreshold()?this.id:""},e.type="series.scatter",e.dependencies=["grid","polar","geo","singleAxis","calendar"],e.defaultOption={coordinateSystem:"cartesian2d",z:2,legendHoverLink:!0,symbolSize:10,large:!1,largeThreshold:2e3,itemStyle:{opacity:.8},emphasis:{scale:!0},clip:!0,select:{itemStyle:{borderColor:"#212121"}},universalTransition:{divideShape:"clone"}},e}(mg),AM=function(){},kM=function(t){function e(e){var n=t.call(this,e)||this;return n._off=0,n.hoverDataIdx=-1,n}return n(e,t),e.prototype.getDefaultShape=function(){return new AM},e.prototype.reset=function(){this.notClear=!1,this._off=0},e.prototype.buildPath=function(t,e){var n,i=e.points,r=e.size,o=this.symbolProxy,a=o.shape,s=t.getContext?t.getContext():t,l=s&&r[0]<4,u=this.softClipShape;if(l)this._ctx=s;else{for(this._ctx=null,n=this._off;n=0;s--){var l=2*s,u=i[l]-o/2,h=i[l+1]-a/2;if(t>=u&&e>=h&&t<=u+o&&e<=h+a)return s}return-1},e.prototype.contain=function(t,e){var n=this.transformCoordToLocal(t,e),i=this.getBoundingRect();return t=n[0],e=n[1],i.contain(t,e)?(this.hoverDataIdx=this.findDataIndex(t,e))>=0:(this.hoverDataIdx=-1,!1)},e.prototype.getBoundingRect=function(){var t=this._rect;if(!t){for(var e=this.shape,n=e.points,i=e.size,r=i[0],o=i[1],a=1/0,s=1/0,l=-1/0,u=-1/0,h=0;h=0&&(l.dataIndex=n+(t.startIndex||0))}))},t.prototype.remove=function(){this._clear()},t.prototype._clear=function(){this._newAdded=[],this.group.removeAll()},t}(),PM=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.render=function(t,e,n){var i=t.getData();this._updateSymbolDraw(i,t).updateData(i,{clipShape:this._getClipShape(t)}),this._finished=!0},e.prototype.incrementalPrepareRender=function(t,e,n){var i=t.getData();this._updateSymbolDraw(i,t).incrementalPrepareUpdate(i),this._finished=!1},e.prototype.incrementalRender=function(t,e,n){this._symbolDraw.incrementalUpdate(t,e.getData(),{clipShape:this._getClipShape(e)}),this._finished=t.end===e.getData().count()},e.prototype.updateTransform=function(t,e,n){var i=t.getData();if(this.group.dirty(),!this._finished||i.count()>1e4)return{update:!0};var r=ES("").reset(t,e,n);r.progress&&r.progress({start:0,end:i.count(),count:i.count()},i),this._symbolDraw.updateLayout(i)},e.prototype.eachRendered=function(t){this._symbolDraw&&this._symbolDraw.eachRendered(t)},e.prototype._getClipShape=function(t){var e=t.coordinateSystem,n=e&&e.getArea&&e.getArea();return t.get("clip",!0)?n:null},e.prototype._updateSymbolDraw=function(t,e){var n=this._symbolDraw,i=e.pipelineContext.large;return n&&i===this._isLargeDraw||(n&&n.remove(),n=this._symbolDraw=i?new LM:new hS,this._isLargeDraw=i,this.group.removeAll()),this.group.add(n.group),n},e.prototype.remove=function(t,e){this._symbolDraw&&this._symbolDraw.remove(!0),this._symbolDraw=null},e.prototype.dispose=function(){},e.type="scatter",e}(kg),OM=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.type="grid",e.dependencies=["xAxis","yAxis"],e.layoutMode="box",e.defaultOption={show:!1,z:0,left:"10%",top:60,right:"10%",bottom:70,containLabel:!1,backgroundColor:"rgba(0,0,0,0)",borderWidth:1,borderColor:"#ccc"},e}(Rp),RM=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.prototype.getCoordSysModel=function(){return this.getReferringComponents("grid",zo).models[0]},e.type="cartesian2dAxis",e}(Rp);R(RM,I_);var NM={show:!0,z:0,inverse:!1,name:"",nameLocation:"end",nameRotate:null,nameTruncate:{maxWidth:null,ellipsis:"...",placeholder:"."},nameTextStyle:{},nameGap:15,silent:!1,triggerEvent:!1,tooltip:{show:!1},axisPointer:{},axisLine:{show:!0,onZero:!0,onZeroAxisIndex:null,lineStyle:{color:"#6E7079",width:1,type:"solid"},symbol:["none","none"],symbolSize:[10,15]},axisTick:{show:!0,inside:!1,length:5,lineStyle:{width:1}},axisLabel:{show:!0,inside:!1,rotate:0,showMinLabel:null,showMaxLabel:null,margin:8,fontSize:12},splitLine:{show:!0,lineStyle:{color:["#E0E6F1"],width:1,type:"solid"}},splitArea:{show:!1,areaStyle:{color:["rgba(250,250,250,0.2)","rgba(210,219,238,0.2)"]}}},EM=C({boundaryGap:!0,deduplication:null,splitLine:{show:!1},axisTick:{alignWithLabel:!1,interval:"auto"},axisLabel:{interval:"auto"}},NM),zM=C({boundaryGap:[0,0],axisLine:{show:"auto"},axisTick:{show:"auto"},splitNumber:5,minorTick:{show:!1,splitNumber:5,length:3,lineStyle:{}},minorSplitLine:{show:!1,lineStyle:{color:"#F4F7FD",width:1}}},NM),VM={category:EM,value:zM,time:C({splitNumber:6,axisLabel:{showMinLabel:!1,showMaxLabel:!1,rich:{primary:{fontWeight:"bold"}}},splitLine:{show:!1}},zM),log:k({logBase:10},zM)},BM={value:1,category:1,time:1,log:1};function FM(t,e,i,r){E(BM,(function(o,a){var s=C(C({},VM[a],!0),r,!0),l=function(t){function i(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e+"Axis."+a,n}return n(i,t),i.prototype.mergeDefaultAndTheme=function(t,e){var n=Ap(this),i=n?Lp(t):{};C(t,e.getTheme().get(a+"Axis")),C(t,this.getDefaultOption()),t.type=GM(t),n&&kp(t,i,n)},i.prototype.optionUpdated=function(){"category"===this.option.type&&(this.__ordinalMeta=_x.createByAxisModel(this))},i.prototype.getCategories=function(t){var e=this.option;if("category"===e.type)return t?e.data:this.__ordinalMeta.categories},i.prototype.getOrdinalMeta=function(){return this.__ordinalMeta},i.type=e+"Axis."+a,i.defaultOption=s,i}(i);t.registerComponentModel(l)})),t.registerSubTypeDefaulter(e+"Axis",GM)}function GM(t){return t.type||(t.data?"category":"value")}var WM=function(){function t(t){this.type="cartesian",this._dimList=[],this._axes={},this.name=t||""}return t.prototype.getAxis=function(t){return this._axes[t]},t.prototype.getAxes=function(){return z(this._dimList,(function(t){return this._axes[t]}),this)},t.prototype.getAxesByScale=function(t){return t=t.toLowerCase(),B(this.getAxes(),(function(e){return e.scale.type===t}))},t.prototype.addAxis=function(t){var e=t.dim;this._axes[e]=t,this._dimList.push(e)},t}(),HM=["x","y"];function YM(t){return"interval"===t.type||"time"===t.type}var XM=function(t){function e(){var e=null!==t&&t.apply(this,arguments)||this;return e.type="cartesian2d",e.dimensions=HM,e}return n(e,t),e.prototype.calcAffineTransform=function(){this._transform=this._invTransform=null;var t=this.getAxis("x").scale,e=this.getAxis("y").scale;if(YM(t)&&YM(e)){var n=t.getExtent(),i=e.getExtent(),r=this.dataToPoint([n[0],i[0]]),o=this.dataToPoint([n[1],i[1]]),a=n[1]-n[0],s=i[1]-i[0];if(a&&s){var l=(o[0]-r[0])/a,u=(o[1]-r[1])/s,h=r[0]-n[0]*l,c=r[1]-i[0]*u,p=this._transform=[l,0,0,u,h,c];this._invTransform=Ie([],p)}}},e.prototype.getBaseAxis=function(){return this.getAxesByScale("ordinal")[0]||this.getAxesByScale("time")[0]||this.getAxis("x")},e.prototype.containPoint=function(t){var e=this.getAxis("x"),n=this.getAxis("y");return e.contain(e.toLocalCoord(t[0]))&&n.contain(n.toLocalCoord(t[1]))},e.prototype.containData=function(t){return this.getAxis("x").containData(t[0])&&this.getAxis("y").containData(t[1])},e.prototype.containZone=function(t,e){var n=this.dataToPoint(t),i=this.dataToPoint(e),r=this.getArea(),o=new ze(n[0],n[1],i[0]-n[0],i[1]-n[1]);return r.intersect(o)},e.prototype.dataToPoint=function(t,e,n){n=n||[];var i=t[0],r=t[1];if(this._transform&&null!=i&&isFinite(i)&&null!=r&&isFinite(r))return Wt(n,t,this._transform);var o=this.getAxis("x"),a=this.getAxis("y");return n[0]=o.toGlobalCoord(o.dataToCoord(i,e)),n[1]=a.toGlobalCoord(a.dataToCoord(r,e)),n},e.prototype.clampData=function(t,e){var n=this.getAxis("x").scale,i=this.getAxis("y").scale,r=n.getExtent(),o=i.getExtent(),a=n.parse(t[0]),s=i.parse(t[1]);return(e=e||[])[0]=Math.min(Math.max(Math.min(r[0],r[1]),a),Math.max(r[0],r[1])),e[1]=Math.min(Math.max(Math.min(o[0],o[1]),s),Math.max(o[0],o[1])),e},e.prototype.pointToData=function(t,e){var n=[];if(this._invTransform)return Wt(n,t,this._invTransform);var i=this.getAxis("x"),r=this.getAxis("y");return n[0]=i.coordToData(i.toLocalCoord(t[0]),e),n[1]=r.coordToData(r.toLocalCoord(t[1]),e),n},e.prototype.getOtherAxis=function(t){return this.getAxis("x"===t.dim?"y":"x")},e.prototype.getArea=function(){var t=this.getAxis("x").getGlobalExtent(),e=this.getAxis("y").getGlobalExtent(),n=Math.min(t[0],t[1]),i=Math.min(e[0],e[1]),r=Math.max(t[0],t[1])-n,o=Math.max(e[0],e[1])-i;return new ze(n,i,r,o)},e}(WM),UM=function(t){function e(e,n,i,r,o){var a=t.call(this,e,n,i)||this;return a.index=0,a.type=r||"value",a.position=o||"bottom",a}return n(e,t),e.prototype.isHorizontal=function(){var t=this.position;return"top"===t||"bottom"===t},e.prototype.getGlobalExtent=function(t){var e=this.getExtent();return e[0]=this.toGlobalCoord(e[0]),e[1]=this.toGlobalCoord(e[1]),t&&e[0]>e[1]&&e.reverse(),e},e.prototype.pointToData=function(t,e){return this.coordToData(this.toLocalCoord(t["x"===this.dim?0:1]),e)},e.prototype.setCategorySortInfo=function(t){if("category"!==this.type)return!1;this.model.option.categorySortInfo=t,this.scale.setSortInfo(t)},e}(nb);function ZM(t,e,n){n=n||{};var i=t.coordinateSystem,r=e.axis,o={},a=r.getAxesOnZeroOf()[0],s=r.position,l=a?"onZero":s,u=r.dim,h=i.getRect(),c=[h.x,h.x+h.width,h.y,h.y+h.height],p={left:0,right:1,top:0,bottom:1,onZero:2},d=e.get("offset")||0,f="x"===u?[c[2]-d,c[3]+d]:[c[0]-d,c[1]+d];if(a){var g=a.toGlobalCoord(a.dataToCoord(0));f[p.onZero]=Math.max(Math.min(g,f[1]),f[0])}o.position=["y"===u?f[p[l]]:c[0],"x"===u?f[p[l]]:c[3]],o.rotation=Math.PI/2*("x"===u?0:1);o.labelDirection=o.tickDirection=o.nameDirection={top:-1,bottom:1,left:-1,right:1}[s],o.labelOffset=a?f[p[s]]-f[p.onZero]:0,e.get(["axisTick","inside"])&&(o.tickDirection=-o.tickDirection),it(n.labelInside,e.get(["axisLabel","inside"]))&&(o.labelDirection=-o.labelDirection);var y=e.get(["axisLabel","rotate"]);return o.labelRotate="top"===l?-y:y,o.z2=1,o}function jM(t){return"cartesian2d"===t.get("coordinateSystem")}function qM(t){var e={xAxisModel:null,yAxisModel:null};return E(e,(function(n,i){var r=i.replace(/Model$/,""),o=t.getReferringComponents(r,zo).models[0];e[i]=o})),e}var KM=Math.log;function $M(t,e,n){var i=Ox.prototype,r=i.getTicks.call(n),o=i.getTicks.call(n,!0),a=r.length-1,s=i.getInterval.call(n),l=y_(t,e),u=l.extent,h=l.fixMin,c=l.fixMax;if("log"===t.type){var p=KM(t.base);u=[KM(u[0])/p,KM(u[1])/p]}t.setExtent(u[0],u[1]),t.calcNiceExtent({splitNumber:a,fixMin:h,fixMax:c});var d=i.getExtent.call(t);h&&(u[0]=d[0]),c&&(u[1]=d[1]);var f=i.getInterval.call(t),g=u[0],y=u[1];if(h&&c)f=(y-g)/a;else if(h)for(y=u[0]+f*a;yu[0]&&isFinite(g)&&isFinite(u[0]);)f=Ix(f),g=u[1]-f*a;else{t.getTicks().length-1>a&&(f=Ix(f));var v=f*a;(g=Zr((y=Math.ceil(u[1]/f)*f)-v))<0&&u[0]>=0?(g=0,y=Zr(v)):y>0&&u[1]<=0&&(y=0,g=-Zr(v))}var m=(r[0].value-o[0].value)/s,x=(r[a].value-o[a].value)/s;i.setExtent.call(t,g+f*m,y+f*x),i.setInterval.call(t,f),(m||x)&&i.setNiceExtent.call(t,g+f,y-f)}var JM=function(){function t(t,e,n){this.type="grid",this._coordsMap={},this._coordsList=[],this._axesMap={},this._axesList=[],this.axisPointerEnabled=!0,this.dimensions=HM,this._initCartesian(t,e,n),this.model=t}return t.prototype.getRect=function(){return this._rect},t.prototype.update=function(t,e){var n=this._axesMap;function i(t){var e,n=G(t),i=n.length;if(i){for(var r=[],o=i-1;o>=0;o--){var a=t[+n[o]],s=a.model,l=a.scale;Sx(l)&&s.get("alignTicks")&&null==s.get("interval")?r.push(a):(v_(l,s),Sx(l)&&(e=a))}r.length&&(e||v_((e=r.pop()).scale,e.model),E(r,(function(t){$M(t.scale,t.model,e.scale)})))}}this._updateScale(t,this.model),i(n.x),i(n.y);var r={};E(n.x,(function(t){tI(n,"y",t,r)})),E(n.y,(function(t){tI(n,"x",t,r)})),this.resize(this.model,e)},t.prototype.resize=function(t,e,n){var i=t.getBoxLayoutParams(),r=!n&&t.get("containLabel"),o=Cp(i,{width:e.getWidth(),height:e.getHeight()});this._rect=o;var a=this._axesList;function s(){E(a,(function(t){var e=t.isHorizontal(),n=e?[0,o.width]:[0,o.height],i=t.inverse?1:0;t.setExtent(n[i],n[1-i]),function(t,e){var n=t.getExtent(),i=n[0]+n[1];t.toGlobalCoord="x"===t.dim?function(t){return t+e}:function(t){return i-t+e},t.toLocalCoord="x"===t.dim?function(t){return t-e}:function(t){return i-t+e}}(t,e?o.x:o.y)}))}s(),r&&(E(a,(function(t){if(!t.model.get(["axisLabel","inside"])){var e=function(t){var e=t.model,n=t.scale;if(e.get(["axisLabel","show"])&&!n.isBlank()){var i,r,o=n.getExtent();r=n instanceof Lx?n.count():(i=n.getTicks()).length;var a,s=t.getLabelModel(),l=x_(t),u=1;r>40&&(u=Math.ceil(r/40));for(var h=0;h0&&i>0||n<0&&i<0)}(t)}var nI=Math.PI,iI=function(){function t(t,e){this.group=new zr,this.opt=e,this.axisModel=t,k(e,{labelOffset:0,nameDirection:1,tickDirection:1,labelDirection:1,silent:!0,handleAutoShown:function(){return!0}});var n=new zr({x:e.position[0],y:e.position[1],rotation:e.rotation});n.updateTransform(),this._transformGroup=n}return t.prototype.hasBuilder=function(t){return!!rI[t]},t.prototype.add=function(t){rI[t](this.opt,this.axisModel,this.group,this._transformGroup)},t.prototype.getGroup=function(){return this.group},t.innerTextLayout=function(t,e,n){var i,r,o=eo(e-t);return no(o)?(r=n>0?"top":"bottom",i="center"):no(o-nI)?(r=n>0?"bottom":"top",i="center"):(r="middle",i=o>0&&o0?"right":"left":n>0?"left":"right"),{rotation:o,textAlign:i,textVerticalAlign:r}},t.makeAxisEventDataBase=function(t){var e={componentType:t.mainType,componentIndex:t.componentIndex};return e[t.mainType+"Index"]=t.componentIndex,e},t.isLabelSilent=function(t){var e=t.get("tooltip");return t.get("silent")||!(t.get("triggerEvent")||e&&e.show)},t}(),rI={axisLine:function(t,e,n,i){var r=e.get(["axisLine","show"]);if("auto"===r&&t.handleAutoShown&&(r=t.handleAutoShown("axisLine")),r){var o=e.axis.getExtent(),a=i.transform,s=[o[0],0],l=[o[1],0],u=s[0]>l[0];a&&(Wt(s,s,a),Wt(l,l,a));var h=A({lineCap:"round"},e.getModel(["axisLine","lineStyle"]).getLineStyle()),c=new Zu({shape:{x1:s[0],y1:s[1],x2:l[0],y2:l[1]},style:h,strokeContainThreshold:t.strokeContainThreshold||5,silent:!0,z2:1});Rh(c.shape,c.style.lineWidth),c.anid="line",n.add(c);var p=e.get(["axisLine","symbol"]);if(null!=p){var d=e.get(["axisLine","symbolSize"]);U(p)&&(p=[p,p]),(U(d)||j(d))&&(d=[d,d]);var f=Yy(e.get(["axisLine","symbolOffset"])||0,d),g=d[0],y=d[1];E([{rotate:t.rotation+Math.PI/2,offset:f[0],r:0},{rotate:t.rotation-Math.PI/2,offset:f[1],r:Math.sqrt((s[0]-l[0])*(s[0]-l[0])+(s[1]-l[1])*(s[1]-l[1]))}],(function(e,i){if("none"!==p[i]&&null!=p[i]){var r=Wy(p[i],-g/2,-y/2,g,y,h.stroke,!0),o=e.r+e.offset,a=u?l:s;r.attr({rotation:e.rotate,x:a[0]+o*Math.cos(t.rotation),y:a[1]-o*Math.sin(t.rotation),silent:!0,z2:11}),n.add(r)}}))}}},axisTickLabel:function(t,e,n,i){var r=function(t,e,n,i){var r=n.axis,o=n.getModel("axisTick"),a=o.get("show");"auto"===a&&i.handleAutoShown&&(a=i.handleAutoShown("axisTick"));if(!a||r.scale.isBlank())return;for(var s=o.getModel("lineStyle"),l=i.tickDirection*o.get("length"),u=lI(r.getTicksCoords(),e.transform,l,k(s.getLineStyle(),{stroke:n.get(["axisLine","lineStyle","color"])}),"ticks"),h=0;hc[1]?-1:1,d=["start"===s?c[0]-p*h:"end"===s?c[1]+p*h:(c[0]+c[1])/2,sI(s)?t.labelOffset+l*h:0],f=e.get("nameRotate");null!=f&&(f=f*nI/180),sI(s)?o=iI.innerTextLayout(t.rotation,null!=f?f:t.rotation,l):(o=function(t,e,n,i){var r,o,a=eo(n-t),s=i[0]>i[1],l="start"===e&&!s||"start"!==e&&s;no(a-nI/2)?(o=l?"bottom":"top",r="center"):no(a-1.5*nI)?(o=l?"top":"bottom",r="center"):(o="middle",r=a<1.5*nI&&a>nI/2?l?"left":"right":l?"right":"left");return{rotation:a,textAlign:r,textVerticalAlign:o}}(t.rotation,s,f||0,c),null!=(a=t.axisNameAvailableWidth)&&(a=Math.abs(a/Math.sin(o.rotation)),!isFinite(a)&&(a=null)));var g=u.getFont(),y=e.get("nameTruncate",!0)||{},v=y.ellipsis,m=it(t.nameTruncateMaxWidth,y.maxWidth,a),x=new Fs({x:d[0],y:d[1],rotation:o.rotation,silent:iI.isLabelSilent(e),style:nc(u,{text:r,font:g,overflow:"truncate",width:m,ellipsis:v,fill:u.getTextColor()||e.get(["axisLine","lineStyle","color"]),align:u.get("align")||o.textAlign,verticalAlign:u.get("verticalAlign")||o.textVerticalAlign}),z2:1});if(Zh({el:x,componentModel:e,itemName:r}),x.__fullText=r,x.anid="name",e.get("triggerEvent")){var _=iI.makeAxisEventDataBase(e);_.targetType="axisName",_.name=r,Qs(x).eventData=_}i.add(x),x.updateTransform(),n.add(x),x.decomposeTransform()}}};function oI(t){t&&(t.ignore=!0)}function aI(t,e){var n=t&&t.getBoundingRect().clone(),i=e&&e.getBoundingRect().clone();if(n&&i){var r=xe([]);return Se(r,r,-t.rotation),n.applyTransform(be([],r,t.getLocalTransform())),i.applyTransform(be([],r,e.getLocalTransform())),n.intersect(i)}}function sI(t){return"middle"===t||"center"===t}function lI(t,e,n,i,r){for(var o=[],a=[],s=[],l=0;l=0||t===e}function cI(t){var e=pI(t);if(e){var n=e.axisPointerModel,i=e.axis.scale,r=n.option,o=n.get("status"),a=n.get("value");null!=a&&(a=i.parse(a));var s=dI(n);null==o&&(r.status=s?"show":"hide");var l=i.getExtent().slice();l[0]>l[1]&&l.reverse(),(null==a||a>l[1])&&(a=l[1]),a0&&!c.min?c.min=0:null!=c.min&&c.min<0&&!c.max&&(c.max=0);var p=a;null!=c.color&&(p=k({color:c.color},a));var d=C(T(c),{boundaryGap:t,splitNumber:e,scale:n,axisLine:i,axisTick:r,axisLabel:o,name:c.text,showName:s,nameLocation:"end",nameGap:u,nameTextStyle:p,triggerEvent:h},!1);if(U(l)){var f=d.name;d.name=l.replace("{value}",null!=f?f:"")}else X(l)&&(d.name=l(d.name,d));var g=new Mc(d,null,this.ecModel);return R(g,I_.prototype),g.mainType="radar",g.componentIndex=this.componentIndex,g}),this);this._indicatorModels=c},e.prototype.getIndicatorModels=function(){return this._indicatorModels},e.type="radar",e.defaultOption={z:0,center:["50%","50%"],radius:"75%",startAngle:90,axisName:{show:!0},boundaryGap:[0,0],splitNumber:5,axisNameGap:15,scale:!1,shape:"polygon",axisLine:C({lineStyle:{color:"#bbb"}},NI.axisLine),axisLabel:EI(NI.axisLabel,!1),axisTick:EI(NI.axisTick,!1),splitLine:EI(NI.splitLine,!0),splitArea:EI(NI.splitArea,!0),indicator:[]},e}(Rp),VI=["axisLine","axisTickLabel","axisName"],BI=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.render=function(t,e,n){this.group.removeAll(),this._buildAxes(t),this._buildSplitLineAndArea(t)},e.prototype._buildAxes=function(t){var e=t.coordinateSystem;E(z(e.getIndicatorAxes(),(function(t){var n=t.model.get("showName")?t.name:"";return new iI(t.model,{axisName:n,position:[e.cx,e.cy],rotation:t.angle,labelDirection:-1,tickDirection:-1,nameDirection:1})})),(function(t){E(VI,t.add,t),this.group.add(t.getGroup())}),this)},e.prototype._buildSplitLineAndArea=function(t){var e=t.coordinateSystem,n=e.getIndicatorAxes();if(n.length){var i=t.get("shape"),r=t.getModel("splitLine"),o=t.getModel("splitArea"),a=r.getModel("lineStyle"),s=o.getModel("areaStyle"),l=r.get("show"),u=o.get("show"),h=a.get("color"),c=s.get("color"),p=Y(h)?h:[h],d=Y(c)?c:[c],f=[],g=[];if("circle"===i)for(var y=n[0].getTicksCoords(),v=e.cx,m=e.cy,x=0;x3?1.4:r>1?1.2:1.1;ZI(this,"zoom","zoomOnMouseWheel",t,{scale:i>0?s:1/s,originX:o,originY:a,isAvailableBehavior:null})}if(n){var l=Math.abs(i);ZI(this,"scrollMove","moveOnMouseWheel",t,{scrollDelta:(i>0?1:-1)*(l>3?.4:l>1?.15:.05),originX:o,originY:a,isAvailableBehavior:null})}}},e.prototype._pinchHandler=function(t){YI(this._zr,"globalPan")||ZI(this,"zoom",null,t,{scale:t.pinchScale>1?1.1:1/1.1,originX:t.pinchX,originY:t.pinchY,isAvailableBehavior:null})},e}(jt);function ZI(t,e,n,i,r){t.pointerChecker&&t.pointerChecker(i,r.originX,r.originY)&&(de(i.event),jI(t,e,n,i,r))}function jI(t,e,n,i,r){r.isAvailableBehavior=W(qI,null,n,i),t.trigger(e,r)}function qI(t,e,n){var i=n[t];return!t||i&&(!U(i)||e.event[i+"Key"])}function KI(t,e,n){var i=t.target;i.x+=e,i.y+=n,i.dirty()}function $I(t,e,n,i){var r=t.target,o=t.zoomLimit,a=t.zoom=t.zoom||1;if(a*=e,o){var s=o.min||0,l=o.max||1/0;a=Math.max(Math.min(l,a),s)}var u=a/t.zoom;t.zoom=a,r.x-=(n-r.x)*(u-1),r.y-=(i-r.y)*(u-1),r.scaleX*=u,r.scaleY*=u,r.dirty()}var JI,QI={axisPointer:1,tooltip:1,brush:1};function tT(t,e,n){var i=e.getComponentByElement(t.topTarget),r=i&&i.coordinateSystem;return i&&i!==n&&!QI.hasOwnProperty(i.mainType)&&r&&r.model!==n}function eT(t){U(t)&&(t=(new DOMParser).parseFromString(t,"text/xml"));var e=t;for(9===e.nodeType&&(e=e.firstChild);"svg"!==e.nodeName.toLowerCase()||1!==e.nodeType;)e=e.nextSibling;return e}var nT={fill:"fill",stroke:"stroke","stroke-width":"lineWidth",opacity:"opacity","fill-opacity":"fillOpacity","stroke-opacity":"strokeOpacity","stroke-dasharray":"lineDash","stroke-dashoffset":"lineDashOffset","stroke-linecap":"lineCap","stroke-linejoin":"lineJoin","stroke-miterlimit":"miterLimit","font-family":"fontFamily","font-size":"fontSize","font-style":"fontStyle","font-weight":"fontWeight","text-anchor":"textAlign",visibility:"visibility",display:"display"},iT=G(nT),rT={"alignment-baseline":"textBaseline","stop-color":"stopColor"},oT=G(rT),aT=function(){function t(){this._defs={},this._root=null}return t.prototype.parse=function(t,e){e=e||{};var n=eT(t);this._defsUsePending=[];var i=new zr;this._root=i;var r=[],o=n.getAttribute("viewBox")||"",a=parseFloat(n.getAttribute("width")||e.width),s=parseFloat(n.getAttribute("height")||e.height);isNaN(a)&&(a=null),isNaN(s)&&(s=null),pT(n,i,null,!0,!1);for(var l,u,h=n.firstChild;h;)this._parseNode(h,i,r,null,!1,!1),h=h.nextSibling;if(function(t,e){for(var n=0;n=4&&(l={x:parseFloat(c[0]||0),y:parseFloat(c[1]||0),width:parseFloat(c[2]),height:parseFloat(c[3])})}if(l&&null!=a&&null!=s&&(u=bT(l,{x:0,y:0,width:a,height:s}),!e.ignoreViewBox)){var p=i;(i=new zr).add(p),p.scaleX=p.scaleY=u.scale,p.x=u.x,p.y=u.y}return e.ignoreRootClip||null==a||null==s||i.setClipPath(new zs({shape:{x:0,y:0,width:a,height:s}})),{root:i,width:a,height:s,viewBoxRect:l,viewBoxTransform:u,named:r}},t.prototype._parseNode=function(t,e,n,i,r,o){var a,s=t.nodeName.toLowerCase(),l=i;if("defs"===s&&(r=!0),"text"===s&&(o=!0),"defs"===s||"switch"===s)a=e;else{if(!r){var u=JI[s];if(u&&_t(JI,s)){a=u.call(this,t,e);var h=t.getAttribute("name");if(h){var c={name:h,namedFrom:null,svgNodeTagLower:s,el:a};n.push(c),"g"===s&&(l=c)}else i&&n.push({name:i.name,namedFrom:i,svgNodeTagLower:s,el:a});e.add(a)}}var p=sT[s];if(p&&_t(sT,s)){var d=p.call(this,t),f=t.getAttribute("id");f&&(this._defs[f]=d)}}if(a&&a.isGroup)for(var g=t.firstChild;g;)1===g.nodeType?this._parseNode(g,a,n,l,r,o):3===g.nodeType&&o&&this._parseText(g,a),g=g.nextSibling},t.prototype._parseText=function(t,e){var n=new Cs({style:{text:t.textContent},silent:!0,x:this._textX||0,y:this._textY||0});hT(e,n),pT(t,n,this._defsUsePending,!1,!1),function(t,e){var n=e.__selfStyle;if(n){var i=n.textBaseline,r=i;i&&"auto"!==i?"baseline"===i?r="alphabetic":"before-edge"===i||"text-before-edge"===i?r="top":"after-edge"===i||"text-after-edge"===i?r="bottom":"central"!==i&&"mathematical"!==i||(r="middle"):r="alphabetic",t.style.textBaseline=r}var o=e.__inheritedStyle;if(o){var a=o.textAlign,s=a;a&&("middle"===a&&(s="center"),t.style.textAlign=s)}}(n,e);var i=n.style,r=i.fontSize;r&&r<9&&(i.fontSize=9,n.scaleX*=r/9,n.scaleY*=r/9);var o=(i.fontSize||i.fontFamily)&&[i.fontStyle,i.fontWeight,(i.fontSize||12)+"px",i.fontFamily||"sans-serif"].join(" ");i.font=o;var a=n.getBoundingRect();return this._textX+=a.width,e.add(n),n},t.internalField=void(JI={g:function(t,e){var n=new zr;return hT(e,n),pT(t,n,this._defsUsePending,!1,!1),n},rect:function(t,e){var n=new zs;return hT(e,n),pT(t,n,this._defsUsePending,!1,!1),n.setShape({x:parseFloat(t.getAttribute("x")||"0"),y:parseFloat(t.getAttribute("y")||"0"),width:parseFloat(t.getAttribute("width")||"0"),height:parseFloat(t.getAttribute("height")||"0")}),n.silent=!0,n},circle:function(t,e){var n=new _u;return hT(e,n),pT(t,n,this._defsUsePending,!1,!1),n.setShape({cx:parseFloat(t.getAttribute("cx")||"0"),cy:parseFloat(t.getAttribute("cy")||"0"),r:parseFloat(t.getAttribute("r")||"0")}),n.silent=!0,n},line:function(t,e){var n=new Zu;return hT(e,n),pT(t,n,this._defsUsePending,!1,!1),n.setShape({x1:parseFloat(t.getAttribute("x1")||"0"),y1:parseFloat(t.getAttribute("y1")||"0"),x2:parseFloat(t.getAttribute("x2")||"0"),y2:parseFloat(t.getAttribute("y2")||"0")}),n.silent=!0,n},ellipse:function(t,e){var n=new wu;return hT(e,n),pT(t,n,this._defsUsePending,!1,!1),n.setShape({cx:parseFloat(t.getAttribute("cx")||"0"),cy:parseFloat(t.getAttribute("cy")||"0"),rx:parseFloat(t.getAttribute("rx")||"0"),ry:parseFloat(t.getAttribute("ry")||"0")}),n.silent=!0,n},polygon:function(t,e){var n,i=t.getAttribute("points");i&&(n=cT(i));var r=new Wu({shape:{points:n||[]},silent:!0});return hT(e,r),pT(t,r,this._defsUsePending,!1,!1),r},polyline:function(t,e){var n,i=t.getAttribute("points");i&&(n=cT(i));var r=new Yu({shape:{points:n||[]},silent:!0});return hT(e,r),pT(t,r,this._defsUsePending,!1,!1),r},image:function(t,e){var n=new ks;return hT(e,n),pT(t,n,this._defsUsePending,!1,!1),n.setStyle({image:t.getAttribute("xlink:href")||t.getAttribute("href"),x:+t.getAttribute("x"),y:+t.getAttribute("y"),width:+t.getAttribute("width"),height:+t.getAttribute("height")}),n.silent=!0,n},text:function(t,e){var n=t.getAttribute("x")||"0",i=t.getAttribute("y")||"0",r=t.getAttribute("dx")||"0",o=t.getAttribute("dy")||"0";this._textX=parseFloat(n)+parseFloat(r),this._textY=parseFloat(i)+parseFloat(o);var a=new zr;return hT(e,a),pT(t,a,this._defsUsePending,!1,!0),a},tspan:function(t,e){var n=t.getAttribute("x"),i=t.getAttribute("y");null!=n&&(this._textX=parseFloat(n)),null!=i&&(this._textY=parseFloat(i));var r=t.getAttribute("dx")||"0",o=t.getAttribute("dy")||"0",a=new zr;return hT(e,a),pT(t,a,this._defsUsePending,!1,!0),this._textX+=parseFloat(r),this._textY+=parseFloat(o),a},path:function(t,e){var n=vu(t.getAttribute("d")||"");return hT(e,n),pT(t,n,this._defsUsePending,!1,!1),n.silent=!0,n}}),t}(),sT={lineargradient:function(t){var e=parseInt(t.getAttribute("x1")||"0",10),n=parseInt(t.getAttribute("y1")||"0",10),i=parseInt(t.getAttribute("x2")||"10",10),r=parseInt(t.getAttribute("y2")||"0",10),o=new nh(e,n,i,r);return lT(t,o),uT(t,o),o},radialgradient:function(t){var e=parseInt(t.getAttribute("cx")||"0",10),n=parseInt(t.getAttribute("cy")||"0",10),i=parseInt(t.getAttribute("r")||"0",10),r=new ih(e,n,i);return lT(t,r),uT(t,r),r}};function lT(t,e){"userSpaceOnUse"===t.getAttribute("gradientUnits")&&(e.global=!0)}function uT(t,e){for(var n=t.firstChild;n;){if(1===n.nodeType&&"stop"===n.nodeName.toLocaleLowerCase()){var i=n.getAttribute("offset"),r=void 0;r=i&&i.indexOf("%")>0?parseInt(i,10)/100:i?parseFloat(i):0;var o={};_T(n,o,o);var a=o.stopColor||n.getAttribute("stop-color")||"#000000";e.colorStops.push({offset:r,color:a})}n=n.nextSibling}}function hT(t,e){t&&t.__inheritedStyle&&(e.__inheritedStyle||(e.__inheritedStyle={}),k(e.__inheritedStyle,t.__inheritedStyle))}function cT(t){for(var e=yT(t),n=[],i=0;i0;o-=2){var a=i[o],s=i[o-1],l=yT(a);switch(r=r||[1,0,0,1,0,0],s){case"translate":we(r,r,[parseFloat(l[0]),parseFloat(l[1]||"0")]);break;case"scale":Me(r,r,[parseFloat(l[0]),parseFloat(l[1]||l[0])]);break;case"rotate":Se(r,r,-parseFloat(l[0])*mT);break;case"skewX":be(r,[1,0,Math.tan(parseFloat(l[0])*mT),1,0,0],r);break;case"skewY":be(r,[1,Math.tan(parseFloat(l[0])*mT),0,1,0,0],r);break;case"matrix":r[0]=parseFloat(l[0]),r[1]=parseFloat(l[1]),r[2]=parseFloat(l[2]),r[3]=parseFloat(l[3]),r[4]=parseFloat(l[4]),r[5]=parseFloat(l[5])}}e.setLocalTransform(r)}}(t,e),_T(t,a,s),i||function(t,e,n){for(var i=0;i0,f={api:n,geo:s,mapOrGeoModel:t,data:a,isVisualEncodedByVisualMap:d,isGeo:o,transformInfoRaw:c};"geoJSON"===s.resourceType?this._buildGeoJSON(f):"geoSVG"===s.resourceType&&this._buildSVG(f),this._updateController(t,e,n),this._updateMapSelectHandler(t,l,n,i)},t.prototype._buildGeoJSON=function(t){var e=this._regionsGroupByName=yt(),n=yt(),i=this._regionsGroup,r=t.transformInfoRaw,o=t.mapOrGeoModel,a=t.data,s=t.geo.projection,l=s&&s.stream;function u(t,e){return e&&(t=e(t)),t&&[t[0]*r.scaleX+r.x,t[1]*r.scaleY+r.y]}function h(t){for(var e=[],n=!l&&s&&s.project,i=0;i=0)&&(p=r);var d=a?{normal:{align:"center",verticalAlign:"middle"}}:null;tc(e,ec(i),{labelFetcher:p,labelDataIndex:c,defaultText:n},d);var f=e.getTextContent();if(f&&(WT(f).ignore=f.ignore,e.textConfig&&a)){var g=e.getBoundingRect().clone();e.textConfig.layoutRect=g,e.textConfig.position=[(a[0]-g.x)/g.width*100+"%",(a[1]-g.y)/g.height*100+"%"]}e.disableLabelAnimation=!0}else e.removeTextContent(),e.removeTextConfig(),e.disableLabelAnimation=null}function jT(t,e,n,i,r,o){t.data?t.data.setItemGraphicEl(o,e):Qs(e).eventData={componentType:"geo",componentIndex:r.componentIndex,geoIndex:r.componentIndex,name:n,region:i&&i.option||{}}}function qT(t,e,n,i,r){t.data||Zh({el:e,componentModel:r,itemName:n,itemTooltipOption:i.get("tooltip")})}function KT(t,e,n,i,r){e.highDownSilentOnTouch=!!r.get("selectedMode");var o=i.getModel("emphasis"),a=o.get("focus");return Yl(e,a,o.get("blurScope"),o.get("disabled")),t.isGeo&&function(t,e,n){var i=Qs(t);i.componentMainType=e.mainType,i.componentIndex=e.componentIndex,i.componentHighDownName=n}(e,r,n),a}function $T(t,e,n){var i,r=[];function o(){i=[]}function a(){i.length&&(r.push(i),i=[])}var s=e({polygonStart:o,polygonEnd:a,lineStart:o,lineEnd:a,point:function(t,e){isFinite(t)&&isFinite(e)&&i.push([t,e])},sphere:function(){}});return!n&&s.polygonStart(),E(t,(function(t){s.lineStart();for(var e=0;e-1&&(n.style.stroke=n.style.fill,n.style.fill="#fff",n.style.lineWidth=2),n},e.type="series.map",e.dependencies=["geo"],e.layoutMode="box",e.defaultOption={z:2,coordinateSystem:"geo",map:"",left:"center",top:"center",aspectScale:null,showLegendSymbol:!0,boundingCoords:null,center:null,zoom:1,scaleLimit:null,selectedMode:!0,label:{show:!1,color:"#000"},itemStyle:{borderWidth:.5,borderColor:"#444",areaColor:"#eee"},emphasis:{label:{show:!0,color:"rgb(100,0,0)"},itemStyle:{areaColor:"rgba(255,215,0,0.8)"}},select:{label:{show:!0,color:"rgb(100,0,0)"},itemStyle:{color:"rgba(255,215,0,0.8)"}},nameProperty:"name"},e}(mg);function tC(t){var e={};t.eachSeriesByType("map",(function(t){var n=t.getHostGeoModel(),i=n?"o"+n.id:"i"+t.getMapType();(e[i]=e[i]||[]).push(t)})),E(e,(function(t,e){for(var n,i,r,o=(n=z(t,(function(t){return t.getData()})),i=t[0].get("mapValueCalculation"),r={},E(n,(function(t){t.each(t.mapDimension("value"),(function(e,n){var i="ec-"+t.getName(n);r[i]=r[i]||[],isNaN(e)||r[i].push(e)}))})),n[0].map(n[0].mapDimension("value"),(function(t,e){for(var o="ec-"+n[0].getName(e),a=0,s=1/0,l=-1/0,u=r[o].length,h=0;h1?(d.width=p,d.height=p/x):(d.height=p,d.width=p*x),d.y=c[1]-d.height/2,d.x=c[0]-d.width/2;else{var b=t.getBoxLayoutParams();b.aspect=x,d=Cp(b,{width:v,height:m})}this.setViewRect(d.x,d.y,d.width,d.height),this.setCenter(t.get("center"),e),this.setZoom(t.get("zoom"))}R(sC,iC);var hC=function(){function t(){this.dimensions=aC}return t.prototype.create=function(t,e){var n=[];function i(t){return{nameProperty:t.get("nameProperty"),aspectScale:t.get("aspectScale"),projection:t.get("projection")}}t.eachComponent("geo",(function(t,r){var o=t.get("map"),a=new sC(o+r,o,A({nameMap:t.get("nameMap")},i(t)));a.zoomLimit=t.get("scaleLimit"),n.push(a),t.coordinateSystem=a,a.model=t,a.resize=uC,a.resize(t,e)})),t.eachSeries((function(t){if("geo"===t.get("coordinateSystem")){var e=t.get("geoIndex")||0;t.coordinateSystem=n[e]}}));var r={};return t.eachSeriesByType("map",(function(t){if(!t.getHostGeoModel()){var e=t.getMapType();r[e]=r[e]||[],r[e].push(t)}})),E(r,(function(t,r){var o=z(t,(function(t){return t.get("nameMap")})),a=new sC(r,r,A({nameMap:D(o)},i(t[0])));a.zoomLimit=it.apply(null,z(t,(function(t){return t.get("scaleLimit")}))),n.push(a),a.resize=uC,a.resize(t[0],e),E(t,(function(t){t.coordinateSystem=a,function(t,e){E(e.get("geoCoord"),(function(e,n){t.addGeoCoord(n,e)}))}(a,t)}))})),n},t.prototype.getFilledRegions=function(t,e,n,i){for(var r=(t||[]).slice(),o=yt(),a=0;a=0;){var o=e[n];o.hierNode.prelim+=i,o.hierNode.modifier+=i,r+=o.hierNode.change,i+=o.hierNode.shift+r}}(t);var o=(n[0].hierNode.prelim+n[n.length-1].hierNode.prelim)/2;r?(t.hierNode.prelim=r.hierNode.prelim+e(t,r),t.hierNode.modifier=t.hierNode.prelim-o):t.hierNode.prelim=o}else r&&(t.hierNode.prelim=r.hierNode.prelim+e(t,r));t.parentNode.hierNode.defaultAncestor=function(t,e,n,i){if(e){for(var r=t,o=t,a=o.parentNode.children[0],s=e,l=r.hierNode.modifier,u=o.hierNode.modifier,h=a.hierNode.modifier,c=s.hierNode.modifier;s=wC(s),o=SC(o),s&&o;){r=wC(r),a=SC(a),r.hierNode.ancestor=t;var p=s.hierNode.prelim+c-o.hierNode.prelim-u+i(s,o);p>0&&(IC(MC(s,t,n),t,p),u+=p,l+=p),c+=s.hierNode.modifier,u+=o.hierNode.modifier,l+=r.hierNode.modifier,h+=a.hierNode.modifier}s&&!wC(r)&&(r.hierNode.thread=s,r.hierNode.modifier+=c-l),o&&!SC(a)&&(a.hierNode.thread=o,a.hierNode.modifier+=u-h,n=t)}return n}(t,r,t.parentNode.hierNode.defaultAncestor||i[0],e)}function xC(t){var e=t.hierNode.prelim+t.parentNode.hierNode.modifier;t.setLayout({x:e},!0),t.hierNode.modifier+=t.parentNode.hierNode.modifier}function _C(t){return arguments.length?t:TC}function bC(t,e){return t-=Math.PI/2,{x:e*Math.cos(t),y:e*Math.sin(t)}}function wC(t){var e=t.children;return e.length&&t.isExpand?e[e.length-1]:t.hierNode.thread}function SC(t){var e=t.children;return e.length&&t.isExpand?e[0]:t.hierNode.thread}function MC(t,e,n){return t.hierNode.ancestor.parentNode===e.parentNode?t.hierNode.ancestor:n}function IC(t,e,n){var i=n/(e.hierNode.i-t.hierNode.i);e.hierNode.change-=i,e.hierNode.shift+=n,e.hierNode.modifier+=n,e.hierNode.prelim+=n,t.hierNode.change+=i}function TC(t,e){return t.parentNode===e.parentNode?1:2}var CC=function(){this.parentPoint=[],this.childPoints=[]},DC=function(t){function e(e){return t.call(this,e)||this}return n(e,t),e.prototype.getDefaultStyle=function(){return{stroke:"#000",fill:null}},e.prototype.getDefaultShape=function(){return new CC},e.prototype.buildPath=function(t,e){var n=e.childPoints,i=n.length,r=e.parentPoint,o=n[0],a=n[i-1];if(1===i)return t.moveTo(r[0],r[1]),void t.lineTo(o[0],o[1]);var s=e.orient,l="TB"===s||"BT"===s?0:1,u=1-l,h=Ur(e.forkPosition,1),c=[];c[l]=r[l],c[u]=r[u]+(a[u]-r[u])*h,t.moveTo(r[0],r[1]),t.lineTo(c[0],c[1]),t.moveTo(o[0],o[1]),c[l]=o[l],t.lineTo(c[0],c[1]),c[l]=a[l],t.lineTo(c[0],c[1]),t.lineTo(a[0],a[1]);for(var p=1;pm.x)||(_-=Math.PI);var S=b?"left":"right",M=s.getModel("label"),I=M.get("rotate"),T=I*(Math.PI/180),C=y.getTextContent();C&&(y.setTextConfig({position:M.get("position")||S,rotation:null==I?-_:T,origin:"center"}),C.setStyle("verticalAlign","middle"))}var D=s.get(["emphasis","focus"]),A="relative"===D?vt(a.getAncestorsIndices(),a.getDescendantIndices()):"ancestor"===D?a.getAncestorsIndices():"descendant"===D?a.getDescendantIndices():null;A&&(Qs(n).focus=A),function(t,e,n,i,r,o,a,s){var l=e.getModel(),u=t.get("edgeShape"),h=t.get("layout"),c=t.getOrient(),p=t.get(["lineStyle","curveness"]),d=t.get("edgeForkPosition"),f=l.getModel("lineStyle").getLineStyle(),g=i.__edge;if("curve"===u)e.parentNode&&e.parentNode!==n&&(g||(g=i.__edge=new $u({shape:NC(h,c,p,r,r)})),fh(g,{shape:NC(h,c,p,o,a)},t));else if("polyline"===u)if("orthogonal"===h){if(e!==n&&e.children&&0!==e.children.length&&!0===e.isExpand){for(var y=e.children,v=[],m=0;me&&(e=i.height)}this.height=e+1},t.prototype.getNodeById=function(t){if(this.getId()===t)return this;for(var e=0,n=this.children,i=n.length;e=0&&this.hostTree.data.setItemLayout(this.dataIndex,t,e)},t.prototype.getLayout=function(){return this.hostTree.data.getItemLayout(this.dataIndex)},t.prototype.getModel=function(t){if(!(this.dataIndex<0))return this.hostTree.data.getItemModel(this.dataIndex).getModel(t)},t.prototype.getLevelModel=function(){return(this.hostTree.levelModels||[])[this.depth]},t.prototype.setVisual=function(t,e){this.dataIndex>=0&&this.hostTree.data.setItemVisual(this.dataIndex,t,e)},t.prototype.getVisual=function(t){return this.hostTree.data.getItemVisual(this.dataIndex,t)},t.prototype.getRawIndex=function(){return this.hostTree.data.getRawIndex(this.dataIndex)},t.prototype.getId=function(){return this.hostTree.data.getId(this.dataIndex)},t.prototype.getChildIndex=function(){if(this.parentNode){for(var t=this.parentNode.children,e=0;e=0){var i=n.getData().tree.root,r=t.targetNode;if(U(r)&&(r=i.getNodeById(r)),r&&i.contains(r))return{node:r};var o=t.targetNodeId;if(null!=o&&(r=i.getNodeById(o)))return{node:r}}}function jC(t){for(var e=[];t;)(t=t.parentNode)&&e.push(t);return e.reverse()}function qC(t,e){return P(jC(t),e)>=0}function KC(t,e){for(var n=[];t;){var i=t.dataIndex;n.push({name:t.name,dataIndex:i,value:e.getRawValue(i)}),t=t.parentNode}return n.reverse(),n}var $C=function(t){function e(){var e=null!==t&&t.apply(this,arguments)||this;return e.hasSymbolVisual=!0,e.ignoreStyleOnData=!0,e}return n(e,t),e.prototype.getInitialData=function(t){var e={name:t.name,children:t.data},n=t.leaves||{},i=new Mc(n,this,this.ecModel),r=UC.createTree(e,this,(function(t){t.wrapMethod("getItemModel",(function(t,e){var n=r.getNodeByDataIndex(e);return n&&n.children.length&&n.isExpand||(t.parentModel=i),t}))}));var o=0;r.eachNode("preorder",(function(t){t.depth>o&&(o=t.depth)}));var a=t.expandAndCollapse&&t.initialTreeDepth>=0?t.initialTreeDepth:o;return r.root.eachNode("preorder",(function(t){var e=t.hostTree.data.getRawDataItem(t.dataIndex);t.isExpand=e&&null!=e.collapsed?!e.collapsed:t.depth<=a})),r.data},e.prototype.getOrient=function(){var t=this.get("orient");return"horizontal"===t?t="LR":"vertical"===t&&(t="TB"),t},e.prototype.setZoom=function(t){this.option.zoom=t},e.prototype.setCenter=function(t){this.option.center=t},e.prototype.formatTooltip=function(t,e,n){for(var i=this.getData().tree,r=i.root.children[0],o=i.getNodeByDataIndex(t),a=o.getValue(),s=o.name;o&&o!==r;)s=o.parentNode.name+"."+s,o=o.parentNode;return ng("nameValue",{name:s,value:a,noValue:isNaN(a)||null==a})},e.prototype.getDataParams=function(e){var n=t.prototype.getDataParams.apply(this,arguments),i=this.getData().tree.getNodeByDataIndex(e);return n.treeAncestors=KC(i,this),n.collapsed=!i.isExpand,n},e.type="series.tree",e.layoutMode="box",e.defaultOption={z:2,coordinateSystem:"view",left:"12%",top:"12%",right:"12%",bottom:"12%",layout:"orthogonal",edgeShape:"curve",edgeForkPosition:"50%",roam:!1,nodeScaleRatio:.4,center:null,zoom:1,orient:"LR",symbol:"emptyCircle",symbolSize:7,expandAndCollapse:!0,initialTreeDepth:2,lineStyle:{color:"#ccc",width:1.5,curveness:.5},itemStyle:{color:"lightsteelblue",borderWidth:1.5},label:{show:!0},animationEasing:"linear",animationDuration:700,animationDurationUpdate:500},e}(mg);function JC(t,e){for(var n,i=[t];n=i.pop();)if(e(n),n.isExpand){var r=n.children;if(r.length)for(var o=r.length-1;o>=0;o--)i.push(r[o])}}function QC(t,e){t.eachSeriesByType("tree",(function(t){!function(t,e){var n=function(t,e){return Cp(t.getBoxLayoutParams(),{width:e.getWidth(),height:e.getHeight()})}(t,e);t.layoutInfo=n;var i=t.get("layout"),r=0,o=0,a=null;"radial"===i?(r=2*Math.PI,o=Math.min(n.height,n.width)/2,a=_C((function(t,e){return(t.parentNode===e.parentNode?1:2)/t.depth}))):(r=n.width,o=n.height,a=_C());var s=t.getData().tree.root,l=s.children[0];if(l){!function(t){var e=t;e.hierNode={defaultAncestor:null,ancestor:e,prelim:0,modifier:0,change:0,shift:0,i:0,thread:null};for(var n,i,r=[e];n=r.pop();)if(i=n.children,n.isExpand&&i.length)for(var o=i.length-1;o>=0;o--){var a=i[o];a.hierNode={defaultAncestor:null,ancestor:a,prelim:0,modifier:0,change:0,shift:0,i:o,thread:null},r.push(a)}}(s),function(t,e,n){for(var i,r=[t],o=[];i=r.pop();)if(o.push(i),i.isExpand){var a=i.children;if(a.length)for(var s=0;sh.getLayout().x&&(h=t),t.depth>c.depth&&(c=t)}));var p=u===h?1:a(u,h)/2,d=p-u.getLayout().x,f=0,g=0,y=0,v=0;if("radial"===i)f=r/(h.getLayout().x+p+d),g=o/(c.depth-1||1),JC(l,(function(t){y=(t.getLayout().x+d)*f,v=(t.depth-1)*g;var e=bC(y,v);t.setLayout({x:e.x,y:e.y,rawX:y,rawY:v},!0)}));else{var m=t.getOrient();"RL"===m||"LR"===m?(g=o/(h.getLayout().x+p+d),f=r/(c.depth-1||1),JC(l,(function(t){v=(t.getLayout().x+d)*g,y="LR"===m?(t.depth-1)*f:r-(t.depth-1)*f,t.setLayout({x:y,y:v},!0)}))):"TB"!==m&&"BT"!==m||(f=r/(h.getLayout().x+p+d),g=o/(c.depth-1||1),JC(l,(function(t){y=(t.getLayout().x+d)*f,v="TB"===m?(t.depth-1)*g:o-(t.depth-1)*g,t.setLayout({x:y,y:v},!0)})))}}}(t,e)}))}function tD(t){t.eachSeriesByType("tree",(function(t){var e=t.getData();e.tree.eachNode((function(t){var n=t.getModel().getModel("itemStyle").getItemStyle();A(e.ensureUniqueItemVisual(t.dataIndex,"style"),n)}))}))}var eD=["treemapZoomToNode","treemapRender","treemapMove"];function nD(t){var e=t.getData().tree,n={};e.eachNode((function(e){for(var i=e;i&&i.depth>1;)i=i.parentNode;var r=ud(t.ecModel,i.name||i.dataIndex+"",n);e.setVisual("decal",r)}))}var iD=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.preventUsingHoverLayer=!0,n}return n(e,t),e.prototype.getInitialData=function(t,e){var n={name:t.name,children:t.data};rD(n);var i=t.levels||[],r=this.designatedVisualItemStyle={},o=new Mc({itemStyle:r},this,e);i=t.levels=function(t,e){var n,i,r=bo(e.get("color")),o=bo(e.get(["aria","decal","decals"]));if(!r)return;t=t||[],E(t,(function(t){var e=new Mc(t),r=e.get("color"),o=e.get("decal");(e.get(["itemStyle","color"])||r&&"none"!==r)&&(n=!0),(e.get(["itemStyle","decal"])||o&&"none"!==o)&&(i=!0)}));var a=t[0]||(t[0]={});n||(a.color=r.slice());!i&&o&&(a.decal=o.slice());return t}(i,e);var a=z(i||[],(function(t){return new Mc(t,o,e)}),this),s=UC.createTree(n,this,(function(t){t.wrapMethod("getItemModel",(function(t,e){var n=s.getNodeByDataIndex(e),i=n?a[n.depth]:null;return t.parentModel=i||o,t}))}));return s.data},e.prototype.optionUpdated=function(){this.resetViewRoot()},e.prototype.formatTooltip=function(t,e,n){var i=this.getData(),r=this.getRawValue(t);return ng("nameValue",{name:i.getName(t),value:r})},e.prototype.getDataParams=function(e){var n=t.prototype.getDataParams.apply(this,arguments),i=this.getData().tree.getNodeByDataIndex(e);return n.treeAncestors=KC(i,this),n.treePathInfo=n.treeAncestors,n},e.prototype.setLayoutInfo=function(t){this.layoutInfo=this.layoutInfo||{},A(this.layoutInfo,t)},e.prototype.mapIdToIndex=function(t){var e=this._idIndexMap;e||(e=this._idIndexMap=yt(),this._idIndexMapCount=0);var n=e.get(t);return null==n&&e.set(t,n=this._idIndexMapCount++),n},e.prototype.getViewRoot=function(){return this._viewRoot},e.prototype.resetViewRoot=function(t){t?this._viewRoot=t:t=this._viewRoot;var e=this.getRawData().tree.root;t&&(t===e||e.contains(t))||(this._viewRoot=e)},e.prototype.enableAriaDecal=function(){nD(this)},e.type="series.treemap",e.layoutMode="box",e.defaultOption={progressive:0,left:"center",top:"middle",width:"80%",height:"80%",sort:!0,clipWindow:"origin",squareRatio:.5*(1+Math.sqrt(5)),leafDepth:null,drillDownIcon:"▶",zoomToNodeRatio:.1024,roam:!0,nodeClick:"zoomToNode",animation:!0,animationDurationUpdate:900,animationEasing:"quinticInOut",breadcrumb:{show:!0,height:22,left:"center",top:"bottom",emptyItemWidth:25,itemStyle:{color:"rgba(0,0,0,0.7)",textStyle:{color:"#fff"}},emphasis:{itemStyle:{color:"rgba(0,0,0,0.9)"}}},label:{show:!0,distance:0,padding:5,position:"inside",color:"#fff",overflow:"truncate"},upperLabel:{show:!1,position:[0,"50%"],height:20,overflow:"truncate",verticalAlign:"middle"},itemStyle:{color:null,colorAlpha:null,colorSaturation:null,borderWidth:0,gapWidth:0,borderColor:"#fff",borderColorSaturation:null},emphasis:{upperLabel:{show:!0,position:[0,"50%"],overflow:"truncate",verticalAlign:"middle"}},visualDimension:0,visualMin:null,visualMax:null,color:[],colorAlpha:null,colorSaturation:null,colorMappingBy:"index",visibleMin:10,childrenVisibleMin:null,levels:[]},e}(mg);function rD(t){var e=0;E(t.children,(function(t){rD(t);var n=t.value;Y(n)&&(n=n[0]),e+=n}));var n=t.value;Y(n)&&(n=n[0]),(null==n||isNaN(n))&&(n=e),n<0&&(n=0),Y(t.value)?t.value[0]=n:t.value=n}var oD=function(){function t(t){this.group=new zr,t.add(this.group)}return t.prototype.render=function(t,e,n,i){var r=t.getModel("breadcrumb"),o=this.group;if(o.removeAll(),r.get("show")&&n){var a=r.getModel("itemStyle"),s=r.getModel("emphasis"),l=a.getModel("textStyle"),u=s.getModel(["itemStyle","textStyle"]),h={pos:{left:r.get("left"),right:r.get("right"),top:r.get("top"),bottom:r.get("bottom")},box:{width:e.getWidth(),height:e.getHeight()},emptyItemWidth:r.get("emptyItemWidth"),totalWidth:0,renderList:[]};this._prepare(n,h,l),this._renderContent(t,h,a,s,l,u,i),Dp(o,h.pos,h.box)}},t.prototype._prepare=function(t,e,n){for(var i=t;i;i=i.parentNode){var r=Ao(i.getModel().get("name"),""),o=n.getTextRect(r),a=Math.max(o.width+16,e.emptyItemWidth);e.totalWidth+=a+8,e.renderList.push({node:i,text:r,width:a})}},t.prototype._renderContent=function(t,e,n,i,r,o,a){for(var s,l,u,h,c,p,d,f,g,y=0,v=e.emptyItemWidth,m=t.get(["breadcrumb","height"]),x=(s=e.pos,l=e.box,h=l.width,c=l.height,p=Ur(s.left,h),d=Ur(s.top,c),f=Ur(s.right,h),g=Ur(s.bottom,c),(isNaN(p)||isNaN(parseFloat(s.left)))&&(p=0),(isNaN(f)||isNaN(parseFloat(s.right)))&&(f=h),(isNaN(d)||isNaN(parseFloat(s.top)))&&(d=0),(isNaN(g)||isNaN(parseFloat(s.bottom)))&&(g=c),u=fp(u||0),{width:Math.max(f-p-u[1]-u[3],0),height:Math.max(g-d-u[0]-u[2],0)}),_=e.totalWidth,b=e.renderList,w=i.getModel("itemStyle").getItemStyle(),S=b.length-1;S>=0;S--){var M=b[S],I=M.node,T=M.width,C=M.text;_>x.width&&(_-=T-v,T=v,C=null);var D=new Wu({shape:{points:aD(y,0,T,m,S===b.length-1,0===S)},style:k(n.getItemStyle(),{lineJoin:"bevel"}),textContent:new Fs({style:nc(r,{text:C})}),textConfig:{position:"inside"},z2:1e5,onclick:H(a,I)});D.disableLabelAnimation=!0,D.getTextContent().ensureState("emphasis").style=nc(o,{text:C}),D.ensureState("emphasis").style=w,Yl(D,i.get("focus"),i.get("blurScope"),i.get("disabled")),this.group.add(D),sD(D,t,I),y+=T+8}},t.prototype.remove=function(){this.group.removeAll()},t}();function aD(t,e,n,i,r,o){var a=[[r?t:t-5,e],[t+n,e],[t+n,e+i],[r?t:t-5,e+i]];return!o&&a.splice(2,0,[t+n+5,e+i/2]),!r&&a.push([t,e+i/2]),a}function sD(t,e,n){Qs(t).eventData={componentType:"series",componentSubType:"treemap",componentIndex:e.componentIndex,seriesIndex:e.seriesIndex,seriesName:e.name,seriesType:"treemap",selfType:"breadcrumb",nodeData:{dataIndex:n&&n.dataIndex,name:n&&n.name},treePathInfo:n&&KC(n,e)}}var lD=function(){function t(){this._storage=[],this._elExistsMap={}}return t.prototype.add=function(t,e,n,i,r){return!this._elExistsMap[t.id]&&(this._elExistsMap[t.id]=!0,this._storage.push({el:t,target:e,duration:n,delay:i,easing:r}),!0)},t.prototype.finished=function(t){return this._finishedCallback=t,this},t.prototype.start=function(){for(var t=this,e=this._storage.length,n=function(){--e<=0&&(t._storage.length=0,t._elExistsMap={},t._finishedCallback&&t._finishedCallback())},i=0,r=this._storage.length;i3||Math.abs(t.dy)>3)){var e=this.seriesModel.getData().tree.root;if(!e)return;var n=e.getLayout();if(!n)return;this.api.dispatchAction({type:"treemapMove",from:this.uid,seriesId:this.seriesModel.id,rootRect:{x:n.x+t.dx,y:n.y+t.dy,width:n.width,height:n.height}})}},e.prototype._onZoom=function(t){var e=t.originX,n=t.originY;if("animating"!==this._state){var i=this.seriesModel.getData().tree.root;if(!i)return;var r=i.getLayout();if(!r)return;var o=new ze(r.x,r.y,r.width,r.height),a=this.seriesModel.layoutInfo,s=[1,0,0,1,0,0];we(s,s,[-(e-=a.x),-(n-=a.y)]),Me(s,s,[t.scale,t.scale]),we(s,s,[e,n]),o.applyTransform(s),this.api.dispatchAction({type:"treemapRender",from:this.uid,seriesId:this.seriesModel.id,rootRect:{x:o.x,y:o.y,width:o.width,height:o.height}})}},e.prototype._initEvents=function(t){var e=this;t.on("click",(function(t){if("ready"===e._state){var n=e.seriesModel.get("nodeClick",!0);if(n){var i=e.findTarget(t.offsetX,t.offsetY);if(i){var r=i.node;if(r.getLayout().isLeafRoot)e._rootToNode(i);else if("zoomToNode"===n)e._zoomToNode(i);else if("link"===n){var o=r.hostTree.data.getItemModel(r.dataIndex),a=o.get("link",!0),s=o.get("target",!0)||"blank";a&&bp(a,s)}}}}}),this)},e.prototype._renderBreadcrumb=function(t,e,n){var i=this;n||(n=null!=t.get("leafDepth",!0)?{node:t.getViewRoot()}:this.findTarget(e.getWidth()/2,e.getHeight()/2))||(n={node:t.getData().tree.root}),(this._breadcrumb||(this._breadcrumb=new oD(this.group))).render(t,e,n.node,(function(e){"animating"!==i._state&&(qC(t.getViewRoot(),e)?i._rootToNode({node:e}):i._zoomToNode({node:e}))}))},e.prototype.remove=function(){this._clearController(),this._containerGroup&&this._containerGroup.removeAll(),this._storage={nodeGroup:[],background:[],content:[]},this._state="ready",this._breadcrumb&&this._breadcrumb.remove()},e.prototype.dispose=function(){this._clearController()},e.prototype._zoomToNode=function(t){this.api.dispatchAction({type:"treemapZoomToNode",from:this.uid,seriesId:this.seriesModel.id,targetNode:t.node})},e.prototype._rootToNode=function(t){this.api.dispatchAction({type:"treemapRootToNode",from:this.uid,seriesId:this.seriesModel.id,targetNode:t.node})},e.prototype.findTarget=function(t,e){var n;return this.seriesModel.getViewRoot().eachNode({attr:"viewChildren",order:"preorder"},(function(i){var r=this._storage.background[i.getRawIndex()];if(r){var o=r.transformCoordToLocal(t,e),a=r.shape;if(!(a.x<=o[0]&&o[0]<=a.x+a.width&&a.y<=o[1]&&o[1]<=a.y+a.height))return!1;n={node:i,offsetX:o[0],offsetY:o[1]}}}),this),n},e.type="treemap",e}(kg);var vD=E,mD=q,xD=-1,_D=function(){function t(e){var n=e.mappingMethod,i=e.type,r=this.option=T(e);this.type=i,this.mappingMethod=n,this._normalizeData=kD[n];var o=t.visualHandlers[i];this.applyVisual=o.applyVisual,this.getColorMapper=o.getColorMapper,this._normalizedToVisual=o._normalizedToVisual[n],"piecewise"===n?(bD(r),function(t){var e=t.pieceList;t.hasSpecialVisual=!1,E(e,(function(e,n){e.originIndex=n,null!=e.visual&&(t.hasSpecialVisual=!0)}))}(r)):"category"===n?r.categories?function(t){var e=t.categories,n=t.categoryMap={},i=t.visual;if(vD(e,(function(t,e){n[t]=e})),!Y(i)){var r=[];q(i)?vD(i,(function(t,e){var i=n[e];r[null!=i?i:xD]=t})):r[-1]=i,i=AD(t,r)}for(var o=e.length-1;o>=0;o--)null==i[o]&&(delete n[e[o]],e.pop())}(r):bD(r,!0):(lt("linear"!==n||r.dataExtent),bD(r))}return t.prototype.mapValueToVisual=function(t){var e=this._normalizeData(t);return this._normalizedToVisual(e,t)},t.prototype.getNormalizer=function(){return W(this._normalizeData,this)},t.listVisualTypes=function(){return G(t.visualHandlers)},t.isValidType=function(e){return t.visualHandlers.hasOwnProperty(e)},t.eachVisual=function(t,e,n){q(t)?E(t,e,n):e.call(n,t)},t.mapVisual=function(e,n,i){var r,o=Y(e)?[]:q(e)?{}:(r=!0,null);return t.eachVisual(e,(function(t,e){var a=n.call(i,t,e);r?o=a:o[e]=a})),o},t.retrieveVisuals=function(e){var n,i={};return e&&vD(t.visualHandlers,(function(t,r){e.hasOwnProperty(r)&&(i[r]=e[r],n=!0)})),n?i:null},t.prepareVisualTypes=function(t){if(Y(t))t=t.slice();else{if(!mD(t))return[];var e=[];vD(t,(function(t,n){e.push(n)})),t=e}return t.sort((function(t,e){return"color"===e&&"color"!==t&&0===t.indexOf("color")?1:-1})),t},t.dependsOn=function(t,e){return"color"===e?!(!t||0!==t.indexOf(e)):t===e},t.findPieceIndex=function(t,e,n){for(var i,r=1/0,o=0,a=e.length;ou[1]&&(u[1]=l);var h=e.get("colorMappingBy"),c={type:a.name,dataExtent:u,visual:a.range};"color"!==c.type||"index"!==h&&"id"!==h?c.mappingMethod="linear":(c.mappingMethod="category",c.loop=!0);var p=new _D(c);return PD(p).drColorMappingBy=h,p}(0,r,o,0,u,d);E(d,(function(t,e){if(t.depth>=n.length||t===n[t.depth]){var o=function(t,e,n,i,r,o){var a=A({},e);if(r){var s=r.type,l="color"===s&&PD(r).drColorMappingBy,u="index"===l?i:"id"===l?o.mapIdToIndex(n.getId()):n.getValue(t.get("visualDimension"));a[s]=r.mapValueToVisual(u)}return a}(r,u,t,e,f,i);RD(t,o,n,i)}}))}else s=ND(u),h.fill=s}}function ND(t){var e=ED(t,"color");if(e){var n=ED(t,"colorAlpha"),i=ED(t,"colorSaturation");return i&&(e=ni(e,null,null,i)),n&&(e=ii(e,n)),e}}function ED(t,e){var n=t[e];if(null!=n&&"none"!==n)return n}function zD(t,e){var n=t.get(e);return Y(n)&&n.length?{name:e,range:n}:null}var VD=Math.max,BD=Math.min,FD=it,GD=E,WD=["itemStyle","borderWidth"],HD=["itemStyle","gapWidth"],YD=["upperLabel","show"],XD=["upperLabel","height"],UD={seriesType:"treemap",reset:function(t,e,n,i){var r=n.getWidth(),o=n.getHeight(),a=t.option,s=Cp(t.getBoxLayoutParams(),{width:n.getWidth(),height:n.getHeight()}),l=a.size||[],u=Ur(FD(s.width,l[0]),r),h=Ur(FD(s.height,l[1]),o),c=i&&i.type,p=ZC(i,["treemapZoomToNode","treemapRootToNode"],t),d="treemapRender"===c||"treemapMove"===c?i.rootRect:null,f=t.getViewRoot(),g=jC(f);if("treemapMove"!==c){var y="treemapZoomToNode"===c?function(t,e,n,i,r){var o,a=(e||{}).node,s=[i,r];if(!a||a===n)return s;var l=i*r,u=l*t.option.zoomToNodeRatio;for(;o=a.parentNode;){for(var h=0,c=o.children,p=0,d=c.length;pto&&(u=to),a=o}ua[1]&&(a[1]=e)}))):a=[NaN,NaN];return{sum:i,dataExtent:a}}(e,a,s);if(0===u.sum)return t.viewChildren=[];if(u.sum=function(t,e,n,i,r){if(!i)return n;for(var o=t.get("visibleMin"),a=r.length,s=a,l=a-1;l>=0;l--){var u=r["asc"===i?a-l-1:l].getValue();u/n*ei&&(i=a));var l=t.area*t.area,u=e*e*n;return l?VD(u*i/l,l/(u*r)):1/0}function qD(t,e,n,i,r){var o=e===n.width?0:1,a=1-o,s=["x","y"],l=["width","height"],u=n[s[o]],h=e?t.area/e:0;(r||h>n[l[a]])&&(h=n[l[a]]);for(var c=0,p=t.length;ci&&(i=e);var o=i%2?i+2:i+3;r=[];for(var a=0;a0&&(m[0]=-m[0],m[1]=-m[1]);var _=v[0]<0?-1:1;if("start"!==i.__position&&"end"!==i.__position){var b=-Math.atan2(v[1],v[0]);u[0].8?"left":h[0]<-.8?"right":"center",p=h[1]>.8?"top":h[1]<-.8?"bottom":"middle";break;case"start":i.x=-h[0]*f+l[0],i.y=-h[1]*g+l[1],c=h[0]>.8?"right":h[0]<-.8?"left":"center",p=h[1]>.8?"bottom":h[1]<-.8?"top":"middle";break;case"insideStartTop":case"insideStart":case"insideStartBottom":i.x=f*_+l[0],i.y=l[1]+w,c=v[0]<0?"right":"left",i.originX=-f*_,i.originY=-w;break;case"insideMiddleTop":case"insideMiddle":case"insideMiddleBottom":case"middle":i.x=x[0],i.y=x[1]+w,c="center",i.originY=-w;break;case"insideEndTop":case"insideEnd":case"insideEndBottom":i.x=-f*_+u[0],i.y=u[1]+w,c=v[0]>=0?"right":"left",i.originX=f*_,i.originY=-w}i.scaleX=i.scaleY=r,i.setStyle({verticalAlign:i.__verticalAlign||p,align:i.__align||c})}}}function S(t,e){var n=t.__specifiedRotation;if(null==n){var i=a.tangentAt(e);t.attr("rotation",(1===e?-1:1)*Math.PI/2-Math.atan2(i[1],i[0]))}else t.attr("rotation",n)}},e}(zr),RA=function(){function t(t){this.group=new zr,this._LineCtor=t||OA}return t.prototype.updateData=function(t){var e=this;this._progressiveEls=null;var n=this,i=n.group,r=n._lineData;n._lineData=t,r||i.removeAll();var o=NA(t);t.diff(r).add((function(n){e._doAdd(t,n,o)})).update((function(n,i){e._doUpdate(r,t,i,n,o)})).remove((function(t){i.remove(r.getItemGraphicEl(t))})).execute()},t.prototype.updateLayout=function(){var t=this._lineData;t&&t.eachItemGraphicEl((function(e,n){e.updateLayout(t,n)}),this)},t.prototype.incrementalPrepareUpdate=function(t){this._seriesScope=NA(t),this._lineData=null,this.group.removeAll()},t.prototype.incrementalUpdate=function(t,e){function n(t){t.isGroup||function(t){return t.animators&&t.animators.length>0}(t)||(t.incremental=!0,t.ensureState("emphasis").hoverLayer=!0)}this._progressiveEls=[];for(var i=t.start;i=0?i+=u:i-=u:f>=0?i-=u:i+=u}return i}function XA(t,e){var n=[],i=Dn,r=[[],[],[]],o=[[],[]],a=[];e/=2,t.eachEdge((function(t,s){var l=t.getLayout(),u=t.getVisual("fromSymbol"),h=t.getVisual("toSymbol");l.__original||(l.__original=[Tt(l[0]),Tt(l[1])],l[2]&&l.__original.push(Tt(l[2])));var c=l.__original;if(null!=l[2]){if(It(r[0],c[0]),It(r[1],c[2]),It(r[2],c[1]),u&&"none"!==u){var p=dA(t.node1),d=YA(r,c[0],p*e);i(r[0][0],r[1][0],r[2][0],d,n),r[0][0]=n[3],r[1][0]=n[4],i(r[0][1],r[1][1],r[2][1],d,n),r[0][1]=n[3],r[1][1]=n[4]}if(h&&"none"!==h){p=dA(t.node2),d=YA(r,c[1],p*e);i(r[0][0],r[1][0],r[2][0],d,n),r[1][0]=n[1],r[2][0]=n[2],i(r[0][1],r[1][1],r[2][1],d,n),r[1][1]=n[1],r[2][1]=n[2]}It(l[0],r[0]),It(l[1],r[2]),It(l[2],r[1])}else{if(It(o[0],c[0]),It(o[1],c[1]),kt(a,o[1],o[0]),Et(a,a),u&&"none"!==u){p=dA(t.node1);At(o[0],o[0],a,p*e)}if(h&&"none"!==h){p=dA(t.node2);At(o[1],o[1],a,-p*e)}It(l[0],o[0]),It(l[1],o[1])}}))}function UA(t){return"view"===t.type}var ZA=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.init=function(t,e){var n=new hS,i=new RA,r=this.group;this._controller=new UI(e.getZr()),this._controllerHost={target:r},r.add(n.group),r.add(i.group),this._symbolDraw=n,this._lineDraw=i,this._firstRender=!0},e.prototype.render=function(t,e,n){var i=this,r=t.coordinateSystem;this._model=t;var o=this._symbolDraw,a=this._lineDraw,s=this.group;if(UA(r)){var l={x:r.x,y:r.y,scaleX:r.scaleX,scaleY:r.scaleY};this._firstRender?s.attr(l):fh(s,l,t)}XA(t.getGraph(),pA(t));var u=t.getData();o.updateData(u);var h=t.getEdgeData();a.updateData(h),this._updateNodeAndLinkScale(),this._updateController(t,e,n),clearTimeout(this._layoutTimeout);var c=t.forceLayout,p=t.get(["force","layoutAnimation"]);c&&this._startForceLayoutIteration(c,p);var d=t.get("layout");u.graph.eachNode((function(e){var n=e.dataIndex,r=e.getGraphicEl(),o=e.getModel();if(r){r.off("drag").off("dragend");var a=o.get("draggable");a&&r.on("drag",(function(o){switch(d){case"force":c.warmUp(),!i._layouting&&i._startForceLayoutIteration(c,p),c.setFixed(n),u.setItemLayout(n,[r.x,r.y]);break;case"circular":u.setItemLayout(n,[r.x,r.y]),e.setLayout({fixed:!0},!0),yA(t,"symbolSize",e,[o.offsetX,o.offsetY]),i.updateLayout(t);break;default:u.setItemLayout(n,[r.x,r.y]),hA(t.getGraph(),t),i.updateLayout(t)}})).on("dragend",(function(){c&&c.setUnfixed(n)})),r.setDraggable(a,!!o.get("cursor")),"adjacency"===o.get(["emphasis","focus"])&&(Qs(r).focus=e.getAdjacentDataIndices())}})),u.graph.eachEdge((function(t){var e=t.getGraphicEl(),n=t.getModel().get(["emphasis","focus"]);e&&"adjacency"===n&&(Qs(e).focus={edge:[t.dataIndex],node:[t.node1.dataIndex,t.node2.dataIndex]})}));var f="circular"===t.get("layout")&&t.get(["circular","rotateLabel"]),g=u.getLayout("cx"),y=u.getLayout("cy");u.graph.eachNode((function(t){mA(t,f,g,y)})),this._firstRender=!1},e.prototype.dispose=function(){this._controller&&this._controller.dispose(),this._controllerHost=null},e.prototype._startForceLayoutIteration=function(t,e){var n=this;!function i(){t.step((function(t){n.updateLayout(n._model),(n._layouting=!t)&&(e?n._layoutTimeout=setTimeout(i,16):i())}))}()},e.prototype._updateController=function(t,e,n){var i=this,r=this._controller,o=this._controllerHost,a=this.group;r.setPointerChecker((function(e,i,r){var o=a.getBoundingRect();return o.applyTransform(a.transform),o.contain(i,r)&&!tT(e,n,t)})),UA(t.coordinateSystem)?(r.enable(t.get("roam")),o.zoomLimit=t.get("scaleLimit"),o.zoom=t.coordinateSystem.getZoom(),r.off("pan").off("zoom").on("pan",(function(e){KI(o,e.dx,e.dy),n.dispatchAction({seriesId:t.id,type:"graphRoam",dx:e.dx,dy:e.dy})})).on("zoom",(function(e){$I(o,e.scale,e.originX,e.originY),n.dispatchAction({seriesId:t.id,type:"graphRoam",zoom:e.scale,originX:e.originX,originY:e.originY}),i._updateNodeAndLinkScale(),XA(t.getGraph(),pA(t)),i._lineDraw.updateLayout(),n.updateLabelLayout()}))):r.disable()},e.prototype._updateNodeAndLinkScale=function(){var t=this._model,e=t.getData(),n=pA(t);e.eachItemGraphicEl((function(t,e){t&&t.setSymbolScale(n)}))},e.prototype.updateLayout=function(t){XA(t.getGraph(),pA(t)),this._symbolDraw.updateLayout(),this._lineDraw.updateLayout()},e.prototype.remove=function(t,e){this._symbolDraw&&this._symbolDraw.remove(),this._lineDraw&&this._lineDraw.remove()},e.type="graph",e}(kg);function jA(t){return"_EC_"+t}var qA=function(){function t(t){this.type="graph",this.nodes=[],this.edges=[],this._nodesMap={},this._edgesMap={},this._directed=t||!1}return t.prototype.isDirected=function(){return this._directed},t.prototype.addNode=function(t,e){t=null==t?""+e:""+t;var n=this._nodesMap;if(!n[jA(t)]){var i=new KA(t,e);return i.hostGraph=this,this.nodes.push(i),n[jA(t)]=i,i}},t.prototype.getNodeByIndex=function(t){var e=this.data.getRawIndex(t);return this.nodes[e]},t.prototype.getNodeById=function(t){return this._nodesMap[jA(t)]},t.prototype.addEdge=function(t,e,n){var i=this._nodesMap,r=this._edgesMap;if(j(t)&&(t=this.nodes[t]),j(e)&&(e=this.nodes[e]),t instanceof KA||(t=i[jA(t)]),e instanceof KA||(e=i[jA(e)]),t&&e){var o=t.id+"-"+e.id,a=new $A(t,e,n);return a.hostGraph=this,this._directed&&(t.outEdges.push(a),e.inEdges.push(a)),t.edges.push(a),t!==e&&e.edges.push(a),this.edges.push(a),r[o]=a,a}},t.prototype.getEdgeByIndex=function(t){var e=this.edgeData.getRawIndex(t);return this.edges[e]},t.prototype.getEdge=function(t,e){t instanceof KA&&(t=t.id),e instanceof KA&&(e=e.id);var n=this._edgesMap;return this._directed?n[t+"-"+e]:n[t+"-"+e]||n[e+"-"+t]},t.prototype.eachNode=function(t,e){for(var n=this.nodes,i=n.length,r=0;r=0&&t.call(e,n[r],r)},t.prototype.eachEdge=function(t,e){for(var n=this.edges,i=n.length,r=0;r=0&&n[r].node1.dataIndex>=0&&n[r].node2.dataIndex>=0&&t.call(e,n[r],r)},t.prototype.breadthFirstTraverse=function(t,e,n,i){if(e instanceof KA||(e=this._nodesMap[jA(e)]),e){for(var r="out"===n?"outEdges":"in"===n?"inEdges":"edges",o=0;o=0&&n.node2.dataIndex>=0}));for(r=0,o=i.length;r=0&&this[t][e].setItemVisual(this.dataIndex,n,i)},getVisual:function(n){return this[t][e].getItemVisual(this.dataIndex,n)},setLayout:function(n,i){this.dataIndex>=0&&this[t][e].setItemLayout(this.dataIndex,n,i)},getLayout:function(){return this[t][e].getItemLayout(this.dataIndex)},getGraphicEl:function(){return this[t][e].getItemGraphicEl(this.dataIndex)},getRawIndex:function(){return this[t][e].getRawIndex(this.dataIndex)}}}function QA(t,e,n,i,r){for(var o=new qA(i),a=0;a "+p)),u++)}var d,f=n.get("coordinateSystem");if("cartesian2d"===f||"polar"===f)d=vx(t,n);else{var g=xd.get(f),y=g&&g.dimensions||[];P(y,"value")<0&&y.concat(["value"]);var v=ux(t,{coordDimensions:y,encodeDefine:n.getEncode()}).dimensions;(d=new lx(v,n)).initData(t)}var m=new lx(["value"],n);return m.initData(l,s),r&&r(d,m),zC({mainData:d,struct:o,structAttr:"graph",datas:{node:d,edge:m},datasAttr:{node:"data",edge:"edgeData"}}),o.update(),o}R(KA,JA("hostGraph","data")),R($A,JA("hostGraph","edgeData"));var tk=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.hasSymbolVisual=!0,n}return n(e,t),e.prototype.init=function(e){t.prototype.init.apply(this,arguments);var n=this;function i(){return n._categoriesData}this.legendVisualProvider=new IM(i,i),this.fillDataTextStyle(e.edges||e.links),this._updateCategoriesData()},e.prototype.mergeOption=function(e){t.prototype.mergeOption.apply(this,arguments),this.fillDataTextStyle(e.edges||e.links),this._updateCategoriesData()},e.prototype.mergeDefaultAndTheme=function(e){t.prototype.mergeDefaultAndTheme.apply(this,arguments),wo(e,"edgeLabel",["show"])},e.prototype.getInitialData=function(t,e){var n,i=t.edges||t.links||[],r=t.data||t.nodes||[],o=this;if(r&&i){iA(n=this)&&(n.__curvenessList=[],n.__edgeMap={},rA(n));var a=QA(r,i,this,!0,(function(t,e){t.wrapMethod("getItemModel",(function(t){var e=o._categoriesModels[t.getShallow("category")];return e&&(e.parentModel=t.parentModel,t.parentModel=e),t}));var n=Mc.prototype.getModel;function i(t,e){var i=n.call(this,t,e);return i.resolveParentPath=r,i}function r(t){if(t&&("label"===t[0]||"label"===t[1])){var e=t.slice();return"label"===t[0]?e[0]="edgeLabel":"label"===t[1]&&(e[1]="edgeLabel"),e}return t}e.wrapMethod("getItemModel",(function(t){return t.resolveParentPath=r,t.getModel=i,t}))}));return E(a.edges,(function(t){!function(t,e,n,i){if(iA(n)){var r=oA(t,e,n),o=n.__edgeMap,a=o[aA(r)];o[r]&&!a?o[r].isForward=!0:a&&o[r]&&(a.isForward=!0,o[r].isForward=!1),o[r]=o[r]||[],o[r].push(i)}}(t.node1,t.node2,this,t.dataIndex)}),this),a.data}},e.prototype.getGraph=function(){return this.getData().graph},e.prototype.getEdgeData=function(){return this.getGraph().edgeData},e.prototype.getCategoriesData=function(){return this._categoriesData},e.prototype.formatTooltip=function(t,e,n){if("edge"===n){var i=this.getData(),r=this.getDataParams(t,n),o=i.graph.getEdgeByIndex(t),a=i.getName(o.node1.dataIndex),s=i.getName(o.node2.dataIndex),l=[];return null!=a&&l.push(a),null!=s&&l.push(s),ng("nameValue",{name:l.join(" > "),value:r.value,noValue:null==r.value})}return fg({series:this,dataIndex:t,multipleSeries:e})},e.prototype._updateCategoriesData=function(){var t=z(this.option.categories||[],(function(t){return null!=t.value?t:A({value:0},t)})),e=new lx(["value"],this);e.initData(t),this._categoriesData=e,this._categoriesModels=e.mapArray((function(t){return e.getItemModel(t)}))},e.prototype.setZoom=function(t){this.option.zoom=t},e.prototype.setCenter=function(t){this.option.center=t},e.prototype.isAnimationEnabled=function(){return t.prototype.isAnimationEnabled.call(this)&&!("force"===this.get("layout")&&this.get(["force","layoutAnimation"]))},e.type="series.graph",e.dependencies=["grid","polar","geo","singleAxis","calendar"],e.defaultOption={z:2,coordinateSystem:"view",legendHoverLink:!0,layout:null,circular:{rotateLabel:!1},force:{initLayout:null,repulsion:[0,50],gravity:.1,friction:.6,edgeLength:30,layoutAnimation:!0},left:"center",top:"center",symbol:"circle",symbolSize:10,edgeSymbol:["none","none"],edgeSymbolSize:10,edgeLabel:{position:"middle",distance:5},draggable:!1,roam:!1,center:null,zoom:1,nodeScaleRatio:.6,label:{show:!1,formatter:"{b}"},itemStyle:{},lineStyle:{color:"#aaa",width:1,opacity:.5},emphasis:{scale:!0,label:{show:!0}},select:{itemStyle:{borderColor:"#212121"}}},e}(mg),ek={type:"graphRoam",event:"graphRoam",update:"none"};var nk=function(){this.angle=0,this.width=10,this.r=10,this.x=0,this.y=0},ik=function(t){function e(e){var n=t.call(this,e)||this;return n.type="pointer",n}return n(e,t),e.prototype.getDefaultShape=function(){return new nk},e.prototype.buildPath=function(t,e){var n=Math.cos,i=Math.sin,r=e.r,o=e.width,a=e.angle,s=e.x-n(a)*o*(o>=r/3?1:2),l=e.y-i(a)*o*(o>=r/3?1:2);a=e.angle-Math.PI/2,t.moveTo(s,l),t.lineTo(e.x+n(a)*o,e.y+i(a)*o),t.lineTo(e.x+n(e.angle)*r,e.y+i(e.angle)*r),t.lineTo(e.x-n(a)*o,e.y-i(a)*o),t.lineTo(s,l)},e}(Is);function rk(t,e){var n=null==t?"":t+"";return e&&(U(e)?n=e.replace("{value}",n):X(e)&&(n=e(t))),n}var ok=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.render=function(t,e,n){this.group.removeAll();var i=t.get(["axisLine","lineStyle","color"]),r=function(t,e){var n=t.get("center"),i=e.getWidth(),r=e.getHeight(),o=Math.min(i,r);return{cx:Ur(n[0],e.getWidth()),cy:Ur(n[1],e.getHeight()),r:Ur(t.get("radius"),o/2)}}(t,n);this._renderMain(t,e,n,i,r),this._data=t.getData()},e.prototype.dispose=function(){},e.prototype._renderMain=function(t,e,n,i,r){var o=this.group,a=t.get("clockwise"),s=-t.get("startAngle")/180*Math.PI,l=-t.get("endAngle")/180*Math.PI,u=t.getModel("axisLine"),h=u.get("roundCap")?HS:zu,c=u.get("show"),p=u.getModel("lineStyle"),d=p.get("width"),f=[s,l];rs(f,!a);for(var g=(l=f[1])-(s=f[0]),y=s,v=[],m=0;c&&m=t&&(0===e?0:i[e-1][0])Math.PI/2&&(V+=Math.PI):"tangential"===z?V=-M-Math.PI/2:j(z)&&(V=z*Math.PI/180),0===V?c.add(new Fs({style:nc(x,{text:O,x:N,y:E,verticalAlign:h<-.8?"top":h>.8?"bottom":"middle",align:u<-.4?"left":u>.4?"right":"center"},{inheritColor:R}),silent:!0})):c.add(new Fs({style:nc(x,{text:O,x:N,y:E,verticalAlign:"middle",align:"center"},{inheritColor:R}),silent:!0,originX:N,originY:E,rotation:V}))}if(m.get("show")&&k!==_){P=(P=m.get("distance"))?P+l:l;for(var B=0;B<=b;B++){u=Math.cos(M),h=Math.sin(M);var F=new Zu({shape:{x1:u*(f-P)+p,y1:h*(f-P)+d,x2:u*(f-S-P)+p,y2:h*(f-S-P)+d},silent:!0,style:D});"auto"===D.stroke&&F.setStyle({stroke:i((k+B/b)/_)}),c.add(F),M+=T}M-=T}else M+=I}},e.prototype._renderPointer=function(t,e,n,i,r,o,a,s,l){var u=this.group,h=this._data,c=this._progressEls,p=[],d=t.get(["pointer","show"]),f=t.getModel("progress"),g=f.get("show"),y=t.getData(),v=y.mapDimension("value"),m=+t.get("min"),x=+t.get("max"),_=[m,x],b=[o,a];function w(e,n){var i,o=y.getItemModel(e).getModel("pointer"),a=Ur(o.get("width"),r.r),s=Ur(o.get("length"),r.r),l=t.get(["pointer","icon"]),u=o.get("offsetCenter"),h=Ur(u[0],r.r),c=Ur(u[1],r.r),p=o.get("keepAspect");return(i=l?Wy(l,h-a/2,c-s,a,s,null,p):new ik({shape:{angle:-Math.PI/2,width:a,r:s,x:h,y:c}})).rotation=-(n+Math.PI/2),i.x=r.cx,i.y=r.cy,i}function S(t,e){var n=f.get("roundCap")?HS:zu,i=f.get("overlap"),a=i?f.get("width"):l/y.count(),u=i?r.r-a:r.r-(t+1)*a,h=i?r.r:r.r-t*a,c=new n({shape:{startAngle:o,endAngle:e,cx:r.cx,cy:r.cy,clockwise:s,r0:u,r:h}});return i&&(c.z2=x-y.get(v,t)%x),c}(g||d)&&(y.diff(h).add((function(e){var n=y.get(v,e);if(d){var i=w(e,o);gh(i,{rotation:-((isNaN(+n)?b[0]:Xr(n,_,b,!0))+Math.PI/2)},t),u.add(i),y.setItemGraphicEl(e,i)}if(g){var r=S(e,o),a=f.get("clip");gh(r,{shape:{endAngle:Xr(n,_,b,a)}},t),u.add(r),tl(t.seriesIndex,y.dataType,e,r),p[e]=r}})).update((function(e,n){var i=y.get(v,e);if(d){var r=h.getItemGraphicEl(n),a=r?r.rotation:o,s=w(e,a);s.rotation=a,fh(s,{rotation:-((isNaN(+i)?b[0]:Xr(i,_,b,!0))+Math.PI/2)},t),u.add(s),y.setItemGraphicEl(e,s)}if(g){var l=c[n],m=S(e,l?l.shape.endAngle:o),x=f.get("clip");fh(m,{shape:{endAngle:Xr(i,_,b,x)}},t),u.add(m),tl(t.seriesIndex,y.dataType,e,m),p[e]=m}})).execute(),y.each((function(t){var e=y.getItemModel(t),n=e.getModel("emphasis"),r=n.get("focus"),o=n.get("blurScope"),a=n.get("disabled");if(d){var s=y.getItemGraphicEl(t),l=y.getItemVisual(t,"style"),u=l.fill;if(s instanceof ks){var h=s.style;s.useStyle(A({image:h.image,x:h.x,y:h.y,width:h.width,height:h.height},l))}else s.useStyle(l),"pointer"!==s.type&&s.setColor(u);s.setStyle(e.getModel(["pointer","itemStyle"]).getItemStyle()),"auto"===s.style.fill&&s.setStyle("fill",i(Xr(y.get(v,t),_,[0,1],!0))),s.z2EmphasisLift=0,jl(s,e),Yl(s,r,o,a)}if(g){var c=p[t];c.useStyle(y.getItemVisual(t,"style")),c.setStyle(e.getModel(["progress","itemStyle"]).getItemStyle()),c.z2EmphasisLift=0,jl(c,e),Yl(c,r,o,a)}})),this._progressEls=p)},e.prototype._renderAnchor=function(t,e){var n=t.getModel("anchor");if(n.get("show")){var i=n.get("size"),r=n.get("icon"),o=n.get("offsetCenter"),a=n.get("keepAspect"),s=Wy(r,e.cx-i/2+Ur(o[0],e.r),e.cy-i/2+Ur(o[1],e.r),i,i,null,a);s.z2=n.get("showAbove")?1:0,s.setStyle(n.getModel("itemStyle").getItemStyle()),this.group.add(s)}},e.prototype._renderTitleAndDetail=function(t,e,n,i,r){var o=this,a=t.getData(),s=a.mapDimension("value"),l=+t.get("min"),u=+t.get("max"),h=new zr,c=[],p=[],d=t.isAnimationEnabled(),f=t.get(["pointer","showAbove"]);a.diff(this._data).add((function(t){c[t]=new Fs({silent:!0}),p[t]=new Fs({silent:!0})})).update((function(t,e){c[t]=o._titleEls[e],p[t]=o._detailEls[e]})).execute(),a.each((function(e){var n=a.getItemModel(e),o=a.get(s,e),g=new zr,y=i(Xr(o,[l,u],[0,1],!0)),v=n.getModel("title");if(v.get("show")){var m=v.get("offsetCenter"),x=r.cx+Ur(m[0],r.r),_=r.cy+Ur(m[1],r.r);(D=c[e]).attr({z2:f?0:2,style:nc(v,{x:x,y:_,text:a.getName(e),align:"center",verticalAlign:"middle"},{inheritColor:y})}),g.add(D)}var b=n.getModel("detail");if(b.get("show")){var w=b.get("offsetCenter"),S=r.cx+Ur(w[0],r.r),M=r.cy+Ur(w[1],r.r),I=Ur(b.get("width"),r.r),T=Ur(b.get("height"),r.r),C=t.get(["progress","show"])?a.getItemVisual(e,"style").fill:y,D=p[e],A=b.get("formatter");D.attr({z2:f?0:2,style:nc(b,{x:S,y:M,text:rk(o,A),width:isNaN(I)?null:I,height:isNaN(T)?null:T,align:"center",verticalAlign:"middle"},{inheritColor:C})}),hc(D,{normal:b},o,(function(t){return rk(t,A)})),d&&cc(D,e,a,t,{getFormattedLabel:function(t,e,n,i,r,a){return rk(a?a.interpolatedValue:o,A)}}),g.add(D)}h.add(g)})),this.group.add(h),this._titleEls=c,this._detailEls=p},e.type="gauge",e}(kg),ak=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.visualStyleAccessPath="itemStyle",n}return n(e,t),e.prototype.getInitialData=function(t,e){return MM(this,["value"])},e.type="series.gauge",e.defaultOption={z:2,colorBy:"data",center:["50%","50%"],legendHoverLink:!0,radius:"75%",startAngle:225,endAngle:-45,clockwise:!0,min:0,max:100,splitNumber:10,axisLine:{show:!0,roundCap:!1,lineStyle:{color:[[1,"#E6EBF8"]],width:10}},progress:{show:!1,overlap:!0,width:10,roundCap:!1,clip:!0},splitLine:{show:!0,length:10,distance:10,lineStyle:{color:"#63677A",width:3,type:"solid"}},axisTick:{show:!0,splitNumber:5,length:6,distance:10,lineStyle:{color:"#63677A",width:1,type:"solid"}},axisLabel:{show:!0,distance:15,color:"#464646",fontSize:12,rotate:0},pointer:{icon:null,offsetCenter:[0,0],show:!0,showAbove:!0,length:"60%",width:6,keepAspect:!1},anchor:{show:!1,showAbove:!1,size:6,icon:"circle",offsetCenter:[0,0],keepAspect:!1,itemStyle:{color:"#fff",borderWidth:0,borderColor:"#5470c6"}},title:{show:!0,offsetCenter:[0,"20%"],color:"#464646",fontSize:16,valueAnimation:!1},detail:{show:!0,backgroundColor:"rgba(0,0,0,0)",borderWidth:0,borderColor:"#ccc",width:100,height:null,padding:[5,10],offsetCenter:[0,"40%"],color:"#464646",fontSize:30,fontWeight:"bold",lineHeight:30,valueAnimation:!1}},e}(mg);var sk=["itemStyle","opacity"],lk=function(t){function e(e,n){var i=t.call(this)||this,r=i,o=new Yu,a=new Fs;return r.setTextContent(a),i.setTextGuideLine(o),i.updateData(e,n,!0),i}return n(e,t),e.prototype.updateData=function(t,e,n){var i=this,r=t.hostModel,o=t.getItemModel(e),a=t.getItemLayout(e),s=o.getModel("emphasis"),l=o.get(sk);l=null==l?1:l,n||_h(i),i.useStyle(t.getItemVisual(e,"style")),i.style.lineJoin="round",n?(i.setShape({points:a.points}),i.style.opacity=0,gh(i,{style:{opacity:l}},r,e)):fh(i,{style:{opacity:l},shape:{points:a.points}},r,e),jl(i,o),this._updateLabel(t,e),Yl(this,s.get("focus"),s.get("blurScope"),s.get("disabled"))},e.prototype._updateLabel=function(t,e){var n=this,i=this.getTextGuideLine(),r=n.getTextContent(),o=t.hostModel,a=t.getItemModel(e),s=t.getItemLayout(e).label,l=t.getItemVisual(e,"style"),u=l.fill;tc(r,ec(a),{labelFetcher:t.hostModel,labelDataIndex:e,defaultOpacity:l.opacity,defaultText:t.getName(e)},{normal:{align:s.textAlign,verticalAlign:s.verticalAlign}}),n.setTextConfig({local:!0,inside:!!s.inside,insideStroke:u,outsideFill:u});var h=s.linePoints;i.setShape({points:h}),n.textGuideLineConfig={anchor:h?new De(h[0][0],h[0][1]):null},fh(r,{style:{x:s.x,y:s.y}},o,e),r.attr({rotation:s.rotation,originX:s.x,originY:s.y,z2:10}),Tb(n,Cb(a),{stroke:u})},e}(Wu),uk=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.ignoreLabelLineUpdate=!0,n}return n(e,t),e.prototype.render=function(t,e,n){var i=t.getData(),r=this._data,o=this.group;i.diff(r).add((function(t){var e=new lk(i,t);i.setItemGraphicEl(t,e),o.add(e)})).update((function(t,e){var n=r.getItemGraphicEl(e);n.updateData(i,t),o.add(n),i.setItemGraphicEl(t,n)})).remove((function(e){xh(r.getItemGraphicEl(e),t,e)})).execute(),this._data=i},e.prototype.remove=function(){this.group.removeAll(),this._data=null},e.prototype.dispose=function(){},e.type="funnel",e}(kg),hk=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.init=function(e){t.prototype.init.apply(this,arguments),this.legendVisualProvider=new IM(W(this.getData,this),W(this.getRawData,this)),this._defaultLabelLine(e)},e.prototype.getInitialData=function(t,e){return MM(this,{coordDimensions:["value"],encodeDefaulter:H(Jp,this)})},e.prototype._defaultLabelLine=function(t){wo(t,"labelLine",["show"]);var e=t.labelLine,n=t.emphasis.labelLine;e.show=e.show&&t.label.show,n.show=n.show&&t.emphasis.label.show},e.prototype.getDataParams=function(e){var n=this.getData(),i=t.prototype.getDataParams.call(this,e),r=n.mapDimension("value"),o=n.getSum(r);return i.percent=o?+(n.get(r,e)/o*100).toFixed(2):0,i.$vars.push("percent"),i},e.type="series.funnel",e.defaultOption={z:2,legendHoverLink:!0,colorBy:"data",left:80,top:60,right:80,bottom:60,minSize:"0%",maxSize:"100%",sort:"descending",orient:"vertical",gap:0,funnelAlign:"center",label:{show:!0,position:"outer"},labelLine:{show:!0,length:20,lineStyle:{width:1}},itemStyle:{borderColor:"#fff",borderWidth:1},emphasis:{label:{show:!0}},select:{itemStyle:{borderColor:"#212121"}}},e}(mg);function ck(t,e){t.eachSeriesByType("funnel",(function(t){var n=t.getData(),i=n.mapDimension("value"),r=t.get("sort"),o=function(t,e){return Cp(t.getBoxLayoutParams(),{width:e.getWidth(),height:e.getHeight()})}(t,e),a=t.get("orient"),s=o.width,l=o.height,u=function(t,e){for(var n=t.mapDimension("value"),i=t.mapArray(n,(function(t){return t})),r=[],o="ascending"===e,a=0,s=t.count();a5)return;var i=this._model.coordinateSystem.getSlidedAxisExpandWindow([t.offsetX,t.offsetY]);"none"!==i.behavior&&this._dispatchExpand({axisExpandWindow:i.axisExpandWindow})}this._mouseDownPoint=null},mousemove:function(t){if(!this._mouseDownPoint&&Mk(this,"mousemove")){var e=this._model,n=e.coordinateSystem.getSlidedAxisExpandWindow([t.offsetX,t.offsetY]),i=n.behavior;"jump"===i&&this._throttledDispatchExpand.debounceNextCall(e.get("axisExpandDebounce")),this._throttledDispatchExpand("none"===i?null:{axisExpandWindow:n.axisExpandWindow,animation:"jump"===i?null:{duration:0}})}}};function Mk(t,e){var n=t._model;return n.get("axisExpandable")&&n.get("axisExpandTriggerOn")===e}var Ik=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.init=function(){t.prototype.init.apply(this,arguments),this.mergeOption({})},e.prototype.mergeOption=function(t){var e=this.option;t&&C(e,t,!0),this._initDimensions()},e.prototype.contains=function(t,e){var n=t.get("parallelIndex");return null!=n&&e.getComponent("parallel",n)===this},e.prototype.setAxisExpand=function(t){E(["axisExpandable","axisExpandCenter","axisExpandCount","axisExpandWidth","axisExpandWindow"],(function(e){t.hasOwnProperty(e)&&(this.option[e]=t[e])}),this)},e.prototype._initDimensions=function(){var t=this.dimensions=[],e=this.parallelAxisIndex=[];E(B(this.ecModel.queryComponents({mainType:"parallelAxis"}),(function(t){return(t.get("parallelIndex")||0)===this.componentIndex}),this),(function(n){t.push("dim"+n.get("dim")),e.push(n.componentIndex)}))},e.type="parallel",e.dependencies=["parallelAxis"],e.layoutMode="box",e.defaultOption={z:0,left:80,top:60,right:80,bottom:60,layout:"horizontal",axisExpandable:!1,axisExpandCenter:null,axisExpandCount:0,axisExpandWidth:50,axisExpandRate:17,axisExpandDebounce:50,axisExpandSlideTriggerArea:[-.15,.05,.4],axisExpandTriggerOn:"click",parallelAxisDefault:null},e}(Rp),Tk=function(t){function e(e,n,i,r,o){var a=t.call(this,e,n,i)||this;return a.type=r||"value",a.axisIndex=o,a}return n(e,t),e.prototype.isHorizontal=function(){return"horizontal"!==this.coordinateSystem.getModel().get("layout")},e}(nb);function Ck(t,e,n,i,r,o){t=t||0;var a=n[1]-n[0];if(null!=r&&(r=Ak(r,[0,a])),null!=o&&(o=Math.max(o,null!=r?r:0)),"all"===i){var s=Math.abs(e[1]-e[0]);s=Ak(s,[0,a]),r=o=Ak(s,[r,o]),i=0}e[0]=Ak(e[0],n),e[1]=Ak(e[1],n);var l=Dk(e,i);e[i]+=t;var u,h=r||0,c=n.slice();return l.sign<0?c[0]+=h:c[1]-=h,e[i]=Ak(e[i],c),u=Dk(e,i),null!=r&&(u.sign!==l.sign||u.spano&&(e[1-i]=e[i]+u.sign*o),e}function Dk(t,e){var n=t[e]-t[1-e];return{span:Math.abs(n),sign:n>0?-1:n<0?1:e?-1:1}}function Ak(t,e){return Math.min(null!=e[1]?e[1]:1/0,Math.max(null!=e[0]?e[0]:-1/0,t))}var kk=E,Lk=Math.min,Pk=Math.max,Ok=Math.floor,Rk=Math.ceil,Nk=Zr,Ek=Math.PI,zk=function(){function t(t,e,n){this.type="parallel",this._axesMap=yt(),this._axesLayout={},this.dimensions=t.dimensions,this._model=t,this._init(t,e,n)}return t.prototype._init=function(t,e,n){var i=t.dimensions,r=t.parallelAxisIndex;kk(i,(function(t,n){var i=r[n],o=e.getComponent("parallelAxis",i),a=this._axesMap.set(t,new Tk(t,m_(o),[0,0],o.get("type"),i)),s="category"===a.type;a.onBand=s&&o.get("boundaryGap"),a.inverse=o.get("inverse"),o.axis=a,a.model=o,a.coordinateSystem=o.coordinateSystem=this}),this)},t.prototype.update=function(t,e){this._updateAxesFromSeries(this._model,t)},t.prototype.containPoint=function(t){var e=this._makeLayoutInfo(),n=e.axisBase,i=e.layoutBase,r=e.pixelDimIndex,o=t[1-r],a=t[r];return o>=n&&o<=n+e.axisLength&&a>=i&&a<=i+e.layoutLength},t.prototype.getModel=function(){return this._model},t.prototype._updateAxesFromSeries=function(t,e){e.eachSeries((function(n){if(t.contains(n,e)){var i=n.getData();kk(this.dimensions,(function(t){var e=this._axesMap.get(t);e.scale.unionExtentFromData(i,i.mapDimension(t)),v_(e.scale,e.model)}),this)}}),this)},t.prototype.resize=function(t,e){this._rect=Cp(t.getBoxLayoutParams(),{width:e.getWidth(),height:e.getHeight()}),this._layoutAxes()},t.prototype.getRect=function(){return this._rect},t.prototype._makeLayoutInfo=function(){var t,e=this._model,n=this._rect,i=["x","y"],r=["width","height"],o=e.get("layout"),a="horizontal"===o?0:1,s=n[r[a]],l=[0,s],u=this.dimensions.length,h=Vk(e.get("axisExpandWidth"),l),c=Vk(e.get("axisExpandCount")||0,[0,u]),p=e.get("axisExpandable")&&u>3&&u>c&&c>1&&h>0&&s>0,d=e.get("axisExpandWindow");d?(t=Vk(d[1]-d[0],l),d[1]=d[0]+t):(t=Vk(h*(c-1),l),(d=[h*(e.get("axisExpandCenter")||Ok(u/2))-t/2])[1]=d[0]+t);var f=(s-t)/(u-c);f<3&&(f=0);var g=[Ok(Nk(d[0]/h,1))+1,Rk(Nk(d[1]/h,1))-1],y=f/h*d[0];return{layout:o,pixelDimIndex:a,layoutBase:n[i[a]],layoutLength:s,axisBase:n[i[1-a]],axisLength:n[r[1-a]],axisExpandable:p,axisExpandWidth:h,axisCollapseWidth:f,axisExpandWindow:d,axisCount:u,winInnerIndices:g,axisExpandWindow0Pos:y}},t.prototype._layoutAxes=function(){var t=this._rect,e=this._axesMap,n=this.dimensions,i=this._makeLayoutInfo(),r=i.layout;e.each((function(t){var e=[0,i.axisLength],n=t.inverse?1:0;t.setExtent(e[n],e[1-n])})),kk(n,(function(e,n){var o=(i.axisExpandable?Fk:Bk)(n,i),a={horizontal:{x:o.position,y:i.axisLength},vertical:{x:0,y:o.position}},s={horizontal:Ek/2,vertical:0},l=[a[r].x+t.x,a[r].y+t.y],u=s[r],h=[1,0,0,1,0,0];Se(h,h,u),we(h,h,l),this._axesLayout[e]={position:l,rotation:u,transform:h,axisNameAvailableWidth:o.axisNameAvailableWidth,axisLabelShow:o.axisLabelShow,nameTruncateMaxWidth:o.nameTruncateMaxWidth,tickDirection:1,labelDirection:1}}),this)},t.prototype.getAxis=function(t){return this._axesMap.get(t)},t.prototype.dataToPoint=function(t,e){return this.axisCoordToPoint(this._axesMap.get(e).dataToCoord(t),e)},t.prototype.eachActiveState=function(t,e,n,i){null==n&&(n=0),null==i&&(i=t.count());var r=this._axesMap,o=this.dimensions,a=[],s=[];E(o,(function(e){a.push(t.mapDimension(e)),s.push(r.get(e).model)}));for(var l=this.hasAxisBrushed(),u=n;ur*(1-h[0])?(l="jump",a=s-r*(1-h[2])):(a=s-r*h[1])>=0&&(a=s-r*(1-h[1]))<=0&&(a=0),(a*=e.axisExpandWidth/u)?Ck(a,i,o,"all"):l="none";else{var p=i[1]-i[0];(i=[Pk(0,o[1]*s/p-p/2)])[1]=Lk(o[1],i[0]+p),i[0]=i[1]-p}return{axisExpandWindow:i,behavior:l}},t}();function Vk(t,e){return Lk(Pk(t,e[0]),e[1])}function Bk(t,e){var n=e.layoutLength/(e.axisCount-1);return{position:n*t,axisNameAvailableWidth:n,axisLabelShow:!0}}function Fk(t,e){var n,i,r=e.layoutLength,o=e.axisExpandWidth,a=e.axisCount,s=e.axisCollapseWidth,l=e.winInnerIndices,u=s,h=!1;return t=0;n--)jr(e[n])},e.prototype.getActiveState=function(t){var e=this.activeIntervals;if(!e.length)return"normal";if(null==t||isNaN(+t))return"inactive";if(1===e.length){var n=e[0];if(n[0]<=t&&t<=n[1])return"active"}else for(var i=0,r=e.length;i6}(t)||o){if(a&&!o){"single"===s.brushMode&&sL(t);var l=T(s);l.brushType=ML(l.brushType,a),l.panelId=a===Hk?null:a.panelId,o=t._creatingCover=Qk(t,l),t._covers.push(o)}if(o){var u=CL[ML(t._brushType,a)];o.__brushOption.range=u.getCreatingRange(_L(t,o,t._track)),i&&(tL(t,o),u.updateCommon(t,o)),eL(t,o),r={isEnd:i}}}else i&&"single"===s.brushMode&&s.removeOnClick&&oL(t,e,n)&&sL(t)&&(r={isEnd:i,removeOnClick:!0});return r}function ML(t,e){return"auto"===t?e.defaultBrushType:t}var IL={mousedown:function(t){if(this._dragging)TL(this,t);else if(!t.target||!t.target.draggable){bL(t);var e=this.group.transformCoordToLocal(t.offsetX,t.offsetY);this._creatingCover=null,(this._creatingPanel=oL(this,t,e))&&(this._dragging=!0,this._track=[e.slice()])}},mousemove:function(t){var e=t.offsetX,n=t.offsetY,i=this.group.transformCoordToLocal(e,n);if(function(t,e,n){if(t._brushType&&!function(t,e,n){var i=t._zr;return e<0||e>i.getWidth()||n<0||n>i.getHeight()}(t,e.offsetX,e.offsetY)){var i=t._zr,r=t._covers,o=oL(t,e,n);if(!t._dragging)for(var a=0;a=0&&(o[r[a].depth]=new Mc(r[a],this,e));if(i&&n){var s=QA(i,n,this,!0,(function(t,e){t.wrapMethod("getItemModel",(function(t,e){var n=t.parentModel,i=n.getData().getItemLayout(e);if(i){var r=i.depth,o=n.levelModels[r];o&&(t.parentModel=o)}return t})),e.wrapMethod("getItemModel",(function(t,e){var n=t.parentModel,i=n.getGraph().getEdgeByIndex(e).node1.getLayout();if(i){var r=i.depth,o=n.levelModels[r];o&&(t.parentModel=o)}return t}))}));return s.data}},e.prototype.setNodePosition=function(t,e){var n=(this.option.data||this.option.nodes)[t];n.localX=e[0],n.localY=e[1]},e.prototype.getGraph=function(){return this.getData().graph},e.prototype.getEdgeData=function(){return this.getGraph().edgeData},e.prototype.formatTooltip=function(t,e,n){function i(t){return isNaN(t)||null==t}if("edge"===n){var r=this.getDataParams(t,n),o=r.data,a=r.value;return ng("nameValue",{name:o.source+" -- "+o.target,value:a,noValue:i(a)})}var s=this.getGraph().getNodeByIndex(t).getLayout().value,l=this.getDataParams(t,n).data.name;return ng("nameValue",{name:null!=l?l+"":null,value:s,noValue:i(s)})},e.prototype.optionUpdated=function(){},e.prototype.getDataParams=function(e,n){var i=t.prototype.getDataParams.call(this,e,n);if(null==i.value&&"node"===n){var r=this.getGraph().getNodeByIndex(e).getLayout().value;i.value=r}return i},e.type="series.sankey",e.defaultOption={z:2,coordinateSystem:"view",left:"5%",top:"5%",right:"20%",bottom:"5%",orient:"horizontal",nodeWidth:20,nodeGap:8,draggable:!0,layoutIterations:32,label:{show:!0,position:"right",fontSize:12},edgeLabel:{show:!1,fontSize:12},levels:[],nodeAlign:"justify",lineStyle:{color:"#314656",opacity:.2,curveness:.5},emphasis:{label:{show:!0},lineStyle:{opacity:.5}},select:{itemStyle:{borderColor:"#212121"}},animationEasing:"linear",animationDuration:1e3},e}(mg);function HL(t,e){t.eachSeriesByType("sankey",(function(t){var n=t.get("nodeWidth"),i=t.get("nodeGap"),r=function(t,e){return Cp(t.getBoxLayoutParams(),{width:e.getWidth(),height:e.getHeight()})}(t,e);t.layoutInfo=r;var o=r.width,a=r.height,s=t.getGraph(),l=s.nodes,u=s.edges;!function(t){E(t,(function(t){var e=QL(t.outEdges,JL),n=QL(t.inEdges,JL),i=t.getValue()||0,r=Math.max(e,n,i);t.setLayout({value:r},!0)}))}(l),function(t,e,n,i,r,o,a,s,l){(function(t,e,n,i,r,o,a){for(var s=[],l=[],u=[],h=[],c=0,p=0;p=0;v&&y.depth>d&&(d=y.depth),g.setLayout({depth:v?y.depth:c},!0),"vertical"===o?g.setLayout({dy:n},!0):g.setLayout({dx:n},!0);for(var m=0;mc-1?d:c-1;a&&"left"!==a&&function(t,e,n,i){if("right"===e){for(var r=[],o=t,a=0;o.length;){for(var s=0;s0;o--)UL(s,l*=.99,a),XL(s,r,n,i,a),tP(s,l,a),XL(s,r,n,i,a)}(t,e,o,r,i,a,s),function(t,e){var n="vertical"===e?"x":"y";E(t,(function(t){t.outEdges.sort((function(t,e){return t.node2.getLayout()[n]-e.node2.getLayout()[n]})),t.inEdges.sort((function(t,e){return t.node1.getLayout()[n]-e.node1.getLayout()[n]}))})),E(t,(function(t){var e=0,n=0;E(t.outEdges,(function(t){t.setLayout({sy:e},!0),e+=t.getLayout().dy})),E(t.inEdges,(function(t){t.setLayout({ty:n},!0),n+=t.getLayout().dy}))}))}(t,s)}(l,u,n,i,o,a,0!==B(l,(function(t){return 0===t.getLayout().value})).length?0:t.get("layoutIterations"),t.get("orient"),t.get("nodeAlign"))}))}function YL(t){var e=t.hostGraph.data.getRawDataItem(t.dataIndex);return null!=e.depth&&e.depth>=0}function XL(t,e,n,i,r){var o="vertical"===r?"x":"y";E(t,(function(t){var a,s,l;t.sort((function(t,e){return t.getLayout()[o]-e.getLayout()[o]}));for(var u=0,h=t.length,c="vertical"===r?"dx":"dy",p=0;p0&&(a=s.getLayout()[o]+l,"vertical"===r?s.setLayout({x:a},!0):s.setLayout({y:a},!0)),u=s.getLayout()[o]+s.getLayout()[c]+e;if((l=u-e-("vertical"===r?i:n))>0){a=s.getLayout()[o]-l,"vertical"===r?s.setLayout({x:a},!0):s.setLayout({y:a},!0),u=a;for(p=h-2;p>=0;--p)(l=(s=t[p]).getLayout()[o]+s.getLayout()[c]+e-u)>0&&(a=s.getLayout()[o]-l,"vertical"===r?s.setLayout({x:a},!0):s.setLayout({y:a},!0)),u=s.getLayout()[o]}}))}function UL(t,e,n){E(t.slice().reverse(),(function(t){E(t,(function(t){if(t.outEdges.length){var i=QL(t.outEdges,ZL,n)/QL(t.outEdges,JL);if(isNaN(i)){var r=t.outEdges.length;i=r?QL(t.outEdges,jL,n)/r:0}if("vertical"===n){var o=t.getLayout().x+(i-$L(t,n))*e;t.setLayout({x:o},!0)}else{var a=t.getLayout().y+(i-$L(t,n))*e;t.setLayout({y:a},!0)}}}))}))}function ZL(t,e){return $L(t.node2,e)*t.getValue()}function jL(t,e){return $L(t.node2,e)}function qL(t,e){return $L(t.node1,e)*t.getValue()}function KL(t,e){return $L(t.node1,e)}function $L(t,e){return"vertical"===e?t.getLayout().x+t.getLayout().dx/2:t.getLayout().y+t.getLayout().dy/2}function JL(t){return t.getValue()}function QL(t,e,n){for(var i=0,r=t.length,o=-1;++oo&&(o=e)})),E(n,(function(e){var n=new _D({type:"color",mappingMethod:"linear",dataExtent:[r,o],visual:t.get("color")}).mapValueToVisual(e.getLayout().value),i=e.getModel().get(["itemStyle","color"]);null!=i?(e.setVisual("color",i),e.setVisual("style",{fill:i})):(e.setVisual("color",n),e.setVisual("style",{fill:n}))}))}i.length&&E(i,(function(t){var e=t.getModel().get("lineStyle");t.setVisual("style",e)}))}))}var nP=function(){function t(){}return t.prototype.getInitialData=function(t,e){var n,i,r=e.getComponent("xAxis",this.get("xAxisIndex")),o=e.getComponent("yAxis",this.get("yAxisIndex")),a=r.get("type"),s=o.get("type");"category"===a?(t.layout="horizontal",n=r.getOrdinalMeta(),i=!0):"category"===s?(t.layout="vertical",n=o.getOrdinalMeta(),i=!0):t.layout=t.layout||"horizontal";var l=["x","y"],u="horizontal"===t.layout?0:1,h=this._baseAxisDim=l[u],c=l[1-u],p=[r,o],d=p[u].get("type"),f=p[1-u].get("type"),g=t.data;if(g&&i){var y=[];E(g,(function(t,e){var n;Y(t)?(n=t.slice(),t.unshift(e)):Y(t.value)?((n=A({},t)).value=n.value.slice(),t.value.unshift(e)):n=t,y.push(n)})),t.data=y}var v=this.defaultValueDimensions,m=[{name:h,type:Gm(d),ordinalMeta:n,otherDims:{tooltip:!1,itemName:0},dimsDef:["base"]},{name:c,type:Gm(f),dimsDef:v.slice()}];return MM(this,{coordDimensions:m,dimensionsCount:v.length+1,encodeDefaulter:H($p,m,this)})},t.prototype.getBaseAxis=function(){var t=this._baseAxisDim;return this.ecModel.getComponent(t+"Axis",this.get(t+"AxisIndex")).axis},t}(),iP=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.defaultValueDimensions=[{name:"min",defaultTooltip:!0},{name:"Q1",defaultTooltip:!0},{name:"median",defaultTooltip:!0},{name:"Q3",defaultTooltip:!0},{name:"max",defaultTooltip:!0}],n.visualDrawType="stroke",n}return n(e,t),e.type="series.boxplot",e.dependencies=["xAxis","yAxis","grid"],e.defaultOption={z:2,coordinateSystem:"cartesian2d",legendHoverLink:!0,layout:null,boxWidth:[7,50],itemStyle:{color:"#fff",borderWidth:1},emphasis:{scale:!0,itemStyle:{borderWidth:2,shadowBlur:5,shadowOffsetX:1,shadowOffsetY:1,shadowColor:"rgba(0,0,0,0.2)"}},animationDuration:800},e}(mg);R(iP,nP,!0);var rP=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.render=function(t,e,n){var i=t.getData(),r=this.group,o=this._data;this._data||r.removeAll();var a="horizontal"===t.get("layout")?1:0;i.diff(o).add((function(t){if(i.hasValue(t)){var e=sP(i.getItemLayout(t),i,t,a,!0);i.setItemGraphicEl(t,e),r.add(e)}})).update((function(t,e){var n=o.getItemGraphicEl(e);if(i.hasValue(t)){var s=i.getItemLayout(t);n?(_h(n),lP(s,n,i,t)):n=sP(s,i,t,a),r.add(n),i.setItemGraphicEl(t,n)}else r.remove(n)})).remove((function(t){var e=o.getItemGraphicEl(t);e&&r.remove(e)})).execute(),this._data=i},e.prototype.remove=function(t){var e=this.group,n=this._data;this._data=null,n&&n.eachItemGraphicEl((function(t){t&&e.remove(t)}))},e.type="boxplot",e}(kg),oP=function(){},aP=function(t){function e(e){var n=t.call(this,e)||this;return n.type="boxplotBoxPath",n}return n(e,t),e.prototype.getDefaultShape=function(){return new oP},e.prototype.buildPath=function(t,e){var n=e.points,i=0;for(t.moveTo(n[i][0],n[i][1]),i++;i<4;i++)t.lineTo(n[i][0],n[i][1]);for(t.closePath();ig){var _=[v,x];i.push(_)}}}return{boxData:n,outliers:i}}(e.getRawData(),t.config);return[{dimensions:["ItemName","Low","Q1","Q2","Q3","High"],data:i.boxData},{data:i.outliers}]}};var dP=["color","borderColor"],fP=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.render=function(t,e,n){this.group.removeClipPath(),this._progressiveEls=null,this._updateDrawMode(t),this._isLargeDraw?this._renderLarge(t):this._renderNormal(t)},e.prototype.incrementalPrepareRender=function(t,e,n){this._clear(),this._updateDrawMode(t)},e.prototype.incrementalRender=function(t,e,n,i){this._progressiveEls=[],this._isLargeDraw?this._incrementalRenderLarge(t,e):this._incrementalRenderNormal(t,e)},e.prototype.eachRendered=function(t){qh(this._progressiveEls||this.group,t)},e.prototype._updateDrawMode=function(t){var e=t.pipelineContext.large;null!=this._isLargeDraw&&e===this._isLargeDraw||(this._isLargeDraw=e,this._clear())},e.prototype._renderNormal=function(t){var e=t.getData(),n=this._data,i=this.group,r=e.getLayout("isSimpleBox"),o=t.get("clip",!0),a=t.coordinateSystem,s=a.getArea&&a.getArea();this._data||i.removeAll(),e.diff(n).add((function(n){if(e.hasValue(n)){var a=e.getItemLayout(n);if(o&&mP(s,a))return;var l=vP(a,n,!0);gh(l,{shape:{points:a.ends}},t,n),xP(l,e,n,r),i.add(l),e.setItemGraphicEl(n,l)}})).update((function(a,l){var u=n.getItemGraphicEl(l);if(e.hasValue(a)){var h=e.getItemLayout(a);o&&mP(s,h)?i.remove(u):(u?(fh(u,{shape:{points:h.ends}},t,a),_h(u)):u=vP(h),xP(u,e,a,r),i.add(u),e.setItemGraphicEl(a,u))}else i.remove(u)})).remove((function(t){var e=n.getItemGraphicEl(t);e&&i.remove(e)})).execute(),this._data=e},e.prototype._renderLarge=function(t){this._clear(),SP(t,this.group);var e=t.get("clip",!0)?SS(t.coordinateSystem,!1,t):null;e?this.group.setClipPath(e):this.group.removeClipPath()},e.prototype._incrementalRenderNormal=function(t,e){for(var n,i=e.getData(),r=i.getLayout("isSimpleBox");null!=(n=t.next());){var o=vP(i.getItemLayout(n));xP(o,i,n,r),o.incremental=!0,this.group.add(o),this._progressiveEls.push(o)}},e.prototype._incrementalRenderLarge=function(t,e){SP(e,this.group,this._progressiveEls,!0)},e.prototype.remove=function(t){this._clear()},e.prototype._clear=function(){this.group.removeAll(),this._data=null},e.type="candlestick",e}(kg),gP=function(){},yP=function(t){function e(e){var n=t.call(this,e)||this;return n.type="normalCandlestickBox",n}return n(e,t),e.prototype.getDefaultShape=function(){return new gP},e.prototype.buildPath=function(t,e){var n=e.points;this.__simpleBox?(t.moveTo(n[4][0],n[4][1]),t.lineTo(n[6][0],n[6][1])):(t.moveTo(n[0][0],n[0][1]),t.lineTo(n[1][0],n[1][1]),t.lineTo(n[2][0],n[2][1]),t.lineTo(n[3][0],n[3][1]),t.closePath(),t.moveTo(n[4][0],n[4][1]),t.lineTo(n[5][0],n[5][1]),t.moveTo(n[6][0],n[6][1]),t.lineTo(n[7][0],n[7][1]))},e}(Is);function vP(t,e,n){var i=t.ends;return new yP({shape:{points:n?_P(i,t):i},z2:100})}function mP(t,e){for(var n=!0,i=0;i0?"borderColor":"borderColor0"])||n.get(["itemStyle",t>0?"color":"color0"]);0===t&&(r=n.get(["itemStyle","borderColorDoji"]));var o=n.getModel("itemStyle").getItemStyle(dP);e.useStyle(o),e.style.fill=null,e.style.stroke=r}var IP=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.defaultValueDimensions=[{name:"open",defaultTooltip:!0},{name:"close",defaultTooltip:!0},{name:"lowest",defaultTooltip:!0},{name:"highest",defaultTooltip:!0}],n}return n(e,t),e.prototype.getShadowDim=function(){return"open"},e.prototype.brushSelector=function(t,e,n){var i=e.getItemLayout(t);return i&&n.rect(i.brushRect)},e.type="series.candlestick",e.dependencies=["xAxis","yAxis","grid"],e.defaultOption={z:2,coordinateSystem:"cartesian2d",legendHoverLink:!0,layout:null,clip:!0,itemStyle:{color:"#eb5454",color0:"#47b262",borderColor:"#eb5454",borderColor0:"#47b262",borderColorDoji:null,borderWidth:1},emphasis:{scale:!0,itemStyle:{borderWidth:2}},barMaxWidth:null,barMinWidth:null,barWidth:null,large:!0,largeThreshold:600,progressive:3e3,progressiveThreshold:1e4,progressiveChunkMode:"mod",animationEasing:"linear",animationDuration:300},e}(mg);function TP(t){t&&Y(t.series)&&E(t.series,(function(t){q(t)&&"k"===t.type&&(t.type="candlestick")}))}R(IP,nP,!0);var CP=["itemStyle","borderColor"],DP=["itemStyle","borderColor0"],AP=["itemStyle","borderColorDoji"],kP=["itemStyle","color"],LP=["itemStyle","color0"],PP={seriesType:"candlestick",plan:Cg(),performRawSeries:!0,reset:function(t,e){function n(t,e){return e.get(t>0?kP:LP)}function i(t,e){return e.get(0===t?AP:t>0?CP:DP)}if(!e.isSeriesFiltered(t))return!t.pipelineContext.large&&{progress:function(t,e){for(var r;null!=(r=t.next());){var o=e.getItemModel(r),a=e.getItemLayout(r).sign,s=o.getItemStyle();s.fill=n(a,o),s.stroke=i(a,o)||s.fill,A(e.ensureUniqueItemVisual(r,"style"),s)}}}}},OP={seriesType:"candlestick",plan:Cg(),reset:function(t){var e=t.coordinateSystem,n=t.getData(),i=function(t,e){var n,i=t.getBaseAxis(),r="category"===i.type?i.getBandWidth():(n=i.getExtent(),Math.abs(n[1]-n[0])/e.count()),o=Ur(rt(t.get("barMaxWidth"),r),r),a=Ur(rt(t.get("barMinWidth"),1),r),s=t.get("barWidth");return null!=s?Ur(s,r):Math.max(Math.min(r/2,o),a)}(t,n),r=["x","y"],o=n.getDimensionIndex(n.mapDimension(r[0])),a=z(n.mapDimensionsAll(r[1]),n.getDimensionIndex,n),s=a[0],l=a[1],u=a[2],h=a[3];if(n.setLayout({candleWidth:i,isSimpleBox:i<=1.3}),!(o<0||a.length<4))return{progress:t.pipelineContext.large?function(n,i){var r,a,c=Ex(4*n.count),p=0,d=[],f=[],g=i.getStore(),y=!!t.get(["itemStyle","borderColorDoji"]);for(;null!=(a=n.next());){var v=g.get(o,a),m=g.get(s,a),x=g.get(l,a),_=g.get(u,a),b=g.get(h,a);isNaN(v)||isNaN(_)||isNaN(b)?(c[p++]=NaN,p+=3):(c[p++]=RP(g,a,m,x,l,y),d[0]=v,d[1]=_,r=e.dataToPoint(d,null,f),c[p++]=r?r[0]:NaN,c[p++]=r?r[1]:NaN,d[1]=b,r=e.dataToPoint(d,null,f),c[p++]=r?r[1]:NaN)}i.setLayout("largePoints",c)}:function(t,n){var r,a=n.getStore();for(;null!=(r=t.next());){var c=a.get(o,r),p=a.get(s,r),d=a.get(l,r),f=a.get(u,r),g=a.get(h,r),y=Math.min(p,d),v=Math.max(p,d),m=M(y,c),x=M(v,c),_=M(f,c),b=M(g,c),w=[];I(w,x,0),I(w,m,1),w.push(C(b),C(x),C(_),C(m));var S=!!n.getItemModel(r).get(["itemStyle","borderColorDoji"]);n.setItemLayout(r,{sign:RP(a,r,p,d,l,S),initBaseline:p>d?x[1]:m[1],ends:w,brushRect:T(f,g,c)})}function M(t,n){var i=[];return i[0]=n,i[1]=t,isNaN(n)||isNaN(t)?[NaN,NaN]:e.dataToPoint(i)}function I(t,e,n){var r=e.slice(),o=e.slice();r[0]=Nh(r[0]+i/2,1,!1),o[0]=Nh(o[0]-i/2,1,!0),n?t.push(r,o):t.push(o,r)}function T(t,e,n){var r=M(t,n),o=M(e,n);return r[0]-=i/2,o[0]-=i/2,{x:r[0],y:r[1],width:i,height:o[1]-r[1]}}function C(t){return t[0]=Nh(t[0],1),t}}}}};function RP(t,e,n,i,r,o){return n>i?-1:n0?t.get(r,e-1)<=i?1:-1:1}function NP(t,e){var n=e.rippleEffectColor||e.color;t.eachChild((function(t){t.attr({z:e.z,zlevel:e.zlevel,style:{stroke:"stroke"===e.brushType?n:null,fill:"fill"===e.brushType?n:null}})}))}var EP=function(t){function e(e,n){var i=t.call(this)||this,r=new oS(e,n),o=new zr;return i.add(r),i.add(o),i.updateData(e,n),i}return n(e,t),e.prototype.stopEffectAnimation=function(){this.childAt(1).removeAll()},e.prototype.startEffectAnimation=function(t){for(var e=t.symbolType,n=t.color,i=t.rippleNumber,r=this.childAt(1),o=0;o0&&(o=this._getLineLength(i)/l*1e3),o!==this._period||a!==this._loop||s!==this._roundTrip){i.stopAnimation();var h=void 0;h=X(u)?u(n):u,i.__t>0&&(h=-o*i.__t),this._animateSymbol(i,o,h,a,s)}this._period=o,this._loop=a,this._roundTrip=s}},e.prototype._animateSymbol=function(t,e,n,i,r){if(e>0){t.__t=0;var o=this,a=t.animate("",i).when(r?2*e:e,{__t:r?2:1}).delay(n).during((function(){o._updateSymbolPosition(t)}));i||a.done((function(){o.remove(t)})),a.start()}},e.prototype._getLineLength=function(t){return Vt(t.__p1,t.__cp1)+Vt(t.__cp1,t.__p2)},e.prototype._updateAnimationPoints=function(t,e){t.__p1=e[0],t.__p2=e[1],t.__cp1=e[2]||[(e[0][0]+e[1][0])/2,(e[0][1]+e[1][1])/2]},e.prototype.updateData=function(t,e,n){this.childAt(0).updateData(t,e,n),this._updateEffectSymbol(t,e)},e.prototype._updateSymbolPosition=function(t){var e=t.__p1,n=t.__p2,i=t.__cp1,r=t.__t<1?t.__t:2-t.__t,o=[t.x,t.y],a=o.slice(),s=In,l=Tn;o[0]=s(e[0],i[0],n[0],r),o[1]=s(e[1],i[1],n[1],r);var u=t.__t<1?l(e[0],i[0],n[0],r):l(n[0],i[0],e[0],1-r),h=t.__t<1?l(e[1],i[1],n[1],r):l(n[1],i[1],e[1],1-r);t.rotation=-Math.atan2(h,u)-Math.PI/2,"line"!==this._symbolType&&"rect"!==this._symbolType&&"roundRect"!==this._symbolType||(void 0!==t.__lastT&&t.__lastT=0&&!(i[o]<=e);o--);o=Math.min(o,r-2)}else{for(o=a;oe);o++);o=Math.min(o-1,r-2)}var s=(e-i[o])/(i[o+1]-i[o]),l=n[o],u=n[o+1];t.x=l[0]*(1-s)+s*u[0],t.y=l[1]*(1-s)+s*u[1];var h=t.__t<1?u[0]-l[0]:l[0]-u[0],c=t.__t<1?u[1]-l[1]:l[1]-u[1];t.rotation=-Math.atan2(c,h)-Math.PI/2,this._lastFrame=o,this._lastFramePercent=e,t.ignore=!1}},e}(BP),WP=function(){this.polyline=!1,this.curveness=0,this.segs=[]},HP=function(t){function e(e){var n=t.call(this,e)||this;return n._off=0,n.hoverDataIdx=-1,n}return n(e,t),e.prototype.reset=function(){this.notClear=!1,this._off=0},e.prototype.getDefaultStyle=function(){return{stroke:"#000",fill:null}},e.prototype.getDefaultShape=function(){return new WP},e.prototype.buildPath=function(t,e){var n,i=e.segs,r=e.curveness;if(e.polyline)for(n=this._off;n0){t.moveTo(i[n++],i[n++]);for(var a=1;a0){var c=(s+u)/2-(l-h)*r,p=(l+h)/2-(u-s)*r;t.quadraticCurveTo(c,p,u,h)}else t.lineTo(u,h)}this.incremental&&(this._off=n,this.notClear=!0)},e.prototype.findDataIndex=function(t,e){var n=this.shape,i=n.segs,r=n.curveness,o=this.style.lineWidth;if(n.polyline)for(var a=0,s=0;s0)for(var u=i[s++],h=i[s++],c=1;c0){if(ls(u,h,(u+p)/2-(h-d)*r,(h+d)/2-(p-u)*r,p,d,o,t,e))return a}else if(as(u,h,p,d,o,t,e))return a;a++}return-1},e.prototype.contain=function(t,e){var n=this.transformCoordToLocal(t,e),i=this.getBoundingRect();return t=n[0],e=n[1],i.contain(t,e)?(this.hoverDataIdx=this.findDataIndex(t,e))>=0:(this.hoverDataIdx=-1,!1)},e.prototype.getBoundingRect=function(){var t=this._rect;if(!t){for(var e=this.shape.segs,n=1/0,i=1/0,r=-1/0,o=-1/0,a=0;a0&&(o.dataIndex=n+t.__startIndex)}))},t.prototype._clear=function(){this._newAdded=[],this.group.removeAll()},t}(),XP={seriesType:"lines",plan:Cg(),reset:function(t){var e=t.coordinateSystem;if(e){var n=t.get("polyline"),i=t.pipelineContext.large;return{progress:function(r,o){var a=[];if(i){var s=void 0,l=r.end-r.start;if(n){for(var u=0,h=r.start;h0&&(l||s.configLayer(o,{motionBlur:!0,lastFrameAlpha:Math.max(Math.min(a/10+.9,1),0)})),r.updateData(i);var u=t.get("clip",!0)&&SS(t.coordinateSystem,!1,t);u?this.group.setClipPath(u):this.group.removeClipPath(),this._lastZlevel=o,this._finished=!0},e.prototype.incrementalPrepareRender=function(t,e,n){var i=t.getData();this._updateLineDraw(i,t).incrementalPrepareUpdate(i),this._clearLayer(n),this._finished=!1},e.prototype.incrementalRender=function(t,e,n){this._lineDraw.incrementalUpdate(t,e.getData()),this._finished=t.end===e.getData().count()},e.prototype.eachRendered=function(t){this._lineDraw&&this._lineDraw.eachRendered(t)},e.prototype.updateTransform=function(t,e,n){var i=t.getData(),r=t.pipelineContext;if(!this._finished||r.large||r.progressiveRender)return{update:!0};var o=XP.reset(t,e,n);o.progress&&o.progress({start:0,end:i.count(),count:i.count()},i),this._lineDraw.updateLayout(),this._clearLayer(n)},e.prototype._updateLineDraw=function(t,e){var n=this._lineDraw,i=this._showEffect(e),r=!!e.get("polyline"),o=e.pipelineContext.large;return n&&i===this._hasEffet&&r===this._isPolyline&&o===this._isLargeDraw||(n&&n.remove(),n=this._lineDraw=o?new YP:new RA(r?i?GP:FP:i?BP:OA),this._hasEffet=i,this._isPolyline=r,this._isLargeDraw=o),this.group.add(n.group),n},e.prototype._showEffect=function(t){return!!t.get(["effect","show"])},e.prototype._clearLayer=function(t){var e=t.getZr();"svg"===e.painter.getType()||null==this._lastZlevel||e.painter.getLayer(this._lastZlevel).clear(!0)},e.prototype.remove=function(t,e){this._lineDraw&&this._lineDraw.remove(),this._lineDraw=null,this._clearLayer(e)},e.prototype.dispose=function(t,e){this.remove(t,e)},e.type="lines",e}(kg),ZP="undefined"==typeof Uint32Array?Array:Uint32Array,jP="undefined"==typeof Float64Array?Array:Float64Array;function qP(t){var e=t.data;e&&e[0]&&e[0][0]&&e[0][0].coord&&(t.data=z(e,(function(t){var e={coords:[t[0].coord,t[1].coord]};return t[0].name&&(e.fromName=t[0].name),t[1].name&&(e.toName=t[1].name),D([e,t[0],t[1]])})))}var KP=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.visualStyleAccessPath="lineStyle",n.visualDrawType="stroke",n}return n(e,t),e.prototype.init=function(e){e.data=e.data||[],qP(e);var n=this._processFlatCoordsArray(e.data);this._flatCoords=n.flatCoords,this._flatCoordsOffset=n.flatCoordsOffset,n.flatCoords&&(e.data=new Float32Array(n.count)),t.prototype.init.apply(this,arguments)},e.prototype.mergeOption=function(e){if(qP(e),e.data){var n=this._processFlatCoordsArray(e.data);this._flatCoords=n.flatCoords,this._flatCoordsOffset=n.flatCoordsOffset,n.flatCoords&&(e.data=new Float32Array(n.count))}t.prototype.mergeOption.apply(this,arguments)},e.prototype.appendData=function(t){var e=this._processFlatCoordsArray(t.data);e.flatCoords&&(this._flatCoords?(this._flatCoords=vt(this._flatCoords,e.flatCoords),this._flatCoordsOffset=vt(this._flatCoordsOffset,e.flatCoordsOffset)):(this._flatCoords=e.flatCoords,this._flatCoordsOffset=e.flatCoordsOffset),t.data=new Float32Array(e.count)),this.getRawData().appendData(t.data)},e.prototype._getCoordsFromItemModel=function(t){var e=this.getData().getItemModel(t),n=e.option instanceof Array?e.option:e.getShallow("coords");return n},e.prototype.getLineCoordsCount=function(t){return this._flatCoordsOffset?this._flatCoordsOffset[2*t+1]:this._getCoordsFromItemModel(t).length},e.prototype.getLineCoords=function(t,e){if(this._flatCoordsOffset){for(var n=this._flatCoordsOffset[2*t],i=this._flatCoordsOffset[2*t+1],r=0;r ")})},e.prototype.preventIncremental=function(){return!!this.get(["effect","show"])},e.prototype.getProgressive=function(){var t=this.option.progressive;return null==t?this.option.large?1e4:this.get("progressive"):t},e.prototype.getProgressiveThreshold=function(){var t=this.option.progressiveThreshold;return null==t?this.option.large?2e4:this.get("progressiveThreshold"):t},e.prototype.getZLevelKey=function(){var t=this.getModel("effect"),e=t.get("trailLength");return this.getData().count()>this.getProgressiveThreshold()?this.id:t.get("show")&&e>0?e+"":""},e.type="series.lines",e.dependencies=["grid","polar","geo","calendar"],e.defaultOption={coordinateSystem:"geo",z:2,legendHoverLink:!0,xAxisIndex:0,yAxisIndex:0,symbol:["none","none"],symbolSize:[10,10],geoIndex:0,effect:{show:!1,period:4,constantSpeed:0,symbol:"circle",symbolSize:3,loop:!0,trailLength:.2},large:!1,largeThreshold:2e3,polyline:!1,clip:!0,label:{show:!1,position:"end"},lineStyle:{opacity:.5}},e}(mg);function $P(t){return t instanceof Array||(t=[t,t]),t}var JP={seriesType:"lines",reset:function(t){var e=$P(t.get("symbol")),n=$P(t.get("symbolSize")),i=t.getData();return i.setVisual("fromSymbol",e&&e[0]),i.setVisual("toSymbol",e&&e[1]),i.setVisual("fromSymbolSize",n&&n[0]),i.setVisual("toSymbolSize",n&&n[1]),{dataEach:i.hasItemOption?function(t,e){var n=t.getItemModel(e),i=$P(n.getShallow("symbol",!0)),r=$P(n.getShallow("symbolSize",!0));i[0]&&t.setItemVisual(e,"fromSymbol",i[0]),i[1]&&t.setItemVisual(e,"toSymbol",i[1]),r[0]&&t.setItemVisual(e,"fromSymbolSize",r[0]),r[1]&&t.setItemVisual(e,"toSymbolSize",r[1])}:null}}};var QP=function(){function t(){this.blurSize=30,this.pointSize=20,this.maxOpacity=1,this.minOpacity=0,this._gradientPixels={inRange:null,outOfRange:null};var t=h.createCanvas();this.canvas=t}return t.prototype.update=function(t,e,n,i,r,o){var a=this._getBrush(),s=this._getGradient(r,"inRange"),l=this._getGradient(r,"outOfRange"),u=this.pointSize+this.blurSize,h=this.canvas,c=h.getContext("2d"),p=t.length;h.width=e,h.height=n;for(var d=0;d0){var I=o(v)?s:l;v>0&&(v=v*S+w),x[_++]=I[M],x[_++]=I[M+1],x[_++]=I[M+2],x[_++]=I[M+3]*v*256}else _+=4}return c.putImageData(m,0,0),h},t.prototype._getBrush=function(){var t=this._brushCanvas||(this._brushCanvas=h.createCanvas()),e=this.pointSize+this.blurSize,n=2*e;t.width=n,t.height=n;var i=t.getContext("2d");return i.clearRect(0,0,n,n),i.shadowOffsetX=n,i.shadowBlur=this.blurSize,i.shadowColor="#000",i.beginPath(),i.arc(-e,e,this.pointSize,0,2*Math.PI,!0),i.closePath(),i.fill(),t},t.prototype._getGradient=function(t,e){for(var n=this._gradientPixels,i=n[e]||(n[e]=new Uint8ClampedArray(1024)),r=[0,0,0,0],o=0,a=0;a<256;a++)t[e](a/255,!0,r),i[o++]=r[0],i[o++]=r[1],i[o++]=r[2],i[o++]=r[3];return i},t}();function tO(t){var e=t.dimensions;return"lng"===e[0]&&"lat"===e[1]}var eO=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.render=function(t,e,n){var i;e.eachComponent("visualMap",(function(e){e.eachTargetSeries((function(n){n===t&&(i=e)}))})),this._progressiveEls=null,this.group.removeAll();var r=t.coordinateSystem;"cartesian2d"===r.type||"calendar"===r.type?this._renderOnCartesianAndCalendar(t,n,0,t.getData().count()):tO(r)&&this._renderOnGeo(r,t,i,n)},e.prototype.incrementalPrepareRender=function(t,e,n){this.group.removeAll()},e.prototype.incrementalRender=function(t,e,n,i){var r=e.coordinateSystem;r&&(tO(r)?this.render(e,n,i):(this._progressiveEls=[],this._renderOnCartesianAndCalendar(e,i,t.start,t.end,!0)))},e.prototype.eachRendered=function(t){qh(this._progressiveEls||this.group,t)},e.prototype._renderOnCartesianAndCalendar=function(t,e,n,i,r){var o,a,s,l,u=t.coordinateSystem,h=MS(u,"cartesian2d");if(h){var c=u.getAxis("x"),p=u.getAxis("y");0,o=c.getBandWidth()+.5,a=p.getBandWidth()+.5,s=c.scale.getExtent(),l=p.scale.getExtent()}for(var d=this.group,f=t.getData(),g=t.getModel(["emphasis","itemStyle"]).getItemStyle(),y=t.getModel(["blur","itemStyle"]).getItemStyle(),v=t.getModel(["select","itemStyle"]).getItemStyle(),m=t.get(["itemStyle","borderRadius"]),x=ec(t),_=t.getModel("emphasis"),b=_.get("focus"),w=_.get("blurScope"),S=_.get("disabled"),M=h?[f.mapDimension("x"),f.mapDimension("y"),f.mapDimension("value")]:[f.mapDimension("time"),f.mapDimension("value")],I=n;Is[1]||Al[1])continue;var k=u.dataToPoint([D,A]);T=new zs({shape:{x:k[0]-o/2,y:k[1]-a/2,width:o,height:a},style:C})}else{if(isNaN(f.get(M[1],I)))continue;T=new zs({z2:1,shape:u.dataToRect([f.get(M[0],I)]).contentShape,style:C})}if(f.hasItemOption){var L=f.getItemModel(I),P=L.getModel("emphasis");g=P.getModel("itemStyle").getItemStyle(),y=L.getModel(["blur","itemStyle"]).getItemStyle(),v=L.getModel(["select","itemStyle"]).getItemStyle(),m=L.get(["itemStyle","borderRadius"]),b=P.get("focus"),w=P.get("blurScope"),S=P.get("disabled"),x=ec(L)}T.shape.r=m;var O=t.getRawValue(I),R="-";O&&null!=O[2]&&(R=O[2]+""),tc(T,x,{labelFetcher:t,labelDataIndex:I,defaultOpacity:C.opacity,defaultText:R}),T.ensureState("emphasis").style=g,T.ensureState("blur").style=y,T.ensureState("select").style=v,Yl(T,b,w,S),T.incremental=r,r&&(T.states.emphasis.hoverLayer=!0),d.add(T),f.setItemGraphicEl(I,T),this._progressiveEls&&this._progressiveEls.push(T)}},e.prototype._renderOnGeo=function(t,e,n,i){var r=n.targetVisuals.inRange,o=n.targetVisuals.outOfRange,a=e.getData(),s=this._hmLayer||this._hmLayer||new QP;s.blurSize=e.get("blurSize"),s.pointSize=e.get("pointSize"),s.minOpacity=e.get("minOpacity"),s.maxOpacity=e.get("maxOpacity");var l=t.getViewRect().clone(),u=t.getRoamTransform();l.applyTransform(u);var h=Math.max(l.x,0),c=Math.max(l.y,0),p=Math.min(l.width+l.x,i.getWidth()),d=Math.min(l.height+l.y,i.getHeight()),f=p-h,g=d-c,y=[a.mapDimension("lng"),a.mapDimension("lat"),a.mapDimension("value")],v=a.mapArray(y,(function(e,n,i){var r=t.dataToPoint([e,n]);return r[0]-=h,r[1]-=c,r.push(i),r})),m=n.getExtent(),x="visualMap.continuous"===n.type?function(t,e){var n=t[1]-t[0];return e=[(e[0]-t[0])/n,(e[1]-t[0])/n],function(t){return t>=e[0]&&t<=e[1]}}(m,n.option.range):function(t,e,n){var i=t[1]-t[0],r=(e=z(e,(function(e){return{interval:[(e.interval[0]-t[0])/i,(e.interval[1]-t[0])/i]}}))).length,o=0;return function(t){var i;for(i=o;i=0;i--){var a;if((a=e[i].interval)[0]<=t&&t<=a[1]){o=i;break}}return i>=0&&i0?1:-1}(n,o,r,i,c),function(t,e,n,i,r,o,a,s,l,u){var h,c=l.valueDim,p=l.categoryDim,d=Math.abs(n[p.wh]),f=t.getItemVisual(e,"symbolSize");h=Y(f)?f.slice():null==f?["100%","100%"]:[f,f];h[p.index]=Ur(h[p.index],d),h[c.index]=Ur(h[c.index],i?d:Math.abs(o)),u.symbolSize=h;var g=u.symbolScale=[h[0]/s,h[1]/s];g[c.index]*=(l.isHorizontal?-1:1)*a}(t,e,r,o,0,c.boundingLength,c.pxSign,u,i,c),function(t,e,n,i,r){var o=t.get(iO)||0;o&&(oO.attr({scaleX:e[0],scaleY:e[1],rotation:n}),oO.updateTransform(),o/=oO.getLineScale(),o*=e[i.valueDim.index]);r.valueLineWidth=o||0}(n,c.symbolScale,l,i,c);var p=c.symbolSize,d=Yy(n.get("symbolOffset"),p);return function(t,e,n,i,r,o,a,s,l,u,h,c){var p=h.categoryDim,d=h.valueDim,f=c.pxSign,g=Math.max(e[d.index]+s,0),y=g;if(i){var v=Math.abs(l),m=it(t.get("symbolMargin"),"15%")+"",x=!1;m.lastIndexOf("!")===m.length-1&&(x=!0,m=m.slice(0,m.length-1));var _=Ur(m,e[d.index]),b=Math.max(g+2*_,0),w=x?0:2*_,S=co(i),M=S?i:SO((v+w)/b);b=g+2*(_=(v-M*g)/2/(x?M:Math.max(M-1,1))),w=x?0:2*_,S||"fixed"===i||(M=u?SO((Math.abs(u)+w)/b):0),y=M*b-w,c.repeatTimes=M,c.symbolMargin=_}var I=f*(y/2),T=c.pathPosition=[];T[p.index]=n[p.wh]/2,T[d.index]="start"===a?I:"end"===a?l-I:l/2,o&&(T[0]+=o[0],T[1]+=o[1]);var C=c.bundlePosition=[];C[p.index]=n[p.xy],C[d.index]=n[d.xy];var D=c.barRectShape=A({},n);D[d.wh]=f*Math.max(Math.abs(n[d.wh]),Math.abs(T[d.index]+I)),D[p.wh]=n[p.wh];var k=c.clipShape={};k[p.xy]=-n[p.xy],k[p.wh]=h.ecSize[p.wh],k[d.xy]=0,k[d.wh]=n[d.wh]}(n,p,r,o,0,d,s,c.valueLineWidth,c.boundingLength,c.repeatCutLength,i,c),c}function lO(t,e){return t.toGlobalCoord(t.dataToCoord(t.scale.parse(e)))}function uO(t){var e=t.symbolPatternSize,n=Wy(t.symbolType,-e/2,-e/2,e,e);return n.attr({culling:!0}),"image"!==n.type&&n.setStyle({strokeNoScale:!0}),n}function hO(t,e,n,i){var r=t.__pictorialBundle,o=n.symbolSize,a=n.valueLineWidth,s=n.pathPosition,l=e.valueDim,u=n.repeatTimes||0,h=0,c=o[e.valueDim.index]+a+2*n.symbolMargin;for(_O(t,(function(t){t.__pictorialAnimationIndex=h,t.__pictorialRepeatTimes=u,h0:i<0)&&(r=u-1-t),e[l.index]=c*(r-u/2+.5)+s[l.index],{x:e[0],y:e[1],scaleX:n.symbolScale[0],scaleY:n.symbolScale[1],rotation:n.rotation}}}function cO(t,e,n,i){var r=t.__pictorialBundle,o=t.__pictorialMainPath;o?bO(o,null,{x:n.pathPosition[0],y:n.pathPosition[1],scaleX:n.symbolScale[0],scaleY:n.symbolScale[1],rotation:n.rotation},n,i):(o=t.__pictorialMainPath=uO(n),r.add(o),bO(o,{x:n.pathPosition[0],y:n.pathPosition[1],scaleX:0,scaleY:0,rotation:n.rotation},{scaleX:n.symbolScale[0],scaleY:n.symbolScale[1]},n,i))}function pO(t,e,n){var i=A({},e.barRectShape),r=t.__pictorialBarRect;r?bO(r,null,{shape:i},e,n):((r=t.__pictorialBarRect=new zs({z2:2,shape:i,silent:!0,style:{stroke:"transparent",fill:"transparent",lineWidth:0}})).disableMorphing=!0,t.add(r))}function dO(t,e,n,i){if(n.symbolClip){var r=t.__pictorialClipPath,o=A({},n.clipShape),a=e.valueDim,s=n.animationModel,l=n.dataIndex;if(r)fh(r,{shape:o},s,l);else{o[a.wh]=0,r=new zs({shape:o}),t.__pictorialBundle.setClipPath(r),t.__pictorialClipPath=r;var u={};u[a.wh]=n.clipShape[a.wh],Kh[i?"updateProps":"initProps"](r,{shape:u},s,l)}}}function fO(t,e){var n=t.getItemModel(e);return n.getAnimationDelayParams=gO,n.isAnimationEnabled=yO,n}function gO(t){return{index:t.__pictorialAnimationIndex,count:t.__pictorialRepeatTimes}}function yO(){return this.parentModel.isAnimationEnabled()&&!!this.getShallow("animation")}function vO(t,e,n,i){var r=new zr,o=new zr;return r.add(o),r.__pictorialBundle=o,o.x=n.bundlePosition[0],o.y=n.bundlePosition[1],n.symbolRepeat?hO(r,e,n):cO(r,0,n),pO(r,n,i),dO(r,e,n,i),r.__pictorialShapeStr=xO(t,n),r.__pictorialSymbolMeta=n,r}function mO(t,e,n,i){var r=i.__pictorialBarRect;r&&r.removeTextContent();var o=[];_O(i,(function(t){o.push(t)})),i.__pictorialMainPath&&o.push(i.__pictorialMainPath),i.__pictorialClipPath&&(n=null),E(o,(function(t){vh(t,{scaleX:0,scaleY:0},n,e,(function(){i.parent&&i.parent.remove(i)}))})),t.setItemGraphicEl(e,null)}function xO(t,e){return[t.getItemVisual(e.dataIndex,"symbol")||"none",!!e.symbolRepeat,!!e.symbolClip].join(":")}function _O(t,e,n){E(t.__pictorialBundle.children(),(function(i){i!==t.__pictorialBarRect&&e.call(n,i)}))}function bO(t,e,n,i,r,o){e&&t.attr(e),i.symbolClip&&!r?n&&t.attr(n):n&&Kh[r?"updateProps":"initProps"](t,n,i.animationModel,i.dataIndex,o)}function wO(t,e,n){var i=n.dataIndex,r=n.itemModel,o=r.getModel("emphasis"),a=o.getModel("itemStyle").getItemStyle(),s=r.getModel(["blur","itemStyle"]).getItemStyle(),l=r.getModel(["select","itemStyle"]).getItemStyle(),u=r.getShallow("cursor"),h=o.get("focus"),c=o.get("blurScope"),p=o.get("scale");_O(t,(function(t){if(t instanceof ks){var e=t.style;t.useStyle(A({image:e.image,x:e.x,y:e.y,width:e.width,height:e.height},n.style))}else t.useStyle(n.style);var i=t.ensureState("emphasis");i.style=a,p&&(i.scaleX=1.1*t.scaleX,i.scaleY=1.1*t.scaleY),t.ensureState("blur").style=s,t.ensureState("select").style=l,u&&(t.cursor=u),t.z2=n.z2}));var d=e.valueDim.posDesc[+(n.boundingLength>0)];tc(t.__pictorialBarRect,ec(r),{labelFetcher:e.seriesModel,labelDataIndex:i,defaultText:iS(e.seriesModel.getData(),i),inheritColor:n.style.fill,defaultOpacity:n.style.opacity,defaultOutsidePosition:d}),Yl(t,h,c,o.get("disabled"))}function SO(t){var e=Math.round(t);return Math.abs(t-e)<1e-4?e:Math.ceil(t)}var MO=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.hasSymbolVisual=!0,n.defaultSymbol="roundRect",n}return n(e,t),e.prototype.getInitialData=function(e){return e.stack=null,t.prototype.getInitialData.apply(this,arguments)},e.type="series.pictorialBar",e.dependencies=["grid"],e.defaultOption=Cc(FS.defaultOption,{symbol:"circle",symbolSize:null,symbolRotate:null,symbolPosition:null,symbolOffset:null,symbolMargin:null,symbolRepeat:!1,symbolRepeatDirection:"end",symbolClip:!1,symbolBoundingData:null,symbolPatternSize:400,barGap:"-100%",progressive:0,emphasis:{scale:!1},select:{itemStyle:{borderColor:"#212121"}}}),e}(FS);var IO=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n._layers=[],n}return n(e,t),e.prototype.render=function(t,e,n){var i=t.getData(),r=this,o=this.group,a=t.getLayerSeries(),s=i.getLayout("layoutInfo"),l=s.rect,u=s.boundaryGap;function h(t){return t.name}o.x=0,o.y=l.y+u[0];var c=new Vm(this._layersSeries||[],a,h,h),p=[];function d(e,n,s){var l=r._layers;if("remove"!==e){for(var u,h,c=[],d=[],f=a[n].indices,g=0;go&&(o=s),i.push(s)}for(var u=0;uo&&(o=c)}return{y0:r,max:o}}(l),h=u.y0,c=n/u.max,p=o.length,d=o[0].indices.length,f=0;fMath.PI/2?"right":"left"):S&&"center"!==S?"left"===S?(m=r.r0+w,a>Math.PI/2&&(S="right")):"right"===S&&(m=r.r-w,a>Math.PI/2&&(S="left")):(m=o===2*Math.PI&&0===r.r0?0:(r.r+r.r0)/2,S="center"),g.style.align=S,g.style.verticalAlign=f(p,"verticalAlign")||"middle",g.x=m*s+r.cx,g.y=m*l+r.cy;var M=f(p,"rotate"),I=0;"radial"===M?(I=hs(-a))>Math.PI/2&&I<1.5*Math.PI&&(I+=Math.PI):"tangential"===M?(I=Math.PI/2-a)>Math.PI/2?I-=Math.PI:I<-Math.PI/2&&(I+=Math.PI):j(M)&&(I=M*Math.PI/180),g.rotation=hs(I)})),h.dirtyStyle()},e}(zu),kO="sunburstRootToNode",LO="sunburstHighlight";var PO=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.render=function(t,e,n,i){var r=this;this.seriesModel=t,this.api=n,this.ecModel=e;var o=t.getData(),a=o.tree.root,s=t.getViewRoot(),l=this.group,u=t.get("renderLabelForZeroData"),h=[];s.eachNode((function(t){h.push(t)}));var c=this._oldChildren||[];!function(i,r){if(0===i.length&&0===r.length)return;function s(t){return t.getId()}function h(s,h){!function(i,r){u||!i||i.getValue()||(i=null);if(i!==a&&r!==a)if(r&&r.piece)i?(r.piece.updateData(!1,i,t,e,n),o.setItemGraphicEl(i.dataIndex,r.piece)):function(t){if(!t)return;t.piece&&(l.remove(t.piece),t.piece=null)}(r);else if(i){var s=new AO(i,t,e,n);l.add(s),o.setItemGraphicEl(i.dataIndex,s)}}(null==s?null:i[s],null==h?null:r[h])}new Vm(r,i,s,s).add(h).update(h).remove(H(h,null)).execute()}(h,c),function(i,o){o.depth>0?(r.virtualPiece?r.virtualPiece.updateData(!1,i,t,e,n):(r.virtualPiece=new AO(i,t,e,n),l.add(r.virtualPiece)),o.piece.off("click"),r.virtualPiece.on("click",(function(t){r._rootToNode(o.parentNode)}))):r.virtualPiece&&(l.remove(r.virtualPiece),r.virtualPiece=null)}(a,s),this._initEvents(),this._oldChildren=h},e.prototype._initEvents=function(){var t=this;this.group.off("click"),this.group.on("click",(function(e){var n=!1;t.seriesModel.getViewRoot().eachNode((function(i){if(!n&&i.piece&&i.piece===e.target){var r=i.getModel().get("nodeClick");if("rootToNode"===r)t._rootToNode(i);else if("link"===r){var o=i.getModel(),a=o.get("link");if(a)bp(a,o.get("target",!0)||"_blank")}n=!0}}))}))},e.prototype._rootToNode=function(t){t!==this.seriesModel.getViewRoot()&&this.api.dispatchAction({type:kO,from:this.uid,seriesId:this.seriesModel.id,targetNode:t})},e.prototype.containPoint=function(t,e){var n=e.getData().getItemLayout(0);if(n){var i=t[0]-n.cx,r=t[1]-n.cy,o=Math.sqrt(i*i+r*r);return o<=n.r&&o>=n.r0}},e.type="sunburst",e}(kg),OO=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.ignoreStyleOnData=!0,n}return n(e,t),e.prototype.getInitialData=function(t,e){var n={name:t.name,children:t.data};RO(n);var i=this._levelModels=z(t.levels||[],(function(t){return new Mc(t,this,e)}),this),r=UC.createTree(n,this,(function(t){t.wrapMethod("getItemModel",(function(t,e){var n=r.getNodeByDataIndex(e),o=i[n.depth];return o&&(t.parentModel=o),t}))}));return r.data},e.prototype.optionUpdated=function(){this.resetViewRoot()},e.prototype.getDataParams=function(e){var n=t.prototype.getDataParams.apply(this,arguments),i=this.getData().tree.getNodeByDataIndex(e);return n.treePathInfo=KC(i,this),n},e.prototype.getLevelModel=function(t){return this._levelModels&&this._levelModels[t.depth]},e.prototype.getViewRoot=function(){return this._viewRoot},e.prototype.resetViewRoot=function(t){t?this._viewRoot=t:t=this._viewRoot;var e=this.getRawData().tree.root;t&&(t===e||e.contains(t))||(this._viewRoot=e)},e.prototype.enableAriaDecal=function(){nD(this)},e.type="series.sunburst",e.defaultOption={z:2,center:["50%","50%"],radius:[0,"75%"],clockwise:!0,startAngle:90,minAngle:0,stillShowZeroSum:!0,nodeClick:"rootToNode",renderLabelForZeroData:!1,label:{rotate:"radial",show:!0,opacity:1,align:"center",position:"inside",distance:5,silent:!0},itemStyle:{borderWidth:1,borderColor:"white",borderType:"solid",shadowBlur:0,shadowColor:"rgba(0, 0, 0, 0.2)",shadowOffsetX:0,shadowOffsetY:0,opacity:1},emphasis:{focus:"descendant"},blur:{itemStyle:{opacity:.2},label:{opacity:.1}},animationType:"expansion",animationDuration:1e3,animationDurationUpdate:500,data:[],sort:"desc"},e}(mg);function RO(t){var e=0;E(t.children,(function(t){RO(t);var n=t.value;Y(n)&&(n=n[0]),e+=n}));var n=t.value;Y(n)&&(n=n[0]),(null==n||isNaN(n))&&(n=e),n<0&&(n=0),Y(t.value)?t.value[0]=n:t.value=n}var NO=Math.PI/180;function EO(t,e,n){e.eachSeriesByType(t,(function(t){var e=t.get("center"),i=t.get("radius");Y(i)||(i=[0,i]),Y(e)||(e=[e,e]);var r=n.getWidth(),o=n.getHeight(),a=Math.min(r,o),s=Ur(e[0],r),l=Ur(e[1],o),u=Ur(i[0],a/2),h=Ur(i[1],a/2),c=-t.get("startAngle")*NO,p=t.get("minAngle")*NO,d=t.getData().tree.root,f=t.getViewRoot(),g=f.depth,y=t.get("sort");null!=y&&zO(f,y);var v=0;E(f.children,(function(t){!isNaN(t.getValue())&&v++}));var m=f.getValue(),x=Math.PI/(m||v)*2,_=f.depth>0,b=f.height-(_?-1:1),w=(h-u)/(b||1),S=t.get("clockwise"),M=t.get("stillShowZeroSum"),I=S?1:-1,T=function(e,n){if(e){var i=n;if(e!==d){var r=e.getValue(),o=0===m&&M?x:r*x;o1;)r=r.parentNode;var o=n.getColorFromPalette(r.name||r.dataIndex+"",e);return t.depth>1&&U(o)&&(o=$n(o,(t.depth-1)/(i-1)*.5)),o}(r,t,i.root.height)),A(n.ensureUniqueItemVisual(r.dataIndex,"style"),o)}))}))}var BO={color:"fill",borderColor:"stroke"},FO={symbol:1,symbolSize:1,symbolKeepAspect:1,legendIcon:1,visualMeta:1,liftZ:1,decal:1},GO=Oo(),WO=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.optionUpdated=function(){this.currentZLevel=this.get("zlevel",!0),this.currentZ=this.get("z",!0)},e.prototype.getInitialData=function(t,e){return vx(null,this)},e.prototype.getDataParams=function(e,n,i){var r=t.prototype.getDataParams.call(this,e,n);return i&&(r.info=GO(i).info),r},e.type="series.custom",e.dependencies=["grid","polar","geo","singleAxis","calendar"],e.defaultOption={coordinateSystem:"cartesian2d",z:2,legendHoverLink:!0,clip:!1},e}(mg);function HO(t,e){return e=e||[0,0],z(["x","y"],(function(n,i){var r=this.getAxis(n),o=e[i],a=t[i]/2;return"category"===r.type?r.getBandWidth():Math.abs(r.dataToCoord(o-a)-r.dataToCoord(o+a))}),this)}function YO(t,e){return e=e||[0,0],z([0,1],(function(n){var i=e[n],r=t[n]/2,o=[],a=[];return o[n]=i-r,a[n]=i+r,o[1-n]=a[1-n]=e[1-n],Math.abs(this.dataToPoint(o)[n]-this.dataToPoint(a)[n])}),this)}function XO(t,e){var n=this.getAxis(),i=e instanceof Array?e[0]:e,r=(t instanceof Array?t[0]:t)/2;return"category"===n.type?n.getBandWidth():Math.abs(n.dataToCoord(i-r)-n.dataToCoord(i+r))}function UO(t,e){return e=e||[0,0],z(["Radius","Angle"],(function(n,i){var r=this["get"+n+"Axis"](),o=e[i],a=t[i]/2,s="category"===r.type?r.getBandWidth():Math.abs(r.dataToCoord(o-a)-r.dataToCoord(o+a));return"Angle"===n&&(s=s*Math.PI/180),s}),this)}function ZO(t,e,n,i){return t&&(t.legacy||!1!==t.legacy&&!n&&!i&&"tspan"!==e&&("text"===e||_t(t,"text")))}function jO(t,e,n){var i,r,o,a=t;if("text"===e)o=a;else{o={},_t(a,"text")&&(o.text=a.text),_t(a,"rich")&&(o.rich=a.rich),_t(a,"textFill")&&(o.fill=a.textFill),_t(a,"textStroke")&&(o.stroke=a.textStroke),_t(a,"fontFamily")&&(o.fontFamily=a.fontFamily),_t(a,"fontSize")&&(o.fontSize=a.fontSize),_t(a,"fontStyle")&&(o.fontStyle=a.fontStyle),_t(a,"fontWeight")&&(o.fontWeight=a.fontWeight),r={type:"text",style:o,silent:!0},i={};var s=_t(a,"textPosition");n?i.position=s?a.textPosition:"inside":s&&(i.position=a.textPosition),_t(a,"textPosition")&&(i.position=a.textPosition),_t(a,"textOffset")&&(i.offset=a.textOffset),_t(a,"textRotation")&&(i.rotation=a.textRotation),_t(a,"textDistance")&&(i.distance=a.textDistance)}return qO(o,t),E(o.rich,(function(t){qO(t,t)})),{textConfig:i,textContent:r}}function qO(t,e){e&&(e.font=e.textFont||e.font,_t(e,"textStrokeWidth")&&(t.lineWidth=e.textStrokeWidth),_t(e,"textAlign")&&(t.align=e.textAlign),_t(e,"textVerticalAlign")&&(t.verticalAlign=e.textVerticalAlign),_t(e,"textLineHeight")&&(t.lineHeight=e.textLineHeight),_t(e,"textWidth")&&(t.width=e.textWidth),_t(e,"textHeight")&&(t.height=e.textHeight),_t(e,"textBackgroundColor")&&(t.backgroundColor=e.textBackgroundColor),_t(e,"textPadding")&&(t.padding=e.textPadding),_t(e,"textBorderColor")&&(t.borderColor=e.textBorderColor),_t(e,"textBorderWidth")&&(t.borderWidth=e.textBorderWidth),_t(e,"textBorderRadius")&&(t.borderRadius=e.textBorderRadius),_t(e,"textBoxShadowColor")&&(t.shadowColor=e.textBoxShadowColor),_t(e,"textBoxShadowBlur")&&(t.shadowBlur=e.textBoxShadowBlur),_t(e,"textBoxShadowOffsetX")&&(t.shadowOffsetX=e.textBoxShadowOffsetX),_t(e,"textBoxShadowOffsetY")&&(t.shadowOffsetY=e.textBoxShadowOffsetY))}function KO(t,e,n){var i=t;i.textPosition=i.textPosition||n.position||"inside",null!=n.offset&&(i.textOffset=n.offset),null!=n.rotation&&(i.textRotation=n.rotation),null!=n.distance&&(i.textDistance=n.distance);var r=i.textPosition.indexOf("inside")>=0,o=t.fill||"#000";$O(i,e);var a=null==i.textFill;return r?a&&(i.textFill=n.insideFill||"#fff",!i.textStroke&&n.insideStroke&&(i.textStroke=n.insideStroke),!i.textStroke&&(i.textStroke=o),null==i.textStrokeWidth&&(i.textStrokeWidth=2)):(a&&(i.textFill=t.fill||n.outsideFill||"#000"),!i.textStroke&&n.outsideStroke&&(i.textStroke=n.outsideStroke)),i.text=e.text,i.rich=e.rich,E(e.rich,(function(t){$O(t,t)})),i}function $O(t,e){e&&(_t(e,"fill")&&(t.textFill=e.fill),_t(e,"stroke")&&(t.textStroke=e.fill),_t(e,"lineWidth")&&(t.textStrokeWidth=e.lineWidth),_t(e,"font")&&(t.font=e.font),_t(e,"fontStyle")&&(t.fontStyle=e.fontStyle),_t(e,"fontWeight")&&(t.fontWeight=e.fontWeight),_t(e,"fontSize")&&(t.fontSize=e.fontSize),_t(e,"fontFamily")&&(t.fontFamily=e.fontFamily),_t(e,"align")&&(t.textAlign=e.align),_t(e,"verticalAlign")&&(t.textVerticalAlign=e.verticalAlign),_t(e,"lineHeight")&&(t.textLineHeight=e.lineHeight),_t(e,"width")&&(t.textWidth=e.width),_t(e,"height")&&(t.textHeight=e.height),_t(e,"backgroundColor")&&(t.textBackgroundColor=e.backgroundColor),_t(e,"padding")&&(t.textPadding=e.padding),_t(e,"borderColor")&&(t.textBorderColor=e.borderColor),_t(e,"borderWidth")&&(t.textBorderWidth=e.borderWidth),_t(e,"borderRadius")&&(t.textBorderRadius=e.borderRadius),_t(e,"shadowColor")&&(t.textBoxShadowColor=e.shadowColor),_t(e,"shadowBlur")&&(t.textBoxShadowBlur=e.shadowBlur),_t(e,"shadowOffsetX")&&(t.textBoxShadowOffsetX=e.shadowOffsetX),_t(e,"shadowOffsetY")&&(t.textBoxShadowOffsetY=e.shadowOffsetY),_t(e,"textShadowColor")&&(t.textShadowColor=e.textShadowColor),_t(e,"textShadowBlur")&&(t.textShadowBlur=e.textShadowBlur),_t(e,"textShadowOffsetX")&&(t.textShadowOffsetX=e.textShadowOffsetX),_t(e,"textShadowOffsetY")&&(t.textShadowOffsetY=e.textShadowOffsetY))}var JO={position:["x","y"],scale:["scaleX","scaleY"],origin:["originX","originY"]},QO=G(JO),tR=(V(yr,(function(t,e){return t[e]=1,t}),{}),yr.join(", "),["","style","shape","extra"]),eR=Oo();function nR(t,e,n,i,r){var o=t+"Animation",a=ph(t,i,r)||{},s=eR(e).userDuring;return a.duration>0&&(a.during=s?W(uR,{el:e,userDuring:s}):null,a.setToFinal=!0,a.scope=t),A(a,n[o]),a}function iR(t,e,n,i){var r=(i=i||{}).dataIndex,o=i.isInit,a=i.clearStyle,s=n.isAnimationEnabled(),l=eR(t),u=e.style;l.userDuring=e.during;var h={},c={};if(function(t,e,n){for(var i=0;i=0)){var c=t.getAnimationStyleProps(),p=c?c.style:null;if(p){!r&&(r=i.style={});var d=G(n);for(u=0;u0&&t.animateFrom(p,d)}else!function(t,e,n,i,r){if(r){var o=nR("update",t,e,i,n);o.duration>0&&t.animateFrom(r,o)}}(t,e,r||0,n,h);rR(t,e),u?t.dirty():t.markRedraw()}function rR(t,e){for(var n=eR(t).leaveToProps,i=0;i=0){!o&&(o=i[t]={});var p=G(a);for(h=0;hi[1]&&i.reverse(),{coordSys:{type:"polar",cx:t.cx,cy:t.cy,r:i[1],r0:i[0]},api:{coord:function(i){var r=e.dataToRadius(i[0]),o=n.dataToAngle(i[1]),a=t.coordToPoint([r,o]);return a.push(r,o*Math.PI/180),a},size:W(UO,t)}}},calendar:function(t){var e=t.getRect(),n=t.getRangeInfo();return{coordSys:{type:"calendar",x:e.x,y:e.y,width:e.width,height:e.height,cellWidth:t.getCellWidth(),cellHeight:t.getCellHeight(),rangeInfo:{start:n.start,end:n.end,weeks:n.weeks,dayCount:n.allDay}},api:{coord:function(e,n){return t.dataToPoint(e,n)}}}}};function CR(t){return t instanceof Is}function DR(t){return t instanceof Sa}var AR=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.render=function(t,e,n,i){this._progressiveEls=null;var r=this._data,o=t.getData(),a=this.group,s=RR(t,o,e,n);r||a.removeAll(),o.diff(r).add((function(e){ER(n,null,e,s(e,i),t,a,o)})).remove((function(e){var n=r.getItemGraphicEl(e);n&&oR(n,GO(n).option,t)})).update((function(e,l){var u=r.getItemGraphicEl(l);ER(n,u,e,s(e,i),t,a,o)})).execute();var l=t.get("clip",!0)?SS(t.coordinateSystem,!1,t):null;l?a.setClipPath(l):a.removeClipPath(),this._data=o},e.prototype.incrementalPrepareRender=function(t,e,n){this.group.removeAll(),this._data=null},e.prototype.incrementalRender=function(t,e,n,i,r){var o=e.getData(),a=RR(e,o,n,i),s=this._progressiveEls=[];function l(t){t.isGroup||(t.incremental=!0,t.ensureState("emphasis").hoverLayer=!0)}for(var u=t.start;u=0?e.getStore().get(r,n):void 0}var o=e.get(i.name,n),a=i&&i.ordinalMeta;return a?a.categories[o]:o},styleEmphasis:function(n,i){0;null==i&&(i=s);var r=m(i,vR).getItemStyle(),o=x(i,vR),a=nc(o,null,null,!0,!0);a.text=o.getShallow("show")?ot(t.getFormattedLabel(i,vR),t.getFormattedLabel(i,mR),iS(e,i)):null;var l=ic(o,null,!0);return b(n,r),r=KO(r,a,l),n&&_(r,n),r.legacy=!0,r},visual:function(t,n){if(null==n&&(n=s),_t(BO,t)){var i=e.getItemVisual(n,"style");return i?i[BO[t]]:null}if(_t(FO,t))return e.getItemVisual(n,t)},barLayout:function(t){if("cartesian2d"===o.type){return function(t){var e=[],n=t.axis,i="axis0";if("category"===n.type){for(var r=n.getBandWidth(),o=0;o=c;f--){var g=e.childAt(f);WR(e,g,r)}}(t,c,n,i,r),a>=0?o.replaceAt(c,a):o.add(c),c}function VR(t,e,n){var i,r=GO(t),o=e.type,a=e.shape,s=e.style;return n.isUniversalTransitionEnabled()||null!=o&&o!==r.customGraphicType||"path"===o&&((i=a)&&(_t(i,"pathData")||_t(i,"d")))&&UR(a)!==r.customPathData||"image"===o&&_t(s,"image")&&s.image!==r.customImagePath}function BR(t,e,n){var i=e?FR(t,e):t,r=e?GR(t,i,vR):t.style,o=t.type,a=i?i.textConfig:null,s=t.textContent,l=s?e?FR(s,e):s:null;if(r&&(n.isLegacy||ZO(r,o,!!a,!!l))){n.isLegacy=!0;var u=jO(r,o,!e);!a&&u.textConfig&&(a=u.textConfig),!l&&u.textContent&&(l=u.textContent)}if(!e&&l){var h=l;!h.type&&(h.type="text")}var c=e?n[e]:n.normal;c.cfg=a,c.conOpt=l}function FR(t,e){return e?t?t[e]:null:t}function GR(t,e,n){var i=e&&e.style;return null==i&&n===vR&&t&&(i=t.styleEmphasis),i}function WR(t,e,n){e&&oR(e,GO(t).option,n)}function HR(t,e){var n=t&&t.name;return null!=n?n:"e\0\0"+e}function YR(t,e){var n=this.context,i=null!=t?n.newChildren[t]:null,r=null!=e?n.oldChildren[e]:null;zR(n.api,r,n.dataIndex,i,n.seriesModel,n.group)}function XR(t){var e=this.context,n=e.oldChildren[t];n&&oR(n,GO(n).option,e.seriesModel)}function UR(t){return t&&(t.pathData||t.d)}var ZR=Oo(),jR=T,qR=W,KR=function(){function t(){this._dragging=!1,this.animationThreshold=15}return t.prototype.render=function(t,e,n,i){var r=e.get("value"),o=e.get("status");if(this._axisModel=t,this._axisPointerModel=e,this._api=n,i||this._lastValue!==r||this._lastStatus!==o){this._lastValue=r,this._lastStatus=o;var a=this._group,s=this._handle;if(!o||"hide"===o)return a&&a.hide(),void(s&&s.hide());a&&a.show(),s&&s.show();var l={};this.makeElOption(l,r,t,e,n);var u=l.graphicKey;u!==this._lastGraphicKey&&this.clear(n),this._lastGraphicKey=u;var h=this._moveAnimation=this.determineAnimation(t,e);if(a){var c=H($R,e,h);this.updatePointerEl(a,l,c),this.updateLabelEl(a,l,c,e)}else a=this._group=new zr,this.createPointerEl(a,l,t,e),this.createLabelEl(a,l,t,e),n.getZr().add(a);eN(a,e,!0),this._renderHandle(r)}},t.prototype.remove=function(t){this.clear(t)},t.prototype.dispose=function(t){this.clear(t)},t.prototype.determineAnimation=function(t,e){var n=e.get("animation"),i=t.axis,r="category"===i.type,o=e.get("snap");if(!o&&!r)return!1;if("auto"===n||null==n){var a=this.animationThreshold;if(r&&i.getBandWidth()>a)return!0;if(o){var s=pI(t).seriesDataCount,l=i.getExtent();return Math.abs(l[0]-l[1])/s>a}return!1}return!0===n},t.prototype.makeElOption=function(t,e,n,i,r){},t.prototype.createPointerEl=function(t,e,n,i){var r=e.pointer;if(r){var o=ZR(t).pointerEl=new Kh[r.type](jR(e.pointer));t.add(o)}},t.prototype.createLabelEl=function(t,e,n,i){if(e.label){var r=ZR(t).labelEl=new Fs(jR(e.label));t.add(r),QR(r,i)}},t.prototype.updatePointerEl=function(t,e,n){var i=ZR(t).pointerEl;i&&e.pointer&&(i.setStyle(e.pointer.style),n(i,{shape:e.pointer.shape}))},t.prototype.updateLabelEl=function(t,e,n,i){var r=ZR(t).labelEl;r&&(r.setStyle(e.label.style),n(r,{x:e.label.x,y:e.label.y}),QR(r,i))},t.prototype._renderHandle=function(t){if(!this._dragging&&this.updateHandleTransform){var e,n=this._axisPointerModel,i=this._api.getZr(),r=this._handle,o=n.getModel("handle"),a=n.get("status");if(!o.get("show")||!a||"hide"===a)return r&&i.remove(r),void(this._handle=null);this._handle||(e=!0,r=this._handle=Hh(o.get("icon"),{cursor:"move",draggable:!0,onmousemove:function(t){de(t.event)},onmousedown:qR(this._onHandleDragMove,this,0,0),drift:qR(this._onHandleDragMove,this),ondragend:qR(this._onHandleDragEnd,this)}),i.add(r)),eN(r,n,!1),r.setStyle(o.getItemStyle(null,["color","borderColor","borderWidth","opacity","shadowColor","shadowBlur","shadowOffsetX","shadowOffsetY"]));var s=o.get("size");Y(s)||(s=[s,s]),r.scaleX=s[0]/2,r.scaleY=s[1]/2,Fg(this,"_doDispatchAxisPointer",o.get("throttle")||0,"fixRate"),this._moveHandleToValue(t,e)}},t.prototype._moveHandleToValue=function(t,e){$R(this._axisPointerModel,!e&&this._moveAnimation,this._handle,tN(this.getHandleTransform(t,this._axisModel,this._axisPointerModel)))},t.prototype._onHandleDragMove=function(t,e){var n=this._handle;if(n){this._dragging=!0;var i=this.updateHandleTransform(tN(n),[t,e],this._axisModel,this._axisPointerModel);this._payloadInfo=i,n.stopAnimation(),n.attr(tN(i)),ZR(n).lastProp=null,this._doDispatchAxisPointer()}},t.prototype._doDispatchAxisPointer=function(){if(this._handle){var t=this._payloadInfo,e=this._axisModel;this._api.dispatchAction({type:"updateAxisPointer",x:t.cursorPoint[0],y:t.cursorPoint[1],tooltipOption:t.tooltipOption,axesInfo:[{axisDim:e.axis.dim,axisIndex:e.componentIndex}]})}},t.prototype._onHandleDragEnd=function(){if(this._dragging=!1,this._handle){var t=this._axisPointerModel.get("value");this._moveHandleToValue(t),this._api.dispatchAction({type:"hideTip"})}},t.prototype.clear=function(t){this._lastValue=null,this._lastStatus=null;var e=t.getZr(),n=this._group,i=this._handle;e&&n&&(this._lastGraphicKey=null,n&&e.remove(n),i&&e.remove(i),this._group=null,this._handle=null,this._payloadInfo=null),Gg(this,"_doDispatchAxisPointer")},t.prototype.doClear=function(){},t.prototype.buildLabel=function(t,e,n){return{x:t[n=n||0],y:t[1-n],width:e[n],height:e[1-n]}},t}();function $R(t,e,n,i){JR(ZR(n).lastProp,i)||(ZR(n).lastProp=i,e?fh(n,i,t):(n.stopAnimation(),n.attr(i)))}function JR(t,e){if(q(t)&&q(e)){var n=!0;return E(e,(function(e,i){n=n&&JR(t[i],e)})),!!n}return t===e}function QR(t,e){t[e.get(["label","show"])?"show":"hide"]()}function tN(t){return{x:t.x||0,y:t.y||0,rotation:t.rotation||0}}function eN(t,e,n){var i=e.get("z"),r=e.get("zlevel");t&&t.traverse((function(t){"group"!==t.type&&(null!=i&&(t.z=i),null!=r&&(t.zlevel=r),t.silent=n)}))}function nN(t){var e,n=t.get("type"),i=t.getModel(n+"Style");return"line"===n?(e=i.getLineStyle()).fill=null:"shadow"===n&&((e=i.getAreaStyle()).stroke=null),e}function iN(t,e,n,i,r){var o=rN(n.get("value"),e.axis,e.ecModel,n.get("seriesDataIndices"),{precision:n.get(["label","precision"]),formatter:n.get(["label","formatter"])}),a=n.getModel("label"),s=fp(a.get("padding")||0),l=a.getFont(),u=br(o,l),h=r.position,c=u.width+s[1]+s[3],p=u.height+s[0]+s[2],d=r.align;"right"===d&&(h[0]-=c),"center"===d&&(h[0]-=c/2);var f=r.verticalAlign;"bottom"===f&&(h[1]-=p),"middle"===f&&(h[1]-=p/2),function(t,e,n,i){var r=i.getWidth(),o=i.getHeight();t[0]=Math.min(t[0]+e,r)-e,t[1]=Math.min(t[1]+n,o)-n,t[0]=Math.max(t[0],0),t[1]=Math.max(t[1],0)}(h,c,p,i);var g=a.get("backgroundColor");g&&"auto"!==g||(g=e.get(["axisLine","lineStyle","color"])),t.label={x:h[0],y:h[1],style:nc(a,{text:o,font:l,fill:a.getTextColor(),padding:s,backgroundColor:g}),z2:10}}function rN(t,e,n,i,r){t=e.scale.parse(t);var o=e.scale.getLabel({value:t},{precision:r.precision}),a=r.formatter;if(a){var s={value:__(e,{value:t}),axisDimension:e.dim,axisIndex:e.index,seriesData:[]};E(i,(function(t){var e=n.getSeriesByIndex(t.seriesIndex),i=t.dataIndexInside,r=e&&e.getDataParams(i);r&&s.seriesData.push(r)})),U(a)?o=a.replace("{value}",o):X(a)&&(o=a(s))}return o}function oN(t,e,n){var i=[1,0,0,1,0,0];return Se(i,i,n.rotation),we(i,i,n.position),zh([t.dataToCoord(e),(n.labelOffset||0)+(n.labelDirection||1)*(n.labelMargin||0)],i)}function aN(t,e,n,i,r,o){var a=iI.innerTextLayout(n.rotation,0,n.labelDirection);n.labelMargin=r.get(["label","margin"]),iN(e,i,r,o,{position:oN(i.axis,t,n),align:a.textAlign,verticalAlign:a.textVerticalAlign})}function sN(t,e,n){return{x1:t[n=n||0],y1:t[1-n],x2:e[n],y2:e[1-n]}}function lN(t,e,n){return{x:t[n=n||0],y:t[1-n],width:e[n],height:e[1-n]}}function uN(t,e,n,i,r,o){return{cx:t,cy:e,r0:n,r:i,startAngle:r,endAngle:o,clockwise:!0}}var hN=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.prototype.makeElOption=function(t,e,n,i,r){var o=n.axis,a=o.grid,s=i.get("type"),l=cN(a,o).getOtherAxis(o).getGlobalExtent(),u=o.toGlobalCoord(o.dataToCoord(e,!0));if(s&&"none"!==s){var h=nN(i),c=pN[s](o,u,l);c.style=h,t.graphicKey=c.type,t.pointer=c}aN(e,t,ZM(a.model,n),n,i,r)},e.prototype.getHandleTransform=function(t,e,n){var i=ZM(e.axis.grid.model,e,{labelInside:!1});i.labelMargin=n.get(["handle","margin"]);var r=oN(e.axis,t,i);return{x:r[0],y:r[1],rotation:i.rotation+(i.labelDirection<0?Math.PI:0)}},e.prototype.updateHandleTransform=function(t,e,n,i){var r=n.axis,o=r.grid,a=r.getGlobalExtent(!0),s=cN(o,r).getOtherAxis(r).getGlobalExtent(),l="x"===r.dim?0:1,u=[t.x,t.y];u[l]+=e[l],u[l]=Math.min(a[1],u[l]),u[l]=Math.max(a[0],u[l]);var h=(s[1]+s[0])/2,c=[h,h];c[l]=u[l];return{x:u[0],y:u[1],rotation:t.rotation,cursorPoint:c,tooltipOption:[{verticalAlign:"middle"},{align:"center"}][l]}},e}(KR);function cN(t,e){var n={};return n[e.dim+"AxisIndex"]=e.index,t.getCartesian(n)}var pN={line:function(t,e,n){return{type:"Line",subPixelOptimize:!0,shape:sN([e,n[0]],[e,n[1]],dN(t))}},shadow:function(t,e,n){var i=Math.max(1,t.getBandWidth()),r=n[1]-n[0];return{type:"Rect",shape:lN([e-i/2,n[0]],[i,r],dN(t))}}};function dN(t){return"x"===t.dim?0:1}var fN=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.type="axisPointer",e.defaultOption={show:"auto",z:50,type:"line",snap:!1,triggerTooltip:!0,triggerEmphasis:!0,value:null,status:null,link:[],animation:null,animationDurationUpdate:200,lineStyle:{color:"#B9BEC9",width:1,type:"dashed"},shadowStyle:{color:"rgba(210,219,238,0.2)"},label:{show:!0,formatter:null,precision:"auto",margin:3,color:"#fff",padding:[5,7,5,7],backgroundColor:"auto",borderColor:null,borderWidth:0,borderRadius:3},handle:{show:!1,icon:"M10.7,11.9v-1.3H9.3v1.3c-4.9,0.3-8.8,4.4-8.8,9.4c0,5,3.9,9.1,8.8,9.4h1.3c4.9-0.3,8.8-4.4,8.8-9.4C19.5,16.3,15.6,12.2,10.7,11.9z M13.3,24.4H6.7v-1.2h6.6z M13.3,22H6.7v-1.2h6.6z M13.3,19.6H6.7v-1.2h6.6z",size:45,margin:50,color:"#333",shadowBlur:3,shadowColor:"#aaa",shadowOffsetX:0,shadowOffsetY:2,throttle:40}},e}(Rp),gN=Oo(),yN=E;function vN(t,e,n){if(!r.node){var i=e.getZr();gN(i).records||(gN(i).records={}),function(t,e){if(gN(t).initialized)return;function n(n,i){t.on(n,(function(n){var r=function(t){var e={showTip:[],hideTip:[]},n=function(i){var r=e[i.type];r?r.push(i):(i.dispatchAction=n,t.dispatchAction(i))};return{dispatchAction:n,pendings:e}}(e);yN(gN(t).records,(function(t){t&&i(t,n,r.dispatchAction)})),function(t,e){var n,i=t.showTip.length,r=t.hideTip.length;i?n=t.showTip[i-1]:r&&(n=t.hideTip[r-1]);n&&(n.dispatchAction=null,e.dispatchAction(n))}(r.pendings,e)}))}gN(t).initialized=!0,n("click",H(xN,"click")),n("mousemove",H(xN,"mousemove")),n("globalout",mN)}(i,e),(gN(i).records[t]||(gN(i).records[t]={})).handler=n}}function mN(t,e,n){t.handler("leave",null,n)}function xN(t,e,n,i){e.handler(t,n,i)}function _N(t,e){if(!r.node){var n=e.getZr();(gN(n).records||{})[t]&&(gN(n).records[t]=null)}}var bN=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.render=function(t,e,n){var i=e.getComponent("tooltip"),r=t.get("triggerOn")||i&&i.get("triggerOn")||"mousemove|click";vN("axisPointer",n,(function(t,e,n){"none"!==r&&("leave"===t||r.indexOf(t)>=0)&&n({type:"updateAxisPointer",currTrigger:t,x:e&&e.offsetX,y:e&&e.offsetY})}))},e.prototype.remove=function(t,e){_N("axisPointer",e)},e.prototype.dispose=function(t,e){_N("axisPointer",e)},e.type="axisPointer",e}(Tg);function wN(t,e){var n,i=[],r=t.seriesIndex;if(null==r||!(n=e.getSeriesByIndex(r)))return{point:[]};var o=n.getData(),a=Po(o,t);if(null==a||a<0||Y(a))return{point:[]};var s=o.getItemGraphicEl(a),l=n.coordinateSystem;if(n.getTooltipPosition)i=n.getTooltipPosition(a)||[];else if(l&&l.dataToPoint)if(t.isStacked){var u=l.getBaseAxis(),h=l.getOtherAxis(u).dim,c=u.dim,p="x"===h||"radius"===h?1:0,d=o.mapDimension(c),f=[];f[p]=o.get(d,a),f[1-p]=o.get(o.getCalculationInfo("stackResultDimension"),a),i=l.dataToPoint(f)||[]}else i=l.dataToPoint(o.getValues(z(l.dimensions,(function(t){return o.mapDimension(t)})),a))||[];else if(s){var g=s.getBoundingRect().clone();g.applyTransform(s.transform),i=[g.x+g.width/2,g.y+g.height/2]}return{point:i,el:s}}var SN=Oo();function MN(t,e,n){var i=t.currTrigger,r=[t.x,t.y],o=t,a=t.dispatchAction||W(n.dispatchAction,n),s=e.getComponent("axisPointer").coordSysAxesInfo;if(s){AN(r)&&(r=wN({seriesIndex:o.seriesIndex,dataIndex:o.dataIndex},e).point);var l=AN(r),u=o.axesInfo,h=s.axesInfo,c="leave"===i||AN(r),p={},d={},f={list:[],map:{}},g={showPointer:H(TN,d),showTooltip:H(CN,f)};E(s.coordSysMap,(function(t,e){var n=l||t.containPoint(r);E(s.coordSysAxesInfo[e],(function(t,e){var i=t.axis,o=function(t,e){for(var n=0;n<(t||[]).length;n++){var i=t[n];if(e.axis.dim===i.axisDim&&e.axis.model.componentIndex===i.axisIndex)return i}}(u,t);if(!c&&n&&(!u||o)){var a=o&&o.value;null!=a||l||(a=i.pointToData(r)),null!=a&&IN(t,a,g,!1,p)}}))}));var y={};return E(h,(function(t,e){var n=t.linkGroup;n&&!d[e]&&E(n.axesInfo,(function(e,i){var r=d[i];if(e!==t&&r){var o=r.value;n.mapper&&(o=t.axis.scale.parse(n.mapper(o,DN(e),DN(t)))),y[t.key]=o}}))})),E(y,(function(t,e){IN(h[e],t,g,!0,p)})),function(t,e,n){var i=n.axesInfo=[];E(e,(function(e,n){var r=e.axisPointerModel.option,o=t[n];o?(!e.useHandle&&(r.status="show"),r.value=o.value,r.seriesDataIndices=(o.payloadBatch||[]).slice()):!e.useHandle&&(r.status="hide"),"show"===r.status&&i.push({axisDim:e.axis.dim,axisIndex:e.axis.model.componentIndex,value:r.value})}))}(d,h,p),function(t,e,n,i){if(AN(e)||!t.list.length)return void i({type:"hideTip"});var r=((t.list[0].dataByAxis[0]||{}).seriesDataIndices||[])[0]||{};i({type:"showTip",escapeConnect:!0,x:e[0],y:e[1],tooltipOption:n.tooltipOption,position:n.position,dataIndexInside:r.dataIndexInside,dataIndex:r.dataIndex,seriesIndex:r.seriesIndex,dataByCoordSys:t.list})}(f,r,t,a),function(t,e,n){var i=n.getZr(),r="axisPointerLastHighlights",o=SN(i)[r]||{},a=SN(i)[r]={};E(t,(function(t,e){var n=t.axisPointerModel.option;"show"===n.status&&t.triggerEmphasis&&E(n.seriesDataIndices,(function(t){var e=t.seriesIndex+" | "+t.dataIndex;a[e]=t}))}));var s=[],l=[];E(o,(function(t,e){!a[e]&&l.push(t)})),E(a,(function(t,e){!o[e]&&s.push(t)})),l.length&&n.dispatchAction({type:"downplay",escapeConnect:!0,notBlur:!0,batch:l}),s.length&&n.dispatchAction({type:"highlight",escapeConnect:!0,notBlur:!0,batch:s})}(h,0,n),p}}function IN(t,e,n,i,r){var o=t.axis;if(!o.scale.isBlank()&&o.containData(e))if(t.involveSeries){var a=function(t,e){var n=e.axis,i=n.dim,r=t,o=[],a=Number.MAX_VALUE,s=-1;return E(e.seriesModels,(function(e,l){var u,h,c=e.getData().mapDimensionsAll(i);if(e.getAxisTooltipData){var p=e.getAxisTooltipData(c,t,n);h=p.dataIndices,u=p.nestestValue}else{if(!(h=e.getData().indicesOfNearest(c[0],t,"category"===n.type?.5:null)).length)return;u=e.getData().get(c[0],h[0])}if(null!=u&&isFinite(u)){var d=t-u,f=Math.abs(d);f<=a&&((f=0&&s<0)&&(a=f,s=d,r=u,o.length=0),E(h,(function(t){o.push({seriesIndex:e.seriesIndex,dataIndexInside:t,dataIndex:e.getData().getRawIndex(t)})})))}})),{payloadBatch:o,snapToValue:r}}(e,t),s=a.payloadBatch,l=a.snapToValue;s[0]&&null==r.seriesIndex&&A(r,s[0]),!i&&t.snap&&o.containData(l)&&null!=l&&(e=l),n.showPointer(t,e,s),n.showTooltip(t,a,l)}else n.showPointer(t,e)}function TN(t,e,n,i){t[e.key]={value:n,payloadBatch:i}}function CN(t,e,n,i){var r=n.payloadBatch,o=e.axis,a=o.model,s=e.axisPointerModel;if(e.triggerTooltip&&r.length){var l=e.coordSys.model,u=fI(l),h=t.map[u];h||(h=t.map[u]={coordSysId:l.id,coordSysIndex:l.componentIndex,coordSysType:l.type,coordSysMainType:l.mainType,dataByAxis:[]},t.list.push(h)),h.dataByAxis.push({axisDim:o.dim,axisIndex:a.componentIndex,axisType:a.type,axisId:a.id,value:i,valueLabelOpt:{precision:s.get(["label","precision"]),formatter:s.get(["label","formatter"])},seriesDataIndices:r.slice()})}}function DN(t){var e=t.axis.model,n={},i=n.axisDim=t.axis.dim;return n.axisIndex=n[i+"AxisIndex"]=e.componentIndex,n.axisName=n[i+"AxisName"]=e.name,n.axisId=n[i+"AxisId"]=e.id,n}function AN(t){return!t||null==t[0]||isNaN(t[0])||null==t[1]||isNaN(t[1])}function kN(t){yI.registerAxisPointerClass("CartesianAxisPointer",hN),t.registerComponentModel(fN),t.registerComponentView(bN),t.registerPreprocessor((function(t){if(t){(!t.axisPointer||0===t.axisPointer.length)&&(t.axisPointer={});var e=t.axisPointer.link;e&&!Y(e)&&(t.axisPointer.link=[e])}})),t.registerProcessor(t.PRIORITY.PROCESSOR.STATISTIC,(function(t,e){t.getComponent("axisPointer").coordSysAxesInfo=uI(t,e)})),t.registerAction({type:"updateAxisPointer",event:"updateAxisPointer",update:":updateAxisPointer"},MN)}var LN=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.prototype.makeElOption=function(t,e,n,i,r){var o=n.axis;"angle"===o.dim&&(this.animationThreshold=Math.PI/18);var a=o.polar,s=a.getOtherAxis(o).getExtent(),l=o.dataToCoord(e),u=i.get("type");if(u&&"none"!==u){var h=nN(i),c=PN[u](o,a,l,s);c.style=h,t.graphicKey=c.type,t.pointer=c}var p=function(t,e,n,i,r){var o=e.axis,a=o.dataToCoord(t),s=i.getAngleAxis().getExtent()[0];s=s/180*Math.PI;var l,u,h,c=i.getRadiusAxis().getExtent();if("radius"===o.dim){var p=[1,0,0,1,0,0];Se(p,p,s),we(p,p,[i.cx,i.cy]),l=zh([a,-r],p);var d=e.getModel("axisLabel").get("rotate")||0,f=iI.innerTextLayout(s,d*Math.PI/180,-1);u=f.textAlign,h=f.textVerticalAlign}else{var g=c[1];l=i.coordToPoint([g+r,a]);var y=i.cx,v=i.cy;u=Math.abs(l[0]-y)/g<.3?"center":l[0]>y?"left":"right",h=Math.abs(l[1]-v)/g<.3?"middle":l[1]>v?"top":"bottom"}return{position:l,align:u,verticalAlign:h}}(e,n,0,a,i.get(["label","margin"]));iN(t,n,i,r,p)},e}(KR);var PN={line:function(t,e,n,i){return"angle"===t.dim?{type:"Line",shape:sN(e.coordToPoint([i[0],n]),e.coordToPoint([i[1],n]))}:{type:"Circle",shape:{cx:e.cx,cy:e.cy,r:n}}},shadow:function(t,e,n,i){var r=Math.max(1,t.getBandWidth()),o=Math.PI/180;return"angle"===t.dim?{type:"Sector",shape:uN(e.cx,e.cy,i[0],i[1],(-n-r/2)*o,(r/2-n)*o)}:{type:"Sector",shape:uN(e.cx,e.cy,n-r/2,n+r/2,0,2*Math.PI)}}},ON=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.findAxisModel=function(t){var e;return this.ecModel.eachComponent(t,(function(t){t.getCoordSysModel()===this&&(e=t)}),this),e},e.type="polar",e.dependencies=["radiusAxis","angleAxis"],e.defaultOption={z:0,center:["50%","50%"],radius:"80%"},e}(Rp),RN=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.prototype.getCoordSysModel=function(){return this.getReferringComponents("polar",zo).models[0]},e.type="polarAxis",e}(Rp);R(RN,I_);var NN=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.type="angleAxis",e}(RN),EN=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.type="radiusAxis",e}(RN),zN=function(t){function e(e,n){return t.call(this,"radius",e,n)||this}return n(e,t),e.prototype.pointToData=function(t,e){return this.polar.pointToData(t,e)["radius"===this.dim?0:1]},e}(nb);zN.prototype.dataToRadius=nb.prototype.dataToCoord,zN.prototype.radiusToData=nb.prototype.coordToData;var VN=Oo(),BN=function(t){function e(e,n){return t.call(this,"angle",e,n||[0,360])||this}return n(e,t),e.prototype.pointToData=function(t,e){return this.polar.pointToData(t,e)["radius"===this.dim?0:1]},e.prototype.calculateCategoryInterval=function(){var t=this,e=t.getLabelModel(),n=t.scale,i=n.getExtent(),r=n.count();if(i[1]-i[0]<1)return 0;var o=i[0],a=t.dataToCoord(o+1)-t.dataToCoord(o),s=Math.abs(a),l=br(null==o?"":o+"",e.getFont(),"center","top"),u=Math.max(l.height,7)/s;isNaN(u)&&(u=1/0);var h=Math.max(0,Math.floor(u)),c=VN(t.model),p=c.lastAutoInterval,d=c.lastTickCount;return null!=p&&null!=d&&Math.abs(p-h)<=1&&Math.abs(d-r)<=1&&p>h?h=p:(c.lastTickCount=r,c.lastAutoInterval=h),h},e}(nb);BN.prototype.dataToAngle=nb.prototype.dataToCoord,BN.prototype.angleToData=nb.prototype.coordToData;var FN=["radius","angle"],GN=function(){function t(t){this.dimensions=FN,this.type="polar",this.cx=0,this.cy=0,this._radiusAxis=new zN,this._angleAxis=new BN,this.axisPointerEnabled=!0,this.name=t||"",this._radiusAxis.polar=this._angleAxis.polar=this}return t.prototype.containPoint=function(t){var e=this.pointToCoord(t);return this._radiusAxis.contain(e[0])&&this._angleAxis.contain(e[1])},t.prototype.containData=function(t){return this._radiusAxis.containData(t[0])&&this._angleAxis.containData(t[1])},t.prototype.getAxis=function(t){return this["_"+t+"Axis"]},t.prototype.getAxes=function(){return[this._radiusAxis,this._angleAxis]},t.prototype.getAxesByScale=function(t){var e=[],n=this._angleAxis,i=this._radiusAxis;return n.scale.type===t&&e.push(n),i.scale.type===t&&e.push(i),e},t.prototype.getAngleAxis=function(){return this._angleAxis},t.prototype.getRadiusAxis=function(){return this._radiusAxis},t.prototype.getOtherAxis=function(t){var e=this._angleAxis;return t===e?this._radiusAxis:e},t.prototype.getBaseAxis=function(){return this.getAxesByScale("ordinal")[0]||this.getAxesByScale("time")[0]||this.getAngleAxis()},t.prototype.getTooltipAxes=function(t){var e=null!=t&&"auto"!==t?this.getAxis(t):this.getBaseAxis();return{baseAxes:[e],otherAxes:[this.getOtherAxis(e)]}},t.prototype.dataToPoint=function(t,e){return this.coordToPoint([this._radiusAxis.dataToRadius(t[0],e),this._angleAxis.dataToAngle(t[1],e)])},t.prototype.pointToData=function(t,e){var n=this.pointToCoord(t);return[this._radiusAxis.radiusToData(n[0],e),this._angleAxis.angleToData(n[1],e)]},t.prototype.pointToCoord=function(t){var e=t[0]-this.cx,n=t[1]-this.cy,i=this.getAngleAxis(),r=i.getExtent(),o=Math.min(r[0],r[1]),a=Math.max(r[0],r[1]);i.inverse?o=a-360:a=o+360;var s=Math.sqrt(e*e+n*n);e/=s,n/=s;for(var l=Math.atan2(-n,e)/Math.PI*180,u=la;)l+=360*u;return[s,l]},t.prototype.coordToPoint=function(t){var e=t[0],n=t[1]/180*Math.PI;return[Math.cos(n)*e+this.cx,-Math.sin(n)*e+this.cy]},t.prototype.getArea=function(){var t=this.getAngleAxis(),e=this.getRadiusAxis().getExtent().slice();e[0]>e[1]&&e.reverse();var n=t.getExtent(),i=Math.PI/180;return{cx:this.cx,cy:this.cy,r0:e[0],r:e[1],startAngle:-n[0]*i,endAngle:-n[1]*i,clockwise:t.inverse,contain:function(t,e){var n=t-this.cx,i=e-this.cy,r=n*n+i*i-1e-4,o=this.r,a=this.r0;return r<=o*o&&r>=a*a}}},t.prototype.convertToPixel=function(t,e,n){return WN(e)===this?this.dataToPoint(n):null},t.prototype.convertFromPixel=function(t,e,n){return WN(e)===this?this.pointToData(n):null},t}();function WN(t){var e=t.seriesModel,n=t.polarModel;return n&&n.coordinateSystem||e&&e.coordinateSystem}function HN(t,e){var n=this,i=n.getAngleAxis(),r=n.getRadiusAxis();if(i.scale.setExtent(1/0,-1/0),r.scale.setExtent(1/0,-1/0),t.eachSeries((function(t){if(t.coordinateSystem===n){var e=t.getData();E(M_(e,"radius"),(function(t){r.scale.unionExtentFromData(e,t)})),E(M_(e,"angle"),(function(t){i.scale.unionExtentFromData(e,t)}))}})),v_(i.scale,i.model),v_(r.scale,r.model),"category"===i.type&&!i.onBand){var o=i.getExtent(),a=360/i.scale.count();i.inverse?o[1]+=a:o[1]-=a,i.setExtent(o[0],o[1])}}function YN(t,e){if(t.type=e.get("type"),t.scale=m_(e),t.onBand=e.get("boundaryGap")&&"category"===t.type,t.inverse=e.get("inverse"),function(t){return"angleAxis"===t.mainType}(e)){t.inverse=t.inverse!==e.get("clockwise");var n=e.get("startAngle");t.setExtent(n,n+(t.inverse?-360:360))}e.axis=t,t.model=e}var XN={dimensions:FN,create:function(t,e){var n=[];return t.eachComponent("polar",(function(t,i){var r=new GN(i+"");r.update=HN;var o=r.getRadiusAxis(),a=r.getAngleAxis(),s=t.findAxisModel("radiusAxis"),l=t.findAxisModel("angleAxis");YN(o,s),YN(a,l),function(t,e,n){var i=e.get("center"),r=n.getWidth(),o=n.getHeight();t.cx=Ur(i[0],r),t.cy=Ur(i[1],o);var a=t.getRadiusAxis(),s=Math.min(r,o)/2,l=e.get("radius");null==l?l=[0,"100%"]:Y(l)||(l=[0,l]);var u=[Ur(l[0],s),Ur(l[1],s)];a.inverse?a.setExtent(u[1],u[0]):a.setExtent(u[0],u[1])}(r,t,e),n.push(r),t.coordinateSystem=r,r.model=t})),t.eachSeries((function(t){if("polar"===t.get("coordinateSystem")){var e=t.getReferringComponents("polar",zo).models[0];0,t.coordinateSystem=e.coordinateSystem}})),n}},UN=["axisLine","axisLabel","axisTick","minorTick","splitLine","minorSplitLine","splitArea"];function ZN(t,e,n){e[1]>e[0]&&(e=e.slice().reverse());var i=t.coordToPoint([e[0],n]),r=t.coordToPoint([e[1],n]);return{x1:i[0],y1:i[1],x2:r[0],y2:r[1]}}function jN(t){return t.getRadiusAxis().inverse?0:1}function qN(t){var e=t[0],n=t[t.length-1];e&&n&&Math.abs(Math.abs(e.coord-n.coord)-360)<1e-4&&t.pop()}var KN=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.axisPointerClass="PolarAxisPointer",n}return n(e,t),e.prototype.render=function(t,e){if(this.group.removeAll(),t.get("show")){var n=t.axis,i=n.polar,r=i.getRadiusAxis().getExtent(),o=n.getTicksCoords(),a=n.getMinorTicksCoords(),s=z(n.getViewLabels(),(function(t){t=T(t);var e=n.scale,i="ordinal"===e.type?e.getRawOrdinalNumber(t.tickValue):t.tickValue;return t.coord=n.dataToCoord(i),t}));qN(s),qN(o),E(UN,(function(e){!t.get([e,"show"])||n.scale.isBlank()&&"axisLine"!==e||$N[e](this.group,t,i,o,a,r,s)}),this)}},e.type="angleAxis",e}(yI),$N={axisLine:function(t,e,n,i,r,o){var a,s=e.getModel(["axisLine","lineStyle"]),l=jN(n),u=l?0:1;(a=0===o[u]?new _u({shape:{cx:n.cx,cy:n.cy,r:o[l]},style:s.getLineStyle(),z2:1,silent:!0}):new Bu({shape:{cx:n.cx,cy:n.cy,r:o[l],r0:o[u]},style:s.getLineStyle(),z2:1,silent:!0})).style.fill=null,t.add(a)},axisTick:function(t,e,n,i,r,o){var a=e.getModel("axisTick"),s=(a.get("inside")?-1:1)*a.get("length"),l=o[jN(n)],u=z(i,(function(t){return new Zu({shape:ZN(n,[l,l+s],t.coord)})}));t.add(Ph(u,{style:k(a.getModel("lineStyle").getLineStyle(),{stroke:e.get(["axisLine","lineStyle","color"])})}))},minorTick:function(t,e,n,i,r,o){if(r.length){for(var a=e.getModel("axisTick"),s=e.getModel("minorTick"),l=(a.get("inside")?-1:1)*s.get("length"),u=o[jN(n)],h=[],c=0;cf?"left":"right",v=Math.abs(d[1]-g)/p<.3?"middle":d[1]>g?"top":"bottom";if(s&&s[c]){var m=s[c];q(m)&&m.textStyle&&(a=new Mc(m.textStyle,l,l.ecModel))}var x=new Fs({silent:iI.isLabelSilent(e),style:nc(a,{x:d[0],y:d[1],fill:a.getTextColor()||e.get(["axisLine","lineStyle","color"]),text:i.formattedLabel,align:y,verticalAlign:v})});if(t.add(x),h){var _=iI.makeAxisEventDataBase(e);_.targetType="axisLabel",_.value=i.rawLabel,Qs(x).eventData=_}}),this)},splitLine:function(t,e,n,i,r,o){var a=e.getModel("splitLine").getModel("lineStyle"),s=a.get("color"),l=0;s=s instanceof Array?s:[s];for(var u=[],h=0;h=0?"p":"n",T=_;m&&(i[s][M]||(i[s][M]={p:_,n:_}),T=i[s][M][I]);var C=void 0,D=void 0,A=void 0,k=void 0;if("radius"===c.dim){var L=c.dataToCoord(S)-_,P=o.dataToCoord(M);Math.abs(L)=k})}}}))}var oE={startAngle:90,clockwise:!0,splitNumber:12,axisLabel:{rotate:0}},aE={splitNumber:5},sE=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.type="polar",e}(Tg);function lE(t,e){e=e||{};var n=t.coordinateSystem,i=t.axis,r={},o=i.position,a=i.orient,s=n.getRect(),l=[s.x,s.x+s.width,s.y,s.y+s.height],u={horizontal:{top:l[2],bottom:l[3]},vertical:{left:l[0],right:l[1]}};r.position=["vertical"===a?u.vertical[o]:l[0],"horizontal"===a?u.horizontal[o]:l[3]];r.rotation=Math.PI/2*{horizontal:0,vertical:1}[a];r.labelDirection=r.tickDirection=r.nameDirection={top:-1,bottom:1,right:1,left:-1}[o],t.get(["axisTick","inside"])&&(r.tickDirection=-r.tickDirection),it(e.labelInside,t.get(["axisLabel","inside"]))&&(r.labelDirection=-r.labelDirection);var h=e.rotate;return null==h&&(h=t.get(["axisLabel","rotate"])),r.labelRotation="top"===o?-h:h,r.z2=1,r}var uE=["axisLine","axisTickLabel","axisName"],hE=["splitArea","splitLine"],cE=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.axisPointerClass="SingleAxisPointer",n}return n(e,t),e.prototype.render=function(e,n,i,r){var o=this.group;o.removeAll();var a=this._axisGroup;this._axisGroup=new zr;var s=lE(e),l=new iI(e,s);E(uE,l.add,l),o.add(this._axisGroup),o.add(l.getGroup()),E(hE,(function(t){e.get([t,"show"])&&pE[t](this,this.group,this._axisGroup,e)}),this),Fh(a,this._axisGroup,e),t.prototype.render.call(this,e,n,i,r)},e.prototype.remove=function(){xI(this)},e.type="singleAxis",e}(yI),pE={splitLine:function(t,e,n,i){var r=i.axis;if(!r.scale.isBlank()){var o=i.getModel("splitLine"),a=o.getModel("lineStyle"),s=a.get("color");s=s instanceof Array?s:[s];for(var l=a.get("width"),u=i.coordinateSystem.getRect(),h=r.isHorizontal(),c=[],p=0,d=r.getTicksCoords({tickModel:o}),f=[],g=[],y=0;y=e.y&&t[1]<=e.y+e.height:n.contain(n.toLocalCoord(t[1]))&&t[0]>=e.y&&t[0]<=e.y+e.height},t.prototype.pointToData=function(t){var e=this.getAxis();return[e.coordToData(e.toLocalCoord(t["horizontal"===e.orient?0:1]))]},t.prototype.dataToPoint=function(t){var e=this.getAxis(),n=this.getRect(),i=[],r="horizontal"===e.orient?0:1;return t instanceof Array&&(t=t[0]),i[r]=e.toGlobalCoord(e.dataToCoord(+t)),i[1-r]=0===r?n.y+n.height/2:n.x+n.width/2,i},t.prototype.convertToPixel=function(t,e,n){return vE(e)===this?this.dataToPoint(n):null},t.prototype.convertFromPixel=function(t,e,n){return vE(e)===this?this.pointToData(n):null},t}();function vE(t){var e=t.seriesModel,n=t.singleAxisModel;return n&&n.coordinateSystem||e&&e.coordinateSystem}var mE={create:function(t,e){var n=[];return t.eachComponent("singleAxis",(function(i,r){var o=new yE(i,t,e);o.name="single_"+r,o.resize(i,e),i.coordinateSystem=o,n.push(o)})),t.eachSeries((function(t){if("singleAxis"===t.get("coordinateSystem")){var e=t.getReferringComponents("singleAxis",zo).models[0];t.coordinateSystem=e&&e.coordinateSystem}})),n},dimensions:gE},xE=["x","y"],_E=["width","height"],bE=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.prototype.makeElOption=function(t,e,n,i,r){var o=n.axis,a=o.coordinateSystem,s=ME(a,1-SE(o)),l=a.dataToPoint(e)[0],u=i.get("type");if(u&&"none"!==u){var h=nN(i),c=wE[u](o,l,s);c.style=h,t.graphicKey=c.type,t.pointer=c}aN(e,t,lE(n),n,i,r)},e.prototype.getHandleTransform=function(t,e,n){var i=lE(e,{labelInside:!1});i.labelMargin=n.get(["handle","margin"]);var r=oN(e.axis,t,i);return{x:r[0],y:r[1],rotation:i.rotation+(i.labelDirection<0?Math.PI:0)}},e.prototype.updateHandleTransform=function(t,e,n,i){var r=n.axis,o=r.coordinateSystem,a=SE(r),s=ME(o,a),l=[t.x,t.y];l[a]+=e[a],l[a]=Math.min(s[1],l[a]),l[a]=Math.max(s[0],l[a]);var u=ME(o,1-a),h=(u[1]+u[0])/2,c=[h,h];return c[a]=l[a],{x:l[0],y:l[1],rotation:t.rotation,cursorPoint:c,tooltipOption:{verticalAlign:"middle"}}},e}(KR),wE={line:function(t,e,n){return{type:"Line",subPixelOptimize:!0,shape:sN([e,n[0]],[e,n[1]],SE(t))}},shadow:function(t,e,n){var i=t.getBandWidth(),r=n[1]-n[0];return{type:"Rect",shape:lN([e-i/2,n[0]],[i,r],SE(t))}}};function SE(t){return t.isHorizontal()?0:1}function ME(t,e){var n=t.getRect();return[n[xE[e]],n[xE[e]]+n[_E[e]]]}var IE=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.type="single",e}(Tg);var TE=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.init=function(e,n,i){var r=Lp(e);t.prototype.init.apply(this,arguments),CE(e,r)},e.prototype.mergeOption=function(e){t.prototype.mergeOption.apply(this,arguments),CE(this.option,e)},e.prototype.getCellSize=function(){return this.option.cellSize},e.type="calendar",e.defaultOption={z:2,left:80,top:60,cellSize:20,orient:"horizontal",splitLine:{show:!0,lineStyle:{color:"#000",width:1,type:"solid"}},itemStyle:{color:"#fff",borderWidth:1,borderColor:"#ccc"},dayLabel:{show:!0,firstDay:0,position:"start",margin:"50%",color:"#000"},monthLabel:{show:!0,position:"start",margin:5,align:"center",formatter:null,color:"#000"},yearLabel:{show:!0,position:null,margin:30,formatter:null,color:"#ccc",fontFamily:"sans-serif",fontWeight:"bolder",fontSize:20}},e}(Rp);function CE(t,e){var n,i=t.cellSize;1===(n=Y(i)?i:t.cellSize=[i,i]).length&&(n[1]=n[0]);var r=z([0,1],(function(t){return function(t,e){return null!=t[Mp[e][0]]||null!=t[Mp[e][1]]&&null!=t[Mp[e][2]]}(e,t)&&(n[t]="auto"),null!=n[t]&&"auto"!==n[t]}));kp(t,e,{type:"box",ignoreSize:r})}var DE=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.render=function(t,e,n){var i=this.group;i.removeAll();var r=t.coordinateSystem,o=r.getRangeInfo(),a=r.getOrient(),s=e.getLocaleModel();this._renderDayRect(t,o,i),this._renderLines(t,o,a,i),this._renderYearText(t,o,a,i),this._renderMonthText(t,s,a,i),this._renderWeekText(t,s,o,a,i)},e.prototype._renderDayRect=function(t,e,n){for(var i=t.coordinateSystem,r=t.getModel("itemStyle").getItemStyle(),o=i.getCellWidth(),a=i.getCellHeight(),s=e.start.time;s<=e.end.time;s=i.getNextNDay(s,1).time){var l=i.dataToRect([s],!1).tl,u=new zs({shape:{x:l[0],y:l[1],width:o,height:a},cursor:"default",style:r});n.add(u)}},e.prototype._renderLines=function(t,e,n,i){var r=this,o=t.coordinateSystem,a=t.getModel(["splitLine","lineStyle"]).getLineStyle(),s=t.get(["splitLine","show"]),l=a.lineWidth;this._tlpoints=[],this._blpoints=[],this._firstDayOfMonth=[],this._firstDayPoints=[];for(var u=e.start,h=0;u.time<=e.end.time;h++){p(u.formatedDate),0===h&&(u=o.getDateInfo(e.start.y+"-"+e.start.m));var c=u.date;c.setMonth(c.getMonth()+1),u=o.getDateInfo(c)}function p(e){r._firstDayOfMonth.push(o.getDateInfo(e)),r._firstDayPoints.push(o.dataToRect([e],!1).tl);var l=r._getLinePointsOfOneWeek(t,e,n);r._tlpoints.push(l[0]),r._blpoints.push(l[l.length-1]),s&&r._drawSplitline(l,a,i)}p(o.getNextNDay(e.end.time,1).formatedDate),s&&this._drawSplitline(r._getEdgesPoints(r._tlpoints,l,n),a,i),s&&this._drawSplitline(r._getEdgesPoints(r._blpoints,l,n),a,i)},e.prototype._getEdgesPoints=function(t,e,n){var i=[t[0].slice(),t[t.length-1].slice()],r="horizontal"===n?0:1;return i[0][r]=i[0][r]-e/2,i[1][r]=i[1][r]+e/2,i},e.prototype._drawSplitline=function(t,e,n){var i=new Yu({z2:20,shape:{points:t},style:e});n.add(i)},e.prototype._getLinePointsOfOneWeek=function(t,e,n){for(var i=t.coordinateSystem,r=i.getDateInfo(e),o=[],a=0;a<7;a++){var s=i.getNextNDay(r.time,a),l=i.dataToRect([s.time],!1);o[2*s.day]=l.tl,o[2*s.day+1]=l["horizontal"===n?"bl":"tr"]}return o},e.prototype._formatterLabel=function(t,e){return U(t)&&t?(n=t,E(e,(function(t,e){n=n.replace("{"+e+"}",i?re(t):t)})),n):X(t)?t(e):e.nameMap;var n,i},e.prototype._yearTextPositionControl=function(t,e,n,i,r){var o=e[0],a=e[1],s=["center","bottom"];"bottom"===i?(a+=r,s=["center","top"]):"left"===i?o-=r:"right"===i?(o+=r,s=["center","top"]):a-=r;var l=0;return"left"!==i&&"right"!==i||(l=Math.PI/2),{rotation:l,x:o,y:a,style:{align:s[0],verticalAlign:s[1]}}},e.prototype._renderYearText=function(t,e,n,i){var r=t.getModel("yearLabel");if(r.get("show")){var o=r.get("margin"),a=r.get("position");a||(a="horizontal"!==n?"top":"left");var s=[this._tlpoints[this._tlpoints.length-1],this._blpoints[0]],l=(s[0][0]+s[1][0])/2,u=(s[0][1]+s[1][1])/2,h="horizontal"===n?0:1,c={top:[l,s[h][1]],bottom:[l,s[1-h][1]],left:[s[1-h][0],u],right:[s[h][0],u]},p=e.start.y;+e.end.y>+e.start.y&&(p=p+"-"+e.end.y);var d=r.get("formatter"),f={start:e.start.y,end:e.end.y,nameMap:p},g=this._formatterLabel(d,f),y=new Fs({z2:30,style:nc(r,{text:g})});y.attr(this._yearTextPositionControl(y,c[a],n,a,o)),i.add(y)}},e.prototype._monthTextPositionControl=function(t,e,n,i,r){var o="left",a="top",s=t[0],l=t[1];return"horizontal"===n?(l+=r,e&&(o="center"),"start"===i&&(a="bottom")):(s+=r,e&&(a="middle"),"start"===i&&(o="right")),{x:s,y:l,align:o,verticalAlign:a}},e.prototype._renderMonthText=function(t,e,n,i){var r=t.getModel("monthLabel");if(r.get("show")){var o=r.get("nameMap"),a=r.get("margin"),s=r.get("position"),l=r.get("align"),u=[this._tlpoints,this._blpoints];o&&!U(o)||(o&&(e=Nc(o)||e),o=e.get(["time","monthAbbr"])||[]);var h="start"===s?0:1,c="horizontal"===n?0:1;a="start"===s?-a:a;for(var p="center"===l,d=0;d=i.start.time&&n.timea.end.time&&t.reverse(),t},t.prototype._getRangeInfo=function(t){var e,n=[this.getDateInfo(t[0]),this.getDateInfo(t[1])];n[0].time>n[1].time&&(e=!0,n.reverse());var i=Math.floor(n[1].time/AE)-Math.floor(n[0].time/AE)+1,r=new Date(n[0].time),o=r.getDate(),a=n[1].date.getDate();r.setDate(o+i-1);var s=r.getDate();if(s!==a)for(var l=r.getTime()-n[1].time>0?1:-1;(s=r.getDate())!==a&&(r.getTime()-n[1].time)*l>0;)i-=l,r.setDate(s-l);var u=Math.floor((i+n[0].day+6)/7),h=e?1-u:u-1;return e&&n.reverse(),{range:[n[0].formatedDate,n[1].formatedDate],start:n[0],end:n[1],allDay:i,weeks:u,nthWeek:h,fweek:n[0].day,lweek:n[1].day}},t.prototype._getDateByWeeksAndDay=function(t,e,n){var i=this._getRangeInfo(n);if(t>i.weeks||0===t&&ei.lweek)return null;var r=7*(t-1)-i.fweek+e,o=new Date(i.start.time);return o.setDate(+i.start.d+r),this.getDateInfo(o)},t.create=function(e,n){var i=[];return e.eachComponent("calendar",(function(r){var o=new t(r,e,n);i.push(o),r.coordinateSystem=o})),e.eachSeries((function(t){"calendar"===t.get("coordinateSystem")&&(t.coordinateSystem=i[t.get("calendarIndex")||0])})),i},t.dimensions=["time","value"],t}();function LE(t){var e=t.calendarModel,n=t.seriesModel;return e?e.coordinateSystem:n?n.coordinateSystem:null}function PE(t,e){var n;return E(e,(function(e){null!=t[e]&&"auto"!==t[e]&&(n=!0)})),n}var OE=["transition","enterFrom","leaveTo"],RE=OE.concat(["enterAnimation","updateAnimation","leaveAnimation"]);function NE(t,e,n){if(n&&(!t[n]&&e[n]&&(t[n]={}),t=t[n],e=e[n]),t&&e)for(var i=n?OE:RE,r=0;r=0;l--){var p,d,f;if(f=null!=(d=Ao((p=n[l]).id,null))?r.get(d):null){var g=f.parent,y=(c=VE(g),{}),v=Dp(f,p,g===i?{width:o,height:a}:{width:c.width,height:c.height},null,{hv:p.hv,boundingMode:p.bounding},y);if(!VE(f).isNew&&v){for(var m=p.transition,x={},_=0;_=0)?x[b]=w:f[b]=w}fh(f,x,t,0)}else f.attr(y)}}},e.prototype._clear=function(){var t=this,e=this._elMap;e.each((function(n){WE(n,VE(n).option,e,t._lastGraphicModel)})),this._elMap=yt()},e.prototype.dispose=function(){this._clear()},e.type="graphic",e}(Tg);function FE(t){var e=_t(zE,t)?zE[t]:Dh(t);var n=new e({});return VE(n).type=t,n}function GE(t,e,n,i){var r=FE(n);return e.add(r),i.set(t,r),VE(r).id=t,VE(r).isNew=!0,r}function WE(t,e,n,i){t&&t.parent&&("group"===t.type&&t.traverse((function(t){WE(t,e,n,i)})),oR(t,e,i),n.removeKey(VE(t).id))}function HE(t,e,n,i){t.isGroup||E([["cursor",Sa.prototype.cursor],["zlevel",i||0],["z",n||0],["z2",0]],(function(n){var i=n[0];_t(e,i)?t[i]=rt(e[i],n[1]):null==t[i]&&(t[i]=n[1])})),E(G(e),(function(n){if(0===n.indexOf("on")){var i=e[n];t[n]=X(i)?i:null}})),_t(e,"draggable")&&(t.draggable=e.draggable),null!=e.name&&(t.name=e.name),null!=e.id&&(t.id=e.id)}var YE=["x","y","radius","angle","single"],XE=["cartesian2d","polar","singleAxis"];function UE(t){return t+"Axis"}function ZE(t,e){var n,i=yt(),r=[],o=yt();t.eachComponent({mainType:"dataZoom",query:e},(function(t){o.get(t.uid)||s(t)}));do{n=!1,t.eachComponent("dataZoom",a)}while(n);function a(t){!o.get(t.uid)&&function(t){var e=!1;return t.eachTargetAxis((function(t,n){var r=i.get(t);r&&r[n]&&(e=!0)})),e}(t)&&(s(t),n=!0)}function s(t){o.set(t.uid,!0),r.push(t),t.eachTargetAxis((function(t,e){(i.get(t)||i.set(t,[]))[e]=!0}))}return r}function jE(t){var e=t.ecModel,n={infoList:[],infoMap:yt()};return t.eachTargetAxis((function(t,i){var r=e.getComponent(UE(t),i);if(r){var o=r.getCoordSysModel();if(o){var a=o.uid,s=n.infoMap.get(a);s||(s={model:o,axisModels:[]},n.infoList.push(s),n.infoMap.set(a,s)),s.axisModels.push(r)}}})),n}var qE=function(){function t(){this.indexList=[],this.indexMap=[]}return t.prototype.add=function(t){this.indexMap[t]||(this.indexList.push(t),this.indexMap[t]=!0)},t}(),KE=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n._autoThrottle=!0,n._noTarget=!0,n._rangePropMode=["percent","percent"],n}return n(e,t),e.prototype.init=function(t,e,n){var i=$E(t);this.settledOption=i,this.mergeDefaultAndTheme(t,n),this._doInit(i)},e.prototype.mergeOption=function(t){var e=$E(t);C(this.option,t,!0),C(this.settledOption,e,!0),this._doInit(e)},e.prototype._doInit=function(t){var e=this.option;this._setDefaultThrottle(t),this._updateRangeUse(t);var n=this.settledOption;E([["start","startValue"],["end","endValue"]],(function(t,i){"value"===this._rangePropMode[i]&&(e[t[0]]=n[t[0]]=null)}),this),this._resetTarget()},e.prototype._resetTarget=function(){var t=this.get("orient",!0),e=this._targetAxisInfoMap=yt();this._fillSpecifiedTargetAxis(e)?this._orient=t||this._makeAutoOrientByTargetAxis():(this._orient=t||"horizontal",this._fillAutoTargetAxisByOrient(e,this._orient)),this._noTarget=!0,e.each((function(t){t.indexList.length&&(this._noTarget=!1)}),this)},e.prototype._fillSpecifiedTargetAxis=function(t){var e=!1;return E(YE,(function(n){var i=this.getReferringComponents(UE(n),Vo);if(i.specified){e=!0;var r=new qE;E(i.models,(function(t){r.add(t.componentIndex)})),t.set(n,r)}}),this),e},e.prototype._fillAutoTargetAxisByOrient=function(t,e){var n=this.ecModel,i=!0;if(i){var r="vertical"===e?"y":"x";o(n.findComponents({mainType:r+"Axis"}),r)}i&&o(n.findComponents({mainType:"singleAxis",filter:function(t){return t.get("orient",!0)===e}}),"single");function o(e,n){var r=e[0];if(r){var o=new qE;if(o.add(r.componentIndex),t.set(n,o),i=!1,"x"===n||"y"===n){var a=r.getReferringComponents("grid",zo).models[0];a&&E(e,(function(t){r.componentIndex!==t.componentIndex&&a===t.getReferringComponents("grid",zo).models[0]&&o.add(t.componentIndex)}))}}}i&&E(YE,(function(e){if(i){var r=n.findComponents({mainType:UE(e),filter:function(t){return"category"===t.get("type",!0)}});if(r[0]){var o=new qE;o.add(r[0].componentIndex),t.set(e,o),i=!1}}}),this)},e.prototype._makeAutoOrientByTargetAxis=function(){var t;return this.eachTargetAxis((function(e){!t&&(t=e)}),this),"y"===t?"vertical":"horizontal"},e.prototype._setDefaultThrottle=function(t){if(t.hasOwnProperty("throttle")&&(this._autoThrottle=!1),this._autoThrottle){var e=this.ecModel.option;this.option.throttle=e.animation&&e.animationDurationUpdate>0?100:20}},e.prototype._updateRangeUse=function(t){var e=this._rangePropMode,n=this.get("rangeMode");E([["start","startValue"],["end","endValue"]],(function(i,r){var o=null!=t[i[0]],a=null!=t[i[1]];o&&!a?e[r]="percent":!o&&a?e[r]="value":n?e[r]=n[r]:o&&(e[r]="percent")}))},e.prototype.noTarget=function(){return this._noTarget},e.prototype.getFirstTargetAxisModel=function(){var t;return this.eachTargetAxis((function(e,n){null==t&&(t=this.ecModel.getComponent(UE(e),n))}),this),t},e.prototype.eachTargetAxis=function(t,e){this._targetAxisInfoMap.each((function(n,i){E(n.indexList,(function(n){t.call(e,i,n)}))}))},e.prototype.getAxisProxy=function(t,e){var n=this.getAxisModel(t,e);if(n)return n.__dzAxisProxy},e.prototype.getAxisModel=function(t,e){var n=this._targetAxisInfoMap.get(t);if(n&&n.indexMap[e])return this.ecModel.getComponent(UE(t),e)},e.prototype.setRawRange=function(t){var e=this.option,n=this.settledOption;E([["start","startValue"],["end","endValue"]],(function(i){null==t[i[0]]&&null==t[i[1]]||(e[i[0]]=n[i[0]]=t[i[0]],e[i[1]]=n[i[1]]=t[i[1]])}),this),this._updateRangeUse(t)},e.prototype.setCalculatedRange=function(t){var e=this.option;E(["start","startValue","end","endValue"],(function(n){e[n]=t[n]}))},e.prototype.getPercentRange=function(){var t=this.findRepresentativeAxisProxy();if(t)return t.getDataPercentWindow()},e.prototype.getValueRange=function(t,e){if(null!=t||null!=e)return this.getAxisProxy(t,e).getDataValueWindow();var n=this.findRepresentativeAxisProxy();return n?n.getDataValueWindow():void 0},e.prototype.findRepresentativeAxisProxy=function(t){if(t)return t.__dzAxisProxy;for(var e,n=this._targetAxisInfoMap.keys(),i=0;i=0}(e)){var n=UE(this._dimName),i=e.getReferringComponents(n,zo).models[0];i&&this._axisIndex===i.componentIndex&&t.push(e)}}),this),t},t.prototype.getAxisModel=function(){return this.ecModel.getComponent(this._dimName+"Axis",this._axisIndex)},t.prototype.getMinMaxSpan=function(){return T(this._minMaxSpan)},t.prototype.calculateDataWindow=function(t){var e,n=this._dataExtent,i=this.getAxisModel().axis.scale,r=this._dataZoomModel.getRangePropMode(),o=[0,100],a=[],s=[];ez(["start","end"],(function(l,u){var h=t[l],c=t[l+"Value"];"percent"===r[u]?(null==h&&(h=o[u]),c=i.parse(Xr(h,o,n))):(e=!0,h=Xr(c=null==c?n[u]:i.parse(c),n,o)),s[u]=null==c||isNaN(c)?n[u]:c,a[u]=null==h||isNaN(h)?o[u]:h})),nz(s),nz(a);var l=this._minMaxSpan;function u(t,e,n,r,o){var a=o?"Span":"ValueSpan";Ck(0,t,n,"all",l["min"+a],l["max"+a]);for(var s=0;s<2;s++)e[s]=Xr(t[s],n,r,!0),o&&(e[s]=i.parse(e[s]))}return e?u(s,a,n,o,!1):u(a,s,o,n,!0),{valueWindow:s,percentWindow:a}},t.prototype.reset=function(t){if(t===this._dataZoomModel){var e=this.getTargetSeriesModels();this._dataExtent=function(t,e,n){var i=[1/0,-1/0];ez(n,(function(t){!function(t,e,n){e&&E(M_(e,n),(function(n){var i=e.getApproximateExtent(n);i[0]t[1]&&(t[1]=i[1])}))}(i,t.getData(),e)}));var r=t.getAxisModel(),o=f_(r.axis.scale,r,i).calculate();return[o.min,o.max]}(this,this._dimName,e),this._updateMinMaxSpan();var n=this.calculateDataWindow(t.settledOption);this._valueWindow=n.valueWindow,this._percentWindow=n.percentWindow,this._setAxisModel()}},t.prototype.filterData=function(t,e){if(t===this._dataZoomModel){var n=this._dimName,i=this.getTargetSeriesModels(),r=t.get("filterMode"),o=this._valueWindow;"none"!==r&&ez(i,(function(t){var e=t.getData(),i=e.mapDimensionsAll(n);if(i.length){if("weakFilter"===r){var a=e.getStore(),s=z(i,(function(t){return e.getDimensionIndex(t)}),e);e.filterSelf((function(t){for(var e,n,r,l=0;lo[1];if(h&&!c&&!p)return!0;h&&(r=!0),c&&(e=!0),p&&(n=!0)}return r&&e&&n}))}else ez(i,(function(n){if("empty"===r)t.setData(e=e.map(n,(function(t){return function(t){return t>=o[0]&&t<=o[1]}(t)?t:NaN})));else{var i={};i[n]=o,e.selectRange(i)}}));ez(i,(function(t){e.setApproximateExtent(o,t)}))}}))}},t.prototype._updateMinMaxSpan=function(){var t=this._minMaxSpan={},e=this._dataZoomModel,n=this._dataExtent;ez(["min","max"],(function(i){var r=e.get(i+"Span"),o=e.get(i+"ValueSpan");null!=o&&(o=this.getAxisModel().axis.scale.parse(o)),null!=o?r=Xr(n[0]+o,n,[0,100],!0):null!=r&&(o=Xr(r,[0,100],n,!0)-n[0]),t[i+"Span"]=r,t[i+"ValueSpan"]=o}),this)},t.prototype._setAxisModel=function(){var t=this.getAxisModel(),e=this._percentWindow,n=this._valueWindow;if(e){var i=$r(n,[0,500]);i=Math.min(i,20);var r=t.axis.scale.rawExtentInfo;0!==e[0]&&r.setDeterminedMinMax("min",+n[0].toFixed(i)),100!==e[1]&&r.setDeterminedMinMax("max",+n[1].toFixed(i)),r.freeze()}},t}();var rz={getTargetSeries:function(t){function e(e){t.eachComponent("dataZoom",(function(n){n.eachTargetAxis((function(i,r){var o=t.getComponent(UE(i),r);e(i,r,o,n)}))}))}e((function(t,e,n,i){n.__dzAxisProxy=null}));var n=[];e((function(e,i,r,o){r.__dzAxisProxy||(r.__dzAxisProxy=new iz(e,i,o,t),n.push(r.__dzAxisProxy))}));var i=yt();return E(n,(function(t){E(t.getTargetSeriesModels(),(function(t){i.set(t.uid,t)}))})),i},overallReset:function(t,e){t.eachComponent("dataZoom",(function(t){t.eachTargetAxis((function(e,n){t.getAxisProxy(e,n).reset(t)})),t.eachTargetAxis((function(n,i){t.getAxisProxy(n,i).filterData(t,e)}))})),t.eachComponent("dataZoom",(function(t){var e=t.findRepresentativeAxisProxy();if(e){var n=e.getDataPercentWindow(),i=e.getDataValueWindow();t.setCalculatedRange({start:n[0],end:n[1],startValue:i[0],endValue:i[1]})}}))}};var oz=!1;function az(t){oz||(oz=!0,t.registerProcessor(t.PRIORITY.PROCESSOR.FILTER,rz),function(t){t.registerAction("dataZoom",(function(t,e){E(ZE(e,t),(function(e){e.setRawRange({start:t.start,end:t.end,startValue:t.startValue,endValue:t.endValue})}))}))}(t),t.registerSubTypeDefaulter("dataZoom",(function(){return"slider"})))}function sz(t){t.registerComponentModel(JE),t.registerComponentView(tz),az(t)}var lz=function(){},uz={};function hz(t,e){uz[t]=e}function cz(t){return uz[t]}var pz=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.optionUpdated=function(){t.prototype.optionUpdated.apply(this,arguments);var e=this.ecModel;E(this.option.feature,(function(t,n){var i=cz(n);i&&(i.getDefaultOption&&(i.defaultOption=i.getDefaultOption(e)),C(t,i.defaultOption))}))},e.type="toolbox",e.layoutMode={type:"box",ignoreSize:!0},e.defaultOption={show:!0,z:6,orient:"horizontal",left:"right",top:"top",backgroundColor:"transparent",borderColor:"#ccc",borderRadius:0,borderWidth:0,padding:5,itemSize:15,itemGap:8,showTitle:!0,iconStyle:{borderColor:"#666",color:"none"},emphasis:{iconStyle:{borderColor:"#3E98C5"}},tooltip:{show:!1,position:"bottom"}},e}(Rp);function dz(t,e){var n=fp(e.get("padding")),i=e.getItemStyle(["color","opacity"]);return i.fill=e.get("backgroundColor"),t=new zs({shape:{x:t.x-n[3],y:t.y-n[0],width:t.width+n[1]+n[3],height:t.height+n[0]+n[2],r:e.get("borderRadius")},style:i,silent:!0,z2:-1})}var fz=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.prototype.render=function(t,e,n,i){var r=this.group;if(r.removeAll(),t.get("show")){var o=+t.get("itemSize"),a="vertical"===t.get("orient"),s=t.get("feature")||{},l=this._features||(this._features={}),u=[];E(s,(function(t,e){u.push(e)})),new Vm(this._featureNames||[],u).add(h).update(h).remove(H(h,null)).execute(),this._featureNames=u,function(t,e,n){var i=e.getBoxLayoutParams(),r=e.get("padding"),o={width:n.getWidth(),height:n.getHeight()},a=Cp(i,o,r);Tp(e.get("orient"),t,e.get("itemGap"),a.width,a.height),Dp(t,i,o,r)}(r,t,n),r.add(dz(r.getBoundingRect(),t)),a||r.eachChild((function(t){var e=t.__title,i=t.ensureState("emphasis"),a=i.textConfig||(i.textConfig={}),s=t.getTextContent(),l=s&&s.ensureState("emphasis");if(l&&!X(l)&&e){var u=l.style||(l.style={}),h=br(e,Fs.makeFont(u)),c=t.x+r.x,p=!1;t.y+r.y+o+h.height>n.getHeight()&&(a.position="top",p=!0);var d=p?-5-h.height:o+10;c+h.width/2>n.getWidth()?(a.position=["100%",d],u.align="right"):c-h.width/2<0&&(a.position=[0,d],u.align="left")}}))}function h(h,c){var p,d=u[h],f=u[c],g=s[d],y=new Mc(g,t,t.ecModel);if(i&&null!=i.newTitle&&i.featureName===d&&(g.title=i.newTitle),d&&!f){if(function(t){return 0===t.indexOf("my")}(d))p={onclick:y.option.onclick,featureName:d};else{var v=cz(d);if(!v)return;p=new v}l[d]=p}else if(!(p=l[f]))return;p.uid=Tc("toolbox-feature"),p.model=y,p.ecModel=e,p.api=n;var m=p instanceof lz;d||!f?!y.get("show")||m&&p.unusable?m&&p.remove&&p.remove(e,n):(!function(i,s,l){var u,h,c=i.getModel("iconStyle"),p=i.getModel(["emphasis","iconStyle"]),d=s instanceof lz&&s.getIcons?s.getIcons():i.get("icon"),f=i.get("title")||{};U(d)?(u={})[l]=d:u=d;U(f)?(h={})[l]=f:h=f;var g=i.iconPaths={};E(u,(function(l,u){var d=Hh(l,{},{x:-o/2,y:-o/2,width:o,height:o});d.setStyle(c.getItemStyle()),d.ensureState("emphasis").style=p.getItemStyle();var f=new Fs({style:{text:h[u],align:p.get("textAlign"),borderRadius:p.get("textBorderRadius"),padding:p.get("textPadding"),fill:null},ignore:!0});d.setTextContent(f),Zh({el:d,componentModel:t,itemName:u,formatterParamsExtra:{title:h[u]}}),d.__title=h[u],d.on("mouseover",(function(){var e=p.getItemStyle(),i=a?null==t.get("right")&&"right"!==t.get("left")?"right":"left":null==t.get("bottom")&&"bottom"!==t.get("top")?"bottom":"top";f.setStyle({fill:p.get("textFill")||e.fill||e.stroke||"#000",backgroundColor:p.get("textBackgroundColor")}),d.setTextConfig({position:p.get("textPosition")||i}),f.ignore=!t.get("showTitle"),n.enterEmphasis(this)})).on("mouseout",(function(){"emphasis"!==i.get(["iconStatus",u])&&n.leaveEmphasis(this),f.hide()})),("emphasis"===i.get(["iconStatus",u])?kl:Ll)(d),r.add(d),d.on("click",W(s.onclick,s,e,n,u)),g[u]=d}))}(y,p,d),y.setIconStatus=function(t,e){var n=this.option,i=this.iconPaths;n.iconStatus=n.iconStatus||{},n.iconStatus[t]=e,i[t]&&("emphasis"===e?kl:Ll)(i[t])},p instanceof lz&&p.render&&p.render(y,e,n,i)):m&&p.dispose&&p.dispose(e,n)}},e.prototype.updateView=function(t,e,n,i){E(this._features,(function(t){t instanceof lz&&t.updateView&&t.updateView(t.model,e,n,i)}))},e.prototype.remove=function(t,e){E(this._features,(function(n){n instanceof lz&&n.remove&&n.remove(t,e)})),this.group.removeAll()},e.prototype.dispose=function(t,e){E(this._features,(function(n){n instanceof lz&&n.dispose&&n.dispose(t,e)}))},e.type="toolbox",e}(Tg);var gz=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.prototype.onclick=function(t,e){var n=this.model,i=n.get("name")||t.get("title.0.text")||"echarts",o="svg"===e.getZr().painter.getType(),a=o?"svg":n.get("type",!0)||"png",s=e.getConnectedDataURL({type:a,backgroundColor:n.get("backgroundColor",!0)||t.get("backgroundColor")||"#fff",connectedBackgroundColor:n.get("connectedBackgroundColor"),excludeComponents:n.get("excludeComponents"),pixelRatio:n.get("pixelRatio")}),l=r.browser;if(X(MouseEvent)&&(l.newEdge||!l.ie&&!l.edge)){var u=document.createElement("a");u.download=i+"."+a,u.target="_blank",u.href=s;var h=new MouseEvent("click",{view:document.defaultView,bubbles:!0,cancelable:!1});u.dispatchEvent(h)}else if(window.navigator.msSaveOrOpenBlob||o){var c=s.split(","),p=c[0].indexOf("base64")>-1,d=o?decodeURIComponent(c[1]):c[1];p&&(d=window.atob(d));var f=i+"."+a;if(window.navigator.msSaveOrOpenBlob){for(var g=d.length,y=new Uint8Array(g);g--;)y[g]=d.charCodeAt(g);var v=new Blob([y]);window.navigator.msSaveOrOpenBlob(v,f)}else{var m=document.createElement("iframe");document.body.appendChild(m);var x=m.contentWindow,_=x.document;_.open("image/svg+xml","replace"),_.write(d),_.close(),x.focus(),_.execCommand("SaveAs",!0,f),document.body.removeChild(m)}}else{var b=n.get("lang"),w='',S=window.open();S.document.write(w),S.document.title=i}},e.getDefaultOption=function(t){return{show:!0,icon:"M4.7,22.9L29.3,45.5L54.7,23.4M4.6,43.6L4.6,58L53.8,58L53.8,43.6M29.2,45.1L29.2,0",title:t.getLocaleModel().get(["toolbox","saveAsImage","title"]),type:"png",connectedBackgroundColor:"#fff",name:"",excludeComponents:["toolbox"],lang:t.getLocaleModel().get(["toolbox","saveAsImage","lang"])}},e}(lz),yz="__ec_magicType_stack__",vz=[["line","bar"],["stack"]],mz=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.prototype.getIcons=function(){var t=this.model,e=t.get("icon"),n={};return E(t.get("type"),(function(t){e[t]&&(n[t]=e[t])})),n},e.getDefaultOption=function(t){return{show:!0,type:[],icon:{line:"M4.1,28.9h7.1l9.3-22l7.4,38l9.7-19.7l3,12.8h14.9M4.1,58h51.4",bar:"M6.7,22.9h10V48h-10V22.9zM24.9,13h10v35h-10V13zM43.2,2h10v46h-10V2zM3.1,58h53.7",stack:"M8.2,38.4l-8.4,4.1l30.6,15.3L60,42.5l-8.1-4.1l-21.5,11L8.2,38.4z M51.9,30l-8.1,4.2l-13.4,6.9l-13.9-6.9L8.2,30l-8.4,4.2l8.4,4.2l22.2,11l21.5-11l8.1-4.2L51.9,30z M51.9,21.7l-8.1,4.2L35.7,30l-5.3,2.8L24.9,30l-8.4-4.1l-8.3-4.2l-8.4,4.2L8.2,30l8.3,4.2l13.9,6.9l13.4-6.9l8.1-4.2l8.1-4.1L51.9,21.7zM30.4,2.2L-0.2,17.5l8.4,4.1l8.3,4.2l8.4,4.2l5.5,2.7l5.3-2.7l8.1-4.2l8.1-4.2l8.1-4.1L30.4,2.2z"},title:t.getLocaleModel().get(["toolbox","magicType","title"]),option:{},seriesIndex:{}}},e.prototype.onclick=function(t,e,n){var i=this.model,r=i.get(["seriesIndex",n]);if(xz[n]){var o,a={series:[]};E(vz,(function(t){P(t,n)>=0&&E(t,(function(t){i.setIconStatus(t,"normal")}))})),i.setIconStatus(n,"emphasis"),t.eachComponent({mainType:"series",query:null==r?null:{seriesIndex:r}},(function(t){var e=t.subType,r=t.id,o=xz[n](e,r,t,i);o&&(k(o,t.option),a.series.push(o));var s=t.coordinateSystem;if(s&&"cartesian2d"===s.type&&("line"===n||"bar"===n)){var l=s.getAxesByScale("ordinal")[0];if(l){var u=l.dim+"Axis",h=t.getReferringComponents(u,zo).models[0].componentIndex;a[u]=a[u]||[];for(var c=0;c<=h;c++)a[u][h]=a[u][h]||{};a[u][h].boundaryGap="bar"===n}}}));var s=n;"stack"===n&&(o=C({stack:i.option.title.tiled,tiled:i.option.title.stack},i.option.title),"emphasis"!==i.get(["iconStatus",n])&&(s="tiled")),e.dispatchAction({type:"changeMagicType",currentType:s,newOption:a,newTitle:o,featureName:"magicType"})}},e}(lz),xz={line:function(t,e,n,i){if("bar"===t)return C({id:e,type:"line",data:n.get("data"),stack:n.get("stack"),markPoint:n.get("markPoint"),markLine:n.get("markLine")},i.get(["option","line"])||{},!0)},bar:function(t,e,n,i){if("line"===t)return C({id:e,type:"bar",data:n.get("data"),stack:n.get("stack"),markPoint:n.get("markPoint"),markLine:n.get("markLine")},i.get(["option","bar"])||{},!0)},stack:function(t,e,n,i){var r=n.get("stack")===yz;if("line"===t||"bar"===t)return i.setIconStatus("stack",r?"normal":"emphasis"),C({id:e,stack:r?"":yz},i.get(["option","stack"])||{},!0)}};Mm({type:"changeMagicType",event:"magicTypeChanged",update:"prepareAndUpdate"},(function(t,e){e.mergeOption(t.newOption)}));var _z=new Array(60).join("-"),bz="\t";function wz(t){return t.replace(/^\s\s*/,"").replace(/\s\s*$/,"")}var Sz=new RegExp("[\t]+","g");function Mz(t,e){var n=t.split(new RegExp("\n*"+_z+"\n*","g")),i={series:[]};return E(n,(function(t,n){if(function(t){if(t.slice(0,t.indexOf("\n")).indexOf(bz)>=0)return!0}(t)){var r=function(t){for(var e=t.split(/\n+/g),n=[],i=z(wz(e.shift()).split(Sz),(function(t){return{name:t,data:[]}})),r=0;r=0)&&t(r,i._targetInfoList)}))}return t.prototype.setOutputRanges=function(t,e){return this.matchOutputRanges(t,e,(function(t,e,n){if((t.coordRanges||(t.coordRanges=[])).push(e),!t.coordRange){t.coordRange=e;var i=Vz[t.brushType](0,n,e);t.__rangeOffset={offset:Fz[t.brushType](i.values,t.range,[1,1]),xyMinMax:i.xyMinMax}}})),t},t.prototype.matchOutputRanges=function(t,e,n){E(t,(function(t){var i=this.findTargetInfo(t,e);i&&!0!==i&&E(i.coordSyses,(function(i){var r=Vz[t.brushType](1,i,t.range,!0);n(t,r.values,i,e)}))}),this)},t.prototype.setInputRanges=function(t,e){E(t,(function(t){var n,i,r,o,a,s=this.findTargetInfo(t,e);if(t.range=t.range||[],s&&!0!==s){t.panelId=s.panelId;var l=Vz[t.brushType](0,s.coordSys,t.coordRange),u=t.__rangeOffset;t.range=u?Fz[t.brushType](l.values,u.offset,(n=l.xyMinMax,i=u.xyMinMax,r=Wz(n),o=Wz(i),a=[r[0]/o[0],r[1]/o[1]],isNaN(a[0])&&(a[0]=1),isNaN(a[1])&&(a[1]=1),a)):l.values}}),this)},t.prototype.makePanelOpts=function(t,e){return z(this._targetInfoList,(function(n){var i=n.getPanelRect();return{panelId:n.panelId,defaultBrushType:e?e(n):null,clipPath:AL(i),isTargetByCursor:LL(i,t,n.coordSysModel),getLinearBrushOtherExtent:kL(i)}}))},t.prototype.controlSeries=function(t,e,n){var i=this.findTargetInfo(t,n);return!0===i||i&&P(i.coordSyses,e.coordinateSystem)>=0},t.prototype.findTargetInfo=function(t,e){for(var n=this._targetInfoList,i=Rz(e,t),r=0;rt[1]&&t.reverse(),t}function Rz(t,e){return No(t,e,{includeMainTypes:Lz})}var Nz={grid:function(t,e){var n=t.xAxisModels,i=t.yAxisModels,r=t.gridModels,o=yt(),a={},s={};(n||i||r)&&(E(n,(function(t){var e=t.axis.grid.model;o.set(e.id,e),a[e.id]=!0})),E(i,(function(t){var e=t.axis.grid.model;o.set(e.id,e),s[e.id]=!0})),E(r,(function(t){o.set(t.id,t),a[t.id]=!0,s[t.id]=!0})),o.each((function(t){var r=t.coordinateSystem,o=[];E(r.getCartesians(),(function(t,e){(P(n,t.getAxis("x").model)>=0||P(i,t.getAxis("y").model)>=0)&&o.push(t)})),e.push({panelId:"grid--"+t.id,gridModel:t,coordSysModel:t,coordSys:o[0],coordSyses:o,getPanelRect:zz.grid,xAxisDeclared:a[t.id],yAxisDeclared:s[t.id]})})))},geo:function(t,e){E(t.geoModels,(function(t){var n=t.coordinateSystem;e.push({panelId:"geo--"+t.id,geoModel:t,coordSysModel:t,coordSys:n,coordSyses:[n],getPanelRect:zz.geo})}))}},Ez=[function(t,e){var n=t.xAxisModel,i=t.yAxisModel,r=t.gridModel;return!r&&n&&(r=n.axis.grid.model),!r&&i&&(r=i.axis.grid.model),r&&r===e.gridModel},function(t,e){var n=t.geoModel;return n&&n===e.geoModel}],zz={grid:function(){return this.coordSys.master.getRect().clone()},geo:function(){var t=this.coordSys,e=t.getBoundingRect().clone();return e.applyTransform(Eh(t)),e}},Vz={lineX:H(Bz,0),lineY:H(Bz,1),rect:function(t,e,n,i){var r=t?e.pointToData([n[0][0],n[1][0]],i):e.dataToPoint([n[0][0],n[1][0]],i),o=t?e.pointToData([n[0][1],n[1][1]],i):e.dataToPoint([n[0][1],n[1][1]],i),a=[Oz([r[0],o[0]]),Oz([r[1],o[1]])];return{values:a,xyMinMax:a}},polygon:function(t,e,n,i){var r=[[1/0,-1/0],[1/0,-1/0]];return{values:z(n,(function(n){var o=t?e.pointToData(n,i):e.dataToPoint(n,i);return r[0][0]=Math.min(r[0][0],o[0]),r[1][0]=Math.min(r[1][0],o[1]),r[0][1]=Math.max(r[0][1],o[0]),r[1][1]=Math.max(r[1][1],o[1]),o})),xyMinMax:r}}};function Bz(t,e,n,i){var r=n.getAxis(["x","y"][t]),o=Oz(z([0,1],(function(t){return e?r.coordToData(r.toLocalCoord(i[t]),!0):r.toGlobalCoord(r.dataToCoord(i[t]))}))),a=[];return a[t]=o,a[1-t]=[NaN,NaN],{values:o,xyMinMax:a}}var Fz={lineX:H(Gz,0),lineY:H(Gz,1),rect:function(t,e,n){return[[t[0][0]-n[0]*e[0][0],t[0][1]-n[0]*e[0][1]],[t[1][0]-n[1]*e[1][0],t[1][1]-n[1]*e[1][1]]]},polygon:function(t,e,n){return z(t,(function(t,i){return[t[0]-n[0]*e[i][0],t[1]-n[1]*e[i][1]]}))}};function Gz(t,e,n,i){return[e[0]-i[t]*n[0],e[1]-i[t]*n[1]]}function Wz(t){return t?[t[0][1]-t[0][0],t[1][1]-t[1][0]]:[NaN,NaN]}var Hz,Yz,Xz=E,Uz=_o+"toolbox-dataZoom_",Zz=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.prototype.render=function(t,e,n,i){this._brushController||(this._brushController=new Jk(n.getZr()),this._brushController.on("brush",W(this._onBrush,this)).mount()),function(t,e,n,i,r){var o=n._isZoomActive;i&&"takeGlobalCursor"===i.type&&(o="dataZoomSelect"===i.key&&i.dataZoomSelectActive);n._isZoomActive=o,t.setIconStatus("zoom",o?"emphasis":"normal");var a=new Pz(qz(t),e,{include:["grid"]}),s=a.makePanelOpts(r,(function(t){return t.xAxisDeclared&&!t.yAxisDeclared?"lineX":!t.xAxisDeclared&&t.yAxisDeclared?"lineY":"rect"}));n._brushController.setPanels(s).enableBrush(!(!o||!s.length)&&{brushType:"auto",brushStyle:t.getModel("brushStyle").getItemStyle()})}(t,e,this,i,n),function(t,e){t.setIconStatus("back",function(t){return Az(t).length}(e)>1?"emphasis":"normal")}(t,e)},e.prototype.onclick=function(t,e,n){jz[n].call(this)},e.prototype.remove=function(t,e){this._brushController&&this._brushController.unmount()},e.prototype.dispose=function(t,e){this._brushController&&this._brushController.dispose()},e.prototype._onBrush=function(t){var e=t.areas;if(t.isEnd&&e.length){var n={},i=this.ecModel;this._brushController.updateCovers([]),new Pz(qz(this.model),i,{include:["grid"]}).matchOutputRanges(e,i,(function(t,e,n){if("cartesian2d"===n.type){var i=t.brushType;"rect"===i?(r("x",n,e[0]),r("y",n,e[1])):r({lineX:"x",lineY:"y"}[i],n,e)}})),function(t,e){var n=Az(t);Cz(e,(function(e,i){for(var r=n.length-1;r>=0&&!n[r][i];r--);if(r<0){var o=t.queryComponents({mainType:"dataZoom",subType:"select",id:i})[0];if(o){var a=o.getPercentRange();n[0][i]={dataZoomId:i,start:a[0],end:a[1]}}}})),n.push(e)}(i,n),this._dispatchZoomAction(n)}function r(t,e,r){var o=e.getAxis(t),a=o.model,s=function(t,e,n){var i;return n.eachComponent({mainType:"dataZoom",subType:"select"},(function(n){n.getAxisModel(t,e.componentIndex)&&(i=n)})),i}(t,a,i),l=s.findRepresentativeAxisProxy(a).getMinMaxSpan();null==l.minValueSpan&&null==l.maxValueSpan||(r=Ck(0,r.slice(),o.scale.getExtent(),0,l.minValueSpan,l.maxValueSpan)),s&&(n[s.id]={dataZoomId:s.id,startValue:r[0],endValue:r[1]})}},e.prototype._dispatchZoomAction=function(t){var e=[];Xz(t,(function(t,n){e.push(T(t))})),e.length&&this.api.dispatchAction({type:"dataZoom",from:this.uid,batch:e})},e.getDefaultOption=function(t){return{show:!0,filterMode:"filter",icon:{zoom:"M0,13.5h26.9 M13.5,26.9V0 M32.1,13.5H58V58H13.5 V32.1",back:"M22,1.4L9.9,13.5l12.3,12.3 M10.3,13.5H54.9v44.6 H10.3v-26"},title:t.getLocaleModel().get(["toolbox","dataZoom","title"]),brushStyle:{borderWidth:0,color:"rgba(210,219,238,0.2)"}}},e}(lz),jz={zoom:function(){var t=!this._isZoomActive;this.api.dispatchAction({type:"takeGlobalCursor",key:"dataZoomSelect",dataZoomSelectActive:t})},back:function(){this._dispatchZoomAction(function(t){var e=Az(t),n=e[e.length-1];e.length>1&&e.pop();var i={};return Cz(n,(function(t,n){for(var r=e.length-1;r>=0;r--)if(t=e[r][n]){i[n]=t;break}})),i}(this.ecModel))}};function qz(t){var e={xAxisIndex:t.get("xAxisIndex",!0),yAxisIndex:t.get("yAxisIndex",!0),xAxisId:t.get("xAxisId",!0),yAxisId:t.get("yAxisId",!0)};return null==e.xAxisIndex&&null==e.xAxisId&&(e.xAxisIndex="all"),null==e.yAxisIndex&&null==e.yAxisId&&(e.yAxisIndex="all"),e}Hz="dataZoom",Yz=function(t){var e=t.getComponent("toolbox",0),n=["feature","dataZoom"];if(e&&null!=e.get(n)){var i=e.getModel(n),r=[],o=No(t,qz(i));return Xz(o.xAxisModels,(function(t){return a(t,"xAxis","xAxisIndex")})),Xz(o.yAxisModels,(function(t){return a(t,"yAxis","yAxisIndex")})),r}function a(t,e,n){var o=t.componentIndex,a={type:"select",$fromToolbox:!0,filterMode:i.get("filterMode",!0)||"filter",id:Uz+e+o};a[n]=o,r.push(a)}},lt(null==nd.get(Hz)&&Yz),nd.set(Hz,Yz);var Kz=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.type="tooltip",e.dependencies=["axisPointer"],e.defaultOption={z:60,show:!0,showContent:!0,trigger:"item",triggerOn:"mousemove|click",alwaysShowContent:!1,displayMode:"single",renderMode:"auto",confine:null,showDelay:0,hideDelay:100,transitionDuration:.4,enterable:!1,backgroundColor:"#fff",shadowBlur:10,shadowColor:"rgba(0, 0, 0, .2)",shadowOffsetX:1,shadowOffsetY:2,borderRadius:4,borderWidth:1,padding:null,extraCssText:"",axisPointer:{type:"line",axis:"auto",animation:"auto",animationDurationUpdate:200,animationEasingUpdate:"exponentialOut",crossStyle:{color:"#999",width:1,type:"dashed",textStyle:{}}},textStyle:{color:"#666",fontSize:14}},e}(Rp);function $z(t){var e=t.get("confine");return null!=e?!!e:"richText"===t.get("renderMode")}function Jz(t){if(r.domSupported)for(var e=document.documentElement.style,n=0,i=t.length;n-1?(u+="top:50%",h+="translateY(-50%) rotate("+(a="left"===s?-225:-45)+"deg)"):(u+="left:50%",h+="translateX(-50%) rotate("+(a="top"===s?225:45)+"deg)");var c=a*Math.PI/180,p=l+r,d=p*Math.abs(Math.cos(c))+p*Math.abs(Math.sin(c)),f=e+" solid "+r+"px;";return'
'}(n,i,r)),U(t))o.innerHTML=t+a;else if(t){o.innerHTML="",Y(t)||(t=[t]);for(var s=0;s=0?this._tryShow(n,i):"leave"===e&&this._hide(i))}),this))},e.prototype._keepShow=function(){var t=this._tooltipModel,e=this._ecModel,n=this._api,i=t.get("triggerOn");if(null!=this._lastX&&null!=this._lastY&&"none"!==i&&"click"!==i){var r=this;clearTimeout(this._refreshUpdateTimeout),this._refreshUpdateTimeout=setTimeout((function(){!n.isDisposed()&&r.manuallyShowTip(t,e,n,{x:r._lastX,y:r._lastY,dataByCoordSys:r._lastDataByCoordSys})}))}},e.prototype.manuallyShowTip=function(t,e,n,i){if(i.from!==this.uid&&!r.node&&n.getDom()){var o=gV(i,n);this._ticket="";var a=i.dataByCoordSys,s=function(t,e,n){var i=Eo(t).queryOptionMap,r=i.keys()[0];if(!r||"series"===r)return;var o=Bo(e,r,i.get(r),{useDefault:!1,enableAll:!1,enableNone:!1}),a=o.models[0];if(!a)return;var s,l=n.getViewOfComponentModel(a);if(l.group.traverse((function(e){var n=Qs(e).tooltipConfig;if(n&&n.name===t.name)return s=e,!0})),s)return{componentMainType:r,componentIndex:a.componentIndex,el:s}}(i,e,n);if(s){var l=s.el.getBoundingRect().clone();l.applyTransform(s.el.transform),this._tryShow({offsetX:l.x+l.width/2,offsetY:l.y+l.height/2,target:s.el,position:i.position,positionDefault:"bottom"},o)}else if(i.tooltip&&null!=i.x&&null!=i.y){var u=pV;u.x=i.x,u.y=i.y,u.update(),Qs(u).tooltipConfig={name:null,option:i.tooltip},this._tryShow({offsetX:i.x,offsetY:i.y,target:u},o)}else if(a)this._tryShow({offsetX:i.x,offsetY:i.y,position:i.position,dataByCoordSys:a,tooltipOption:i.tooltipOption},o);else if(null!=i.seriesIndex){if(this._manuallyAxisShowTip(t,e,n,i))return;var h=wN(i,e),c=h.point[0],p=h.point[1];null!=c&&null!=p&&this._tryShow({offsetX:c,offsetY:p,target:h.el,position:i.position,positionDefault:"bottom"},o)}else null!=i.x&&null!=i.y&&(n.dispatchAction({type:"updateAxisPointer",x:i.x,y:i.y}),this._tryShow({offsetX:i.x,offsetY:i.y,position:i.position,target:n.getZr().findHover(i.x,i.y).target},o))}},e.prototype.manuallyHideTip=function(t,e,n,i){var r=this._tooltipContent;this._tooltipModel&&r.hideLater(this._tooltipModel.get("hideDelay")),this._lastX=this._lastY=this._lastDataByCoordSys=null,i.from!==this.uid&&this._hide(gV(i,n))},e.prototype._manuallyAxisShowTip=function(t,e,n,i){var r=i.seriesIndex,o=i.dataIndex,a=e.getComponent("axisPointer").coordSysAxesInfo;if(null!=r&&null!=o&&null!=a){var s=e.getSeriesByIndex(r);if(s)if("axis"===fV([s.getData().getItemModel(o),s,(s.coordinateSystem||{}).model],this._tooltipModel).get("trigger"))return n.dispatchAction({type:"updateAxisPointer",seriesIndex:r,dataIndex:o,position:i.position}),!0}},e.prototype._tryShow=function(t,e){var n=t.target;if(this._tooltipModel){this._lastX=t.offsetX,this._lastY=t.offsetY;var i=t.dataByCoordSys;if(i&&i.length)this._showAxisTooltip(i,t);else if(n){var r,o;this._lastDataByCoordSys=null,ky(n,(function(t){return null!=Qs(t).dataIndex?(r=t,!0):null!=Qs(t).tooltipConfig?(o=t,!0):void 0}),!0),r?this._showSeriesItemTooltip(t,r,e):o?this._showComponentItemTooltip(t,o,e):this._hide(e)}else this._lastDataByCoordSys=null,this._hide(e)}},e.prototype._showOrMove=function(t,e){var n=t.get("showDelay");e=W(e,this),clearTimeout(this._showTimout),n>0?this._showTimout=setTimeout(e,n):e()},e.prototype._showAxisTooltip=function(t,e){var n=this._ecModel,i=this._tooltipModel,r=[e.offsetX,e.offsetY],o=fV([e.tooltipOption],i),a=this._renderMode,s=[],l=ng("section",{blocks:[],noHeader:!0}),u=[],h=new dg;E(t,(function(t){E(t.dataByAxis,(function(t){var e=n.getComponent(t.axisDim+"Axis",t.axisIndex),r=t.value;if(e&&null!=r){var o=rN(r,e.axis,n,t.seriesDataIndices,t.valueLabelOpt),c=ng("section",{header:o,noHeader:!ut(o),sortBlocks:!0,blocks:[]});l.blocks.push(c),E(t.seriesDataIndices,(function(l){var p=n.getSeriesByIndex(l.seriesIndex),d=l.dataIndexInside,f=p.getDataParams(d);if(!(f.dataIndex<0)){f.axisDim=t.axisDim,f.axisIndex=t.axisIndex,f.axisType=t.axisType,f.axisId=t.axisId,f.axisValue=__(e.axis,{value:r}),f.axisValueLabel=o,f.marker=h.makeTooltipMarker("item",_p(f.color),a);var g=mf(p.formatTooltip(d,!0,null)),y=g.frag;if(y){var v=fV([p],i).get("valueFormatter");c.blocks.push(v?A({valueFormatter:v},y):y)}g.text&&u.push(g.text),s.push(f)}}))}}))})),l.blocks.reverse(),u.reverse();var c=e.position,p=o.get("order"),d=lg(l,h,a,p,n.get("useUTC"),o.get("textStyle"));d&&u.unshift(d);var f="richText"===a?"\n\n":"
",g=u.join(f);this._showOrMove(o,(function(){this._updateContentNotChangedOnAxis(t,s)?this._updatePosition(o,c,r[0],r[1],this._tooltipContent,s):this._showTooltipContent(o,g,s,Math.random()+"",r[0],r[1],c,null,h)}))},e.prototype._showSeriesItemTooltip=function(t,e,n){var i=this._ecModel,r=Qs(e),o=r.seriesIndex,a=i.getSeriesByIndex(o),s=r.dataModel||a,l=r.dataIndex,u=r.dataType,h=s.getData(u),c=this._renderMode,p=t.positionDefault,d=fV([h.getItemModel(l),s,a&&(a.coordinateSystem||{}).model],this._tooltipModel,p?{position:p}:null),f=d.get("trigger");if(null==f||"item"===f){var g=s.getDataParams(l,u),y=new dg;g.marker=y.makeTooltipMarker("item",_p(g.color),c);var v=mf(s.formatTooltip(l,!1,u)),m=d.get("order"),x=d.get("valueFormatter"),_=v.frag,b=_?lg(x?A({valueFormatter:x},_):_,y,c,m,i.get("useUTC"),d.get("textStyle")):v.text,w="item_"+s.name+"_"+l;this._showOrMove(d,(function(){this._showTooltipContent(d,b,g,w,t.offsetX,t.offsetY,t.position,t.target,y)})),n({type:"showTip",dataIndexInside:l,dataIndex:h.getRawIndex(l),seriesIndex:o,from:this.uid})}},e.prototype._showComponentItemTooltip=function(t,e,n){var i=Qs(e),r=i.tooltipConfig.option||{};if(U(r)){r={content:r,formatter:r}}var o=[r],a=this._ecModel.getComponent(i.componentMainType,i.componentIndex);a&&o.push(a),o.push({formatter:r.content});var s=t.positionDefault,l=fV(o,this._tooltipModel,s?{position:s}:null),u=l.get("content"),h=Math.random()+"",c=new dg;this._showOrMove(l,(function(){var n=T(l.get("formatterParams")||{});this._showTooltipContent(l,u,n,h,t.offsetX,t.offsetY,t.position,e,c)})),n({type:"showTip",from:this.uid})},e.prototype._showTooltipContent=function(t,e,n,i,r,o,a,s,l){if(this._ticket="",t.get("showContent")&&t.get("show")){var u=this._tooltipContent;u.setEnterable(t.get("enterable"));var h=t.get("formatter");a=a||t.get("position");var c=e,p=this._getNearestPoint([r,o],n,t.get("trigger"),t.get("borderColor")).color;if(h)if(U(h)){var d=t.ecModel.get("useUTC"),f=Y(n)?n[0]:n;c=h,f&&f.axisType&&f.axisType.indexOf("time")>=0&&(c=qc(f.axisValue,c,d)),c=mp(c,n,!0)}else if(X(h)){var g=W((function(e,i){e===this._ticket&&(u.setContent(i,l,t,p,a),this._updatePosition(t,a,r,o,u,n,s))}),this);this._ticket=i,c=h(n,i,g)}else c=h;u.setContent(c,l,t,p,a),u.show(t,p),this._updatePosition(t,a,r,o,u,n,s)}},e.prototype._getNearestPoint=function(t,e,n,i){return"axis"===n||Y(e)?{color:i||("html"===this._renderMode?"#fff":"none")}:Y(e)?void 0:{color:i||e.color||e.borderColor}},e.prototype._updatePosition=function(t,e,n,i,r,o,a){var s=this._api.getWidth(),l=this._api.getHeight();e=e||t.get("position");var u=r.getSize(),h=t.get("align"),c=t.get("verticalAlign"),p=a&&a.getBoundingRect().clone();if(a&&p.applyTransform(a.transform),X(e)&&(e=e([n,i],o,r.el,p,{viewSize:[s,l],contentSize:u.slice()})),Y(e))n=Ur(e[0],s),i=Ur(e[1],l);else if(q(e)){var d=e;d.width=u[0],d.height=u[1];var f=Cp(d,{width:s,height:l});n=f.x,i=f.y,h=null,c=null}else if(U(e)&&a){var g=function(t,e,n,i){var r=n[0],o=n[1],a=Math.ceil(Math.SQRT2*i)+8,s=0,l=0,u=e.width,h=e.height;switch(t){case"inside":s=e.x+u/2-r/2,l=e.y+h/2-o/2;break;case"top":s=e.x+u/2-r/2,l=e.y-o-a;break;case"bottom":s=e.x+u/2-r/2,l=e.y+h+a;break;case"left":s=e.x-r-a,l=e.y+h/2-o/2;break;case"right":s=e.x+u+a,l=e.y+h/2-o/2}return[s,l]}(e,p,u,t.get("borderWidth"));n=g[0],i=g[1]}else{g=function(t,e,n,i,r,o,a){var s=n.getSize(),l=s[0],u=s[1];null!=o&&(t+l+o+2>i?t-=l+o:t+=o);null!=a&&(e+u+a>r?e-=u+a:e+=a);return[t,e]}(n,i,r,s,l,h?null:20,c?null:20);n=g[0],i=g[1]}if(h&&(n-=yV(h)?u[0]/2:"right"===h?u[0]:0),c&&(i-=yV(c)?u[1]/2:"bottom"===c?u[1]:0),$z(t)){g=function(t,e,n,i,r){var o=n.getSize(),a=o[0],s=o[1];return t=Math.min(t+a,i)-a,e=Math.min(e+s,r)-s,t=Math.max(t,0),e=Math.max(e,0),[t,e]}(n,i,r,s,l);n=g[0],i=g[1]}r.moveTo(n,i)},e.prototype._updateContentNotChangedOnAxis=function(t,e){var n=this._lastDataByCoordSys,i=this._cbParamsList,r=!!n&&n.length===t.length;return r&&E(n,(function(n,o){var a=n.dataByAxis||[],s=(t[o]||{}).dataByAxis||[];(r=r&&a.length===s.length)&&E(a,(function(t,n){var o=s[n]||{},a=t.seriesDataIndices||[],l=o.seriesDataIndices||[];(r=r&&t.value===o.value&&t.axisType===o.axisType&&t.axisId===o.axisId&&a.length===l.length)&&E(a,(function(t,e){var n=l[e];r=r&&t.seriesIndex===n.seriesIndex&&t.dataIndex===n.dataIndex})),i&&E(t.seriesDataIndices,(function(t){var n=t.seriesIndex,o=e[n],a=i[n];o&&a&&a.data!==o.data&&(r=!1)}))}))})),this._lastDataByCoordSys=t,this._cbParamsList=e,!!r},e.prototype._hide=function(t){this._lastDataByCoordSys=null,t({type:"hideTip",from:this.uid})},e.prototype.dispose=function(t,e){!r.node&&e.getDom()&&(Gg(this,"_updatePosition"),this._tooltipContent.dispose(),_N("itemTooltip",e))},e.type="tooltip",e}(Tg);function fV(t,e,n){var i,r=e.ecModel;n?(i=new Mc(n,r,r),i=new Mc(e.option,i,r)):i=e;for(var o=t.length-1;o>=0;o--){var a=t[o];a&&(a instanceof Mc&&(a=a.get("tooltip",!0)),U(a)&&(a={formatter:a}),a&&(i=new Mc(a,i,r)))}return i}function gV(t,e){return t.dispatchAction||W(e.dispatchAction,e)}function yV(t){return"center"===t||"middle"===t}var vV=["rect","polygon","keep","clear"];function mV(t,e){var n=bo(t?t.brush:[]);if(n.length){var i=[];E(n,(function(t){var e=t.hasOwnProperty("toolbox")?t.toolbox:[];e instanceof Array&&(i=i.concat(e))}));var r=t&&t.toolbox;Y(r)&&(r=r[0]),r||(r={feature:{}},t.toolbox=[r]);var o=r.feature||(r.feature={}),a=o.brush||(o.brush={}),s=a.type||(a.type=[]);s.push.apply(s,i),function(t){var e={};E(t,(function(t){e[t]=1})),t.length=0,E(e,(function(e,n){t.push(n)}))}(s),e&&!s.length&&s.push.apply(s,vV)}}var xV=E;function _V(t){if(t)for(var e in t)if(t.hasOwnProperty(e))return!0}function bV(t,e,n){var i={};return xV(e,(function(e){var r,o=i[e]=((r=function(){}).prototype.__hidden=r.prototype,new r);xV(t[e],(function(t,i){if(_D.isValidType(i)){var r={type:i,visual:t};n&&n(r,e),o[i]=new _D(r),"opacity"===i&&((r=T(r)).type="colorAlpha",o.__hidden.__alphaForOpacity=new _D(r))}}))})),i}function wV(t,e,n){var i;E(n,(function(t){e.hasOwnProperty(t)&&_V(e[t])&&(i=!0)})),i&&E(n,(function(n){e.hasOwnProperty(n)&&_V(e[n])?t[n]=T(e[n]):delete t[n]}))}var SV={lineX:MV(0),lineY:MV(1),rect:{point:function(t,e,n){return t&&n.boundingRect.contain(t[0],t[1])},rect:function(t,e,n){return t&&n.boundingRect.intersect(t)}},polygon:{point:function(t,e,n){return t&&n.boundingRect.contain(t[0],t[1])&&A_(n.range,t[0],t[1])},rect:function(t,e,n){var i=n.range;if(!t||i.length<=1)return!1;var r=t.x,o=t.y,a=t.width,s=t.height,l=i[0];return!!(A_(i,r,o)||A_(i,r+a,o)||A_(i,r,o+s)||A_(i,r+a,o+s)||ze.create(t).contain(l[0],l[1])||Yh(r,o,r+a,o,i)||Yh(r,o,r,o+s,i)||Yh(r+a,o,r+a,o+s,i)||Yh(r,o+s,r+a,o+s,i))||void 0}}};function MV(t){var e=["x","y"],n=["width","height"];return{point:function(e,n,i){if(e){var r=i.range;return IV(e[t],r)}},rect:function(i,r,o){if(i){var a=o.range,s=[i[e[t]],i[e[t]]+i[n[t]]];return s[1]e[0][1]&&(e[0][1]=o[0]),o[1]e[1][1]&&(e[1][1]=o[1])}return e&&RV(e)}};function RV(t){return new ze(t[0][0],t[1][0],t[0][1]-t[0][0],t[1][1]-t[1][0])}var NV=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.init=function(t,e){this.ecModel=t,this.api=e,this.model,(this._brushController=new Jk(e.getZr())).on("brush",W(this._onBrush,this)).mount()},e.prototype.render=function(t,e,n,i){this.model=t,this._updateController(t,e,n,i)},e.prototype.updateTransform=function(t,e,n,i){AV(e),this._updateController(t,e,n,i)},e.prototype.updateVisual=function(t,e,n,i){this.updateTransform(t,e,n,i)},e.prototype.updateView=function(t,e,n,i){this._updateController(t,e,n,i)},e.prototype._updateController=function(t,e,n,i){(!i||i.$from!==t.id)&&this._brushController.setPanels(t.brushTargetManager.makePanelOpts(n)).enableBrush(t.brushOption).updateCovers(t.areas.slice())},e.prototype.dispose=function(){this._brushController.dispose()},e.prototype._onBrush=function(t){var e=this.model.id,n=this.model.brushTargetManager.setOutputRanges(t.areas,this.ecModel);(!t.isEnd||t.removeOnClick)&&this.api.dispatchAction({type:"brush",brushId:e,areas:T(n),$from:e}),t.isEnd&&this.api.dispatchAction({type:"brushEnd",brushId:e,areas:T(n),$from:e})},e.type="brush",e}(Tg),EV=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.areas=[],n.brushOption={},n}return n(e,t),e.prototype.optionUpdated=function(t,e){var n=this.option;!e&&wV(n,t,["inBrush","outOfBrush"]);var i=n.inBrush=n.inBrush||{};n.outOfBrush=n.outOfBrush||{color:"#ddd"},i.hasOwnProperty("liftZ")||(i.liftZ=5)},e.prototype.setAreas=function(t){t&&(this.areas=z(t,(function(t){return zV(this.option,t)}),this))},e.prototype.setBrushOption=function(t){this.brushOption=zV(this.option,t),this.brushType=this.brushOption.brushType},e.type="brush",e.dependencies=["geo","grid","xAxis","yAxis","parallel","series"],e.defaultOption={seriesIndex:"all",brushType:"rect",brushMode:"single",transformable:!0,brushStyle:{borderWidth:1,color:"rgba(210,219,238,0.3)",borderColor:"#D2DBEE"},throttleType:"fixRate",throttleDelay:0,removeOnClick:!0,z:1e4},e}(Rp);function zV(t,e){return C({brushType:t.brushType,brushMode:t.brushMode,transformable:t.transformable,brushStyle:new Mc(t.brushStyle).getItemStyle(),removeOnClick:t.removeOnClick,z:t.z},e,!0)}var VV=["rect","polygon","lineX","lineY","keep","clear"],BV=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return n(e,t),e.prototype.render=function(t,e,n){var i,r,o;e.eachComponent({mainType:"brush"},(function(t){i=t.brushType,r=t.brushOption.brushMode||"single",o=o||!!t.areas.length})),this._brushType=i,this._brushMode=r,E(t.get("type",!0),(function(e){t.setIconStatus(e,("keep"===e?"multiple"===r:"clear"===e?o:e===i)?"emphasis":"normal")}))},e.prototype.updateView=function(t,e,n){this.render(t,e,n)},e.prototype.getIcons=function(){var t=this.model,e=t.get("icon",!0),n={};return E(t.get("type",!0),(function(t){e[t]&&(n[t]=e[t])})),n},e.prototype.onclick=function(t,e,n){var i=this._brushType,r=this._brushMode;"clear"===n?(e.dispatchAction({type:"axisAreaSelect",intervals:[]}),e.dispatchAction({type:"brush",command:"clear",areas:[]})):e.dispatchAction({type:"takeGlobalCursor",key:"brush",brushOption:{brushType:"keep"===n?i:i!==n&&n,brushMode:"keep"===n?"multiple"===r?"single":"multiple":r}})},e.getDefaultOption=function(t){return{show:!0,type:VV.slice(),icon:{rect:"M7.3,34.7 M0.4,10V-0.2h9.8 M89.6,10V-0.2h-9.8 M0.4,60v10.2h9.8 M89.6,60v10.2h-9.8 M12.3,22.4V10.5h13.1 M33.6,10.5h7.8 M49.1,10.5h7.8 M77.5,22.4V10.5h-13 M12.3,31.1v8.2 M77.7,31.1v8.2 M12.3,47.6v11.9h13.1 M33.6,59.5h7.6 M49.1,59.5 h7.7 M77.5,47.6v11.9h-13",polygon:"M55.2,34.9c1.7,0,3.1,1.4,3.1,3.1s-1.4,3.1-3.1,3.1 s-3.1-1.4-3.1-3.1S53.5,34.9,55.2,34.9z M50.4,51c1.7,0,3.1,1.4,3.1,3.1c0,1.7-1.4,3.1-3.1,3.1c-1.7,0-3.1-1.4-3.1-3.1 C47.3,52.4,48.7,51,50.4,51z M55.6,37.1l1.5-7.8 M60.1,13.5l1.6-8.7l-7.8,4 M59,19l-1,5.3 M24,16.1l6.4,4.9l6.4-3.3 M48.5,11.6 l-5.9,3.1 M19.1,12.8L9.7,5.1l1.1,7.7 M13.4,29.8l1,7.3l6.6,1.6 M11.6,18.4l1,6.1 M32.8,41.9 M26.6,40.4 M27.3,40.2l6.1,1.6 M49.9,52.1l-5.6-7.6l-4.9-1.2",lineX:"M15.2,30 M19.7,15.6V1.9H29 M34.8,1.9H40.4 M55.3,15.6V1.9H45.9 M19.7,44.4V58.1H29 M34.8,58.1H40.4 M55.3,44.4 V58.1H45.9 M12.5,20.3l-9.4,9.6l9.6,9.8 M3.1,29.9h16.5 M62.5,20.3l9.4,9.6L62.3,39.7 M71.9,29.9H55.4",lineY:"M38.8,7.7 M52.7,12h13.2v9 M65.9,26.6V32 M52.7,46.3h13.2v-9 M24.9,12H11.8v9 M11.8,26.6V32 M24.9,46.3H11.8v-9 M48.2,5.1l-9.3-9l-9.4,9.2 M38.9-3.9V12 M48.2,53.3l-9.3,9l-9.4-9.2 M38.9,62.3V46.4",keep:"M4,10.5V1h10.3 M20.7,1h6.1 M33,1h6.1 M55.4,10.5V1H45.2 M4,17.3v6.6 M55.6,17.3v6.6 M4,30.5V40h10.3 M20.7,40 h6.1 M33,40h6.1 M55.4,30.5V40H45.2 M21,18.9h62.9v48.6H21V18.9z",clear:"M22,14.7l30.9,31 M52.9,14.7L22,45.7 M4.7,16.8V4.2h13.1 M26,4.2h7.8 M41.6,4.2h7.8 M70.3,16.8V4.2H57.2 M4.7,25.9v8.6 M70.3,25.9v8.6 M4.7,43.2v12.6h13.1 M26,55.8h7.8 M41.6,55.8h7.8 M70.3,43.2v12.6H57.2"},title:t.getLocaleModel().get(["toolbox","brush","title"])}},e}(lz);var FV=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.layoutMode={type:"box",ignoreSize:!0},n}return n(e,t),e.type="title",e.defaultOption={z:6,show:!0,text:"",target:"blank",subtext:"",subtarget:"blank",left:0,top:0,backgroundColor:"rgba(0,0,0,0)",borderColor:"#ccc",borderWidth:0,padding:5,itemGap:10,textStyle:{fontSize:18,fontWeight:"bold",color:"#464646"},subtextStyle:{fontSize:12,color:"#6E7079"}},e}(Rp),GV=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.render=function(t,e,n){if(this.group.removeAll(),t.get("show")){var i=this.group,r=t.getModel("textStyle"),o=t.getModel("subtextStyle"),a=t.get("textAlign"),s=rt(t.get("textBaseline"),t.get("textVerticalAlign")),l=new Fs({style:nc(r,{text:t.get("text"),fill:r.getTextColor()},{disableBox:!0}),z2:10}),u=l.getBoundingRect(),h=t.get("subtext"),c=new Fs({style:nc(o,{text:h,fill:o.getTextColor(),y:u.height+t.get("itemGap"),verticalAlign:"top"},{disableBox:!0}),z2:10}),p=t.get("link"),d=t.get("sublink"),f=t.get("triggerEvent",!0);l.silent=!p&&!f,c.silent=!d&&!f,p&&l.on("click",(function(){bp(p,"_"+t.get("target"))})),d&&c.on("click",(function(){bp(d,"_"+t.get("subtarget"))})),Qs(l).eventData=Qs(c).eventData=f?{componentType:"title",componentIndex:t.componentIndex}:null,i.add(l),h&&i.add(c);var g=i.getBoundingRect(),y=t.getBoxLayoutParams();y.width=g.width,y.height=g.height;var v=Cp(y,{width:n.getWidth(),height:n.getHeight()},t.get("padding"));a||("middle"===(a=t.get("left")||t.get("right"))&&(a="center"),"right"===a?v.x+=v.width:"center"===a&&(v.x+=v.width/2)),s||("center"===(s=t.get("top")||t.get("bottom"))&&(s="middle"),"bottom"===s?v.y+=v.height:"middle"===s&&(v.y+=v.height/2),s=s||"top"),i.x=v.x,i.y=v.y,i.markRedraw();var m={align:a,verticalAlign:s};l.setStyle(m),c.setStyle(m),g=i.getBoundingRect();var x=v.margin,_=t.getItemStyle(["color","opacity"]);_.fill=t.get("backgroundColor");var b=new zs({shape:{x:g.x-x[3],y:g.y-x[0],width:g.width+x[1]+x[3],height:g.height+x[0]+x[2],r:t.get("borderRadius")},style:_,subPixelOptimize:!0,silent:!0});i.add(b)}},e.type="title",e}(Tg);var WV=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.layoutMode="box",n}return n(e,t),e.prototype.init=function(t,e,n){this.mergeDefaultAndTheme(t,n),this._initData()},e.prototype.mergeOption=function(e){t.prototype.mergeOption.apply(this,arguments),this._initData()},e.prototype.setCurrentIndex=function(t){null==t&&(t=this.option.currentIndex);var e=this._data.count();this.option.loop?t=(t%e+e)%e:(t>=e&&(t=e-1),t<0&&(t=0)),this.option.currentIndex=t},e.prototype.getCurrentIndex=function(){return this.option.currentIndex},e.prototype.isIndexMax=function(){return this.getCurrentIndex()>=this._data.count()-1},e.prototype.setPlayState=function(t){this.option.autoPlay=!!t},e.prototype.getPlayState=function(){return!!this.option.autoPlay},e.prototype._initData=function(){var t,e=this.option,n=e.data||[],i=e.axisType,r=this._names=[];"category"===i?(t=[],E(n,(function(e,n){var i,o=Ao(Mo(e),"");q(e)?(i=T(e)).value=n:i=n,t.push(i),r.push(o)}))):t=n;var o={category:"ordinal",time:"time",value:"number"}[i]||"number";(this._data=new lx([{name:"value",type:o}],this)).initData(t,r)},e.prototype.getData=function(){return this._data},e.prototype.getCategories=function(){if("category"===this.get("axisType"))return this._names.slice()},e.type="timeline",e.defaultOption={z:4,show:!0,axisType:"time",realtime:!0,left:"20%",top:null,right:"20%",bottom:0,width:null,height:40,padding:5,controlPosition:"left",autoPlay:!1,rewind:!1,loop:!0,playInterval:2e3,currentIndex:0,itemStyle:{},label:{color:"#000"},data:[]},e}(Rp),HV=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.type="timeline.slider",e.defaultOption=Cc(WV.defaultOption,{backgroundColor:"rgba(0,0,0,0)",borderColor:"#ccc",borderWidth:0,orient:"horizontal",inverse:!1,tooltip:{trigger:"item"},symbol:"circle",symbolSize:12,lineStyle:{show:!0,width:2,color:"#DAE1F5"},label:{position:"auto",show:!0,interval:"auto",rotate:0,color:"#A4B1D7"},itemStyle:{color:"#A4B1D7",borderWidth:1},checkpointStyle:{symbol:"circle",symbolSize:15,color:"#316bf3",borderColor:"#fff",borderWidth:2,shadowBlur:2,shadowOffsetX:1,shadowOffsetY:1,shadowColor:"rgba(0, 0, 0, 0.3)",animation:!0,animationDuration:300,animationEasing:"quinticInOut"},controlStyle:{show:!0,showPlayBtn:!0,showPrevBtn:!0,showNextBtn:!0,itemSize:24,itemGap:12,position:"left",playIcon:"path://M31.6,53C17.5,53,6,41.5,6,27.4S17.5,1.8,31.6,1.8C45.7,1.8,57.2,13.3,57.2,27.4S45.7,53,31.6,53z M31.6,3.3 C18.4,3.3,7.5,14.1,7.5,27.4c0,13.3,10.8,24.1,24.1,24.1C44.9,51.5,55.7,40.7,55.7,27.4C55.7,14.1,44.9,3.3,31.6,3.3z M24.9,21.3 c0-2.2,1.6-3.1,3.5-2l10.5,6.1c1.899,1.1,1.899,2.9,0,4l-10.5,6.1c-1.9,1.1-3.5,0.2-3.5-2V21.3z",stopIcon:"path://M30.9,53.2C16.8,53.2,5.3,41.7,5.3,27.6S16.8,2,30.9,2C45,2,56.4,13.5,56.4,27.6S45,53.2,30.9,53.2z M30.9,3.5C17.6,3.5,6.8,14.4,6.8,27.6c0,13.3,10.8,24.1,24.101,24.1C44.2,51.7,55,40.9,55,27.6C54.9,14.4,44.1,3.5,30.9,3.5z M36.9,35.8c0,0.601-0.4,1-0.9,1h-1.3c-0.5,0-0.9-0.399-0.9-1V19.5c0-0.6,0.4-1,0.9-1H36c0.5,0,0.9,0.4,0.9,1V35.8z M27.8,35.8 c0,0.601-0.4,1-0.9,1h-1.3c-0.5,0-0.9-0.399-0.9-1V19.5c0-0.6,0.4-1,0.9-1H27c0.5,0,0.9,0.4,0.9,1L27.8,35.8L27.8,35.8z",nextIcon:"M2,18.5A1.52,1.52,0,0,1,.92,18a1.49,1.49,0,0,1,0-2.12L7.81,9.36,1,3.11A1.5,1.5,0,1,1,3,.89l8,7.34a1.48,1.48,0,0,1,.49,1.09,1.51,1.51,0,0,1-.46,1.1L3,18.08A1.5,1.5,0,0,1,2,18.5Z",prevIcon:"M10,.5A1.52,1.52,0,0,1,11.08,1a1.49,1.49,0,0,1,0,2.12L4.19,9.64,11,15.89a1.5,1.5,0,1,1-2,2.22L1,10.77A1.48,1.48,0,0,1,.5,9.68,1.51,1.51,0,0,1,1,8.58L9,.92A1.5,1.5,0,0,1,10,.5Z",prevBtnSize:18,nextBtnSize:18,color:"#A4B1D7",borderColor:"#A4B1D7",borderWidth:1},emphasis:{label:{show:!0,color:"#6f778d"},itemStyle:{color:"#316BF3"},controlStyle:{color:"#316BF3",borderColor:"#316BF3",borderWidth:2}},progress:{lineStyle:{color:"#316BF3"},itemStyle:{color:"#316BF3"},label:{color:"#6f778d"}},data:[]}),e}(WV);R(HV,vf.prototype);var YV=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.type="timeline",e}(Tg),XV=function(t){function e(e,n,i,r){var o=t.call(this,e,n,i)||this;return o.type=r||"value",o}return n(e,t),e.prototype.getLabelModel=function(){return this.model.getModel("label")},e.prototype.isHorizontal=function(){return"horizontal"===this.model.get("orient")},e}(nb),UV=Math.PI,ZV=Oo(),jV=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.init=function(t,e){this.api=e},e.prototype.render=function(t,e,n){if(this.model=t,this.api=n,this.ecModel=e,this.group.removeAll(),t.get("show",!0)){var i=this._layout(t,n),r=this._createGroup("_mainGroup"),o=this._createGroup("_labelGroup"),a=this._axis=this._createAxis(i,t);t.formatTooltip=function(t){return ng("nameValue",{noName:!0,value:a.scale.getLabel({value:t})})},E(["AxisLine","AxisTick","Control","CurrentPointer"],(function(e){this["_render"+e](i,r,a,t)}),this),this._renderAxisLabel(i,o,a,t),this._position(i,t)}this._doPlayStop(),this._updateTicksStatus()},e.prototype.remove=function(){this._clearTimer(),this.group.removeAll()},e.prototype.dispose=function(){this._clearTimer()},e.prototype._layout=function(t,e){var n,i,r,o,a=t.get(["label","position"]),s=t.get("orient"),l=function(t,e){return Cp(t.getBoxLayoutParams(),{width:e.getWidth(),height:e.getHeight()},t.get("padding"))}(t,e),u={horizontal:"center",vertical:(n=null==a||"auto"===a?"horizontal"===s?l.y+l.height/2=0||"+"===n?"left":"right"},h={horizontal:n>=0||"+"===n?"top":"bottom",vertical:"middle"},c={horizontal:0,vertical:UV/2},p="vertical"===s?l.height:l.width,d=t.getModel("controlStyle"),f=d.get("show",!0),g=f?d.get("itemSize"):0,y=f?d.get("itemGap"):0,v=g+y,m=t.get(["label","rotate"])||0;m=m*UV/180;var x=d.get("position",!0),_=f&&d.get("showPlayBtn",!0),b=f&&d.get("showPrevBtn",!0),w=f&&d.get("showNextBtn",!0),S=0,M=p;"left"===x||"bottom"===x?(_&&(i=[0,0],S+=v),b&&(r=[S,0],S+=v),w&&(o=[M-g,0],M-=v)):(_&&(i=[M-g,0],M-=v),b&&(r=[0,0],S+=v),w&&(o=[M-g,0],M-=v));var I=[S,M];return t.get("inverse")&&I.reverse(),{viewRect:l,mainLength:p,orient:s,rotation:c[s],labelRotation:m,labelPosOpt:n,labelAlign:t.get(["label","align"])||u[s],labelBaseline:t.get(["label","verticalAlign"])||t.get(["label","baseline"])||h[s],playPosition:i,prevBtnPosition:r,nextBtnPosition:o,axisExtent:I,controlSize:g,controlGap:y}},e.prototype._position=function(t,e){var n=this._mainGroup,i=this._labelGroup,r=t.viewRect;if("vertical"===t.orient){var o=[1,0,0,1,0,0],a=r.x,s=r.y+r.height;we(o,o,[-a,-s]),Se(o,o,-UV/2),we(o,o,[a,s]),(r=r.clone()).applyTransform(o)}var l=y(r),u=y(n.getBoundingRect()),h=y(i.getBoundingRect()),c=[n.x,n.y],p=[i.x,i.y];p[0]=c[0]=l[0][0];var d,f=t.labelPosOpt;null==f||U(f)?(v(c,u,l,1,d="+"===f?0:1),v(p,h,l,1,1-d)):(v(c,u,l,1,d=f>=0?0:1),p[1]=c[1]+f);function g(t){t.originX=l[0][0]-t.x,t.originY=l[1][0]-t.y}function y(t){return[[t.x,t.x+t.width],[t.y,t.y+t.height]]}function v(t,e,n,i,r){t[i]+=n[i][r]-e[i][r]}n.setPosition(c),i.setPosition(p),n.rotation=i.rotation=t.rotation,g(n),g(i)},e.prototype._createAxis=function(t,e){var n=e.getData(),i=e.get("axisType"),r=function(t,e){if(e=e||t.get("type"),e)switch(e){case"category":return new Lx({ordinalMeta:t.getCategories(),extent:[1/0,-1/0]});case"time":return new Zx({locale:t.ecModel.getLocaleModel(),useUTC:t.ecModel.get("useUTC")});default:return new Ox}}(e,i);r.getTicks=function(){return n.mapArray(["value"],(function(t){return{value:t}}))};var o=n.getDataExtent("value");r.setExtent(o[0],o[1]),r.calcNiceTicks();var a=new XV("value",r,t.axisExtent,i);return a.model=e,a},e.prototype._createGroup=function(t){var e=this[t]=new zr;return this.group.add(e),e},e.prototype._renderAxisLine=function(t,e,n,i){var r=n.getExtent();if(i.get(["lineStyle","show"])){var o=new Zu({shape:{x1:r[0],y1:0,x2:r[1],y2:0},style:A({lineCap:"round"},i.getModel("lineStyle").getLineStyle()),silent:!0,z2:1});e.add(o);var a=this._progressLine=new Zu({shape:{x1:r[0],x2:this._currentPointer?this._currentPointer.x:r[0],y1:0,y2:0},style:k({lineCap:"round",lineWidth:o.style.lineWidth},i.getModel(["progress","lineStyle"]).getLineStyle()),silent:!0,z2:1});e.add(a)}},e.prototype._renderAxisTick=function(t,e,n,i){var r=this,o=i.getData(),a=n.scale.getTicks();this._tickSymbols=[],E(a,(function(t){var a=n.dataToCoord(t.value),s=o.getItemModel(t.value),l=s.getModel("itemStyle"),u=s.getModel(["emphasis","itemStyle"]),h=s.getModel(["progress","itemStyle"]),c={x:a,y:0,onclick:W(r._changeTimeline,r,t.value)},p=qV(s,l,e,c);p.ensureState("emphasis").style=u.getItemStyle(),p.ensureState("progress").style=h.getItemStyle(),Hl(p);var d=Qs(p);s.get("tooltip")?(d.dataIndex=t.value,d.dataModel=i):d.dataIndex=d.dataModel=null,r._tickSymbols.push(p)}))},e.prototype._renderAxisLabel=function(t,e,n,i){var r=this;if(n.getLabelModel().get("show")){var o=i.getData(),a=n.getViewLabels();this._tickLabels=[],E(a,(function(i){var a=i.tickValue,s=o.getItemModel(a),l=s.getModel("label"),u=s.getModel(["emphasis","label"]),h=s.getModel(["progress","label"]),c=n.dataToCoord(i.tickValue),p=new Fs({x:c,y:0,rotation:t.labelRotation-t.rotation,onclick:W(r._changeTimeline,r,a),silent:!1,style:nc(l,{text:i.formattedLabel,align:t.labelAlign,verticalAlign:t.labelBaseline})});p.ensureState("emphasis").style=nc(u),p.ensureState("progress").style=nc(h),e.add(p),Hl(p),ZV(p).dataIndex=a,r._tickLabels.push(p)}))}},e.prototype._renderControl=function(t,e,n,i){var r=t.controlSize,o=t.rotation,a=i.getModel("controlStyle").getItemStyle(),s=i.getModel(["emphasis","controlStyle"]).getItemStyle(),l=i.getPlayState(),u=i.get("inverse",!0);function h(t,n,l,u){if(t){var h=Ir(rt(i.get(["controlStyle",n+"BtnSize"]),r),r),c=function(t,e,n,i){var r=i.style,o=Hh(t.get(["controlStyle",e]),i||{},new ze(n[0],n[1],n[2],n[3]));r&&o.setStyle(r);return o}(i,n+"Icon",[0,-h/2,h,h],{x:t[0],y:t[1],originX:r/2,originY:0,rotation:u?-o:0,rectHover:!0,style:a,onclick:l});c.ensureState("emphasis").style=s,e.add(c),Hl(c)}}h(t.nextBtnPosition,"next",W(this._changeTimeline,this,u?"-":"+")),h(t.prevBtnPosition,"prev",W(this._changeTimeline,this,u?"+":"-")),h(t.playPosition,l?"stop":"play",W(this._handlePlayClick,this,!l),!0)},e.prototype._renderCurrentPointer=function(t,e,n,i){var r=i.getData(),o=i.getCurrentIndex(),a=r.getItemModel(o).getModel("checkpointStyle"),s=this,l={onCreate:function(t){t.draggable=!0,t.drift=W(s._handlePointerDrag,s),t.ondragend=W(s._handlePointerDragend,s),KV(t,s._progressLine,o,n,i,!0)},onUpdate:function(t){KV(t,s._progressLine,o,n,i)}};this._currentPointer=qV(a,a,this._mainGroup,{},this._currentPointer,l)},e.prototype._handlePlayClick=function(t){this._clearTimer(),this.api.dispatchAction({type:"timelinePlayChange",playState:t,from:this.uid})},e.prototype._handlePointerDrag=function(t,e,n){this._clearTimer(),this._pointerChangeTimeline([n.offsetX,n.offsetY])},e.prototype._handlePointerDragend=function(t){this._pointerChangeTimeline([t.offsetX,t.offsetY],!0)},e.prototype._pointerChangeTimeline=function(t,e){var n=this._toAxisCoord(t)[0],i=jr(this._axis.getExtent().slice());n>i[1]&&(n=i[1]),n=0&&(a[o]=+a[o].toFixed(c)),[a,h]}var sB={min:H(aB,"min"),max:H(aB,"max"),average:H(aB,"average"),median:H(aB,"median")};function lB(t,e){if(e){var n=t.getData(),i=t.coordinateSystem,r=i&&i.dimensions;if(!function(t){return!isNaN(parseFloat(t.x))&&!isNaN(parseFloat(t.y))}(e)&&!Y(e.coord)&&Y(r)){var o=uB(e,n,i,t);if((e=T(e)).type&&sB[e.type]&&o.baseAxis&&o.valueAxis){var a=P(r,o.baseAxis.dim),s=P(r,o.valueAxis.dim),l=sB[e.type](n,o.baseDataDim,o.valueDataDim,a,s);e.coord=l[0],e.value=l[1]}else e.coord=[null!=e.xAxis?e.xAxis:e.radiusAxis,null!=e.yAxis?e.yAxis:e.angleAxis]}if(null!=e.coord&&Y(r))for(var u=e.coord,h=0;h<2;h++)sB[u[h]]&&(u[h]=pB(n,n.mapDimension(r[h]),u[h]));else e.coord=[];return e}}function uB(t,e,n,i){var r={};return null!=t.valueIndex||null!=t.valueDim?(r.valueDataDim=null!=t.valueIndex?e.getDimension(t.valueIndex):t.valueDim,r.valueAxis=n.getAxis(function(t,e){var n=t.getData().getDimensionInfo(e);return n&&n.coordDim}(i,r.valueDataDim)),r.baseAxis=n.getOtherAxis(r.valueAxis),r.baseDataDim=e.mapDimension(r.baseAxis.dim)):(r.baseAxis=i.getBaseAxis(),r.valueAxis=n.getOtherAxis(r.baseAxis),r.baseDataDim=e.mapDimension(r.baseAxis.dim),r.valueDataDim=e.mapDimension(r.valueAxis.dim)),r}function hB(t,e){return!(t&&t.containData&&e.coord&&!oB(e))||t.containData(e.coord)}function cB(t,e){return t?function(t,n,i,r){return wf(r<2?t.coord&&t.coord[r]:t.value,e[r])}:function(t,n,i,r){return wf(t.value,e[r])}}function pB(t,e,n){if("average"===n){var i=0,r=0;return t.each(e,(function(t,e){isNaN(t)||(i+=t,r++)})),i/r}return"median"===n?t.getMedian(e):t.getDataExtent(e)["max"===n?1:0]}var dB=Oo(),fB=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.init=function(){this.markerGroupMap=yt()},e.prototype.render=function(t,e,n){var i=this,r=this.markerGroupMap;r.each((function(t){dB(t).keep=!1})),e.eachSeries((function(t){var r=iB.getMarkerModelFromSeries(t,i.type);r&&i.renderSeries(t,r,e,n)})),r.each((function(t){!dB(t).keep&&i.group.remove(t.group)}))},e.prototype.markKeep=function(t){dB(t).keep=!0},e.prototype.toggleBlurSeries=function(t,e){var n=this;E(t,(function(t){var i=iB.getMarkerModelFromSeries(t,n.type);i&&i.getData().eachItemGraphicEl((function(t){t&&(e?Pl(t):Ol(t))}))}))},e.type="marker",e}(Tg);function gB(t,e,n){var i=e.coordinateSystem;t.each((function(r){var o,a=t.getItemModel(r),s=Ur(a.get("x"),n.getWidth()),l=Ur(a.get("y"),n.getHeight());if(isNaN(s)||isNaN(l)){if(e.getMarkerPosition)o=e.getMarkerPosition(t.getValues(t.dimensions,r));else if(i){var u=t.get(i.dimensions[0],r),h=t.get(i.dimensions[1],r);o=i.dataToPoint([u,h])}}else o=[s,l];isNaN(s)||(o[0]=s),isNaN(l)||(o[1]=l),t.setItemLayout(r,o)}))}var yB=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.updateTransform=function(t,e,n){e.eachSeries((function(t){var e=iB.getMarkerModelFromSeries(t,"markPoint");e&&(gB(e.getData(),t,n),this.markerGroupMap.get(t.id).updateLayout())}),this)},e.prototype.renderSeries=function(t,e,n,i){var r=t.coordinateSystem,o=t.id,a=t.getData(),s=this.markerGroupMap,l=s.get(o)||s.set(o,new hS),u=function(t,e,n){var i;i=t?z(t&&t.dimensions,(function(t){return A(A({},e.getData().getDimensionInfo(e.getData().mapDimension(t))||{}),{name:t,ordinalMeta:null})})):[{name:"value",type:"float"}];var r=new lx(i,n),o=z(n.get("data"),H(lB,e));t&&(o=B(o,H(hB,t)));var a=cB(!!t,i);return r.initData(o,null,a),r}(r,t,e);e.setData(u),gB(e.getData(),t,i),u.each((function(t){var n=u.getItemModel(t),i=n.getShallow("symbol"),r=n.getShallow("symbolSize"),o=n.getShallow("symbolRotate"),s=n.getShallow("symbolOffset"),l=n.getShallow("symbolKeepAspect");if(X(i)||X(r)||X(o)||X(s)){var h=e.getRawValue(t),c=e.getDataParams(t);X(i)&&(i=i(h,c)),X(r)&&(r=r(h,c)),X(o)&&(o=o(h,c)),X(s)&&(s=s(h,c))}var p=n.getModel("itemStyle").getItemStyle(),d=Ty(a,"color");p.fill||(p.fill=d),u.setItemVisual(t,{symbol:i,symbolSize:r,symbolRotate:o,symbolOffset:s,symbolKeepAspect:l,style:p})})),l.updateData(u),this.group.add(l.group),u.eachItemGraphicEl((function(t){t.traverse((function(t){Qs(t).dataModel=e}))})),this.markKeep(l),l.group.silent=e.get("silent")||t.get("silent")},e.type="markPoint",e}(fB);var vB=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.createMarkerModelFromSeries=function(t,n,i){return new e(t,n,i)},e.type="markLine",e.defaultOption={z:5,symbol:["circle","arrow"],symbolSize:[8,16],symbolOffset:0,precision:2,tooltip:{trigger:"item"},label:{show:!0,position:"end",distance:5},lineStyle:{type:"dashed"},emphasis:{label:{show:!0},lineStyle:{width:3}},animationEasing:"linear"},e}(iB),mB=Oo(),xB=function(t,e,n,i){var r,o=t.getData();if(Y(i))r=i;else{var a=i.type;if("min"===a||"max"===a||"average"===a||"median"===a||null!=i.xAxis||null!=i.yAxis){var s=void 0,l=void 0;if(null!=i.yAxis||null!=i.xAxis)s=e.getAxis(null!=i.yAxis?"y":"x"),l=it(i.yAxis,i.xAxis);else{var u=uB(i,o,e,t);s=u.valueAxis,l=pB(o,yx(o,u.valueDataDim),a)}var h="x"===s.dim?0:1,c=1-h,p=T(i),d={coord:[]};p.type=null,p.coord=[],p.coord[c]=-1/0,d.coord[c]=1/0;var f=n.get("precision");f>=0&&j(l)&&(l=+l.toFixed(Math.min(f,20))),p.coord[h]=d.coord[h]=l,r=[p,d,{type:a,valueIndex:i.valueIndex,value:l}]}else r=[]}var g=[lB(t,r[0]),lB(t,r[1]),A({},r[2])];return g[2].type=g[2].type||null,C(g[2],g[0]),C(g[2],g[1]),g};function _B(t){return!isNaN(t)&&!isFinite(t)}function bB(t,e,n,i){var r=1-t,o=i.dimensions[t];return _B(e[r])&&_B(n[r])&&e[t]===n[t]&&i.getAxis(o).containData(e[t])}function wB(t,e){if("cartesian2d"===t.type){var n=e[0].coord,i=e[1].coord;if(n&&i&&(bB(1,n,i,t)||bB(0,n,i,t)))return!0}return hB(t,e[0])&&hB(t,e[1])}function SB(t,e,n,i,r){var o,a=i.coordinateSystem,s=t.getItemModel(e),l=Ur(s.get("x"),r.getWidth()),u=Ur(s.get("y"),r.getHeight());if(isNaN(l)||isNaN(u)){if(i.getMarkerPosition)o=i.getMarkerPosition(t.getValues(t.dimensions,e));else{var h=a.dimensions,c=t.get(h[0],e),p=t.get(h[1],e);o=a.dataToPoint([c,p])}if(MS(a,"cartesian2d")){var d=a.getAxis("x"),f=a.getAxis("y");h=a.dimensions;_B(t.get(h[0],e))?o[0]=d.toGlobalCoord(d.getExtent()[n?0:1]):_B(t.get(h[1],e))&&(o[1]=f.toGlobalCoord(f.getExtent()[n?0:1]))}isNaN(l)||(o[0]=l),isNaN(u)||(o[1]=u)}else o=[l,u];t.setItemLayout(e,o)}var MB=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.updateTransform=function(t,e,n){e.eachSeries((function(t){var e=iB.getMarkerModelFromSeries(t,"markLine");if(e){var i=e.getData(),r=mB(e).from,o=mB(e).to;r.each((function(e){SB(r,e,!0,t,n),SB(o,e,!1,t,n)})),i.each((function(t){i.setItemLayout(t,[r.getItemLayout(t),o.getItemLayout(t)])})),this.markerGroupMap.get(t.id).updateLayout()}}),this)},e.prototype.renderSeries=function(t,e,n,i){var r=t.coordinateSystem,o=t.id,a=t.getData(),s=this.markerGroupMap,l=s.get(o)||s.set(o,new RA);this.group.add(l.group);var u=function(t,e,n){var i;i=t?z(t&&t.dimensions,(function(t){return A(A({},e.getData().getDimensionInfo(e.getData().mapDimension(t))||{}),{name:t,ordinalMeta:null})})):[{name:"value",type:"float"}];var r=new lx(i,n),o=new lx(i,n),a=new lx([],n),s=z(n.get("data"),H(xB,e,t,n));t&&(s=B(s,H(wB,t)));var l=cB(!!t,i);return r.initData(z(s,(function(t){return t[0]})),null,l),o.initData(z(s,(function(t){return t[1]})),null,l),a.initData(z(s,(function(t){return t[2]}))),a.hasItemOption=!0,{from:r,to:o,line:a}}(r,t,e),h=u.from,c=u.to,p=u.line;mB(e).from=h,mB(e).to=c,e.setData(p);var d=e.get("symbol"),f=e.get("symbolSize"),g=e.get("symbolRotate"),y=e.get("symbolOffset");function v(e,n,r){var o=e.getItemModel(n);SB(e,n,r,t,i);var s=o.getModel("itemStyle").getItemStyle();null==s.fill&&(s.fill=Ty(a,"color")),e.setItemVisual(n,{symbolKeepAspect:o.get("symbolKeepAspect"),symbolOffset:rt(o.get("symbolOffset",!0),y[r?0:1]),symbolRotate:rt(o.get("symbolRotate",!0),g[r?0:1]),symbolSize:rt(o.get("symbolSize"),f[r?0:1]),symbol:rt(o.get("symbol",!0),d[r?0:1]),style:s})}Y(d)||(d=[d,d]),Y(f)||(f=[f,f]),Y(g)||(g=[g,g]),Y(y)||(y=[y,y]),u.from.each((function(t){v(h,t,!0),v(c,t,!1)})),p.each((function(t){var e=p.getItemModel(t).getModel("lineStyle").getLineStyle();p.setItemLayout(t,[h.getItemLayout(t),c.getItemLayout(t)]),null==e.stroke&&(e.stroke=h.getItemVisual(t,"style").fill),p.setItemVisual(t,{fromSymbolKeepAspect:h.getItemVisual(t,"symbolKeepAspect"),fromSymbolOffset:h.getItemVisual(t,"symbolOffset"),fromSymbolRotate:h.getItemVisual(t,"symbolRotate"),fromSymbolSize:h.getItemVisual(t,"symbolSize"),fromSymbol:h.getItemVisual(t,"symbol"),toSymbolKeepAspect:c.getItemVisual(t,"symbolKeepAspect"),toSymbolOffset:c.getItemVisual(t,"symbolOffset"),toSymbolRotate:c.getItemVisual(t,"symbolRotate"),toSymbolSize:c.getItemVisual(t,"symbolSize"),toSymbol:c.getItemVisual(t,"symbol"),style:e})})),l.updateData(p),u.line.eachItemGraphicEl((function(t){Qs(t).dataModel=e,t.traverse((function(t){Qs(t).dataModel=e}))})),this.markKeep(l),l.group.silent=e.get("silent")||t.get("silent")},e.type="markLine",e}(fB);var IB=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.createMarkerModelFromSeries=function(t,n,i){return new e(t,n,i)},e.type="markArea",e.defaultOption={z:1,tooltip:{trigger:"item"},animation:!1,label:{show:!0,position:"top"},itemStyle:{borderWidth:0},emphasis:{label:{show:!0,position:"top"}}},e}(iB),TB=Oo(),CB=function(t,e,n,i){var r=i[0],o=i[1];if(r&&o){var a=lB(t,r),s=lB(t,o),l=a.coord,u=s.coord;l[0]=it(l[0],-1/0),l[1]=it(l[1],-1/0),u[0]=it(u[0],1/0),u[1]=it(u[1],1/0);var h=D([{},a,s]);return h.coord=[a.coord,s.coord],h.x0=a.x,h.y0=a.y,h.x1=s.x,h.y1=s.y,h}};function DB(t){return!isNaN(t)&&!isFinite(t)}function AB(t,e,n,i){var r=1-t;return DB(e[r])&&DB(n[r])}function kB(t,e){var n=e.coord[0],i=e.coord[1],r={coord:n,x:e.x0,y:e.y0},o={coord:i,x:e.x1,y:e.y1};return MS(t,"cartesian2d")?!(!n||!i||!AB(1,n,i)&&!AB(0,n,i))||function(t,e,n){return!(t&&t.containZone&&e.coord&&n.coord&&!oB(e)&&!oB(n))||t.containZone(e.coord,n.coord)}(t,r,o):hB(t,r)||hB(t,o)}function LB(t,e,n,i,r){var o,a=i.coordinateSystem,s=t.getItemModel(e),l=Ur(s.get(n[0]),r.getWidth()),u=Ur(s.get(n[1]),r.getHeight());if(isNaN(l)||isNaN(u)){if(i.getMarkerPosition){var h=t.getValues(["x0","y0"],e),c=t.getValues(["x1","y1"],e),p=a.clampData(h),d=a.clampData(c),f=[];"x0"===n[0]?f[0]=p[0]>d[0]?c[0]:h[0]:f[0]=p[0]>d[0]?h[0]:c[0],"y0"===n[1]?f[1]=p[1]>d[1]?c[1]:h[1]:f[1]=p[1]>d[1]?h[1]:c[1],o=i.getMarkerPosition(f,n,!0)}else{var g=[m=t.get(n[0],e),x=t.get(n[1],e)];a.clampData&&a.clampData(g,g),o=a.dataToPoint(g,!0)}if(MS(a,"cartesian2d")){var y=a.getAxis("x"),v=a.getAxis("y"),m=t.get(n[0],e),x=t.get(n[1],e);DB(m)?o[0]=y.toGlobalCoord(y.getExtent()["x0"===n[0]?0:1]):DB(x)&&(o[1]=v.toGlobalCoord(v.getExtent()["y0"===n[1]?0:1]))}isNaN(l)||(o[0]=l),isNaN(u)||(o[1]=u)}else o=[l,u];return o}var PB=[["x0","y0"],["x1","y0"],["x1","y1"],["x0","y1"]],OB=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.updateTransform=function(t,e,n){e.eachSeries((function(t){var e=iB.getMarkerModelFromSeries(t,"markArea");if(e){var i=e.getData();i.each((function(e){var r=z(PB,(function(r){return LB(i,e,r,t,n)}));i.setItemLayout(e,r),i.getItemGraphicEl(e).setShape("points",r)}))}}),this)},e.prototype.renderSeries=function(t,e,n,i){var r=t.coordinateSystem,o=t.id,a=t.getData(),s=this.markerGroupMap,l=s.get(o)||s.set(o,{group:new zr});this.group.add(l.group),this.markKeep(l);var u=function(t,e,n){var i,r,o=["x0","y0","x1","y1"];if(t){var a=z(t&&t.dimensions,(function(t){var n=e.getData();return A(A({},n.getDimensionInfo(n.mapDimension(t))||{}),{name:t,ordinalMeta:null})}));r=z(o,(function(t,e){return{name:t,type:a[e%2].type}})),i=new lx(r,n)}else i=new lx(r=[{name:"value",type:"float"}],n);var s=z(n.get("data"),H(CB,e,t,n));t&&(s=B(s,H(kB,t)));var l=t?function(t,e,n,i){return wf(t.coord[Math.floor(i/2)][i%2],r[i])}:function(t,e,n,i){return wf(t.value,r[i])};return i.initData(s,null,l),i.hasItemOption=!0,i}(r,t,e);e.setData(u),u.each((function(e){var n=z(PB,(function(n){return LB(u,e,n,t,i)})),o=r.getAxis("x").scale,s=r.getAxis("y").scale,l=o.getExtent(),h=s.getExtent(),c=[o.parse(u.get("x0",e)),o.parse(u.get("x1",e))],p=[s.parse(u.get("y0",e)),s.parse(u.get("y1",e))];jr(c),jr(p);var d=!!(l[0]>c[1]||l[1]p[1]||h[1]=0},e.prototype.getOrient=function(){return"vertical"===this.get("orient")?{index:1,name:"vertical"}:{index:0,name:"horizontal"}},e.type="legend.plain",e.dependencies=["series"],e.defaultOption={z:4,show:!0,orient:"horizontal",left:"center",top:0,align:"auto",backgroundColor:"rgba(0,0,0,0)",borderColor:"#ccc",borderRadius:0,borderWidth:0,padding:5,itemGap:10,itemWidth:25,itemHeight:14,symbolRotate:"inherit",symbolKeepAspect:!0,inactiveColor:"#ccc",inactiveBorderColor:"#ccc",inactiveBorderWidth:"auto",itemStyle:{color:"inherit",opacity:"inherit",borderColor:"inherit",borderWidth:"auto",borderCap:"inherit",borderJoin:"inherit",borderDashOffset:"inherit",borderMiterLimit:"inherit"},lineStyle:{width:"auto",color:"inherit",inactiveColor:"#ccc",inactiveWidth:2,opacity:"inherit",type:"inherit",cap:"inherit",join:"inherit",dashOffset:"inherit",miterLimit:"inherit"},textStyle:{color:"#333"},selectedMode:!0,selector:!1,selectorLabel:{show:!0,borderRadius:10,padding:[3,5,3,5],fontSize:12,fontFamily:"sans-serif",color:"#666",borderWidth:1,borderColor:"#666"},emphasis:{selectorLabel:{show:!0,color:"#eee",backgroundColor:"#666"}},selectorPosition:"auto",selectorItemGap:7,selectorButtonGap:10,tooltip:{show:!1}},e}(Rp),NB=H,EB=E,zB=zr,VB=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.newlineDisabled=!1,n}return n(e,t),e.prototype.init=function(){this.group.add(this._contentGroup=new zB),this.group.add(this._selectorGroup=new zB),this._isFirstRender=!0},e.prototype.getContentGroup=function(){return this._contentGroup},e.prototype.getSelectorGroup=function(){return this._selectorGroup},e.prototype.render=function(t,e,n){var i=this._isFirstRender;if(this._isFirstRender=!1,this.resetInner(),t.get("show",!0)){var r=t.get("align"),o=t.get("orient");r&&"auto"!==r||(r="right"===t.get("left")&&"vertical"===o?"right":"left");var a=t.get("selector",!0),s=t.get("selectorPosition",!0);!a||s&&"auto"!==s||(s="horizontal"===o?"end":"start"),this.renderInner(r,t,e,n,a,o,s);var l=t.getBoxLayoutParams(),u={width:n.getWidth(),height:n.getHeight()},h=t.get("padding"),c=Cp(l,u,h),p=this.layoutInner(t,r,c,i,a,s),d=Cp(k({width:p.width,height:p.height},l),u,h);this.group.x=d.x-p.x,this.group.y=d.y-p.y,this.group.markRedraw(),this.group.add(this._backgroundEl=dz(p,t))}},e.prototype.resetInner=function(){this.getContentGroup().removeAll(),this._backgroundEl&&this.group.remove(this._backgroundEl),this.getSelectorGroup().removeAll()},e.prototype.renderInner=function(t,e,n,i,r,o,a){var s=this.getContentGroup(),l=yt(),u=e.get("selectedMode"),h=[];n.eachRawSeries((function(t){!t.get("legendHoverLink")&&h.push(t.id)})),EB(e.getData(),(function(r,o){var a=r.get("name");if(!this.newlineDisabled&&(""===a||"\n"===a)){var c=new zB;return c.newline=!0,void s.add(c)}var p=n.getSeriesByName(a)[0];if(!l.get(a)){if(p){var d=p.getData(),f=d.getVisual("legendLineStyle")||{},g=d.getVisual("legendIcon"),y=d.getVisual("style");this._createItem(p,a,o,r,e,t,f,y,g,u,i).on("click",NB(BB,a,null,i,h)).on("mouseover",NB(GB,p.name,null,i,h)).on("mouseout",NB(WB,p.name,null,i,h)),l.set(a,!0)}else n.eachRawSeries((function(n){if(!l.get(a)&&n.legendVisualProvider){var s=n.legendVisualProvider;if(!s.containName(a))return;var c=s.indexOfName(a),p=s.getItemVisual(c,"style"),d=s.getItemVisual(c,"legendIcon"),f=qn(p.fill);f&&0===f[3]&&(f[3]=.2,p=A(A({},p),{fill:ri(f,"rgba")})),this._createItem(n,a,o,r,e,t,{},p,d,u,i).on("click",NB(BB,null,a,i,h)).on("mouseover",NB(GB,null,a,i,h)).on("mouseout",NB(WB,null,a,i,h)),l.set(a,!0)}}),this);0}}),this),r&&this._createSelector(r,e,i,o,a)},e.prototype._createSelector=function(t,e,n,i,r){var o=this.getSelectorGroup();EB(t,(function(t){var i=t.type,r=new Fs({style:{x:0,y:0,align:"center",verticalAlign:"middle"},onclick:function(){n.dispatchAction({type:"all"===i?"legendAllSelect":"legendInverseSelect"})}});o.add(r),tc(r,{normal:e.getModel("selectorLabel"),emphasis:e.getModel(["emphasis","selectorLabel"])},{defaultText:t.title}),Hl(r)}))},e.prototype._createItem=function(t,e,n,i,r,o,a,s,l,u,h){var c=t.visualDrawType,p=r.get("itemWidth"),d=r.get("itemHeight"),f=r.isSelected(e),g=i.get("symbolRotate"),y=i.get("symbolKeepAspect"),v=i.get("icon"),m=function(t,e,n,i,r,o,a){function s(t,e){"auto"===t.lineWidth&&(t.lineWidth=e.lineWidth>0?2:0),EB(t,(function(n,i){"inherit"===t[i]&&(t[i]=e[i])}))}var l=e.getModel("itemStyle"),u=l.getItemStyle(),h=0===t.lastIndexOf("empty",0)?"fill":"stroke",c=l.getShallow("decal");u.decal=c&&"inherit"!==c?gv(c,a):i.decal,"inherit"===u.fill&&(u.fill=i[r]);"inherit"===u.stroke&&(u.stroke=i[h]);"inherit"===u.opacity&&(u.opacity=("fill"===r?i:n).opacity);s(u,i);var p=e.getModel("lineStyle"),d=p.getLineStyle();if(s(d,n),"auto"===u.fill&&(u.fill=i.fill),"auto"===u.stroke&&(u.stroke=i.fill),"auto"===d.stroke&&(d.stroke=i.fill),!o){var f=e.get("inactiveBorderWidth"),g=u[h];u.lineWidth="auto"===f?i.lineWidth>0&&g?2:0:u.lineWidth,u.fill=e.get("inactiveColor"),u.stroke=e.get("inactiveBorderColor"),d.stroke=p.get("inactiveColor"),d.lineWidth=p.get("inactiveWidth")}return{itemStyle:u,lineStyle:d}}(l=v||l||"roundRect",i,a,s,c,f,h),x=new zB,_=i.getModel("textStyle");if(!X(t.getLegendIcon)||v&&"inherit"!==v){var b="inherit"===v&&t.getData().getVisual("symbol")?"inherit"===g?t.getData().getVisual("symbolRotate"):g:0;x.add(function(t){var e=t.icon||"roundRect",n=Wy(e,0,0,t.itemWidth,t.itemHeight,t.itemStyle.fill,t.symbolKeepAspect);n.setStyle(t.itemStyle),n.rotation=(t.iconRotate||0)*Math.PI/180,n.setOrigin([t.itemWidth/2,t.itemHeight/2]),e.indexOf("empty")>-1&&(n.style.stroke=n.style.fill,n.style.fill="#fff",n.style.lineWidth=2);return n}({itemWidth:p,itemHeight:d,icon:l,iconRotate:b,itemStyle:m.itemStyle,lineStyle:m.lineStyle,symbolKeepAspect:y}))}else x.add(t.getLegendIcon({itemWidth:p,itemHeight:d,icon:l,iconRotate:g,itemStyle:m.itemStyle,lineStyle:m.lineStyle,symbolKeepAspect:y}));var w="left"===o?p+5:-5,S=o,M=r.get("formatter"),I=e;U(M)&&M?I=M.replace("{name}",null!=e?e:""):X(M)&&(I=M(e));var T=f?_.getTextColor():i.get("inactiveColor");x.add(new Fs({style:nc(_,{text:I,x:w,y:d/2,fill:T,align:S,verticalAlign:"middle"},{inheritColor:T})}));var C=new zs({shape:x.getBoundingRect(),invisible:!0}),D=i.getModel("tooltip");return D.get("show")&&Zh({el:C,componentModel:r,itemName:e,itemTooltipOption:D.option}),x.add(C),x.eachChild((function(t){t.silent=!0})),C.silent=!u,this.getContentGroup().add(x),Hl(x),x.__legendDataIndex=n,x},e.prototype.layoutInner=function(t,e,n,i,r,o){var a=this.getContentGroup(),s=this.getSelectorGroup();Tp(t.get("orient"),a,t.get("itemGap"),n.width,n.height);var l=a.getBoundingRect(),u=[-l.x,-l.y];if(s.markRedraw(),a.markRedraw(),r){Tp("horizontal",s,t.get("selectorItemGap",!0));var h=s.getBoundingRect(),c=[-h.x,-h.y],p=t.get("selectorButtonGap",!0),d=t.getOrient().index,f=0===d?"width":"height",g=0===d?"height":"width",y=0===d?"y":"x";"end"===o?c[d]+=l[f]+p:u[d]+=h[f]+p,c[1-d]+=l[g]/2-h[g]/2,s.x=c[0],s.y=c[1],a.x=u[0],a.y=u[1];var v={x:0,y:0};return v[f]=l[f]+p+h[f],v[g]=Math.max(l[g],h[g]),v[y]=Math.min(0,h[y]+c[1-d]),v}return a.x=u[0],a.y=u[1],this.group.getBoundingRect()},e.prototype.remove=function(){this.getContentGroup().removeAll(),this._isFirstRender=!0},e.type="legend.plain",e}(Tg);function BB(t,e,n,i){WB(t,e,n,i),n.dispatchAction({type:"legendToggleSelect",name:null!=t?t:e}),GB(t,e,n,i)}function FB(t){for(var e,n=t.getZr().storage.getDisplayList(),i=0,r=n.length;in[r],f=[-c.x,-c.y];e||(f[i]=l[s]);var g=[0,0],y=[-p.x,-p.y],v=rt(t.get("pageButtonGap",!0),t.get("itemGap",!0));d&&("end"===t.get("pageButtonPosition",!0)?y[i]+=n[r]-p[r]:g[i]+=p[r]+v);y[1-i]+=c[o]/2-p[o]/2,l.setPosition(f),u.setPosition(g),h.setPosition(y);var m={x:0,y:0};if(m[r]=d?n[r]:c[r],m[o]=Math.max(c[o],p[o]),m[a]=Math.min(0,p[a]+y[1-i]),u.__rectSize=n[r],d){var x={x:0,y:0};x[r]=Math.max(n[r]-p[r]-v,0),x[o]=m[o],u.setClipPath(new zs({shape:x})),u.__rectSize=x[r]}else h.eachChild((function(t){t.attr({invisible:!0,silent:!0})}));var _=this._getPageInfo(t);return null!=_.pageIndex&&fh(l,{x:_.contentPosition[0],y:_.contentPosition[1]},d?t:null),this._updatePageInfoView(t,_),m},e.prototype._pageGo=function(t,e,n){var i=this._getPageInfo(e)[t];null!=i&&n.dispatchAction({type:"legendScroll",scrollDataIndex:i,legendId:e.id})},e.prototype._updatePageInfoView=function(t,e){var n=this._controllerGroup;E(["pagePrev","pageNext"],(function(i){var r=null!=e[i+"DataIndex"],o=n.childOfName(i);o&&(o.setStyle("fill",r?t.get("pageIconColor",!0):t.get("pageIconInactiveColor",!0)),o.cursor=r?"pointer":"default")}));var i=n.childOfName("pageText"),r=t.get("pageFormatter"),o=e.pageIndex,a=null!=o?o+1:0,s=e.pageCount;i&&r&&i.setStyle("text",U(r)?r.replace("{current}",null==a?"":a+"").replace("{total}",null==s?"":s+""):r({current:a,total:s}))},e.prototype._getPageInfo=function(t){var e=t.get("scrollDataIndex",!0),n=this.getContentGroup(),i=this._containerGroup.__rectSize,r=t.getOrient().index,o=qB[r],a=KB[r],s=this._findTargetItemIndex(e),l=n.children(),u=l[s],h=l.length,c=h?1:0,p={contentPosition:[n.x,n.y],pageCount:c,pageIndex:c-1,pagePrevDataIndex:null,pageNextDataIndex:null};if(!u)return p;var d=m(u);p.contentPosition[r]=-d.s;for(var f=s+1,g=d,y=d,v=null;f<=h;++f)(!(v=m(l[f]))&&y.e>g.s+i||v&&!x(v,g.s))&&(g=y.i>g.i?y:v)&&(null==p.pageNextDataIndex&&(p.pageNextDataIndex=g.i),++p.pageCount),y=v;for(f=s-1,g=d,y=d,v=null;f>=-1;--f)(v=m(l[f]))&&x(y,v.s)||!(g.i=e&&t.s<=e+i}},e.prototype._findTargetItemIndex=function(t){return this._showController?(this.getContentGroup().eachChild((function(i,r){var o=i.__legendDataIndex;null==n&&null!=o&&(n=r),o===t&&(e=r)})),null!=e?e:n):0;var e,n},e.type="legend.scroll",e}(VB);function JB(t){Nm(XB),t.registerComponentModel(UB),t.registerComponentView($B),function(t){t.registerAction("legendScroll","legendscroll",(function(t,e){var n=t.scrollDataIndex;null!=n&&e.eachComponent({mainType:"legend",subType:"scroll",query:t},(function(t){t.setScrollDataIndex(n)}))}))}(t)}var QB=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.type="dataZoom.inside",e.defaultOption=Cc(KE.defaultOption,{disabled:!1,zoomLock:!1,zoomOnMouseWheel:!0,moveOnMouseMove:!0,moveOnMouseWheel:!1,preventDefaultMouseMove:!0}),e}(KE),tF=Oo();function eF(t,e,n){tF(t).coordSysRecordMap.each((function(t){var i=t.dataZoomInfoMap.get(e.uid);i&&(i.getRange=n)}))}function nF(t,e){if(e){t.removeKey(e.model.uid);var n=e.controller;n&&n.dispose()}}function iF(t,e){t.isDisposed()||t.dispatchAction({type:"dataZoom",animation:{easing:"cubicOut",duration:100},batch:e})}function rF(t,e,n,i){return t.coordinateSystem.containPoint([n,i])}function oF(t){t.registerProcessor(t.PRIORITY.PROCESSOR.FILTER,(function(t,e){var n=tF(e),i=n.coordSysRecordMap||(n.coordSysRecordMap=yt());i.each((function(t){t.dataZoomInfoMap=null})),t.eachComponent({mainType:"dataZoom",subType:"inside"},(function(t){E(jE(t).infoList,(function(n){var r=n.model.uid,o=i.get(r)||i.set(r,function(t,e){var n={model:e,containsPoint:H(rF,e),dispatchAction:H(iF,t),dataZoomInfoMap:null,controller:null},i=n.controller=new UI(t.getZr());return E(["pan","zoom","scrollMove"],(function(t){i.on(t,(function(e){var i=[];n.dataZoomInfoMap.each((function(r){if(e.isAvailableBehavior(r.model.option)){var o=(r.getRange||{})[t],a=o&&o(r.dzReferCoordSysInfo,n.model.mainType,n.controller,e);!r.model.get("disabled",!0)&&a&&i.push({dataZoomId:r.model.id,start:a[0],end:a[1]})}})),i.length&&n.dispatchAction(i)}))})),n}(e,n.model));(o.dataZoomInfoMap||(o.dataZoomInfoMap=yt())).set(t.uid,{dzReferCoordSysInfo:n,model:t,getRange:null})}))})),i.each((function(t){var e,n=t.controller,r=t.dataZoomInfoMap;if(r){var o=r.keys()[0];null!=o&&(e=r.get(o))}if(e){var a=function(t){var e,n="type_",i={type_true:2,type_move:1,type_false:0,type_undefined:-1},r=!0;return t.each((function(t){var o=t.model,a=!o.get("disabled",!0)&&(!o.get("zoomLock",!0)||"move");i[n+a]>i[n+e]&&(e=a),r=r&&o.get("preventDefaultMouseMove",!0)})),{controlType:e,opt:{zoomOnMouseWheel:!0,moveOnMouseMove:!0,moveOnMouseWheel:!0,preventDefaultMouseMove:!!r}}}(r);n.enable(a.controlType,a.opt),n.setPointerChecker(t.containsPoint),Fg(t,"dispatchAction",e.model.get("throttle",!0),"fixRate")}else nF(i,t)}))}))}var aF=function(t){function e(){var e=null!==t&&t.apply(this,arguments)||this;return e.type="dataZoom.inside",e}return n(e,t),e.prototype.render=function(e,n,i){t.prototype.render.apply(this,arguments),e.noTarget()?this._clear():(this.range=e.getPercentRange(),eF(i,e,{pan:W(sF.pan,this),zoom:W(sF.zoom,this),scrollMove:W(sF.scrollMove,this)}))},e.prototype.dispose=function(){this._clear(),t.prototype.dispose.apply(this,arguments)},e.prototype._clear=function(){!function(t,e){for(var n=tF(t).coordSysRecordMap,i=n.keys(),r=0;r0?s.pixelStart+s.pixelLength-s.pixel:s.pixel-s.pixelStart)/s.pixelLength*(o[1]-o[0])+o[0],u=Math.max(1/i.scale,0);o[0]=(o[0]-l)*u+l,o[1]=(o[1]-l)*u+l;var h=this.dataZoomModel.findRepresentativeAxisProxy().getMinMaxSpan();return Ck(0,o,[0,100],0,h.minSpan,h.maxSpan),this.range=o,r[0]!==o[0]||r[1]!==o[1]?o:void 0}},pan:lF((function(t,e,n,i,r,o){var a=uF[i]([o.oldX,o.oldY],[o.newX,o.newY],e,r,n);return a.signal*(t[1]-t[0])*a.pixel/a.pixelLength})),scrollMove:lF((function(t,e,n,i,r,o){return uF[i]([0,0],[o.scrollDelta,o.scrollDelta],e,r,n).signal*(t[1]-t[0])*o.scrollDelta}))};function lF(t){return function(e,n,i,r){var o=this.range,a=o.slice(),s=e.axisModels[0];if(s)return Ck(t(a,s,e,n,i,r),a,[0,100],"all"),this.range=a,o[0]!==a[0]||o[1]!==a[1]?a:void 0}}var uF={grid:function(t,e,n,i,r){var o=n.axis,a={},s=r.model.coordinateSystem.getRect();return t=t||[0,0],"x"===o.dim?(a.pixel=e[0]-t[0],a.pixelLength=s.width,a.pixelStart=s.x,a.signal=o.inverse?1:-1):(a.pixel=e[1]-t[1],a.pixelLength=s.height,a.pixelStart=s.y,a.signal=o.inverse?-1:1),a},polar:function(t,e,n,i,r){var o=n.axis,a={},s=r.model.coordinateSystem,l=s.getRadiusAxis().getExtent(),u=s.getAngleAxis().getExtent();return t=t?s.pointToCoord(t):[0,0],e=s.pointToCoord(e),"radiusAxis"===n.mainType?(a.pixel=e[0]-t[0],a.pixelLength=l[1]-l[0],a.pixelStart=l[0],a.signal=o.inverse?1:-1):(a.pixel=e[1]-t[1],a.pixelLength=u[1]-u[0],a.pixelStart=u[0],a.signal=o.inverse?-1:1),a},singleAxis:function(t,e,n,i,r){var o=n.axis,a=r.model.coordinateSystem.getRect(),s={};return t=t||[0,0],"horizontal"===o.orient?(s.pixel=e[0]-t[0],s.pixelLength=a.width,s.pixelStart=a.x,s.signal=o.inverse?1:-1):(s.pixel=e[1]-t[1],s.pixelLength=a.height,s.pixelStart=a.y,s.signal=o.inverse?-1:1),s}};function hF(t){az(t),t.registerComponentModel(QB),t.registerComponentView(aF),oF(t)}var cF=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.type="dataZoom.slider",e.layoutMode="box",e.defaultOption=Cc(KE.defaultOption,{show:!0,right:"ph",top:"ph",width:"ph",height:"ph",left:null,bottom:null,borderColor:"#d2dbee",borderRadius:3,backgroundColor:"rgba(47,69,84,0)",dataBackground:{lineStyle:{color:"#d2dbee",width:.5},areaStyle:{color:"#d2dbee",opacity:.2}},selectedDataBackground:{lineStyle:{color:"#8fb0f7",width:.5},areaStyle:{color:"#8fb0f7",opacity:.2}},fillerColor:"rgba(135,175,274,0.2)",handleIcon:"path://M-9.35,34.56V42m0-40V9.5m-2,0h4a2,2,0,0,1,2,2v21a2,2,0,0,1-2,2h-4a2,2,0,0,1-2-2v-21A2,2,0,0,1-11.35,9.5Z",handleSize:"100%",handleStyle:{color:"#fff",borderColor:"#ACB8D1"},moveHandleSize:7,moveHandleIcon:"path://M-320.9-50L-320.9-50c18.1,0,27.1,9,27.1,27.1V85.7c0,18.1-9,27.1-27.1,27.1l0,0c-18.1,0-27.1-9-27.1-27.1V-22.9C-348-41-339-50-320.9-50z M-212.3-50L-212.3-50c18.1,0,27.1,9,27.1,27.1V85.7c0,18.1-9,27.1-27.1,27.1l0,0c-18.1,0-27.1-9-27.1-27.1V-22.9C-239.4-41-230.4-50-212.3-50z M-103.7-50L-103.7-50c18.1,0,27.1,9,27.1,27.1V85.7c0,18.1-9,27.1-27.1,27.1l0,0c-18.1,0-27.1-9-27.1-27.1V-22.9C-130.9-41-121.8-50-103.7-50z",moveHandleStyle:{color:"#D2DBEE",opacity:.7},showDetail:!0,showDataShadow:"auto",realtime:!0,zoomLock:!1,textStyle:{color:"#6E7079"},brushSelect:!0,brushStyle:{color:"rgba(135,175,274,0.15)"},emphasis:{handleStyle:{borderColor:"#8FB0F7"},moveHandleStyle:{color:"#8FB0F7"}}}),e}(KE),pF=zs,dF="horizontal",fF="vertical",gF=["line","bar","candlestick","scatter"],yF={easing:"cubicOut",duration:100,delay:0},vF=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n._displayables={},n}return n(e,t),e.prototype.init=function(t,e){this.api=e,this._onBrush=W(this._onBrush,this),this._onBrushEnd=W(this._onBrushEnd,this)},e.prototype.render=function(e,n,i,r){if(t.prototype.render.apply(this,arguments),Fg(this,"_dispatchZoomAction",e.get("throttle"),"fixRate"),this._orient=e.getOrient(),!1!==e.get("show")){if(e.noTarget())return this._clear(),void this.group.removeAll();r&&"dataZoom"===r.type&&r.from===this.uid||this._buildView(),this._updateView()}else this.group.removeAll()},e.prototype.dispose=function(){this._clear(),t.prototype.dispose.apply(this,arguments)},e.prototype._clear=function(){Gg(this,"_dispatchZoomAction");var t=this.api.getZr();t.off("mousemove",this._onBrush),t.off("mouseup",this._onBrushEnd)},e.prototype._buildView=function(){var t=this.group;t.removeAll(),this._brushing=!1,this._displayables.brushRect=null,this._resetLocation(),this._resetInterval();var e=this._displayables.sliderGroup=new zr;this._renderBackground(),this._renderHandle(),this._renderDataShadow(),t.add(e),this._positionGroup()},e.prototype._resetLocation=function(){var t=this.dataZoomModel,e=this.api,n=t.get("brushSelect")?7:0,i=this._findCoordRect(),r={width:e.getWidth(),height:e.getHeight()},o=this._orient===dF?{right:r.width-i.x-i.width,top:r.height-30-7-n,width:i.width,height:30}:{right:7,top:i.y,width:30,height:i.height},a=Lp(t.option);E(["right","top","width","height"],(function(t){"ph"===a[t]&&(a[t]=o[t])}));var s=Cp(a,r);this._location={x:s.x,y:s.y},this._size=[s.width,s.height],this._orient===fF&&this._size.reverse()},e.prototype._positionGroup=function(){var t=this.group,e=this._location,n=this._orient,i=this.dataZoomModel.getFirstTargetAxisModel(),r=i&&i.get("inverse"),o=this._displayables.sliderGroup,a=(this._dataShadowInfo||{}).otherAxisInverse;o.attr(n!==dF||r?n===dF&&r?{scaleY:a?1:-1,scaleX:-1}:n!==fF||r?{scaleY:a?-1:1,scaleX:-1,rotation:Math.PI/2}:{scaleY:a?-1:1,scaleX:1,rotation:Math.PI/2}:{scaleY:a?1:-1,scaleX:1});var s=t.getBoundingRect([o]);t.x=e.x-s.x,t.y=e.y-s.y,t.markRedraw()},e.prototype._getViewExtent=function(){return[0,this._size[0]]},e.prototype._renderBackground=function(){var t=this.dataZoomModel,e=this._size,n=this._displayables.sliderGroup,i=t.get("brushSelect");n.add(new pF({silent:!0,shape:{x:0,y:0,width:e[0],height:e[1]},style:{fill:t.get("backgroundColor")},z2:-40}));var r=new pF({shape:{x:0,y:0,width:e[0],height:e[1]},style:{fill:"transparent"},z2:0,onclick:W(this._onClickPanel,this)}),o=this.api.getZr();i?(r.on("mousedown",this._onBrushStart,this),r.cursor="crosshair",o.on("mousemove",this._onBrush),o.on("mouseup",this._onBrushEnd)):(o.off("mousemove",this._onBrush),o.off("mouseup",this._onBrushEnd)),n.add(r)},e.prototype._renderDataShadow=function(){var t=this._dataShadowInfo=this._prepareDataShadowInfo();if(this._displayables.dataShadowSegs=[],t){var e=this._size,n=this._shadowSize||[],i=t.series,r=i.getRawData(),o=i.getShadowDim&&i.getShadowDim(),a=o&&r.getDimensionInfo(o)?i.getShadowDim():t.otherDim;if(null!=a){var s=this._shadowPolygonPts,l=this._shadowPolylinePts;if(r!==this._shadowData||a!==this._shadowDim||e[0]!==n[0]||e[1]!==n[1]){var u=r.getDataExtent(a),h=.3*(u[1]-u[0]);u=[u[0]-h,u[1]+h];var c,p=[0,e[1]],d=[0,e[0]],f=[[e[0],0],[0,0]],g=[],y=d[1]/(r.count()-1),v=0,m=Math.round(r.count()/e[0]);r.each([a],(function(t,e){if(m>0&&e%m)v+=y;else{var n=null==t||isNaN(t)||""===t,i=n?0:Xr(t,u,p,!0);n&&!c&&e?(f.push([f[f.length-1][0],0]),g.push([g[g.length-1][0],0])):!n&&c&&(f.push([v,0]),g.push([v,0])),f.push([v,i]),g.push([v,i]),v+=y,c=n}})),s=this._shadowPolygonPts=f,l=this._shadowPolylinePts=g}this._shadowData=r,this._shadowDim=a,this._shadowSize=[e[0],e[1]];for(var x=this.dataZoomModel,_=0;_<3;_++){var b=w(1===_);this._displayables.sliderGroup.add(b),this._displayables.dataShadowSegs.push(b)}}}function w(t){var e=x.getModel(t?"selectedDataBackground":"dataBackground"),n=new zr,i=new Wu({shape:{points:s},segmentIgnoreThreshold:1,style:e.getModel("areaStyle").getAreaStyle(),silent:!0,z2:-20}),r=new Yu({shape:{points:l},segmentIgnoreThreshold:1,style:e.getModel("lineStyle").getLineStyle(),silent:!0,z2:-19});return n.add(i),n.add(r),n}},e.prototype._prepareDataShadowInfo=function(){var t=this.dataZoomModel,e=t.get("showDataShadow");if(!1!==e){var n,i=this.ecModel;return t.eachTargetAxis((function(r,o){E(t.getAxisProxy(r,o).getTargetSeriesModels(),(function(t){if(!(n||!0!==e&&P(gF,t.get("type"))<0)){var a,s=i.getComponent(UE(r),o).axis,l=function(t){var e={x:"y",y:"x",radius:"angle",angle:"radius"};return e[t]}(r),u=t.coordinateSystem;null!=l&&u.getOtherAxis&&(a=u.getOtherAxis(s).inverse),l=t.getData().mapDimension(l),n={thisAxis:s,series:t,thisDim:r,otherDim:l,otherAxisInverse:a}}}),this)}),this),n}},e.prototype._renderHandle=function(){var t=this.group,e=this._displayables,n=e.handles=[null,null],i=e.handleLabels=[null,null],r=this._displayables.sliderGroup,o=this._size,a=this.dataZoomModel,s=this.api,l=a.get("borderRadius")||0,u=a.get("brushSelect"),h=e.filler=new pF({silent:u,style:{fill:a.get("fillerColor")},textConfig:{position:"inside"}});r.add(h),r.add(new pF({silent:!0,subPixelOptimize:!0,shape:{x:0,y:0,width:o[0],height:o[1],r:l},style:{stroke:a.get("dataBackgroundColor")||a.get("borderColor"),lineWidth:1,fill:"rgba(0,0,0,0)"}})),E([0,1],(function(e){var o=a.get("handleIcon");!By[o]&&o.indexOf("path://")<0&&o.indexOf("image://")<0&&(o="path://"+o);var s=Wy(o,-1,0,2,2,null,!0);s.attr({cursor:mF(this._orient),draggable:!0,drift:W(this._onDragMove,this,e),ondragend:W(this._onDragEnd,this),onmouseover:W(this._showDataInfo,this,!0),onmouseout:W(this._showDataInfo,this,!1),z2:5});var l=s.getBoundingRect(),u=a.get("handleSize");this._handleHeight=Ur(u,this._size[1]),this._handleWidth=l.width/l.height*this._handleHeight,s.setStyle(a.getModel("handleStyle").getItemStyle()),s.style.strokeNoScale=!0,s.rectHover=!0,s.ensureState("emphasis").style=a.getModel(["emphasis","handleStyle"]).getItemStyle(),Hl(s);var h=a.get("handleColor");null!=h&&(s.style.fill=h),r.add(n[e]=s);var c=a.getModel("textStyle");t.add(i[e]=new Fs({silent:!0,invisible:!0,style:nc(c,{x:0,y:0,text:"",verticalAlign:"middle",align:"center",fill:c.getTextColor(),font:c.getFont()}),z2:10}))}),this);var c=h;if(u){var p=Ur(a.get("moveHandleSize"),o[1]),d=e.moveHandle=new zs({style:a.getModel("moveHandleStyle").getItemStyle(),silent:!0,shape:{r:[0,0,2,2],y:o[1]-.5,height:p}}),f=.8*p,g=e.moveHandleIcon=Wy(a.get("moveHandleIcon"),-f/2,-f/2,f,f,"#fff",!0);g.silent=!0,g.y=o[1]+p/2-.5,d.ensureState("emphasis").style=a.getModel(["emphasis","moveHandleStyle"]).getItemStyle();var y=Math.min(o[1]/2,Math.max(p,10));(c=e.moveZone=new zs({invisible:!0,shape:{y:o[1]-y,height:p+y}})).on("mouseover",(function(){s.enterEmphasis(d)})).on("mouseout",(function(){s.leaveEmphasis(d)})),r.add(d),r.add(g),r.add(c)}c.attr({draggable:!0,cursor:mF(this._orient),drift:W(this._onDragMove,this,"all"),ondragstart:W(this._showDataInfo,this,!0),ondragend:W(this._onDragEnd,this),onmouseover:W(this._showDataInfo,this,!0),onmouseout:W(this._showDataInfo,this,!1)})},e.prototype._resetInterval=function(){var t=this._range=this.dataZoomModel.getPercentRange(),e=this._getViewExtent();this._handleEnds=[Xr(t[0],[0,100],e,!0),Xr(t[1],[0,100],e,!0)]},e.prototype._updateInterval=function(t,e){var n=this.dataZoomModel,i=this._handleEnds,r=this._getViewExtent(),o=n.findRepresentativeAxisProxy().getMinMaxSpan(),a=[0,100];Ck(e,i,r,n.get("zoomLock")?"all":t,null!=o.minSpan?Xr(o.minSpan,a,r,!0):null,null!=o.maxSpan?Xr(o.maxSpan,a,r,!0):null);var s=this._range,l=this._range=jr([Xr(i[0],r,a,!0),Xr(i[1],r,a,!0)]);return!s||s[0]!==l[0]||s[1]!==l[1]},e.prototype._updateView=function(t){var e=this._displayables,n=this._handleEnds,i=jr(n.slice()),r=this._size;E([0,1],(function(t){var i=e.handles[t],o=this._handleHeight;i.attr({scaleX:o/2,scaleY:o/2,x:n[t]+(t?-1:1),y:r[1]/2-o/2})}),this),e.filler.setShape({x:i[0],y:0,width:i[1]-i[0],height:r[1]});var o={x:i[0],width:i[1]-i[0]};e.moveHandle&&(e.moveHandle.setShape(o),e.moveZone.setShape(o),e.moveZone.getBoundingRect(),e.moveHandleIcon&&e.moveHandleIcon.attr("x",o.x+o.width/2));for(var a=e.dataShadowSegs,s=[0,i[0],i[1],r[0]],l=0;le[0]||n[1]<0||n[1]>e[1])){var i=this._handleEnds,r=(i[0]+i[1])/2,o=this._updateInterval("all",n[0]-r);this._updateView(),o&&this._dispatchZoomAction(!1)}},e.prototype._onBrushStart=function(t){var e=t.offsetX,n=t.offsetY;this._brushStart=new De(e,n),this._brushing=!0,this._brushStartTime=+new Date},e.prototype._onBrushEnd=function(t){if(this._brushing){var e=this._displayables.brushRect;if(this._brushing=!1,e){e.attr("ignore",!0);var n=e.shape;if(!(+new Date-this._brushStartTime<200&&Math.abs(n.width)<5)){var i=this._getViewExtent(),r=[0,100];this._range=jr([Xr(n.x,i,r,!0),Xr(n.x+n.width,i,r,!0)]),this._handleEnds=[n.x,n.x+n.width],this._updateView(),this._dispatchZoomAction(!1)}}}},e.prototype._onBrush=function(t){this._brushing&&(de(t.event),this._updateBrushRect(t.offsetX,t.offsetY))},e.prototype._updateBrushRect=function(t,e){var n=this._displayables,i=this.dataZoomModel,r=n.brushRect;r||(r=n.brushRect=new pF({silent:!0,style:i.getModel("brushStyle").getItemStyle()}),n.sliderGroup.add(r)),r.attr("ignore",!1);var o=this._brushStart,a=this._displayables.sliderGroup,s=a.transformCoordToLocal(t,e),l=a.transformCoordToLocal(o.x,o.y),u=this._size;s[0]=Math.max(Math.min(u[0],s[0]),0),r.setShape({x:l[0],y:0,width:s[0]-l[0],height:u[1]})},e.prototype._dispatchZoomAction=function(t){var e=this._range;this.api.dispatchAction({type:"dataZoom",from:this.uid,dataZoomId:this.dataZoomModel.id,animation:t?yF:null,start:e[0],end:e[1]})},e.prototype._findCoordRect=function(){var t,e=jE(this.dataZoomModel).infoList;if(!t&&e.length){var n=e[0].model.coordinateSystem;t=n.getRect&&n.getRect()}if(!t){var i=this.api.getWidth(),r=this.api.getHeight();t={x:.2*i,y:.2*r,width:.6*i,height:.6*r}}return t},e.type="dataZoom.slider",e}(QE);function mF(t){return"vertical"===t?"ns-resize":"ew-resize"}function xF(t){t.registerComponentModel(cF),t.registerComponentView(vF),az(t)}var _F=function(t,e,n){var i=T((bF[t]||{})[e]);return n&&Y(i)?i[i.length-1]:i},bF={color:{active:["#006edd","#e0ffff"],inactive:["rgba(0,0,0,0)"]},colorHue:{active:[0,360],inactive:[0,0]},colorSaturation:{active:[.3,1],inactive:[0,0]},colorLightness:{active:[.9,.5],inactive:[0,0]},colorAlpha:{active:[.3,1],inactive:[0,0]},opacity:{active:[.3,1],inactive:[0,0]},symbol:{active:["circle","roundRect","diamond"],inactive:["none"]},symbolSize:{active:[10,50],inactive:[0,0]}},wF=_D.mapVisual,SF=_D.eachVisual,MF=Y,IF=E,TF=jr,CF=Xr,DF=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n.stateList=["inRange","outOfRange"],n.replacableOptionKeys=["inRange","outOfRange","target","controller","color"],n.layoutMode={type:"box",ignoreSize:!0},n.dataBound=[-1/0,1/0],n.targetVisuals={},n.controllerVisuals={},n}return n(e,t),e.prototype.init=function(t,e,n){this.mergeDefaultAndTheme(t,n)},e.prototype.optionUpdated=function(t,e){var n=this.option;!e&&wV(n,t,this.replacableOptionKeys),this.textStyleModel=this.getModel("textStyle"),this.resetItemSize(),this.completeVisualOption()},e.prototype.resetVisual=function(t){var e=this.stateList;t=W(t,this),this.controllerVisuals=bV(this.option.controller,e,t),this.targetVisuals=bV(this.option.target,e,t)},e.prototype.getItemSymbol=function(){return null},e.prototype.getTargetSeriesIndices=function(){var t=this.option.seriesIndex,e=[];return null==t||"all"===t?this.ecModel.eachSeries((function(t,n){e.push(n)})):e=bo(t),e},e.prototype.eachTargetSeries=function(t,e){E(this.getTargetSeriesIndices(),(function(n){var i=this.ecModel.getSeriesByIndex(n);i&&t.call(e,i)}),this)},e.prototype.isTargetSeries=function(t){var e=!1;return this.eachTargetSeries((function(n){n===t&&(e=!0)})),e},e.prototype.formatValueText=function(t,e,n){var i,r=this.option,o=r.precision,a=this.dataBound,s=r.formatter;n=n||["<",">"],Y(t)&&(t=t.slice(),i=!0);var l=e?t:i?[u(t[0]),u(t[1])]:u(t);return U(s)?s.replace("{value}",i?l[0]:l).replace("{value2}",i?l[1]:l):X(s)?i?s(t[0],t[1]):s(t):i?t[0]===a[0]?n[0]+" "+l[1]:t[1]===a[1]?n[1]+" "+l[0]:l[0]+" - "+l[1]:l;function u(t){return t===a[0]?"min":t===a[1]?"max":(+t).toFixed(Math.min(o,20))}},e.prototype.resetExtent=function(){var t=this.option,e=TF([t.min,t.max]);this._dataExtent=e},e.prototype.getDataDimensionIndex=function(t){var e=this.option.dimension;if(null!=e)return t.getDimensionIndex(e);for(var n=t.dimensions,i=n.length-1;i>=0;i--){var r=n[i],o=t.getDimensionInfo(r);if(!o.isCalculationCoord)return o.storeDimIndex}},e.prototype.getExtent=function(){return this._dataExtent.slice()},e.prototype.completeVisualOption=function(){var t=this.ecModel,e=this.option,n={inRange:e.inRange,outOfRange:e.outOfRange},i=e.target||(e.target={}),r=e.controller||(e.controller={});C(i,n),C(r,n);var o=this.isCategory();function a(n){MF(e.color)&&!n.inRange&&(n.inRange={color:e.color.slice().reverse()}),n.inRange=n.inRange||{color:t.get("gradientColor")}}a.call(this,i),a.call(this,r),function(t,e,n){var i=t[e],r=t[n];i&&!r&&(r=t[n]={},IF(i,(function(t,e){if(_D.isValidType(e)){var n=_F(e,"inactive",o);null!=n&&(r[e]=n,"color"!==e||r.hasOwnProperty("opacity")||r.hasOwnProperty("colorAlpha")||(r.opacity=[0,0]))}})))}.call(this,i,"inRange","outOfRange"),function(t){var e=(t.inRange||{}).symbol||(t.outOfRange||{}).symbol,n=(t.inRange||{}).symbolSize||(t.outOfRange||{}).symbolSize,i=this.get("inactiveColor"),r=this.getItemSymbol()||"roundRect";IF(this.stateList,(function(a){var s=this.itemSize,l=t[a];l||(l=t[a]={color:o?i:[i]}),null==l.symbol&&(l.symbol=e&&T(e)||(o?r:[r])),null==l.symbolSize&&(l.symbolSize=n&&T(n)||(o?s[0]:[s[0],s[0]])),l.symbol=wF(l.symbol,(function(t){return"none"===t?r:t}));var u=l.symbolSize;if(null!=u){var h=-1/0;SF(u,(function(t){t>h&&(h=t)})),l.symbolSize=wF(u,(function(t){return CF(t,[0,h],[0,s[0]],!0)}))}}),this)}.call(this,r)},e.prototype.resetItemSize=function(){this.itemSize=[parseFloat(this.get("itemWidth")),parseFloat(this.get("itemHeight"))]},e.prototype.isCategory=function(){return!!this.option.categories},e.prototype.setSelected=function(t){},e.prototype.getSelected=function(){return null},e.prototype.getValueState=function(t){return null},e.prototype.getVisualMeta=function(t){return null},e.type="visualMap",e.dependencies=["series"],e.defaultOption={show:!0,z:4,seriesIndex:"all",min:0,max:200,left:0,right:null,top:null,bottom:0,itemWidth:null,itemHeight:null,inverse:!1,orient:"vertical",backgroundColor:"rgba(0,0,0,0)",borderColor:"#ccc",contentColor:"#5793f3",inactiveColor:"#aaa",borderWidth:0,padding:5,textGap:10,precision:0,textStyle:{color:"#333"}},e}(Rp),AF=[20,140],kF=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.optionUpdated=function(e,n){t.prototype.optionUpdated.apply(this,arguments),this.resetExtent(),this.resetVisual((function(t){t.mappingMethod="linear",t.dataExtent=this.getExtent()})),this._resetRange()},e.prototype.resetItemSize=function(){t.prototype.resetItemSize.apply(this,arguments);var e=this.itemSize;(null==e[0]||isNaN(e[0]))&&(e[0]=AF[0]),(null==e[1]||isNaN(e[1]))&&(e[1]=AF[1])},e.prototype._resetRange=function(){var t=this.getExtent(),e=this.option.range;!e||e.auto?(t.auto=1,this.option.range=t):Y(e)&&(e[0]>e[1]&&e.reverse(),e[0]=Math.max(e[0],t[0]),e[1]=Math.min(e[1],t[1]))},e.prototype.completeVisualOption=function(){t.prototype.completeVisualOption.apply(this,arguments),E(this.stateList,(function(t){var e=this.option.controller[t].symbolSize;e&&e[0]!==e[1]&&(e[0]=e[1]/3)}),this)},e.prototype.setSelected=function(t){this.option.range=t.slice(),this._resetRange()},e.prototype.getSelected=function(){var t=this.getExtent(),e=jr((this.get("range")||[]).slice());return e[0]>t[1]&&(e[0]=t[1]),e[1]>t[1]&&(e[1]=t[1]),e[0]=n[1]||t<=e[1])?"inRange":"outOfRange"},e.prototype.findTargetDataIndices=function(t){var e=[];return this.eachTargetSeries((function(n){var i=[],r=n.getData();r.each(this.getDataDimensionIndex(r),(function(e,n){t[0]<=e&&e<=t[1]&&i.push(n)}),this),e.push({seriesId:n.id,dataIndex:i})}),this),e},e.prototype.getVisualMeta=function(t){var e=LF(this,"outOfRange",this.getExtent()),n=LF(this,"inRange",this.option.range.slice()),i=[];function r(e,n){i.push({value:e,color:t(e,n)})}for(var o=0,a=0,s=n.length,l=e.length;at[1])break;n.push({color:this.getControllerVisual(o,"color",e),offset:r/100})}return n.push({color:this.getControllerVisual(t[1],"color",e),offset:1}),n},e.prototype._createBarPoints=function(t,e){var n=this.visualMapModel.itemSize;return[[n[0]-e[0],t[0]],[n[0],t[0]],[n[0],t[1]],[n[0]-e[1],t[1]]]},e.prototype._createBarGroup=function(t){var e=this._orient,n=this.visualMapModel.get("inverse");return new zr("horizontal"!==e||n?"horizontal"===e&&n?{scaleX:"bottom"===t?-1:1,rotation:-Math.PI/2}:"vertical"!==e||n?{scaleX:"left"===t?1:-1}:{scaleX:"left"===t?1:-1,scaleY:-1}:{scaleX:"bottom"===t?1:-1,rotation:Math.PI/2})},e.prototype._updateHandle=function(t,e){if(this._useHandle){var n=this._shapes,i=this.visualMapModel,r=n.handleThumbs,o=n.handleLabels,a=i.itemSize,s=i.getExtent();zF([0,1],(function(l){var u=r[l];u.setStyle("fill",e.handlesColor[l]),u.y=t[l];var h=EF(t[l],[0,a[1]],s,!0),c=this.getControllerVisual(h,"symbolSize");u.scaleX=u.scaleY=c/a[0],u.x=a[0]-c/2;var p=zh(n.handleLabelPoints[l],Eh(u,this.group));o[l].setStyle({x:p[0],y:p[1],text:i.formatValueText(this._dataInterval[l]),verticalAlign:"middle",align:"vertical"===this._orient?this._applyTransform("left",n.mainGroup):"center"})}),this)}},e.prototype._showIndicator=function(t,e,n,i){var r=this.visualMapModel,o=r.getExtent(),a=r.itemSize,s=[0,a[1]],l=this._shapes,u=l.indicator;if(u){u.attr("invisible",!1);var h=this.getControllerVisual(t,"color",{convertOpacityToAlpha:!0}),c=this.getControllerVisual(t,"symbolSize"),p=EF(t,o,s,!0),d=a[0]-c/2,f={x:u.x,y:u.y};u.y=p,u.x=d;var g=zh(l.indicatorLabelPoint,Eh(u,this.group)),y=l.indicatorLabel;y.attr("invisible",!1);var v=this._applyTransform("left",l.mainGroup),m="horizontal"===this._orient;y.setStyle({text:(n||"")+r.formatValueText(e),verticalAlign:m?v:"middle",align:m?"center":v});var x={x:d,y:p,style:{fill:h}},_={style:{x:g[0],y:g[1]}};if(r.ecModel.isAnimationEnabled()&&!this._firstShowIndicator){var b={duration:100,easing:"cubicInOut",additive:!0};u.x=f.x,u.y=f.y,u.animateTo(x,b),y.animateTo(_,b)}else u.attr(x),y.attr(_);this._firstShowIndicator=!1;var w=this._shapes.handleLabels;if(w)for(var S=0;Sr[1]&&(u[1]=1/0),e&&(u[0]===-1/0?this._showIndicator(l,u[1],"< ",a):u[1]===1/0?this._showIndicator(l,u[0],"> ",a):this._showIndicator(l,l,"≈ ",a));var h=this._hoverLinkDataIndices,c=[];(e||WF(n))&&(c=this._hoverLinkDataIndices=n.findTargetDataIndices(u));var p=function(t,e){var n={},i={};return r(t||[],n),r(e||[],i,n),[o(n),o(i)];function r(t,e,n){for(var i=0,r=t.length;i=0&&(r.dimension=o,i.push(r))}})),t.getData().setVisual("visualMeta",i)}}];function ZF(t,e,n,i){for(var r=e.targetVisuals[i],o=_D.prepareVisualTypes(r),a={color:Ty(t.getData(),"color")},s=0,l=o.length;s0:t.splitNumber>0)&&!t.calculable?"piecewise":"continuous"})),t.registerAction(YF,XF),E(UF,(function(e){t.registerVisual(t.PRIORITY.VISUAL.COMPONENT,e)})),t.registerPreprocessor(qF))}function QF(t){t.registerComponentModel(kF),t.registerComponentView(FF),JF(t)}var tG=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n._pieceList=[],n}return n(e,t),e.prototype.optionUpdated=function(e,n){t.prototype.optionUpdated.apply(this,arguments),this.resetExtent();var i=this._mode=this._determineMode();this._pieceList=[],eG[this._mode].call(this,this._pieceList),this._resetSelected(e,n);var r=this.option.categories;this.resetVisual((function(t,e){"categories"===i?(t.mappingMethod="category",t.categories=T(r)):(t.dataExtent=this.getExtent(),t.mappingMethod="piecewise",t.pieceList=z(this._pieceList,(function(t){return t=T(t),"inRange"!==e&&(t.visual=null),t})))}))},e.prototype.completeVisualOption=function(){var e=this.option,n={},i=_D.listVisualTypes(),r=this.isCategory();function o(t,e,n){return t&&t[e]&&t[e].hasOwnProperty(n)}E(e.pieces,(function(t){E(i,(function(e){t.hasOwnProperty(e)&&(n[e]=1)}))})),E(n,(function(t,n){var i=!1;E(this.stateList,(function(t){i=i||o(e,t,n)||o(e.target,t,n)}),this),!i&&E(this.stateList,(function(t){(e[t]||(e[t]={}))[n]=_F(n,"inRange"===t?"active":"inactive",r)}))}),this),t.prototype.completeVisualOption.apply(this,arguments)},e.prototype._resetSelected=function(t,e){var n=this.option,i=this._pieceList,r=(e?n:t).selected||{};if(n.selected=r,E(i,(function(t,e){var n=this.getSelectedMapKey(t);r.hasOwnProperty(n)||(r[n]=!0)}),this),"single"===n.selectedMode){var o=!1;E(i,(function(t,e){var n=this.getSelectedMapKey(t);r[n]&&(o?r[n]=!1:o=!0)}),this)}},e.prototype.getItemSymbol=function(){return this.get("itemSymbol")},e.prototype.getSelectedMapKey=function(t){return"categories"===this._mode?t.value+"":t.index+""},e.prototype.getPieceList=function(){return this._pieceList},e.prototype._determineMode=function(){var t=this.option;return t.pieces&&t.pieces.length>0?"pieces":this.option.categories?"categories":"splitNumber"},e.prototype.setSelected=function(t){this.option.selected=T(t)},e.prototype.getValueState=function(t){var e=_D.findPieceIndex(t,this._pieceList);return null!=e&&this.option.selected[this.getSelectedMapKey(this._pieceList[e])]?"inRange":"outOfRange"},e.prototype.findTargetDataIndices=function(t){var e=[],n=this._pieceList;return this.eachTargetSeries((function(i){var r=[],o=i.getData();o.each(this.getDataDimensionIndex(o),(function(e,i){_D.findPieceIndex(e,n)===t&&r.push(i)}),this),e.push({seriesId:i.id,dataIndex:r})}),this),e},e.prototype.getRepresentValue=function(t){var e;if(this.isCategory())e=t.value;else if(null!=t.value)e=t.value;else{var n=t.interval||[];e=n[0]===-1/0&&n[1]===1/0?0:(n[0]+n[1])/2}return e},e.prototype.getVisualMeta=function(t){if(!this.isCategory()){var e=[],n=["",""],i=this,r=this._pieceList.slice();if(r.length){var o=r[0].interval[0];o!==-1/0&&r.unshift({interval:[-1/0,o]}),(o=r[r.length-1].interval[1])!==1/0&&r.push({interval:[o,1/0]})}else r.push({interval:[-1/0,1/0]});var a=-1/0;return E(r,(function(t){var e=t.interval;e&&(e[0]>a&&s([a,e[0]],"outOfRange"),s(e.slice()),a=e[1])}),this),{stops:e,outerColors:n}}function s(r,o){var a=i.getRepresentValue({interval:r});o||(o=i.getValueState(a));var s=t(a,o);r[0]===-1/0?n[0]=s:r[1]===1/0?n[1]=s:e.push({value:r[0],color:s},{value:r[1],color:s})}},e.type="visualMap.piecewise",e.defaultOption=Cc(DF.defaultOption,{selected:null,minOpen:!1,maxOpen:!1,align:"auto",itemWidth:20,itemHeight:14,itemSymbol:"roundRect",pieces:null,categories:null,splitNumber:5,selectedMode:"multiple",itemGap:10,hoverLink:!0}),e}(DF),eG={splitNumber:function(t){var e=this.option,n=Math.min(e.precision,20),i=this.getExtent(),r=e.splitNumber;r=Math.max(parseInt(r,10),1),e.splitNumber=r;for(var o=(i[1]-i[0])/r;+o.toFixed(n)!==o&&n<5;)n++;e.precision=n,o=+o.toFixed(n),e.minOpen&&t.push({interval:[-1/0,i[0]],close:[0,0]});for(var a=0,s=i[0];a","≥"][e[0]]];t.text=t.text||this.formatValueText(null!=t.value?t.value:t.interval,!1,n)}),this)}};function nG(t,e){var n=t.inverse;("vertical"===t.orient?!n:n)&&e.reverse()}var iG=function(t){function e(){var n=null!==t&&t.apply(this,arguments)||this;return n.type=e.type,n}return n(e,t),e.prototype.doRender=function(){var t=this.group;t.removeAll();var e=this.visualMapModel,n=e.get("textGap"),i=e.textStyleModel,r=i.getFont(),o=i.getTextColor(),a=this._getItemAlign(),s=e.itemSize,l=this._getViewData(),u=l.endsText,h=it(e.get("showLabel",!0),!u);u&&this._renderEndsText(t,u[0],s,h,a),E(l.viewPieceList,(function(i){var l=i.piece,u=new zr;u.onclick=W(this._onItemClick,this,l),this._enableHoverLink(u,i.indexInModelPieceList);var c=e.getRepresentValue(l);if(this._createItemSymbol(u,c,[0,0,s[0],s[1]]),h){var p=this.visualMapModel.getValueState(c);u.add(new Fs({style:{x:"right"===a?-n:s[0]+n,y:s[1]/2,text:l.text,verticalAlign:"middle",align:a,font:r,fill:o,opacity:"outOfRange"===p?.5:1}}))}t.add(u)}),this),u&&this._renderEndsText(t,u[1],s,h,a),Tp(e.get("orient"),t,e.get("itemGap")),this.renderBackground(t),this.positionGroup(t)},e.prototype._enableHoverLink=function(t,e){var n=this;t.on("mouseover",(function(){return i("highlight")})).on("mouseout",(function(){return i("downplay")}));var i=function(t){var i=n.visualMapModel;i.option.hoverLink&&n.api.dispatchAction({type:t,batch:NF(i.findTargetDataIndices(e),i)})}},e.prototype._getItemAlign=function(){var t=this.visualMapModel,e=t.option;if("vertical"===e.orient)return RF(t,this.api,t.itemSize);var n=e.align;return n&&"auto"!==n||(n="left"),n},e.prototype._renderEndsText=function(t,e,n,i,r){if(e){var o=new zr,a=this.visualMapModel.textStyleModel;o.add(new Fs({style:nc(a,{x:i?"right"===r?n[0]:0:n[0]/2,y:n[1]/2,verticalAlign:"middle",align:i?r:"center",text:e})})),t.add(o)}},e.prototype._getViewData=function(){var t=this.visualMapModel,e=z(t.getPieceList(),(function(t,e){return{piece:t,indexInModelPieceList:e}})),n=t.get("text"),i=t.get("orient"),r=t.get("inverse");return("horizontal"===i?r:!r)?e.reverse():n&&(n=n.slice().reverse()),{viewPieceList:e,endsText:n}},e.prototype._createItemSymbol=function(t,e,n){t.add(Wy(this.getControllerVisual(e,"symbol"),n[0],n[1],n[2],n[3],this.getControllerVisual(e,"color")))},e.prototype._onItemClick=function(t){var e=this.visualMapModel,n=e.option,i=n.selectedMode;if(i){var r=T(n.selected),o=e.getSelectedMapKey(t);"single"===i||!0===i?(r[o]=!0,E(r,(function(t,e){r[e]=e===o}))):r[o]=!r[o],this.api.dispatchAction({type:"selectDataRange",from:this.uid,visualMapId:this.visualMapModel.id,selected:r})}},e.type="visualMap.piecewise",e}(PF);function rG(t){t.registerComponentModel(tG),t.registerComponentView(iG),JF(t)}var oG={label:{enabled:!0},decal:{show:!1}},aG=Oo(),sG={};function lG(t,e){var n=t.getModel("aria");if(n.get("enabled")){var i=T(oG);C(i.label,t.getLocaleModel().get("aria"),!1),C(n.option,i,!1),function(){if(n.getModel("decal").get("show")){var e=yt();t.eachSeries((function(t){if(!t.isColorBySeries()){var n=e.get(t.type);n||(n={},e.set(t.type,n)),aG(t).scope=n}})),t.eachRawSeries((function(e){if(!t.isSeriesFiltered(e))if(X(e.enableAriaDecal))e.enableAriaDecal();else{var n=e.getData();if(e.isColorBySeries()){var i=ud(e.ecModel,e.name,sG,t.getSeriesCount()),r=n.getVisual("decal");n.setVisual("decal",u(r,i))}else{var o=e.getRawData(),a={},s=aG(e).scope;n.each((function(t){var e=n.getRawIndex(t);a[e]=t}));var l=o.count();o.each((function(t){var i=a[t],r=o.getName(t)||t+"",h=ud(e.ecModel,r,s,l),c=n.getItemVisual(i,"decal");n.setItemVisual(i,"decal",u(c,h))}))}}function u(t,e){var n=t?A(A({},e),t):e;return n.dirty=!0,n}}))}}(),function(){var i=t.getLocaleModel().get("aria"),o=n.getModel("label");if(o.option=k(o.option,i),!o.get("enabled"))return;var a=e.getZr().dom;if(o.get("description"))return void a.setAttribute("aria-label",o.get("description"));var s,l=t.getSeriesCount(),u=o.get(["data","maxCount"])||10,h=o.get(["series","maxCount"])||10,c=Math.min(l,h);if(l<1)return;var p=function(){var e=t.get("title");e&&e.length&&(e=e[0]);return e&&e.text}();s=p?r(o.get(["general","withTitle"]),{title:p}):o.get(["general","withoutTitle"]);var d=[];s+=r(l>1?o.get(["series","multiple","prefix"]):o.get(["series","single","prefix"]),{seriesCount:l}),t.eachSeries((function(e,n){if(n1?o.get(["series","multiple",a]):o.get(["series","single",a]),{seriesId:e.seriesIndex,seriesName:e.get("name"),seriesType:(x=e.subType,t.getLocaleModel().get(["series","typeNames"])[x]||"自定义图")});var s=e.getData();if(s.count()>u)i+=r(o.get(["data","partialData"]),{displayCnt:u});else i+=o.get(["data","allData"]);for(var h=o.get(["data","separator","middle"]),p=o.get(["data","separator","end"]),f=[],g=0;g":"gt",">=":"gte","=":"eq","!=":"ne","<>":"ne"},cG=function(){function t(t){if(null==(this._condVal=U(t)?new RegExp(t):et(t)?t:null)){var e="";0,vo(e)}}return t.prototype.evaluate=function(t){var e=typeof t;return U(e)?this._condVal.test(t):!!j(e)&&this._condVal.test(t+"")},t}(),pG=function(){function t(){}return t.prototype.evaluate=function(){return this.value},t}(),dG=function(){function t(){}return t.prototype.evaluate=function(){for(var t=this.children,e=0;e2&&l.push(e),e=[t,n]}function f(t,n,i,r){TG(t,i)&&TG(n,r)||e.push(t,n,i,r,i,r)}function g(t,n,i,r,o,a){var s=Math.abs(n-t),l=4*Math.tan(s/4)/3,u=nM:C2&&l.push(e),l}function DG(t,e,n,i,r,o,a,s,l,u){if(TG(t,n)&&TG(e,i)&&TG(r,a)&&TG(o,s))l.push(a,s);else{var h=2/u,c=h*h,p=a-t,d=s-e,f=Math.sqrt(p*p+d*d);p/=f,d/=f;var g=n-t,y=i-e,v=r-a,m=o-s,x=g*g+y*y,_=v*v+m*m;if(x=0&&_-w*w=0)l.push(a,s);else{var S=[],M=[];wn(t,n,r,a,.5,S),wn(e,i,o,s,.5,M),DG(S[0],M[0],S[1],M[1],S[2],M[2],S[3],M[3],l,u),DG(S[4],M[4],S[5],M[5],S[6],M[6],S[7],M[7],l,u)}}}}function AG(t,e,n){var i=t[e],r=t[1-e],o=Math.abs(i/r),a=Math.ceil(Math.sqrt(o*n)),s=Math.floor(n/a);0===s&&(s=1,a=n);for(var l=[],u=0;u0)for(u=0;uMath.abs(u),c=AG([l,u],h?0:1,e),p=(h?s:u)/c.length,d=0;d1?null:new De(d*l+t,d*u+e)}function OG(t,e,n){var i=new De;De.sub(i,n,e),i.normalize();var r=new De;return De.sub(r,t,e),r.dot(i)}function RG(t,e){var n=t[t.length-1];n&&n[0]===e[0]&&n[1]===e[1]||t.push(e)}function NG(t){var e=t.points,n=[],i=[];Ra(e,n,i);var r=new ze(n[0],n[1],i[0]-n[0],i[1]-n[1]),o=r.width,a=r.height,s=r.x,l=r.y,u=new De,h=new De;return o>a?(u.x=h.x=s+o/2,u.y=l,h.y=l+a):(u.y=h.y=l+a/2,u.x=s,h.x=s+o),function(t,e,n){for(var i=t.length,r=[],o=0;or,a=AG([i,r],o?0:1,e),s=o?"width":"height",l=o?"height":"width",u=o?"x":"y",h=o?"y":"x",c=t[s]/a.length,p=0;p0)for(var b=i/n,w=-i/2;w<=i/2;w+=b){var S=Math.sin(w),M=Math.cos(w),I=0;for(x=0;x0;l/=2){var u=0,h=0;(t&l)>0&&(u=1),(e&l)>0&&(h=1),s+=l*l*(3*u^h),0===h&&(1===u&&(t=l-1-t,e=l-1-e),a=t,t=e,e=a)}return s}function JG(t){var e=1/0,n=1/0,i=-1/0,r=-1/0,o=z(t,(function(t){var o=t.getBoundingRect(),a=t.getComputedTransform(),s=o.x+o.width/2+(a?a[4]:0),l=o.y+o.height/2+(a?a[5]:0);return e=Math.min(s,e),n=Math.min(l,n),i=Math.max(s,i),r=Math.max(l,r),[s,l]}));return z(o,(function(o,a){return{cp:o,z:$G(o[0],o[1],e,n,i,r),path:t[a]}})).sort((function(t,e){return t.z-e.z})).map((function(t){return t.path}))}function QG(t){return VG(t.path,t.count)}function tW(t){return Y(t[0])}function eW(t,e){for(var n=[],i=t.length,r=0;r=0;r--)if(!n[r].many.length){var l=n[s].many;if(l.length<=1){if(!s)return n;s=0}o=l.length;var u=Math.ceil(o/2);n[r].many=l.slice(u,o),n[s].many=l.slice(0,u),s++}return n}var nW={clone:function(t){for(var e=[],n=1-Math.pow(1-t.path.style.opacity,1/t.count),i=0;i0){var s,l,u=i.getModel("universalTransition").get("delay"),h=Object.assign({setToFinal:!0},a);tW(t)&&(s=t,l=e),tW(e)&&(s=e,l=t);for(var c=s?s===t:t.length>e.length,p=s?eW(l,s):eW(c?e:t,[c?t:e]),d=0,f=0;f1e4))for(var i=n.getIndices(),r=function(t){for(var e=t.dimensions,n=0;n0&&i.group.traverse((function(t){t instanceof Is&&!t.animators.length&&t.animateFrom({style:{opacity:0}},r)}))}))}function pW(t){var e=t.getModel("universalTransition").get("seriesKey");return e||t.id}function dW(t){return Y(t)?t.sort().join(","):t}function fW(t){if(t.hostModel)return t.hostModel.getModel("universalTransition").get("divideShape")}function gW(t,e){for(var n=0;n=0&&r.push({dataGroupId:e.oldDataGroupIds[n],data:e.oldData[n],divide:fW(e.oldData[n]),dim:t.dimension})})),E(bo(t.to),(function(t){var i=gW(n.updatedSeries,t);if(i>=0){var r=n.updatedSeries[i].getData();o.push({dataGroupId:e.oldDataGroupIds[i],data:r,divide:fW(r),dim:t.dimension})}})),r.length>0&&o.length>0&&cW(r,o,i)}(t,i,n,e)}));else{var o=function(t,e){var n=yt(),i=yt(),r=yt();return E(t.oldSeries,(function(e,n){var o=t.oldDataGroupIds[n],a=t.oldData[n],s=pW(e),l=dW(s);i.set(l,{dataGroupId:o,data:a}),Y(s)&&E(s,(function(t){r.set(t,{key:l,dataGroupId:o,data:a})}))})),E(e.updatedSeries,(function(t){if(t.isUniversalTransitionEnabled()&&t.isAnimationEnabled()){var e=t.get("dataGroupId"),o=t.getData(),a=pW(t),s=dW(a),l=i.get(s);if(l)n.set(s,{oldSeries:[{dataGroupId:l.dataGroupId,divide:fW(l.data),data:l.data}],newSeries:[{dataGroupId:e,divide:fW(o),data:o}]});else if(Y(a)){var u=[];E(a,(function(t){var e=i.get(t);e.data&&u.push({dataGroupId:e.dataGroupId,divide:fW(e.data),data:e.data})})),u.length&&n.set(s,{oldSeries:u,newSeries:[{dataGroupId:e,data:o,divide:fW(o)}]})}else{var h=r.get(a);if(h){var c=n.get(h.key);c||(c={oldSeries:[{dataGroupId:h.dataGroupId,data:h.data,divide:fW(h.data)}],newSeries:[]},n.set(h.key,c)),c.newSeries.push({dataGroupId:e,data:o,divide:fW(o)})}}}})),n}(i,n);E(o.keys(),(function(t){var n=o.get(t);cW(n.oldSeries,n.newSeries,e)}))}E(n.updatedSeries,(function(t){t[vg]&&(t[vg]=!1)}))}for(var a=t.getSeries(),s=i.oldSeries=[],l=i.oldDataGroupIds=[],u=i.oldData=[],h=0;h= 500) { + return attempt < maxRetries; + } + // 4xx errors are NOT retryable + if (response && response.status >= 400 && response.status < 500) { + return false; + } + return attempt < maxRetries; + } + + function parseRetryAfterSeconds(response, errorData) { + const headerValue = response?.headers?.get?.('Retry-After'); + if (headerValue) { + const parsed = Number(headerValue); + if (!Number.isNaN(parsed) && parsed > 0) { + return parsed; + } + } + const metaRetry = errorData?.meta?.retry_after_seconds; + const parsedMeta = Number(metaRetry); + if (!Number.isNaN(parsedMeta) && parsedMeta > 0) { + return parsedMeta; + } + return null; + } + + function getErrorCode(errorData) { + return errorData?.error?.code || errorData?.code || null; + } + + function getErrorMessage(errorData, fallbackStatus) { + if (errorData?.error?.message) return errorData.error.message; + if (errorData?.error && typeof errorData.error === 'string') return errorData.error; + if (errorData?.message) return errorData.message; + return `HTTP ${fallbackStatus}`; + } + + function getRetryDelayMs(error, attempt) { + const baseDelay = RETRY_DELAYS[attempt] || RETRY_DELAYS[RETRY_DELAYS.length - 1]; + const retryAfterMs = error?.retryAfterSeconds ? error.retryAfterSeconds * 1000 : 0; + + if (error?.errorCode && DEGRADED_CODES.has(error.errorCode)) { + return Math.max(baseDelay, retryAfterMs, MIN_DEGRADED_DELAY_MS); + } + if (retryAfterMs > 0) { + return Math.max(baseDelay, retryAfterMs); + } + return baseDelay; + } + + /** + * Sleep for a given duration + */ + function sleep(ms) { + return new Promise(resolve => setTimeout(resolve, ms)); + } + + /** + * Execute fetch with timeout + */ + async function fetchWithTimeout(url, fetchOptions, timeout, externalSignal) { + const controller = new AbortController(); + const timeoutId = setTimeout(() => controller.abort(), timeout); + + // Link external signal if provided + if (externalSignal) { + if (externalSignal.aborted) { + controller.abort(); + } else { + externalSignal.addEventListener('abort', () => controller.abort()); + } + } + + try { + const response = await fetch(url, { + ...fetchOptions, + signal: controller.signal + }); + clearTimeout(timeoutId); + return response; + } catch (error) { + clearTimeout(timeoutId); + // Distinguish between timeout and user abort + if (error.name === 'AbortError') { + if (externalSignal && externalSignal.aborted) { + error.isUserAbort = true; + } else { + // Timeout + const timeoutError = new Error('Request timeout'); + timeoutError.name = 'TimeoutError'; + throw timeoutError; + } + } + throw error; + } + } + + /** + * Core request function with retry logic + */ + async function request(method, url, options = {}) { + const reqId = generateRequestId(); + const timeout = options.timeout || DEFAULT_TIMEOUT; + const maxRetries = options.retries !== undefined ? options.retries : DEFAULT_RETRIES; + const silent = options.silent || false; + const signal = options.signal; + + const fullUrl = buildUrl(url, options.params); + const startTime = Date.now(); + + console.log(`[MesApi] ${reqId} ${method} ${fullUrl}`); + + const fetchOptions = { + method: method, + headers: { + 'Content-Type': 'application/json' + } + }; + + if (options.body) { + fetchOptions.body = JSON.stringify(options.body); + } + + let lastError = null; + let lastResponse = null; + let loadingToastId = null; + + for (let attempt = 0; attempt <= maxRetries; attempt++) { + try { + // Check if already aborted + if (signal && signal.aborted) { + console.log(`[MesApi] ${reqId} ⊘ Aborted`); + const abortError = new Error('Request aborted'); + abortError.name = 'AbortError'; + abortError.isUserAbort = true; + throw abortError; + } + + const response = await fetchWithTimeout(fullUrl, fetchOptions, timeout, signal); + lastResponse = response; + + if (response.ok) { + const elapsed = Date.now() - startTime; + console.log(`[MesApi] ${reqId} ✓ ${response.status} (${elapsed}ms)`); + + // Dismiss loading toast if showing retry status + if (loadingToastId) { + Toast.dismiss(loadingToastId); + } + + try { + const data = await response.json(); + return data; + } catch (parseError) { + // JSON parse error on successful response - don't retry + console.error(`[MesApi] ${reqId} ✗ JSON parse failed:`, parseError.message); + if (!silent) { + Toast.error('回應資料解析失敗,資料量可能過大'); + } + parseError.isParseError = true; + throw parseError; + } + } + + // Non-OK response + const errorData = await response.json().catch(() => ({})); + const error = new Error(getErrorMessage(errorData, response.status)); + error.status = response.status; + error.data = errorData; + error.errorCode = getErrorCode(errorData); + error.retryAfterSeconds = parseRetryAfterSeconds(response, errorData); + + // 4xx errors - don't retry + if (response.status >= 400 && response.status < 500) { + console.log(`[MesApi] ${reqId} ✗ ${response.status} (no retry)`); + if (!silent) { + Toast.error(`請求錯誤: ${error.message}`); + } + throw error; + } + + // 5xx errors - will retry + lastError = error; + + } catch (error) { + // User abort - don't retry, no toast + if (error.isUserAbort) { + console.log(`[MesApi] ${reqId} ⊘ Aborted`); + if (loadingToastId) { + Toast.dismiss(loadingToastId); + } + throw error; + } + + // JSON parse error on successful response - don't retry + if (error.isParseError) { + if (loadingToastId) { + Toast.dismiss(loadingToastId); + } + throw error; + } + + lastError = error; + } + + // Check if we should retry + if (attempt < maxRetries && isRetryable(lastError, lastResponse, attempt, maxRetries)) { + const delay = getRetryDelayMs(lastError, attempt); + console.log(`[MesApi] ${reqId} ✗ Retry ${attempt + 1}/${maxRetries} in ${delay}ms`); + + if (!silent) { + const retryMsg = `正在重試 (${attempt + 1}/${maxRetries})...`; + if (loadingToastId) { + Toast.update(loadingToastId, { message: retryMsg }); + } else { + loadingToastId = Toast.loading(retryMsg); + } + } + + await sleep(delay); + } + } + + // All retries exhausted + const elapsed = Date.now() - startTime; + console.log(`[MesApi] ${reqId} ✗ Failed after ${maxRetries} retries (${elapsed}ms)`); + + // Update or dismiss loading toast, show error with retry button + if (loadingToastId) { + Toast.dismiss(loadingToastId); + } + + if (!silent) { + const errorMsg = lastError.message || '請求失敗'; + Toast.error(`${errorMsg}`, { + retry: () => request(method, url, options) + }); + } + + throw lastError; + } + + // Public API + return { + /** + * Send a GET request + * @param {string} url - The URL to request + * @param {Object} options - Request options + * @param {Object} options.params - URL query parameters + * @param {number} options.timeout - Timeout in ms (default: 30000) + * @param {number} options.retries - Max retries (default: 3) + * @param {AbortSignal} options.signal - AbortController signal + * @param {boolean} options.silent - Suppress toast notifications + * @returns {Promise} Response data + */ + get: function(url, options = {}) { + return request('GET', url, options); + }, + + /** + * Send a POST request + * @param {string} url - The URL to request + * @param {Object} data - Request body data + * @param {Object} options - Request options (same as get) + * @returns {Promise} Response data + */ + post: function(url, data, options = {}) { + return request('POST', url, { ...options, body: data }); + } + }; +})(); diff --git a/src/mes_dashboard/static/js/toast.js b/src/mes_dashboard/static/js/toast.js new file mode 100644 index 0000000..01134e5 --- /dev/null +++ b/src/mes_dashboard/static/js/toast.js @@ -0,0 +1,240 @@ +/** + * Toast Notification System + * + * Usage: + * Toast.info('訊息內容'); + * Toast.success('操作成功'); + * Toast.warning('請注意'); + * Toast.error('發生錯誤'); + * Toast.error('連線失敗', { retry: () => loadData() }); + * + * const id = Toast.loading('載入中...'); + * Toast.update(id, { type: 'success', message: '完成!' }); + * Toast.dismiss(id); + */ +const Toast = (function() { + 'use strict'; + + const MAX_TOASTS = 5; + const AUTO_DISMISS = { + info: 3000, + success: 2000, + warning: 5000, + error: null, // no auto dismiss + loading: null // no auto dismiss + }; + + const ICONS = { + info: 'ℹ', + success: '✓', + warning: '⚠', + error: '✗', + loading: '⟳' + }; + + let toastId = 0; + const activeToasts = new Map(); + + /** + * Get or create the toast container + */ + function getContainer() { + let container = document.getElementById('mes-toast-container'); + if (!container) { + container = document.createElement('div'); + container.id = 'mes-toast-container'; + container.className = 'mes-toast-container'; + document.body.appendChild(container); + } + return container; + } + + /** + * Create a toast element + */ + function createToastElement(id, type, message, options) { + const toast = document.createElement('div'); + toast.id = `mes-toast-${id}`; + toast.className = `mes-toast mes-toast-${type}`; + toast.setAttribute('role', 'alert'); + + // Icon + const icon = document.createElement('span'); + icon.className = 'mes-toast-icon'; + icon.textContent = ICONS[type]; + toast.appendChild(icon); + + // Message + const msg = document.createElement('span'); + msg.className = 'mes-toast-message'; + msg.textContent = message; + toast.appendChild(msg); + + // Retry button (for error type with retry callback) + if (type === 'error' && options && typeof options.retry === 'function') { + const retryBtn = document.createElement('button'); + retryBtn.className = 'mes-toast-retry'; + retryBtn.textContent = '重試'; + retryBtn.onclick = function(e) { + e.stopPropagation(); + dismiss(id); + options.retry(); + }; + toast.appendChild(retryBtn); + } + + // Close button + const closeBtn = document.createElement('button'); + closeBtn.className = 'mes-toast-close'; + closeBtn.innerHTML = '×'; + closeBtn.onclick = function(e) { + e.stopPropagation(); + dismiss(id); + }; + toast.appendChild(closeBtn); + + return toast; + } + + /** + * Enforce max toasts limit - remove oldest if exceeded + */ + function enforceMaxToasts() { + while (activeToasts.size >= MAX_TOASTS) { + const oldestId = activeToasts.keys().next().value; + dismiss(oldestId); + } + } + + /** + * Show a toast notification + */ + function show(type, message, options) { + enforceMaxToasts(); + + const id = ++toastId; + const container = getContainer(); + const toast = createToastElement(id, type, message, options); + + // Insert at the top (newest first) + container.insertBefore(toast, container.firstChild); + + // Track active toast + const toastData = { element: toast, type, message, options, timerId: null }; + activeToasts.set(id, toastData); + + // Auto dismiss if applicable + const dismissTime = AUTO_DISMISS[type]; + if (dismissTime) { + toastData.timerId = setTimeout(() => dismiss(id), dismissTime); + } + + return id; + } + + /** + * Update an existing toast + */ + function update(id, updates) { + const toastData = activeToasts.get(id); + if (!toastData) { + return false; + } + + const { element, timerId } = toastData; + + // Clear existing auto-dismiss timer + if (timerId) { + clearTimeout(timerId); + toastData.timerId = null; + } + + // Update type if provided + if (updates.type && updates.type !== toastData.type) { + element.className = `mes-toast mes-toast-${updates.type}`; + const icon = element.querySelector('.mes-toast-icon'); + if (icon) { + icon.textContent = ICONS[updates.type]; + } + toastData.type = updates.type; + + // Set auto-dismiss for new type + const dismissTime = AUTO_DISMISS[updates.type]; + if (dismissTime) { + toastData.timerId = setTimeout(() => dismiss(id), dismissTime); + } + } + + // Update message if provided + if (updates.message !== undefined) { + const msg = element.querySelector('.mes-toast-message'); + if (msg) { + msg.textContent = updates.message; + } + toastData.message = updates.message; + } + + return true; + } + + /** + * Dismiss a toast + */ + function dismiss(id) { + const toastData = activeToasts.get(id); + if (!toastData) { + return false; + } + + const { element, timerId } = toastData; + + // Clear timer + if (timerId) { + clearTimeout(timerId); + } + + // Add exit animation + element.classList.add('mes-toast-exit'); + + // Remove after animation + setTimeout(() => { + if (element.parentNode) { + element.parentNode.removeChild(element); + } + }, 300); + + activeToasts.delete(id); + return true; + } + + /** + * Dismiss all toasts + */ + function dismissAll() { + for (const id of activeToasts.keys()) { + dismiss(id); + } + } + + // Public API + return { + info: function(message, options) { + return show('info', message, options); + }, + success: function(message, options) { + return show('success', message, options); + }, + warning: function(message, options) { + return show('warning', message, options); + }, + error: function(message, options) { + return show('error', message, options); + }, + loading: function(message, options) { + return show('loading', message, options); + }, + update: update, + dismiss: dismiss, + dismissAll: dismissAll + }; +})(); diff --git a/src/mes_dashboard/templates/403.html b/src/mes_dashboard/templates/403.html new file mode 100644 index 0000000..ebf25e1 --- /dev/null +++ b/src/mes_dashboard/templates/403.html @@ -0,0 +1,87 @@ +{% extends "_base.html" %} + +{% block title %}頁面開發中 - MES Dashboard{% endblock %} + +{% block head_extra %} + +{% endblock %} + +{% block content %} +
+
🚧
+

頁面開發中

+

+ 此頁面尚未發布,目前僅供管理員存取。
+ 如需查看,請聯繫系統管理員。 +

+ 返回首頁 + 管理員登入 +
+{% endblock %} diff --git a/src/mes_dashboard/templates/404.html b/src/mes_dashboard/templates/404.html new file mode 100644 index 0000000..63d8f3d --- /dev/null +++ b/src/mes_dashboard/templates/404.html @@ -0,0 +1,81 @@ +{% extends "_base.html" %} + +{% block title %}頁面不存在 - MES Dashboard{% endblock %} + +{% block head_extra %} + +{% endblock %} + +{% block content %} +
+
404
+

頁面不存在

+

+ 您要找的頁面不存在或已被移除。
+ 請檢查網址是否正確,或返回首頁。 +

+ 返回首頁 +
+{% endblock %} diff --git a/src/mes_dashboard/templates/500.html b/src/mes_dashboard/templates/500.html new file mode 100644 index 0000000..c4193e7 --- /dev/null +++ b/src/mes_dashboard/templates/500.html @@ -0,0 +1,101 @@ +{% extends "_base.html" %} + +{% block title %}系統錯誤 - MES Dashboard{% endblock %} + +{% block head_extra %} + +{% endblock %} + +{% block content %} +
+
500
+

系統發生錯誤

+

+ 很抱歉,系統發生了內部錯誤。
+ 我們的技術團隊已收到通知,請稍後再試。 +

+ 返回首頁 + +
+{% endblock %} diff --git a/src/mes_dashboard/templates/_base.html b/src/mes_dashboard/templates/_base.html new file mode 100644 index 0000000..86583c6 --- /dev/null +++ b/src/mes_dashboard/templates/_base.html @@ -0,0 +1,122 @@ + + + + + + {% block title %}MES Dashboard{% endblock %} + + + + + {% block head_extra %}{% endblock %} + + + +
+ + {% block content %}{% endblock %} + + + + + + {% block scripts %}{% endblock %} + + diff --git a/src/mes_dashboard/templates/admin/pages.html b/src/mes_dashboard/templates/admin/pages.html new file mode 100644 index 0000000..1893c83 --- /dev/null +++ b/src/mes_dashboard/templates/admin/pages.html @@ -0,0 +1,296 @@ +{% extends "_base.html" %} + +{% block title %}頁面管理 - MES Dashboard{% endblock %} + +{% block head_extra %} + +{% endblock %} + +{% block content %} +
+
+
+

頁面管理

+

設定頁面存取權限:Released(所有人可見)/ Dev(僅管理員可見)

+
+
+ {% if admin_user %} +
+
{{ admin_user.displayName }}
+
{{ admin_user.mail }}
+
+ {% endif %} + 登出 +
+
+ +
+
+

所有頁面

+
+
+ + + + + + + + + + + + + +
路由名稱狀態
載入中...
+
+
+ + 返回首頁 +
+{% endblock %} + +{% block scripts %} + +{% endblock %} diff --git a/src/mes_dashboard/templates/admin/performance.html b/src/mes_dashboard/templates/admin/performance.html new file mode 100644 index 0000000..917178c --- /dev/null +++ b/src/mes_dashboard/templates/admin/performance.html @@ -0,0 +1,1207 @@ +{% extends "_base.html" %} + +{% block title %}效能監控 - MES Dashboard{% endblock %} + +{% block head_extra %} + + +{% endblock %} + +{% block content %} +
+
+
+

效能監控儀表板

+

系統狀態、查詢效能與日誌記錄

+
+
+
+ -- +
+
+ + +
+ ← 返回首頁 +
+
+ + +
+
+
+ Database + +
+
--
+
連線延遲
+
+ +
+
+ Redis + +
+
--
+
快取狀態
+
+ +
+
+ Circuit Breaker + +
+
--
+
--
+
+ +
+
+ Worker + +
+
--
+
Process ID
+
+
+ + +
+
+
+ 查詢效能 +
+
+ P50 延遲 + -- ms +
+
+ P95 延遲 + -- ms +
+
+ P99 延遲 + -- ms +
+
+ 總查詢數 + -- +
+
+ 慢查詢數 (>1s) + -- +
+
+ 慢查詢率 + --% +
+
+ +
+
+ 延遲分布 (最近 100 筆) +
+
+ +
+
+
+ + +
+
+

Worker 控制

+
+
+
+
+
+ Worker PID + -- +
+
+ 啟動時間 + -- +
+
+ 冷卻狀態 + -- +
+
+
+
+ 上次重啟 + -- +
+
+ 重啟者 + -- +
+
+ 重啟狀態 + -- +
+
+
+
+ +

+ 這將優雅地重新載入所有 Worker 程序 (不會中斷現有請求) +

+
+
+
+ + +
+
+

確認重新啟動 Workers?

+

+ 這將發送信號給 Gunicorn master 程序,優雅地重新載入所有 worker。 + 現有請求會完成處理後才關閉。 +

+
+ + +
+
+
+ + +
+
+

確認清理日誌?

+

+ 這將刪除超過保留期限的舊日誌,以及超過上限筆數的記錄。此操作無法復原。 +

+
+ + +
+
+
+ + +
+
+

系統日誌

+
+ -- + +
+
+
+ +
+
+
+ 總筆數 +
--
+
+
+ 檔案大小 +
--
+
+
+ 最舊記錄 +
--
+
+
+ 保留天數 +
--
+
+
+ 上限筆數 +
--
+
+
+
+ +
+ + +
+
+ + + + + + + + + + + + +
時間等級來源訊息
載入中...
+
+ + +
+
+
+{% endblock %} + +{% block scripts %} + +{% endblock %} diff --git a/src/mes_dashboard/templates/excel_query.html b/src/mes_dashboard/templates/excel_query.html new file mode 100644 index 0000000..b2d2b3a --- /dev/null +++ b/src/mes_dashboard/templates/excel_query.html @@ -0,0 +1,1181 @@ +{% extends "_base.html" %} + +{% block title %}Excel 批次查詢工具{% endblock %} + +{% block head_extra %} + +{% endblock %} + +{% block content %} +
+
+

Excel 批次查詢工具

+

上傳 Excel 檔案,批次查詢資料庫並匯出結果

+
+ +
+ +
+
+ 1 + 上傳 Excel 檔案 +
+
+ + +
+
+
+
+ + +
+
+ 2 + 選擇 Excel 欄位(作為查詢值) +
+
+
+ + +
+
+
+
+ + +
+
+ 3 + 選擇目標資料表 +
+
+
+ + +
+
+
+
+ + +
+
+ 4 + 選擇查詢欄位與回傳欄位 +
+
+
+ + +
+
+
+ + +
+
+ 進階查詢條件 + +
+
+ +
+
+ + +
+
+ + + + + + +
+
+ +
+ +
+ + +
+
+
+
+ + +
+
+ 5 + 執行查詢 +
+
+ + +
+
+
+ + +
+
+

查詢結果

+
+
+
+
+
+
+
+
+{% endblock %} + +{% block scripts %} + {% set excel_query_js = frontend_asset('excel-query.js') %} + {% if excel_query_js %} + + {% else %} + + {% endif %} +{% endblock %} diff --git a/src/mes_dashboard/templates/hold_detail.html b/src/mes_dashboard/templates/hold_detail.html new file mode 100644 index 0000000..0b979b2 --- /dev/null +++ b/src/mes_dashboard/templates/hold_detail.html @@ -0,0 +1,1013 @@ +{% extends "_base.html" %} + +{% block title %}Hold Detail - {{ reason }}{% endblock %} + +{% block head_extra %} + +{% endblock %} + +{% block content %} +
+ +
+
+ ← WIP Overview +

Hold Detail: {{ reason }}

+ {% if hold_type == 'quality' %}品質異常{% else %}非品質異常{% endif %} +
+
+ + + + + +
+
+ + +
+
+
Total Lots
+
-
+
+
+
Total QTY
+
-
+
+
+
平均當站滯留
+
-
+
+
+
最久當站滯留
+
-
+
+
+
影響站群
+
-
+
+
+ + +
當站滯留天數分佈 (Age at Current Station)
+
+
+
0-1天
+
+
Lots-
+
QTY-
+
+
-
+
+
+
1-3天
+
+
Lots-
+
QTY-
+
+
-
+
+
+
3-7天
+
+
Lots-
+
QTY-
+
+
-
+
+
+
7+天
+
+
Lots-
+
QTY-
+
+
-
+
+
+ + +
+
+
+
By Workcenter
+
+
+ + + + + + + + + + + + +
WorkcenterLotsQTY%
Loading...
+
+
+
+
+
By Package
+
+
+ + + + + + + + + + + + +
PackageLotsQTY%
Loading...
+
+
+
+ + +
+
+
Lot Details
+ +
Loading...
+
+
+ + + + + + + + + + + + + + + + + + +
LOTIDWORKORDERQTYPackageWorkcenterSpecAgeHold ByDeptHold Comment
Loading...
+
+ +
+
+ + +
+ + Loading... +
+{% endblock %} + +{% block scripts %} +{% set hold_detail_js = frontend_asset('hold-detail.js') %} +{% if hold_detail_js %} + +{% else %} + +{% endif %} +{% endblock %} diff --git a/src/mes_dashboard/templates/index.html b/src/mes_dashboard/templates/index.html new file mode 100644 index 0000000..f7665fb --- /dev/null +++ b/src/mes_dashboard/templates/index.html @@ -0,0 +1,589 @@ +{% extends "_base.html" %} + +{% block title %}MES 數據表查詢工具{% endblock %} + +{% block head_extra %} + +{% endblock %} + +{% block content %} +
+
+

MES 數據表查詢工具

+

點擊表名載入欄位 | 輸入篩選條件後查詢 | 套用篩選後取最後 1000 筆

+
+ +
+ {% for category, tables in tables_config.items() %} +
+
{{ category }}
+
+ {% for table in tables %} +
+
+ {{ table.display_name }} + {% if table.row_count > 10000000 %} + 大表 + {% endif %} +
+
數據量: {{ "{:,}".format(table.row_count) }} 行
+ {% if table.time_field %} +
時間欄位: {{ table.time_field }}
+ {% endif %} +
{{ table.description }}
+
+ {% endfor %} +
+
+ {% endfor %} + +
+
+

數據查看器

+ +
+
+
+
+
+
+
+
+{% endblock %} + +{% block scripts %} + {% set tables_js = frontend_asset('tables.js') %} + {% if tables_js %} + + {% else %} + + {% endif %} +{% endblock %} diff --git a/src/mes_dashboard/templates/job_query.html b/src/mes_dashboard/templates/job_query.html new file mode 100644 index 0000000..0e43193 --- /dev/null +++ b/src/mes_dashboard/templates/job_query.html @@ -0,0 +1,923 @@ +{% extends "_base.html" %} + +{% block title %}設備維修查詢工具{% endblock %} + +{% block head_extra %} + +{% endblock %} + +{% block content %} +
+
+

設備維修查詢工具

+

查詢設備維修工單及交易歷史,支援匯出完整資料

+
+ +
+ +
+ +
+ +
+
+ 點擊選擇設備... +
+
+ +
+
+
+
載入設備中... +
+
+
+
+
+
+ + +
+ +
+ + ~ + + +
+
+ + +
+ + +
+
+ + +
+
+

請選擇設備和日期範圍後,點擊「查詢工單」

+
+
+
+
+{% endblock %} + +{% block scripts %} + {% set job_query_js = frontend_asset('job-query.js') %} + {% if job_query_js %} + + {% else %} + + {% endif %} +{% endblock %} diff --git a/src/mes_dashboard/templates/login.html b/src/mes_dashboard/templates/login.html new file mode 100644 index 0000000..c600e8d --- /dev/null +++ b/src/mes_dashboard/templates/login.html @@ -0,0 +1,150 @@ +{% extends "_base.html" %} + +{% block title %}管理員登入 - MES Dashboard{% endblock %} + +{% block head_extra %} + +{% endblock %} + +{% block content %} + +{% endblock %} diff --git a/src/mes_dashboard/templates/portal.html b/src/mes_dashboard/templates/portal.html new file mode 100644 index 0000000..2ae25f1 --- /dev/null +++ b/src/mes_dashboard/templates/portal.html @@ -0,0 +1,629 @@ +{% extends "_base.html" %} + +{% block title %}MES 報表入口{% endblock %} + +{% block head_extra %} + {% set portal_css = frontend_asset('portal.css') %} + {% if portal_css %} + + {% endif %} + +{% endblock %} + +{% block content %} +
+
+
+

MES 報表入口

+

統一入口:WIP 即時看板、設備即時概況與數據表查詢工具

+
+
+ +
+ + 檢查中... +
+
+

系統連線狀態

+
+ 資料庫 (Oracle) + -- +
+
+ 快取 (Redis) + -- +
+
+
WIP 快取
+
狀態:--
+
資料更新時間:--
+
最後同步:--
+
+
+
設備主檔快取
+
狀態:--
+
資料筆數:--
+
最後同步:--
+
+
+
路由快取 (L1/L2)
+
模式:--
+
L1/L2 命中:--
+
降級模式:--
+
+
+
+ {% if is_admin %} + {{ admin_user.displayName }} + 頁面管理 + 效能監控 + 登出 + {% else %} + 管理員登入 + {% endif %} +
+
+
+ +
+
+ 報表類 +
+ {% if can_view_page('/wip-overview') %} + + {% endif %} + {% if can_view_page('/resource') %} + + {% endif %} + {% if can_view_page('/resource-history') %} + + {% endif %} +
+
+ +
+ 查詢類 +
+ {% if can_view_page('/tables') %} + + {% endif %} + {% if can_view_page('/excel-query') %} + + {% endif %} + {% if can_view_page('/job-query') %} + + {% endif %} +
+
+ +
+ 開發工具 +
+ {% if is_admin %} + 頁面管理 + 效能監控 + {% else %} + 管理員登入 + {% endif %} +
+
+
+ +
+ + {% if can_view_page('/wip-overview') %} + + {% endif %} + {% if can_view_page('/resource') %} + + {% endif %} + {% if can_view_page('/tables') %} + + {% endif %} + {% if can_view_page('/excel-query') %} + + {% endif %} + {% if can_view_page('/resource-history') %} + + {% endif %} + {% if can_view_page('/job-query') %} + + {% endif %} + {% if is_admin %} + + {% endif %} +
+
+{% endblock %} + +{% block scripts %} + {% set portal_js = frontend_asset('portal.js') %} + {% if portal_js %} + + {% else %} + + {% endif %} +{% endblock %} diff --git a/src/mes_dashboard/templates/resource_history.html b/src/mes_dashboard/templates/resource_history.html new file mode 100644 index 0000000..e0e1777 --- /dev/null +++ b/src/mes_dashboard/templates/resource_history.html @@ -0,0 +1,1531 @@ +{% extends "_base.html" %} + +{% block title %}設備歷史績效{% endblock %} + +{% block head_extra %} + + + +{% endblock %} + +{% block content %} +
+ +
+

設備歷史績效

+
+ + +
+
+
+ + +
+
+ + +
+
+ +
+ + + + +
+
+
+ +
+
+ 全部站點 + +
+
+
+
+
+ + +
+
+
+
+
+ +
+
+ 全部型號 + +
+
+
+
+
+ + +
+
+
+
+
+ + + +
+ +
+
+ + +
+
+
OU%
+
--
+
稼動率
+
+
+
AVAIL%
+
--
+
可用率
+
+
+
PRD
+
--
+
生產
+
+
+
SBY
+
--
+
待機
+
+
+
UDT
+
--
+
非計畫停機
+
+
+
SDT
+
--
+
計畫停機
+
+
+
EGT
+
--
+
工程
+
+
+
NST
+
--
+
未排程
+
+
+
機台數
+
--
+
設備總數
+
+
+ + +
+
+
OU% / AVAIL% 趨勢
+
+
+
+
E10 狀態分布
+
+
+
+ + +
+
+
工站 OU% 對比
+
+
+
+
設備狀態熱力圖
+
+
+
+ + +
+
+
明細資料
+
+ + + +
+
+
+ + + + + + + + + + + + + + + + + + + + +
站點 / 型號 / 機台OU%Availability%PRDSBYUDTSDTEGTNST機台數
+
+
🔍
+
請設定查詢條件後點擊「查詢」
+
+
+
+
+
+ + + +{% endblock %} + +{% block scripts %} +{% set resource_history_js = frontend_asset('resource-history.js') %} +{% if resource_history_js %} + +{% else %} + +{% endif %} +{% endblock %} diff --git a/src/mes_dashboard/templates/resource_status.html b/src/mes_dashboard/templates/resource_status.html new file mode 100644 index 0000000..cb52874 --- /dev/null +++ b/src/mes_dashboard/templates/resource_status.html @@ -0,0 +1,1669 @@ +{% extends "_base.html" %} + +{% block title %}設備即時概況{% endblock %} + +{% block head_extra %} + +{% endblock %} + +{% block content %} +
+ +
+

設備即時概況

+
+
+ + 檢查中... +
+ -- +
+
+ + +
+
+ + +
+ + + + + + + + +
+ + +
+
+
OU%
+
--
+
稼動率
+
+
+
AVAIL%
+
--
+
可用率
+
+
+
PRD
+
--
+
生產
+
+
+
SBY
+
--
+
待機
+
+
+
UDT
+
--
+
非計畫停機
+
+
+
SDT
+
--
+
計畫停機
+
+
+
EGT
+
--
+
工程
+
+
+
NST
+
--
+
未排程
+
+
+
JOB
+
--
+
有維修單
+
+
+
機台數
+
--
+
設備總數
+
+
+ + +
+
+ 工站狀態矩陣 +
+ + +
+
+
+
+ 載入中... +
+
+
+ 篩選中: + + +
+
+ + +
+
設備清單 (0 台)
+
+
+ 載入中... +
+
+
+ + +
+
+ + +
+
+
+
+{% endblock %} + +{% block scripts %} +{% set resource_status_js = frontend_asset('resource-status.js') %} +{% if resource_status_js %} + +{% else %} + +{% endif %} +{% endblock %} diff --git a/src/mes_dashboard/templates/wip_detail.html b/src/mes_dashboard/templates/wip_detail.html new file mode 100644 index 0000000..e84cc11 --- /dev/null +++ b/src/mes_dashboard/templates/wip_detail.html @@ -0,0 +1,1794 @@ +{% extends "_base.html" %} + +{% block title %}WIP Detail Dashboard{% endblock %} + +{% block head_extra %} + +{% endblock %} + +{% block content %} +
+ +
+
+ ← Overview +

WIP Detail

+
+
+ + + + + + + +
+
+ + +
+
+ +
+ +
+
+
+
+ +
+ +
+
+
+
+ +
+ +
+
+
+
+ +
+ +
+
+
+ + +
+ + +
+
+
Total Lots
+
-
+
+
+
RUN
+
-
+
+
+
QUEUE
+
-
+
+
+
品質異常
+
-
+
+
+
非品質異常
+
-
+
+
+ + +
+
+
Lot Details
+
Loading...
+
+
+
Loading...
+
+ +
+ + +
+
+
+ Lot Detail - +
+ +
+
+
+ Loading... +
+
+
+
+ + +
+ + Loading... +
+{% endblock %} + +{% block scripts %} +{% set wip_detail_js = frontend_asset('wip-detail.js') %} +{% if wip_detail_js %} + +{% else %} + +{% endif %} +{% endblock %} diff --git a/src/mes_dashboard/templates/wip_overview.html b/src/mes_dashboard/templates/wip_overview.html new file mode 100644 index 0000000..8dc05d4 --- /dev/null +++ b/src/mes_dashboard/templates/wip_overview.html @@ -0,0 +1,1825 @@ +{% extends "_base.html" %} + +{% block title %}WIP Overview Dashboard{% endblock %} + +{% block head_extra %} + + +{% endblock %} + +{% block content %} +
+ +
+

WIP Overview Dashboard

+
+ + + + + + + +
+
+ + +
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + +
+ + +
+
+
Total Lots
+
-
+
+
+
Total QTY
+
-
+
+
+ + +
+
+
RUN
+
+ - + - +
+
+
+
QUEUE
+
+ - + - +
+
+
+
品質異常
+
+ - + - +
+
+
+
非品質異常
+
+ - + - +
+
+
+ + +
+ +
+
+
Workcenter x Package Matrix (QTY)
+
+
+
+
Loading...
+
+
+
+ + +
+ +
+
+
+ 品質異常 Hold + 0 項 +
+
+
+
+ +
+
+
+ +
+
+
+ 非品質異常 Hold + 0 項 +
+
+
+
+ +
+
+
+
+
+
+ + +
+ + Loading... +
+{% endblock %} + +{% block scripts %} +{% set wip_overview_js = frontend_asset('wip-overview.js') %} +{% if wip_overview_js %} + +{% else %} + +{% endif %} +{% endblock %} diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..407dfc1 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +"""Pytest configuration and fixtures for MES Dashboard tests.""" + +import pytest +import sys +import os + +# Add the src directory to Python path +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src')) + +import mes_dashboard.core.database as db +from mes_dashboard.app import create_app + + +@pytest.fixture +def app(): + """Create application for testing.""" + db._ENGINE = None + app = create_app('testing') + app.config['TESTING'] = True + return app + + +@pytest.fixture +def client(app): + """Create test client.""" + return app.test_client() + + +@pytest.fixture +def runner(app): + """Create test CLI runner.""" + return app.test_cli_runner() + + +def pytest_configure(config): + """Add custom markers.""" + config.addinivalue_line( + "markers", "integration: mark test as integration test (requires database)" + ) + config.addinivalue_line( + "markers", "e2e: mark test as end-to-end test (requires running server)" + ) + config.addinivalue_line( + "markers", "redis: mark test as requiring Redis connection" + ) + + +def pytest_addoption(parser): + """Add custom command line options.""" + parser.addoption( + "--run-integration", + action="store_true", + default=False, + help="Run integration tests that require database connection" + ) + parser.addoption( + "--run-e2e", + action="store_true", + default=False, + help="Run end-to-end tests that require running server" + ) + + +def pytest_collection_modifyitems(config, items): + """Skip integration/e2e tests unless explicitly enabled.""" + run_integration = config.getoption("--run-integration") + run_e2e = config.getoption("--run-e2e") + + skip_integration = pytest.mark.skip(reason="need --run-integration option to run") + skip_e2e = pytest.mark.skip(reason="need --run-e2e option to run") + + for item in items: + if "integration" in item.keywords and not run_integration: + item.add_marker(skip_integration) + if "e2e" in item.keywords and not run_e2e: + item.add_marker(skip_e2e) diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py new file mode 100644 index 0000000..c24ff87 --- /dev/null +++ b/tests/e2e/conftest.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +"""Pytest configuration for Playwright E2E tests.""" + +import pytest +import os +import sys + +# Add src to path +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'src')) + + +@pytest.fixture(scope="session") +def app_server() -> str: + """Get the base URL for E2E testing. + + Uses environment variable E2E_BASE_URL or defaults to production server. + """ + return os.environ.get('E2E_BASE_URL', 'http://127.0.0.1:8080') + + +@pytest.fixture(scope="session") +def browser_context_args(browser_context_args): + """Configure browser context for tests.""" + return { + **browser_context_args, + "viewport": {"width": 1280, "height": 720}, + "locale": "zh-TW", + } + + +def pytest_configure(config): + """Add custom markers for E2E tests.""" + config.addinivalue_line( + "markers", "e2e: mark test as end-to-end test (requires running server)" + ) + config.addinivalue_line( + "markers", "redis: mark test as requiring Redis connection" + ) + + +@pytest.fixture(scope="session") +def api_base_url(app_server): + """Get the API base URL.""" + return f"{app_server}/api" + + +@pytest.fixture(scope="session") +def health_url(app_server): + """Get the health check URL.""" + return f"{app_server}/health" diff --git a/tests/e2e/test_admin_auth_e2e.py b/tests/e2e/test_admin_auth_e2e.py new file mode 100644 index 0000000..65ea28d --- /dev/null +++ b/tests/e2e/test_admin_auth_e2e.py @@ -0,0 +1,350 @@ +# -*- coding: utf-8 -*- +"""End-to-end tests for admin authentication flow. + +These tests simulate real user workflows through the admin authentication system. +Run with: pytest tests/e2e/test_admin_auth_e2e.py -v --run-integration +""" + +import json +import pytest +from unittest.mock import patch, MagicMock +import tempfile +from pathlib import Path + +import sys +import os +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'src')) + +import mes_dashboard.core.database as db +from mes_dashboard.app import create_app +from mes_dashboard.services import page_registry + + +@pytest.fixture +def temp_page_status(tmp_path): + """Create temporary page status file.""" + data_file = tmp_path / "page_status.json" + initial_data = { + "pages": [ + {"route": "/", "name": "首頁", "status": "released"}, + {"route": "/wip-overview", "name": "WIP 即時概況", "status": "released"}, + {"route": "/wip-detail", "name": "WIP 明細", "status": "released"}, + {"route": "/tables", "name": "表格總覽", "status": "dev"}, + {"route": "/resource", "name": "機台狀態", "status": "dev"}, + ], + "api_public": True + } + data_file.write_text(json.dumps(initial_data, ensure_ascii=False), encoding="utf-8") + return data_file + + +@pytest.fixture +def app(temp_page_status): + """Create application for testing.""" + db._ENGINE = None + + # Mock page registry + original_data_file = page_registry.DATA_FILE + original_cache = page_registry._cache + page_registry.DATA_FILE = temp_page_status + page_registry._cache = None + + app = create_app('testing') + app.config['TESTING'] = True + app.config['WTF_CSRF_ENABLED'] = False + + yield app + + page_registry.DATA_FILE = original_data_file + page_registry._cache = original_cache + + +@pytest.fixture +def client(app): + """Create test client.""" + return app.test_client() + + +def mock_ldap_success(mail="ymirliu@panjit.com.tw"): + """Helper to create mock for successful LDAP auth.""" + mock_response = MagicMock() + mock_response.json.return_value = { + "success": True, + "user": { + "username": "92367", + "displayName": "Test Admin", + "mail": mail, + "department": "Test Department" + } + } + return mock_response + + +class TestFullLoginLogoutFlow: + """E2E tests for complete login/logout flow.""" + + @patch('mes_dashboard.services.auth_service.requests.post') + def test_complete_admin_login_workflow(self, mock_post, client): + """Test complete admin login workflow.""" + mock_post.return_value = mock_ldap_success() + + # 1. Access portal - should see login link + response = client.get("/") + assert response.status_code == 200 + content = response.data.decode("utf-8") + assert "管理員登入" in content + + # 2. Go to login page + response = client.get("/admin/login") + assert response.status_code == 200 + + # 3. Submit login form + response = client.post("/admin/login", data={ + "username": "92367", + "password": "password123" + }, follow_redirects=True) + + assert response.status_code == 200 + content = response.data.decode("utf-8") + # Should see admin name and logout option + assert "Test Admin" in content or "登出" in content + + # 4. Verify session has admin + with client.session_transaction() as sess: + assert "admin" in sess + assert sess["admin"]["mail"] == "ymirliu@panjit.com.tw" + + # 5. Access admin pages + response = client.get("/admin/pages") + assert response.status_code == 200 + + # 6. Logout + response = client.get("/admin/logout", follow_redirects=True) + assert response.status_code == 200 + + # 7. Verify logged out + with client.session_transaction() as sess: + assert "admin" not in sess + + # 8. Admin pages should redirect now + response = client.get("/admin/pages", follow_redirects=False) + assert response.status_code == 302 + + +class TestPageAccessControlFlow: + """E2E tests for page access control flow.""" + + def test_non_admin_cannot_access_dev_pages(self, client, temp_page_status): + """Test non-admin users cannot access dev pages.""" + # 1. Access released page - should work + response = client.get("/wip-overview") + assert response.status_code != 403 + + # 2. Access dev page - should get 403 + response = client.get("/tables") + assert response.status_code == 403 + content = response.data.decode("utf-8") + assert "開發中" in content or "403" in content + + @patch('mes_dashboard.services.auth_service.requests.post') + def test_admin_can_access_all_pages(self, mock_post, client, temp_page_status): + """Test admin users can access all pages.""" + mock_post.return_value = mock_ldap_success() + + # 1. Login as admin + client.post("/admin/login", data={ + "username": "92367", + "password": "password123" + }) + + # 2. Access released page - should work + response = client.get("/wip-overview") + assert response.status_code != 403 + + # 3. Access dev page - should work for admin + response = client.get("/tables") + assert response.status_code != 403 + + +class TestPageManagementFlow: + """E2E tests for page management flow.""" + + @patch('mes_dashboard.services.auth_service.requests.post') + def test_admin_can_change_page_status(self, mock_post, client, temp_page_status): + """Test admin can change page status via management interface.""" + mock_post.return_value = mock_ldap_success() + + # 1. Login as admin + client.post("/admin/login", data={ + "username": "92367", + "password": "password123" + }) + + # 2. Get current pages list + response = client.get("/admin/api/pages") + assert response.status_code == 200 + data = json.loads(response.data) + assert data["success"] is True + + # 3. Change /wip-overview from released to dev + response = client.put( + "/admin/api/pages/wip-overview", + data=json.dumps({"status": "dev"}), + content_type="application/json" + ) + assert response.status_code == 200 + + # 4. Verify change persisted + page_registry._cache = None + status = page_registry.get_page_status("/wip-overview") + assert status == "dev" + + # 5. Logout + client.get("/admin/logout") + + # 6. Now non-admin should get 403 on /wip-overview + response = client.get("/wip-overview") + assert response.status_code == 403 + + @patch('mes_dashboard.services.auth_service.requests.post') + def test_release_dev_page_makes_it_public(self, mock_post, client, temp_page_status): + """Test releasing a dev page makes it publicly accessible.""" + mock_post.return_value = mock_ldap_success() + + # 1. Verify /tables is currently dev (403 for non-admin) + response = client.get("/tables") + assert response.status_code == 403 + + # 2. Login as admin + client.post("/admin/login", data={ + "username": "92367", + "password": "password123" + }) + + # 3. Release the page + response = client.put( + "/admin/api/pages/tables", + data=json.dumps({"status": "released"}), + content_type="application/json" + ) + assert response.status_code == 200 + + # 4. Logout + client.get("/admin/logout") + + # 5. Clear cache and verify non-admin can access + page_registry._cache = None + response = client.get("/tables") + assert response.status_code != 403 + + +class TestPortalDynamicTabs: + """E2E tests for dynamic portal tabs based on page status.""" + + def test_portal_hides_dev_tabs_for_non_admin(self, client, temp_page_status): + """Test portal hides dev page tabs for non-admin users.""" + response = client.get("/") + assert response.status_code == 200 + content = response.data.decode("utf-8") + + # Released pages should show + assert "WIP 即時概況" in content + + # Dev pages should NOT show (tables and resource are dev) + # Note: This depends on the can_view_page implementation in portal.html + + @patch('mes_dashboard.services.auth_service.requests.post') + def test_portal_shows_all_tabs_for_admin(self, mock_post, client, temp_page_status): + """Test portal shows all tabs for admin users.""" + mock_post.return_value = mock_ldap_success() + + # Login as admin + client.post("/admin/login", data={ + "username": "92367", + "password": "password123" + }) + + response = client.get("/") + assert response.status_code == 200 + content = response.data.decode("utf-8") + + # Admin should see all pages + assert "WIP 即時概況" in content + + +class TestSessionPersistence: + """E2E tests for session persistence.""" + + @patch('mes_dashboard.services.auth_service.requests.post') + def test_session_persists_across_requests(self, mock_post, client): + """Test admin session persists across multiple requests.""" + mock_post.return_value = mock_ldap_success() + + # Login + client.post("/admin/login", data={ + "username": "92367", + "password": "password123" + }) + + # Make multiple requests + for _ in range(5): + response = client.get("/admin/pages") + assert response.status_code == 200 + + # Session should still be valid + with client.session_transaction() as sess: + assert "admin" in sess + + +class TestSecurityScenarios: + """E2E tests for security scenarios.""" + + def test_cannot_access_admin_api_without_login(self, client): + """Test admin APIs are protected.""" + # Try to get pages without login + response = client.get("/admin/api/pages", follow_redirects=False) + assert response.status_code == 302 + + # Try to update page without login + response = client.put( + "/admin/api/pages/wip-overview", + data=json.dumps({"status": "dev"}), + content_type="application/json", + follow_redirects=False + ) + assert response.status_code == 302 + + @patch('mes_dashboard.services.auth_service.requests.post') + def test_non_admin_user_cannot_login(self, mock_post, client): + """Test non-admin user cannot access admin features.""" + # Mock LDAP success but with non-admin email + mock_response = MagicMock() + mock_response.json.return_value = { + "success": True, + "user": { + "username": "99999", + "displayName": "Regular User", + "mail": "regular@panjit.com.tw", + "department": "Test" + } + } + mock_post.return_value = mock_response + + # Try to login + response = client.post("/admin/login", data={ + "username": "99999", + "password": "password123" + }) + + # Should fail (show error, not redirect) + assert response.status_code == 200 + content = response.data.decode("utf-8") + assert "管理員" in content or "error" in content.lower() + + # Should NOT have admin session + with client.session_transaction() as sess: + assert "admin" not in sess + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/e2e/test_cache_e2e.py b/tests/e2e/test_cache_e2e.py new file mode 100644 index 0000000..9462c49 --- /dev/null +++ b/tests/e2e/test_cache_e2e.py @@ -0,0 +1,281 @@ +# -*- coding: utf-8 -*- +"""End-to-end tests for Redis cache functionality. + +These tests require a running server with Redis enabled. +Run with: pytest tests/e2e/test_cache_e2e.py -v +""" + +import pytest +import requests +import time + + +@pytest.mark.e2e +class TestHealthEndpointE2E: + """E2E tests for /health endpoint.""" + + def test_health_endpoint_accessible(self, health_url): + """Test health endpoint is accessible.""" + response = requests.get(health_url, timeout=10) + + assert response.status_code in [200, 503] + data = response.json() + assert 'status' in data + assert 'services' in data + assert 'cache' in data + + def test_health_shows_database_status(self, health_url): + """Test health endpoint shows database status.""" + response = requests.get(health_url, timeout=10) + data = response.json() + + assert 'database' in data['services'] + assert data['services']['database'] in ['ok', 'error'] + + def test_health_shows_redis_status(self, health_url): + """Test health endpoint shows Redis status.""" + response = requests.get(health_url, timeout=10) + data = response.json() + + assert 'redis' in data['services'] + assert data['services']['redis'] in ['ok', 'error', 'disabled'] + + def test_health_shows_cache_info(self, health_url): + """Test health endpoint shows cache information.""" + response = requests.get(health_url, timeout=10) + data = response.json() + + assert 'cache' in data + assert 'enabled' in data['cache'] + assert 'sys_date' in data['cache'] + assert 'updated_at' in data['cache'] + + +@pytest.mark.e2e +@pytest.mark.redis +class TestCachedWipApiE2E: + """E2E tests for cached WIP API endpoints.""" + + def _unwrap(self, resp_json): + """Unwrap API response to get data.""" + if isinstance(resp_json, dict) and 'data' in resp_json: + return resp_json['data'] + return resp_json + + def test_wip_summary_returns_data(self, api_base_url): + """Test WIP summary endpoint returns valid data.""" + response = requests.get(f"{api_base_url}/wip/overview/summary", timeout=30) + + assert response.status_code == 200 + data = self._unwrap(response.json()) + assert 'totalLots' in data + assert 'totalQtyPcs' in data + assert 'byWipStatus' in data + assert 'dataUpdateDate' in data + + def test_wip_summary_status_breakdown(self, api_base_url): + """Test WIP summary contains correct status breakdown.""" + response = requests.get(f"{api_base_url}/wip/overview/summary", timeout=30) + data = self._unwrap(response.json()) + + by_status = data['byWipStatus'] + assert 'run' in by_status + assert 'queue' in by_status + assert 'hold' in by_status + assert 'qualityHold' in by_status + assert 'nonQualityHold' in by_status + + # Each status should have lots and qtyPcs + for status in ['run', 'queue', 'hold']: + assert 'lots' in by_status[status] + assert 'qtyPcs' in by_status[status] + + def test_wip_matrix_returns_data(self, api_base_url): + """Test WIP matrix endpoint returns valid data.""" + response = requests.get(f"{api_base_url}/wip/overview/matrix", timeout=30) + + assert response.status_code == 200 + data = self._unwrap(response.json()) + assert 'workcenters' in data + assert 'packages' in data + assert 'matrix' in data + assert 'workcenter_totals' in data + assert 'package_totals' in data + assert 'grand_total' in data + + def test_wip_workcenters_returns_list(self, api_base_url): + """Test workcenters endpoint returns list.""" + response = requests.get(f"{api_base_url}/wip/meta/workcenters", timeout=30) + + assert response.status_code == 200 + data = self._unwrap(response.json()) + assert isinstance(data, list) + + if len(data) > 0: + assert 'name' in data[0] + assert 'lot_count' in data[0] + + def test_wip_packages_returns_list(self, api_base_url): + """Test packages endpoint returns list.""" + response = requests.get(f"{api_base_url}/wip/meta/packages", timeout=30) + + assert response.status_code == 200 + data = self._unwrap(response.json()) + assert isinstance(data, list) + + if len(data) > 0: + assert 'name' in data[0] + assert 'lot_count' in data[0] + + def test_wip_hold_summary_returns_data(self, api_base_url): + """Test hold summary endpoint returns valid data.""" + response = requests.get(f"{api_base_url}/wip/overview/hold", timeout=30) + + assert response.status_code == 200 + data = self._unwrap(response.json()) + assert 'items' in data + assert isinstance(data['items'], list) + + +@pytest.mark.e2e +@pytest.mark.redis +class TestCachePerformanceE2E: + """E2E tests for cache performance.""" + + def _unwrap(self, resp_json): + """Unwrap API response to get data.""" + if isinstance(resp_json, dict) and 'data' in resp_json: + return resp_json['data'] + return resp_json + + def test_cached_response_is_fast(self, api_base_url): + """Test cached responses are faster than 2 seconds.""" + # First request may load cache + requests.get(f"{api_base_url}/wip/overview/summary", timeout=30) + + # Second request should be from cache + start = time.time() + response = requests.get(f"{api_base_url}/wip/overview/summary", timeout=30) + elapsed = time.time() - start + + assert response.status_code == 200 + # Cached response should be fast (< 2 seconds) + assert elapsed < 2.0, f"Response took {elapsed:.2f}s, expected < 2s" + + def test_multiple_endpoints_consistent(self, api_base_url): + """Test multiple endpoints return consistent data.""" + # Get summary + summary_resp = requests.get(f"{api_base_url}/wip/overview/summary", timeout=30) + summary = self._unwrap(summary_resp.json()) + + # Get matrix + matrix_resp = requests.get(f"{api_base_url}/wip/overview/matrix", timeout=30) + matrix = self._unwrap(matrix_resp.json()) + + # Grand total from matrix should match total from summary (approximately) + # There may be slight differences due to filtering + if summary['totalLots'] > 0 and matrix['grand_total'] > 0: + assert summary['totalQtyPcs'] > 0 or matrix['grand_total'] > 0 + + +@pytest.mark.e2e +@pytest.mark.redis +class TestSearchEndpointsE2E: + """E2E tests for search endpoints with cache.""" + + def _unwrap(self, resp_json): + """Unwrap API response to get data.""" + if isinstance(resp_json, dict) and 'data' in resp_json: + data = resp_json['data'] + # Search returns {'items': [...]} + if isinstance(data, dict) and 'items' in data: + return data['items'] + return data + return resp_json + + def test_search_workorders(self, api_base_url): + """Test workorder search returns results.""" + # Use a common pattern that should exist + response = requests.get( + f"{api_base_url}/wip/meta/search", + params={'type': 'workorder', 'q': 'WO', 'limit': 10}, + timeout=30 + ) + + assert response.status_code == 200 + data = self._unwrap(response.json()) + assert isinstance(data, list) + + def test_search_lotids(self, api_base_url): + """Test lot ID search returns results.""" + response = requests.get( + f"{api_base_url}/wip/meta/search", + params={'type': 'lotid', 'q': 'LOT', 'limit': 10}, + timeout=30 + ) + + assert response.status_code == 200 + data = self._unwrap(response.json()) + assert isinstance(data, list) + + def test_search_with_short_query_returns_empty(self, api_base_url): + """Test search with short query returns empty list.""" + response = requests.get( + f"{api_base_url}/wip/meta/search", + params={'type': 'workorder', 'q': 'W'}, # Too short + timeout=30 + ) + + assert response.status_code == 200 + data = self._unwrap(response.json()) + assert data == [] + + +@pytest.mark.e2e +@pytest.mark.redis +class TestWipDetailE2E: + """E2E tests for WIP detail endpoint with cache.""" + + def _unwrap(self, resp_json): + """Unwrap API response to get data.""" + if isinstance(resp_json, dict) and 'data' in resp_json: + return resp_json['data'] + return resp_json + + def test_wip_detail_with_workcenter(self, api_base_url): + """Test WIP detail endpoint for a workcenter.""" + # First get list of workcenters + wc_resp = requests.get(f"{api_base_url}/wip/meta/workcenters", timeout=30) + workcenters = self._unwrap(wc_resp.json()) + + if len(workcenters) > 0: + wc_name = workcenters[0]['name'] + response = requests.get( + f"{api_base_url}/wip/detail/{wc_name}", + timeout=30 + ) + + assert response.status_code == 200 + data = self._unwrap(response.json()) + assert 'workcenter' in data + assert 'summary' in data + assert 'lots' in data + assert 'pagination' in data + + def test_wip_detail_pagination(self, api_base_url): + """Test WIP detail pagination.""" + wc_resp = requests.get(f"{api_base_url}/wip/meta/workcenters", timeout=30) + workcenters = self._unwrap(wc_resp.json()) + + if len(workcenters) > 0: + wc_name = workcenters[0]['name'] + response = requests.get( + f"{api_base_url}/wip/detail/{wc_name}", + params={'page': 1, 'page_size': 10}, + timeout=30 + ) + + assert response.status_code == 200 + data = self._unwrap(response.json()) + assert data['pagination']['page'] == 1 + assert data['pagination']['page_size'] == 10 diff --git a/tests/e2e/test_global_connection.py b/tests/e2e/test_global_connection.py new file mode 100644 index 0000000..7e87b44 --- /dev/null +++ b/tests/e2e/test_global_connection.py @@ -0,0 +1,362 @@ +# -*- coding: utf-8 -*- +"""E2E tests for global connection management features. + +Tests the MesApi client, Toast notifications, and page functionality +using Playwright. + +Run with: pytest tests/e2e/ --headed (to see browser) +""" + +import pytest +import re +from playwright.sync_api import Page, expect + + +@pytest.mark.e2e +class TestPortalPage: + """E2E tests for the Portal page.""" + + def test_portal_loads_successfully(self, page: Page, app_server: str): + """Portal page should load without errors.""" + page.goto(app_server) + + # Wait for page to load + expect(page.locator('h1')).to_contain_text('MES 報表入口') + + def test_portal_has_all_tabs(self, page: Page, app_server: str): + """Portal should have all navigation tabs.""" + page.goto(app_server) + + # Check all tabs exist + expect(page.locator('.tab:has-text("WIP 即時概況")')).to_be_visible() + expect(page.locator('.tab:has-text("機台狀態報表")')).to_be_visible() + expect(page.locator('.tab:has-text("數據表查詢工具")')).to_be_visible() + expect(page.locator('.tab:has-text("Excel 批次查詢")')).to_be_visible() + + def test_portal_tab_switching(self, page: Page, app_server: str): + """Portal tabs should switch iframe content.""" + page.goto(app_server) + + # Click on a different tab + page.locator('.tab:has-text("機台狀態報表")').click() + + # Verify the tab is active + expect(page.locator('.tab:has-text("機台狀態報表")')).to_have_class(re.compile(r'active')) + + +@pytest.mark.e2e +class TestToastNotifications: + """E2E tests for Toast notification system.""" + + def test_toast_container_exists(self, page: Page, app_server: str): + """Toast container should be present in the DOM.""" + page.goto(f"{app_server}/wip-overview") + + # Toast container should exist in DOM (hidden when empty, which is expected) + page.wait_for_selector('#mes-toast-container', state='attached', timeout=5000) + + def test_toast_info_display(self, page: Page, app_server: str): + """Toast.info() should display info notification.""" + page.goto(f"{app_server}/wip-overview") + + # Execute Toast.info() in browser context + page.evaluate("Toast.info('Test info message')") + + # Verify toast appears + toast = page.locator('.mes-toast-info') + expect(toast).to_be_visible() + expect(toast).to_contain_text('Test info message') + + def test_toast_success_display(self, page: Page, app_server: str): + """Toast.success() should display success notification.""" + page.goto(f"{app_server}/wip-overview") + + page.evaluate("Toast.success('Operation successful')") + + toast = page.locator('.mes-toast-success') + expect(toast).to_be_visible() + expect(toast).to_contain_text('Operation successful') + + def test_toast_error_display(self, page: Page, app_server: str): + """Toast.error() should display error notification.""" + page.goto(f"{app_server}/wip-overview") + + page.evaluate("Toast.error('An error occurred')") + + toast = page.locator('.mes-toast-error') + expect(toast).to_be_visible() + expect(toast).to_contain_text('An error occurred') + + def test_toast_error_with_retry(self, page: Page, app_server: str): + """Toast.error() with retry callback should show retry button.""" + page.goto(f"{app_server}/wip-overview") + + page.evaluate("Toast.error('Connection failed', { retry: () => console.log('retry clicked') })") + + # Verify retry button exists + retry_btn = page.locator('.mes-toast-retry') + expect(retry_btn).to_be_visible() + expect(retry_btn).to_contain_text('重試') + + def test_toast_loading_display(self, page: Page, app_server: str): + """Toast.loading() should display loading notification.""" + page.goto(f"{app_server}/wip-overview") + + page.evaluate("Toast.loading('Loading data...')") + + toast = page.locator('.mes-toast-loading') + expect(toast).to_be_visible() + + def test_toast_dismiss(self, page: Page, app_server: str): + """Toast.dismiss() should remove toast.""" + page.goto(f"{app_server}/wip-overview") + + # Create and dismiss a toast + toast_id = page.evaluate("Toast.info('Will be dismissed')") + page.evaluate(f"Toast.dismiss({toast_id})") + + # Wait for animation + page.wait_for_timeout(500) + + # Toast should be gone + expect(page.locator('.mes-toast-info')).not_to_be_visible() + + def test_toast_max_limit(self, page: Page, app_server: str): + """Toast system should enforce max 5 toasts.""" + page.goto(f"{app_server}/wip-overview") + + # Create 7 toasts + for i in range(7): + page.evaluate(f"Toast.info('Toast {i}')") + + # Should only have 5 toasts visible + toasts = page.locator('.mes-toast') + expect(toasts).to_have_count(5) + + +@pytest.mark.e2e +class TestMesApiClient: + """E2E tests for MesApi client.""" + + def test_mesapi_exists_on_page(self, page: Page, app_server: str): + """MesApi should be available in window scope.""" + page.goto(f"{app_server}/wip-overview") + + has_mesapi = page.evaluate("typeof MesApi !== 'undefined'") + assert has_mesapi, "MesApi should be defined" + + def test_mesapi_has_get_method(self, page: Page, app_server: str): + """MesApi should have get() method.""" + page.goto(f"{app_server}/wip-overview") + + has_get = page.evaluate("typeof MesApi.get === 'function'") + assert has_get, "MesApi.get should be a function" + + def test_mesapi_has_post_method(self, page: Page, app_server: str): + """MesApi should have post() method.""" + page.goto(f"{app_server}/wip-overview") + + has_post = page.evaluate("typeof MesApi.post === 'function'") + assert has_post, "MesApi.post should be a function" + + def test_mesapi_request_logging(self, page: Page, app_server: str): + """MesApi should log requests to console.""" + page.goto(f"{app_server}/wip-overview") + + # Capture console messages + console_messages = [] + page.on("console", lambda msg: console_messages.append(msg.text)) + + # Make a request (will fail but should log) + page.evaluate(""" + (async () => { + try { + await MesApi.get('/api/test-endpoint'); + } catch (e) { + // Expected to fail + } + })() + """) + + page.wait_for_timeout(1000) + + # Check for MesApi log pattern + mesapi_logs = [m for m in console_messages if '[MesApi]' in m] + assert len(mesapi_logs) > 0, "MesApi should log requests with [MesApi] prefix" + + +@pytest.mark.e2e +class TestWIPOverviewPage: + """E2E tests for WIP Overview page.""" + + def test_wip_overview_loads(self, page: Page, app_server: str): + """WIP Overview page should load.""" + page.goto(f"{app_server}/wip-overview") + + # Page should have the header + expect(page.locator('body')).to_be_visible() + + def test_wip_overview_has_toast_system(self, page: Page, app_server: str): + """WIP Overview should have Toast system loaded.""" + page.goto(f"{app_server}/wip-overview") + + has_toast = page.evaluate("typeof Toast !== 'undefined'") + assert has_toast, "Toast should be defined on WIP Overview page" + + def test_wip_overview_has_mesapi(self, page: Page, app_server: str): + """WIP Overview should have MesApi loaded.""" + page.goto(f"{app_server}/wip-overview") + + has_mesapi = page.evaluate("typeof MesApi !== 'undefined'") + assert has_mesapi, "MesApi should be defined on WIP Overview page" + + +@pytest.mark.e2e +class TestWIPDetailPage: + """E2E tests for WIP Detail page.""" + + def test_wip_detail_loads(self, page: Page, app_server: str): + """WIP Detail page should load.""" + page.goto(f"{app_server}/wip-detail") + + expect(page.locator('body')).to_be_visible() + + def test_wip_detail_has_toast_system(self, page: Page, app_server: str): + """WIP Detail should have Toast system loaded.""" + page.goto(f"{app_server}/wip-detail") + + has_toast = page.evaluate("typeof Toast !== 'undefined'") + assert has_toast, "Toast should be defined on WIP Detail page" + + def test_wip_detail_has_mesapi(self, page: Page, app_server: str): + """WIP Detail should have MesApi loaded.""" + page.goto(f"{app_server}/wip-detail") + + has_mesapi = page.evaluate("typeof MesApi !== 'undefined'") + assert has_mesapi, "MesApi should be defined on WIP Detail page" + + +@pytest.mark.e2e +class TestTablesPage: + """E2E tests for Tables page.""" + + def test_tables_page_loads(self, page: Page, app_server: str): + """Tables page should load.""" + page.goto(f"{app_server}/tables") + + expect(page.locator('h1')).to_contain_text('MES 數據表查詢工具') + + def test_tables_has_toast_system(self, page: Page, app_server: str): + """Tables page should have Toast system loaded.""" + page.goto(f"{app_server}/tables") + + has_toast = page.evaluate("typeof Toast !== 'undefined'") + assert has_toast, "Toast should be defined on Tables page" + + def test_tables_has_mesapi(self, page: Page, app_server: str): + """Tables page should have MesApi loaded.""" + page.goto(f"{app_server}/tables") + + has_mesapi = page.evaluate("typeof MesApi !== 'undefined'") + assert has_mesapi, "MesApi should be defined on Tables page" + + +@pytest.mark.e2e +class TestResourcePage: + """E2E tests for Resource Status page.""" + + def test_resource_page_loads(self, page: Page, app_server: str): + """Resource page should load.""" + page.goto(f"{app_server}/resource") + + expect(page.locator('body')).to_be_visible() + + def test_resource_has_toast_system(self, page: Page, app_server: str): + """Resource page should have Toast system loaded.""" + page.goto(f"{app_server}/resource") + + has_toast = page.evaluate("typeof Toast !== 'undefined'") + assert has_toast, "Toast should be defined on Resource page" + + def test_resource_has_mesapi(self, page: Page, app_server: str): + """Resource page should have MesApi loaded.""" + page.goto(f"{app_server}/resource") + + has_mesapi = page.evaluate("typeof MesApi !== 'undefined'") + assert has_mesapi, "MesApi should be defined on Resource page" + + +@pytest.mark.e2e +class TestExcelQueryPage: + """E2E tests for Excel Query page.""" + + def test_excel_query_page_loads(self, page: Page, app_server: str): + """Excel Query page should load.""" + page.goto(f"{app_server}/excel-query") + + expect(page.locator('body')).to_be_visible() + + def test_excel_query_has_toast_system(self, page: Page, app_server: str): + """Excel Query page should have Toast system loaded.""" + page.goto(f"{app_server}/excel-query") + + has_toast = page.evaluate("typeof Toast !== 'undefined'") + assert has_toast, "Toast should be defined on Excel Query page" + + def test_excel_query_has_mesapi(self, page: Page, app_server: str): + """Excel Query page should have MesApi loaded.""" + page.goto(f"{app_server}/excel-query") + + has_mesapi = page.evaluate("typeof MesApi !== 'undefined'") + assert has_mesapi, "MesApi should be defined on Excel Query page" + + +@pytest.mark.e2e +class TestConsoleLogVerification: + """E2E tests for console log verification (Phase 4.2 tasks).""" + + def test_request_has_request_id(self, page: Page, app_server: str): + """API requests should log with req_xxx ID format.""" + page.goto(f"{app_server}/wip-overview") + + console_messages = [] + page.on("console", lambda msg: console_messages.append(msg.text)) + + # Trigger an API request + page.evaluate(""" + (async () => { + try { + await MesApi.get('/api/wip/overview/summary'); + } catch (e) {} + })() + """) + + page.wait_for_timeout(2000) + + # Check for request ID pattern + req_id_pattern = re.compile(r'req_\d{4}') + has_req_id = any(req_id_pattern.search(m) for m in console_messages) + assert has_req_id, "Console should show request ID like req_0001" + + def test_successful_request_shows_checkmark(self, page: Page, app_server: str): + """Successful requests should show checkmark in console.""" + page.goto(f"{app_server}/wip-overview") + + console_messages = [] + page.on("console", lambda msg: console_messages.append(msg.text)) + + # Make request to a working endpoint + page.evaluate(""" + (async () => { + try { + await MesApi.get('/api/wip/overview/summary'); + } catch (e) {} + })() + """) + + page.wait_for_timeout(3000) + + # Filter for MesApi logs + mesapi_logs = [m for m in console_messages if '[MesApi]' in m] + # The exact checkmark depends on implementation (✓ or similar) + assert len(mesapi_logs) > 0, "Should have MesApi console logs" diff --git a/tests/e2e/test_realtime_equipment_e2e.py b/tests/e2e/test_realtime_equipment_e2e.py new file mode 100644 index 0000000..500698c --- /dev/null +++ b/tests/e2e/test_realtime_equipment_e2e.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- +"""End-to-end tests for realtime equipment status cache. + +Tests the full flow from cache sync to API response. +Requires a running server with --run-e2e flag. +""" + +import pytest +import requests + + +@pytest.mark.e2e +class TestEquipmentStatusCacheSync: + """Test equipment status cache synchronization.""" + + def test_health_check_includes_equipment_status_cache(self, health_url): + """Test health check includes equipment_status_cache status.""" + response = requests.get(health_url) + + assert response.status_code == 200 + data = response.json() + + # Should have equipment_status_cache in response + assert 'equipment_status_cache' in data + cache_status = data['equipment_status_cache'] + + # Should have expected fields + assert 'enabled' in cache_status + assert 'loaded' in cache_status + assert 'count' in cache_status + assert 'updated_at' in cache_status + + def test_health_check_includes_workcenter_mapping(self, health_url): + """Test health check includes workcenter_mapping status.""" + response = requests.get(health_url) + + assert response.status_code == 200 + data = response.json() + + # Should have workcenter_mapping in response + assert 'workcenter_mapping' in data + wc_status = data['workcenter_mapping'] + + # Should have expected fields + assert 'loaded' in wc_status + assert 'workcenter_count' in wc_status + assert 'group_count' in wc_status + + +@pytest.mark.e2e +class TestMergedQueryApi: + """Test merged resource status API endpoints.""" + + def test_resource_status_endpoint(self, api_base_url): + """Test /api/resource/status endpoint.""" + url = f"{api_base_url}/resource/status" + response = requests.get(url) + + assert response.status_code == 200 + data = response.json() + + assert data['success'] is True + assert 'data' in data + assert 'count' in data + + # If data exists, verify structure + if data['data']: + record = data['data'][0] + # Should have merged fields + assert 'RESOURCEID' in record + assert 'RESOURCENAME' in record + # Should have workcenter mapping fields + assert 'WORKCENTER_GROUP' in record + assert 'WORKCENTER_SHORT' in record + # Should have realtime status fields + assert 'STATUS_CATEGORY' in record + + def test_resource_status_with_workcenter_filter(self, api_base_url): + """Test /api/resource/status with workcenter_groups filter.""" + url = f"{api_base_url}/resource/status" + response = requests.get(url, params={'workcenter_groups': '焊接'}) + + assert response.status_code == 200 + data = response.json() + + assert data['success'] is True + + # All results should be in the specified group + for record in data['data']: + # May be None if mapping not found + if record.get('WORKCENTER_GROUP'): + assert record['WORKCENTER_GROUP'] == '焊接' + + def test_resource_status_with_production_filter(self, api_base_url): + """Test /api/resource/status with is_production filter.""" + url = f"{api_base_url}/resource/status" + response = requests.get(url, params={'is_production': 'true'}) + + assert response.status_code == 200 + data = response.json() + + assert data['success'] is True + + def test_resource_status_with_status_category_filter(self, api_base_url): + """Test /api/resource/status with status_categories filter.""" + url = f"{api_base_url}/resource/status" + response = requests.get(url, params={'status_categories': 'PRODUCTIVE,DOWN'}) + + assert response.status_code == 200 + data = response.json() + + assert data['success'] is True + + # All results should be in specified categories + for record in data['data']: + if record.get('STATUS_CATEGORY'): + assert record['STATUS_CATEGORY'] in ['PRODUCTIVE', 'DOWN'] + + def test_resource_status_summary_endpoint(self, api_base_url): + """Test /api/resource/status/summary endpoint.""" + url = f"{api_base_url}/resource/status/summary" + response = requests.get(url) + + assert response.status_code == 200 + data = response.json() + + assert data['success'] is True + assert 'data' in data + + summary = data['data'] + assert 'total_count' in summary + assert 'by_status_category' in summary + assert 'by_workcenter_group' in summary + assert 'with_active_job' in summary + assert 'with_wip' in summary + + def test_resource_status_matrix_endpoint(self, api_base_url): + """Test /api/resource/status/matrix endpoint.""" + url = f"{api_base_url}/resource/status/matrix" + response = requests.get(url) + + assert response.status_code == 200 + data = response.json() + + assert data['success'] is True + assert 'data' in data + + # If data exists, verify structure + if data['data']: + row = data['data'][0] + assert 'workcenter_group' in row + assert 'workcenter_sequence' in row + assert 'total' in row + # Should have standard status columns + assert 'PRD' in row + assert 'SBY' in row + assert 'UDT' in row + assert 'SDT' in row + assert 'EGT' in row + assert 'NST' in row + assert 'OTHER' in row + + +@pytest.mark.e2e +class TestFilterOptionsIncludeNewFields: + """Test filter options API includes new fields.""" + + def test_status_options_endpoint(self, api_base_url): + """Test /api/resource/status/options endpoint.""" + url = f"{api_base_url}/resource/status/options" + response = requests.get(url) + + assert response.status_code == 200 + data = response.json() + + assert data['success'] is True + assert 'data' in data + + options = data['data'] + # Should have workcenter_groups + assert 'workcenter_groups' in options + assert isinstance(options['workcenter_groups'], list) + + # Should have status_categories + assert 'status_categories' in options + assert isinstance(options['status_categories'], list) + + +@pytest.mark.e2e +@pytest.mark.redis +class TestCacheIntegration: + """Test cache integration (requires Redis).""" + + def test_cache_data_consistency(self, api_base_url, health_url): + """Test cache data is consistent between health and API.""" + # Get health status + health_resp = requests.get(health_url) + health_data = health_resp.json() + + cache_status = health_data.get('equipment_status_cache', {}) + + if not cache_status.get('enabled') or not cache_status.get('loaded'): + pytest.skip("Equipment status cache not enabled or loaded") + + cache_count = cache_status.get('count', 0) + + # Get all equipment status via API + api_resp = requests.get(f"{api_base_url}/resource/status") + api_data = api_resp.json() + + # Count should be consistent (within reasonable margin for filtering) + api_count = api_data.get('count', 0) + + # API may have filters applied from resource-cache, so it could be less + # but should never exceed cache count + assert api_count <= cache_count or cache_count == 0 diff --git a/tests/e2e/test_resource_cache_e2e.py b/tests/e2e/test_resource_cache_e2e.py new file mode 100644 index 0000000..ce84019 --- /dev/null +++ b/tests/e2e/test_resource_cache_e2e.py @@ -0,0 +1,250 @@ +# -*- coding: utf-8 -*- +"""End-to-end tests for Resource Cache functionality. + +These tests require a running server with Redis enabled. +Run with: pytest tests/e2e/test_resource_cache_e2e.py -v --run-e2e +""" + +import pytest +import requests + + +@pytest.mark.e2e +class TestHealthEndpointResourceCacheE2E: + """E2E tests for /health endpoint resource cache status.""" + + def test_health_includes_resource_cache(self, health_url): + """Test health endpoint includes resource_cache field.""" + response = requests.get(health_url, timeout=10) + + assert response.status_code in [200, 503] + data = response.json() + assert 'resource_cache' in data + + def test_resource_cache_has_required_fields(self, health_url): + """Test resource_cache has all required fields.""" + response = requests.get(health_url, timeout=10) + data = response.json() + + rc = data['resource_cache'] + assert 'enabled' in rc + + if rc['enabled']: + assert 'loaded' in rc + assert 'count' in rc + assert 'version' in rc + assert 'updated_at' in rc + + def test_resource_cache_loaded_has_positive_count(self, health_url): + """Test resource cache has positive count when loaded.""" + response = requests.get(health_url, timeout=10) + data = response.json() + + rc = data['resource_cache'] + if rc.get('enabled') and rc.get('loaded'): + assert rc['count'] > 0, "Resource cache should have data when loaded" + + +@pytest.mark.e2e +@pytest.mark.redis +class TestResourceHistoryOptionsE2E: + """E2E tests for resource history filter options endpoint.""" + + def test_options_endpoint_accessible(self, api_base_url): + """Test resource history options endpoint is accessible.""" + response = requests.get( + f"{api_base_url}/resource/history/options", + timeout=30 + ) + + assert response.status_code == 200 + + def test_options_returns_families(self, api_base_url): + """Test options endpoint returns families list.""" + response = requests.get( + f"{api_base_url}/resource/history/options", + timeout=30 + ) + + assert response.status_code == 200 + data = response.json() + + if data.get('success'): + options = data.get('data', {}) + assert 'families' in options + assert isinstance(options['families'], list) + + def test_options_returns_workcenter_groups(self, api_base_url): + """Test options endpoint returns workcenter groups.""" + response = requests.get( + f"{api_base_url}/resource/history/options", + timeout=30 + ) + + assert response.status_code == 200 + data = response.json() + + if data.get('success'): + options = data.get('data', {}) + assert 'workcenter_groups' in options + assert isinstance(options['workcenter_groups'], list) + + +@pytest.mark.e2e +@pytest.mark.redis +class TestResourceFilterOptionsE2E: + """E2E tests for resource filter options endpoint.""" + + def test_filter_options_endpoint_accessible(self, api_base_url): + """Test resource filter options endpoint is accessible.""" + response = requests.get( + f"{api_base_url}/resource/filter_options", + timeout=30 + ) + + assert response.status_code == 200 + + def test_filter_options_returns_workcenters(self, api_base_url): + """Test filter options returns workcenters list.""" + response = requests.get( + f"{api_base_url}/resource/filter_options", + timeout=30 + ) + + assert response.status_code == 200 + data = response.json() + + if data.get('success'): + options = data.get('data', {}) + assert 'workcenters' in options + assert isinstance(options['workcenters'], list) + + def test_filter_options_returns_families(self, api_base_url): + """Test filter options returns families list.""" + response = requests.get( + f"{api_base_url}/resource/filter_options", + timeout=30 + ) + + assert response.status_code == 200 + data = response.json() + + if data.get('success'): + options = data.get('data', {}) + assert 'families' in options + assert isinstance(options['families'], list) + + def test_filter_options_returns_departments(self, api_base_url): + """Test filter options returns departments list.""" + response = requests.get( + f"{api_base_url}/resource/filter_options", + timeout=30 + ) + + assert response.status_code == 200 + data = response.json() + + if data.get('success'): + options = data.get('data', {}) + assert 'departments' in options + assert isinstance(options['departments'], list) + + def test_filter_options_returns_statuses(self, api_base_url): + """Test filter options returns statuses list (from Oracle).""" + response = requests.get( + f"{api_base_url}/resource/filter_options", + timeout=30 + ) + + assert response.status_code == 200 + data = response.json() + + if data.get('success'): + options = data.get('data', {}) + assert 'statuses' in options + assert isinstance(options['statuses'], list) + + +@pytest.mark.e2e +@pytest.mark.redis +class TestResourceCachePerformanceE2E: + """E2E tests for resource cache performance.""" + + def test_filter_options_response_time(self, api_base_url): + """Test filter options responds within acceptable time.""" + import time + + # First request may trigger cache load + requests.get(f"{api_base_url}/resource/filter_options", timeout=30) + + # Second request should be from cache + start = time.time() + response = requests.get(f"{api_base_url}/resource/filter_options", timeout=30) + elapsed = time.time() - start + + assert response.status_code == 200 + # Note: statuses still queries Oracle, so allow more time + # Other fields (workcenters, families, departments) come from Redis cache + assert elapsed < 30.0, f"Response took {elapsed:.2f}s, expected < 30s" + + def test_history_options_response_time(self, api_base_url): + """Test history options responds within acceptable time.""" + import time + + # First request + requests.get(f"{api_base_url}/resource/history/options", timeout=30) + + # Second request should be from cache + start = time.time() + response = requests.get(f"{api_base_url}/resource/history/options", timeout=30) + elapsed = time.time() - start + + assert response.status_code == 200 + # Should be fast (< 2 seconds) + assert elapsed < 2.0, f"Response took {elapsed:.2f}s, expected < 2s" + + +@pytest.mark.e2e +@pytest.mark.redis +class TestResourceCacheDataConsistencyE2E: + """E2E tests for resource cache data consistency.""" + + def test_cache_count_matches_health_report(self, health_url, api_base_url): + """Test cache count in health matches actual data count.""" + # Get health status + health_resp = requests.get(health_url, timeout=10) + health_data = health_resp.json() + + rc = health_data.get('resource_cache', {}) + if not rc.get('enabled') or not rc.get('loaded'): + pytest.skip("Resource cache not enabled or loaded") + + reported_count = rc.get('count', 0) + + # Get filter options which uses cached data + options_resp = requests.get(f"{api_base_url}/resource/filter_options", timeout=30) + options_data = options_resp.json() + + # The workcenters list should be derived from the same cache + if options_data.get('success'): + workcenters = options_data.get('data', {}).get('workcenters', []) + # Just verify we got data - exact count comparison is complex + assert len(workcenters) > 0 or reported_count == 0 + + def test_families_consistent_across_endpoints(self, api_base_url): + """Test families list is consistent across endpoints.""" + # Get from resource filter options + filter_resp = requests.get(f"{api_base_url}/resource/filter_options", timeout=30) + filter_data = filter_resp.json() + + # Get from resource history options + history_resp = requests.get(f"{api_base_url}/resource/history/options", timeout=30) + history_data = history_resp.json() + + if filter_data.get('success') and history_data.get('success'): + filter_families = set(filter_data.get('data', {}).get('families', [])) + history_families = set(history_data.get('data', {}).get('families', [])) + + # Both should return the same families (from same cache) + assert filter_families == history_families, \ + f"Families mismatch: filter has {len(filter_families)}, history has {len(history_families)}" diff --git a/tests/e2e/test_resource_history_e2e.py b/tests/e2e/test_resource_history_e2e.py new file mode 100644 index 0000000..729d7cf --- /dev/null +++ b/tests/e2e/test_resource_history_e2e.py @@ -0,0 +1,319 @@ +# -*- coding: utf-8 -*- +"""End-to-end tests for resource history analysis page. + +These tests simulate real user workflows through the resource history analysis feature. +Run with: pytest tests/e2e/test_resource_history_e2e.py -v --run-integration +""" + +import json +import pytest +from unittest.mock import patch, MagicMock +import pandas as pd +from datetime import datetime, timedelta + +import sys +import os +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'src')) + +import mes_dashboard.core.database as db +from mes_dashboard.app import create_app + + +@pytest.fixture +def app(): + """Create application for testing.""" + db._ENGINE = None + app = create_app('testing') + app.config['TESTING'] = True + return app + + +@pytest.fixture +def client(app): + """Create test client.""" + return app.test_client() + + +class TestResourceHistoryPageAccess: + """E2E tests for page access and navigation.""" + + def test_page_loads_successfully(self, client): + """Resource history page should load without errors.""" + response = client.get('/resource-history') + + assert response.status_code == 200 + content = response.data.decode('utf-8') + assert '設備歷史績效' in content + + def test_page_contains_filter_elements(self, client): + """Page should contain all filter elements.""" + response = client.get('/resource-history') + content = response.data.decode('utf-8') + + # Check for filter elements + assert 'startDate' in content + assert 'endDate' in content + # Multi-select dropdowns + assert 'workcenterGroupsDropdown' in content + assert 'familiesDropdown' in content + assert 'isProduction' in content + assert 'isKey' in content + assert 'isMonitor' in content + + def test_page_contains_kpi_cards(self, client): + """Page should contain KPI card elements.""" + response = client.get('/resource-history') + content = response.data.decode('utf-8') + + assert 'kpiOuPct' in content + assert 'kpiAvailabilityPct' in content + assert 'kpiPrdHours' in content + assert 'kpiUdtHours' in content + assert 'kpiSdtHours' in content + assert 'kpiEgtHours' in content + assert 'kpiMachineCount' in content + + def test_page_contains_chart_containers(self, client): + """Page should contain chart container elements.""" + response = client.get('/resource-history') + content = response.data.decode('utf-8') + + assert 'trendChart' in content + assert 'stackedChart' in content + assert 'comparisonChart' in content + assert 'heatmapChart' in content + + def test_page_contains_table_elements(self, client): + """Page should contain table elements.""" + response = client.get('/resource-history') + content = response.data.decode('utf-8') + + assert 'detailTableBody' in content + assert 'expandAllBtn' in content + assert 'collapseAllBtn' in content + assert 'exportBtn' in content + + +class TestResourceHistoryAPIWorkflow: + """E2E tests for API workflows.""" + + @patch('mes_dashboard.services.filter_cache.get_workcenter_groups') + @patch('mes_dashboard.services.filter_cache.get_resource_families') + def test_filter_options_workflow(self, mock_families, mock_groups, client): + """Filter options should be loadable.""" + mock_groups.return_value = [ + {'name': '焊接_DB', 'sequence': 1}, + {'name': '焊接_WB', 'sequence': 2}, + {'name': '成型', 'sequence': 4}, + ] + mock_families.return_value = ['FAM001', 'FAM002'] + + response = client.get('/api/resource/history/options') + + assert response.status_code == 200 + data = json.loads(response.data) + assert data['success'] is True + assert 'workcenter_groups' in data['data'] + assert 'families' in data['data'] + + @patch('mes_dashboard.services.resource_history_service.read_sql_df') + def test_complete_query_workflow(self, mock_read_sql, client): + """Complete query workflow should return all data sections.""" + # Mock responses for the 4 queries in query_summary + kpi_df = pd.DataFrame([{ + 'PRD_HOURS': 8000, 'SBY_HOURS': 1000, 'UDT_HOURS': 500, + 'SDT_HOURS': 300, 'EGT_HOURS': 200, 'NST_HOURS': 1000, + 'MACHINE_COUNT': 100 + }]) + + trend_df = pd.DataFrame([ + {'DATA_DATE': datetime(2024, 1, 1), 'PRD_HOURS': 1000, 'SBY_HOURS': 100, + 'UDT_HOURS': 50, 'SDT_HOURS': 30, 'EGT_HOURS': 20, 'NST_HOURS': 100, 'MACHINE_COUNT': 100}, + {'DATA_DATE': datetime(2024, 1, 2), 'PRD_HOURS': 1100, 'SBY_HOURS': 90, + 'UDT_HOURS': 40, 'SDT_HOURS': 25, 'EGT_HOURS': 15, 'NST_HOURS': 100, 'MACHINE_COUNT': 100}, + ]) + + heatmap_df = pd.DataFrame([ + {'WORKCENTERNAME': '焊接_DB', 'DATA_DATE': datetime(2024, 1, 1), + 'PRD_HOURS': 400, 'SBY_HOURS': 50, 'UDT_HOURS': 25, 'SDT_HOURS': 15, 'EGT_HOURS': 10}, + {'WORKCENTERNAME': '成型', 'DATA_DATE': datetime(2024, 1, 1), + 'PRD_HOURS': 600, 'SBY_HOURS': 50, 'UDT_HOURS': 25, 'SDT_HOURS': 15, 'EGT_HOURS': 10}, + ]) + + comparison_df = pd.DataFrame([ + {'WORKCENTERNAME': '焊接_DB', 'PRD_HOURS': 4000, 'SBY_HOURS': 500, + 'UDT_HOURS': 250, 'SDT_HOURS': 150, 'EGT_HOURS': 100, 'MACHINE_COUNT': 50}, + {'WORKCENTERNAME': '成型', 'PRD_HOURS': 4000, 'SBY_HOURS': 500, + 'UDT_HOURS': 250, 'SDT_HOURS': 150, 'EGT_HOURS': 100, 'MACHINE_COUNT': 50}, + ]) + + # Use function-based side_effect for ThreadPoolExecutor parallel queries + def mock_sql(sql): + sql_upper = sql.upper() + if 'DATA_DATE' in sql_upper and 'WORKCENTERNAME' in sql_upper: + return heatmap_df + elif 'DATA_DATE' in sql_upper: + return trend_df + elif 'WORKCENTERNAME' in sql_upper: + return comparison_df + else: + return kpi_df + + mock_read_sql.side_effect = mock_sql + + response = client.get( + '/api/resource/history/summary' + '?start_date=2024-01-01' + '&end_date=2024-01-07' + '&granularity=day' + ) + + assert response.status_code == 200 + data = json.loads(response.data) + assert data['success'] is True + + # Verify KPI + assert data['data']['kpi']['ou_pct'] == 80.0 + # Availability% = (8000+1000+200) / (8000+1000+200+300+500+1000) * 100 = 9200/11000 = 83.6% + assert data['data']['kpi']['availability_pct'] == 83.6 + assert data['data']['kpi']['machine_count'] == 100 + + # Verify trend + assert len(data['data']['trend']) == 2 + # Trend should also have availability_pct + assert 'availability_pct' in data['data']['trend'][0] + + # Verify heatmap + assert len(data['data']['heatmap']) == 2 + + # Verify comparison + assert len(data['data']['workcenter_comparison']) == 2 + + @patch('mes_dashboard.services.resource_history_service.read_sql_df') + def test_detail_query_workflow(self, mock_read_sql, client): + """Detail query workflow should return hierarchical data.""" + detail_df = pd.DataFrame([ + {'WORKCENTERNAME': '焊接_DB', 'RESOURCEFAMILYNAME': 'FAM001', 'RESOURCENAME': 'RES001', + 'PRD_HOURS': 80, 'SBY_HOURS': 10, 'UDT_HOURS': 5, 'SDT_HOURS': 3, 'EGT_HOURS': 2, + 'NST_HOURS': 10, 'TOTAL_HOURS': 110}, + {'WORKCENTERNAME': '焊接_DB', 'RESOURCEFAMILYNAME': 'FAM001', 'RESOURCENAME': 'RES002', + 'PRD_HOURS': 75, 'SBY_HOURS': 15, 'UDT_HOURS': 5, 'SDT_HOURS': 3, 'EGT_HOURS': 2, + 'NST_HOURS': 10, 'TOTAL_HOURS': 110}, + ]) + + mock_read_sql.return_value = detail_df + + response = client.get( + '/api/resource/history/detail' + '?start_date=2024-01-01' + '&end_date=2024-01-07' + ) + + assert response.status_code == 200 + data = json.loads(response.data) + assert data['success'] is True + assert data['total'] == 2 + assert len(data['data']) == 2 + assert data['truncated'] is False + + # Verify data structure + first_row = data['data'][0] + assert 'workcenter' in first_row + assert 'family' in first_row + assert 'resource' in first_row + assert 'ou_pct' in first_row + assert 'availability_pct' in first_row + assert 'prd_hours' in first_row + assert 'prd_pct' in first_row + + @patch('mes_dashboard.services.resource_history_service.read_sql_df') + def test_export_workflow(self, mock_read_sql, client): + """Export workflow should return valid CSV.""" + mock_read_sql.return_value = pd.DataFrame([ + {'WORKCENTERNAME': '焊接_DB', 'RESOURCEFAMILYNAME': 'FAM001', 'RESOURCENAME': 'RES001', + 'PRD_HOURS': 80, 'SBY_HOURS': 10, 'UDT_HOURS': 5, 'SDT_HOURS': 3, 'EGT_HOURS': 2, + 'NST_HOURS': 10, 'TOTAL_HOURS': 110}, + ]) + + response = client.get( + '/api/resource/history/export' + '?start_date=2024-01-01' + '&end_date=2024-01-07' + ) + + assert response.status_code == 200 + assert 'text/csv' in response.content_type + + content = response.data.decode('utf-8-sig') + lines = content.strip().split('\n') + + # Should have header + data rows + assert len(lines) >= 2 + + # Verify header + header = lines[0] + assert '站點' in header + assert 'OU%' in header + assert 'Availability%' in header + + +class TestResourceHistoryValidation: + """E2E tests for input validation.""" + + def test_date_range_validation(self, client): + """Date range exceeding 730 days should be rejected.""" + response = client.get( + '/api/resource/history/summary' + '?start_date=2024-01-01' + '&end_date=2026-01-02' + ) + + assert response.status_code == 400 + data = json.loads(response.data) + assert data['success'] is False + assert '730' in data['error'] + + def test_missing_required_params(self, client): + """Missing required parameters should return error.""" + response = client.get('/api/resource/history/summary') + + assert response.status_code == 400 + data = json.loads(response.data) + assert data['success'] is False + + @patch('mes_dashboard.services.resource_history_service.read_sql_df') + def test_granularity_options(self, mock_read_sql, client): + """Different granularity options should work.""" + mock_df = pd.DataFrame([{ + 'PRD_HOURS': 100, 'SBY_HOURS': 10, 'UDT_HOURS': 5, + 'SDT_HOURS': 3, 'EGT_HOURS': 2, 'NST_HOURS': 10, 'MACHINE_COUNT': 5 + }]) + mock_read_sql.return_value = mock_df + + for granularity in ['day', 'week', 'month', 'year']: + mock_read_sql.side_effect = [mock_df, pd.DataFrame(), pd.DataFrame(), pd.DataFrame()] + + response = client.get( + f'/api/resource/history/summary' + f'?start_date=2024-01-01' + f'&end_date=2024-01-31' + f'&granularity={granularity}' + ) + + assert response.status_code == 200, f"Failed for granularity={granularity}" + + +class TestResourceHistoryNavigation: + """E2E tests for navigation integration.""" + + def test_portal_includes_history_tab(self, client): + """Portal should include resource history tab.""" + response = client.get('/') + content = response.data.decode('utf-8') + + assert '設備歷史績效' in content + assert 'resourceHistoryFrame' in content + + +if __name__ == '__main__': + pytest.main([__file__, '-v']) diff --git a/tests/fixtures/frontend_compute_parity.json b/tests/fixtures/frontend_compute_parity.json new file mode 100644 index 0000000..945d123 --- /dev/null +++ b/tests/fixtures/frontend_compute_parity.json @@ -0,0 +1,46 @@ +{ + "metric_tolerance": { + "ou_pct": 0.1, + "availability_pct": 0.1, + "prd_pct": 0.1, + "sby_pct": 0.1, + "udt_pct": 0.1, + "sdt_pct": 0.1, + "egt_pct": 0.1, + "nst_pct": 0.1 + }, + "cases": [ + { + "prd_hours": 10, + "sby_hours": 2, + "udt_hours": 1, + "sdt_hours": 1, + "egt_hours": 1, + "nst_hours": 1 + }, + { + "prd_hours": 0, + "sby_hours": 0, + "udt_hours": 0, + "sdt_hours": 0, + "egt_hours": 0, + "nst_hours": 0 + }, + { + "prd_hours": 85.5, + "sby_hours": 10.2, + "udt_hours": 1.1, + "sdt_hours": 0.8, + "egt_hours": 2.4, + "nst_hours": 3.0 + }, + { + "prd_hours": 5, + "sby_hours": 3, + "udt_hours": 4, + "sdt_hours": 2, + "egt_hours": 1, + "nst_hours": 5 + } + ] +} diff --git a/tests/stress/__init__.py b/tests/stress/__init__.py new file mode 100644 index 0000000..d4c21e5 --- /dev/null +++ b/tests/stress/__init__.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- +"""Stress tests for MES Dashboard.""" diff --git a/tests/stress/conftest.py b/tests/stress/conftest.py new file mode 100644 index 0000000..32e7b83 --- /dev/null +++ b/tests/stress/conftest.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +"""Pytest configuration for stress tests.""" + +import pytest +import os +import sys +import time +from dataclasses import dataclass, field +from typing import List, Dict, Any + +# Add src to path +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'src')) + + +@dataclass +class StressTestResult: + """Container for stress test results.""" + test_name: str + total_requests: int = 0 + successful_requests: int = 0 + failed_requests: int = 0 + total_duration: float = 0.0 + min_response_time: float = float('inf') + max_response_time: float = 0.0 + response_times: List[float] = field(default_factory=list) + errors: List[str] = field(default_factory=list) + + @property + def avg_response_time(self) -> float: + if not self.response_times: + return 0.0 + return sum(self.response_times) / len(self.response_times) + + @property + def success_rate(self) -> float: + if self.total_requests == 0: + return 0.0 + return (self.successful_requests / self.total_requests) * 100 + + @property + def requests_per_second(self) -> float: + if self.total_duration == 0: + return 0.0 + return self.total_requests / self.total_duration + + def add_success(self, response_time: float): + self.total_requests += 1 + self.successful_requests += 1 + self.response_times.append(response_time) + self.min_response_time = min(self.min_response_time, response_time) + self.max_response_time = max(self.max_response_time, response_time) + + def add_failure(self, error: str, response_time: float = 0): + self.total_requests += 1 + self.failed_requests += 1 + self.errors.append(error) + if response_time > 0: + self.response_times.append(response_time) + + def report(self) -> str: + """Generate human-readable report.""" + lines = [ + f"\n{'='*60}", + f"Stress Test Report: {self.test_name}", + f"{'='*60}", + f"Total Requests: {self.total_requests}", + f"Successful: {self.successful_requests}", + f"Failed: {self.failed_requests}", + f"Success Rate: {self.success_rate:.2f}%", + f"{'─'*60}", + f"Total Duration: {self.total_duration:.2f}s", + f"Requests/Second: {self.requests_per_second:.2f}", + f"{'─'*60}", + f"Min Response Time: {self.min_response_time*1000:.2f}ms" if self.min_response_time != float('inf') else "Min Response Time: N/A", + f"Max Response Time: {self.max_response_time*1000:.2f}ms", + f"Avg Response Time: {self.avg_response_time*1000:.2f}ms", + f"{'='*60}", + ] + if self.errors: + lines.append(f"Errors (first 5):") + for err in self.errors[:5]: + lines.append(f" - {err[:100]}") + return "\n".join(lines) + + +@pytest.fixture(scope="session") +def base_url() -> str: + """Get the base URL for stress testing.""" + return os.environ.get('STRESS_TEST_URL', 'http://127.0.0.1:8080') + + +@pytest.fixture(scope="session") +def stress_config() -> Dict[str, Any]: + """Get stress test configuration.""" + return { + 'concurrent_users': int(os.environ.get('STRESS_CONCURRENT_USERS', '10')), + 'requests_per_user': int(os.environ.get('STRESS_REQUESTS_PER_USER', '20')), + 'ramp_up_time': float(os.environ.get('STRESS_RAMP_UP_TIME', '2.0')), + 'timeout': float(os.environ.get('STRESS_TIMEOUT', '30.0')), + } + + +@pytest.fixture +def stress_result(): + """Factory fixture to create stress test results.""" + def _create_result(test_name: str) -> StressTestResult: + return StressTestResult(test_name=test_name) + return _create_result + + +def pytest_configure(config): + """Add custom markers for stress tests.""" + config.addinivalue_line( + "markers", "stress: mark test as stress test (may take longer)" + ) + config.addinivalue_line( + "markers", "load: mark test as load test (concurrent requests)" + ) diff --git a/tests/stress/test_api_load.py b/tests/stress/test_api_load.py new file mode 100644 index 0000000..604d3e6 --- /dev/null +++ b/tests/stress/test_api_load.py @@ -0,0 +1,327 @@ +# -*- coding: utf-8 -*- +"""Backend API load tests. + +Tests API endpoints under concurrent load to verify: +- Connection pool stability +- Timeout handling +- Response consistency under pressure + +Run with: pytest tests/stress/test_api_load.py -v -s +""" + +import pytest +import time +import requests +import concurrent.futures +from typing import List, Tuple + +# Import from local conftest via pytest fixtures + + +@pytest.mark.stress +@pytest.mark.load +class TestAPILoadConcurrent: + """Load tests with concurrent requests.""" + + def _make_request(self, url: str, timeout: float) -> Tuple[bool, float, str]: + """Make a single request and return (success, duration, error).""" + start = time.time() + try: + response = requests.get(url, timeout=timeout) + duration = time.time() - start + if response.status_code == 200: + data = response.json() + if data.get('success'): + return (True, duration, '') + return (False, duration, f"API returned success=false: {data.get('error', 'unknown')}") + return (False, duration, f"HTTP {response.status_code}") + except requests.exceptions.Timeout: + duration = time.time() - start + return (False, duration, "Request timeout") + except requests.exceptions.ConnectionError as e: + duration = time.time() - start + return (False, duration, f"Connection error: {str(e)[:50]}") + except Exception as e: + duration = time.time() - start + return (False, duration, f"Error: {str(e)[:50]}") + + def test_wip_summary_concurrent_load(self, base_url: str, stress_config: dict, stress_result): + """Test WIP summary API under concurrent load.""" + result = stress_result("WIP Summary Concurrent Load") + url = f"{base_url}/api/wip/overview/summary" + concurrent_users = stress_config['concurrent_users'] + requests_per_user = stress_config['requests_per_user'] + timeout = stress_config['timeout'] + + total_requests = concurrent_users * requests_per_user + + start_time = time.time() + with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor: + futures = [ + executor.submit(self._make_request, url, timeout) + for _ in range(total_requests) + ] + + for future in concurrent.futures.as_completed(futures): + success, duration, error = future.result() + if success: + result.add_success(duration) + else: + result.add_failure(error, duration) + + result.total_duration = time.time() - start_time + + print(result.report()) + + # Assertions + assert result.success_rate >= 90.0, f"Success rate {result.success_rate:.1f}% is below 90%" + assert result.avg_response_time < 10.0, f"Avg response time {result.avg_response_time:.2f}s exceeds 10s" + + def test_wip_matrix_concurrent_load(self, base_url: str, stress_config: dict, stress_result): + """Test WIP matrix API under concurrent load.""" + result = stress_result("WIP Matrix Concurrent Load") + url = f"{base_url}/api/wip/overview/matrix" + concurrent_users = stress_config['concurrent_users'] + requests_per_user = stress_config['requests_per_user'] + timeout = stress_config['timeout'] + + total_requests = concurrent_users * requests_per_user + + start_time = time.time() + with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor: + futures = [ + executor.submit(self._make_request, url, timeout) + for _ in range(total_requests) + ] + + for future in concurrent.futures.as_completed(futures): + success, duration, error = future.result() + if success: + result.add_success(duration) + else: + result.add_failure(error, duration) + + result.total_duration = time.time() - start_time + + print(result.report()) + + assert result.success_rate >= 90.0, f"Success rate {result.success_rate:.1f}% is below 90%" + assert result.avg_response_time < 15.0, f"Avg response time {result.avg_response_time:.2f}s exceeds 15s" + + def test_resource_summary_concurrent_load(self, base_url: str, stress_config: dict, stress_result): + """Test resource status summary API under concurrent load.""" + result = stress_result("Resource Status Summary Concurrent Load") + url = f"{base_url}/api/resource/status/summary" + concurrent_users = stress_config['concurrent_users'] + requests_per_user = stress_config['requests_per_user'] + timeout = stress_config['timeout'] + + total_requests = concurrent_users * requests_per_user + + start_time = time.time() + with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor: + futures = [ + executor.submit(self._make_request, url, timeout) + for _ in range(total_requests) + ] + + for future in concurrent.futures.as_completed(futures): + success, duration, error = future.result() + if success: + result.add_success(duration) + else: + result.add_failure(error, duration) + + result.total_duration = time.time() - start_time + + print(result.report()) + + assert result.success_rate >= 90.0, f"Success rate {result.success_rate:.1f}% is below 90%" + + def test_mixed_endpoints_concurrent_load(self, base_url: str, stress_config: dict, stress_result): + """Test multiple API endpoints simultaneously.""" + result = stress_result("Mixed Endpoints Concurrent Load") + endpoints = [ + f"{base_url}/api/wip/overview/summary", + f"{base_url}/api/wip/overview/matrix", + f"{base_url}/api/wip/overview/hold", + f"{base_url}/api/wip/meta/workcenters", + f"{base_url}/api/resource/status/summary", + ] + concurrent_users = stress_config['concurrent_users'] + timeout = stress_config['timeout'] + + # 5 requests per endpoint per user + requests_per_endpoint = 5 + total_requests = concurrent_users * len(endpoints) * requests_per_endpoint + + start_time = time.time() + with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor: + futures = [] + for _ in range(concurrent_users): + for endpoint in endpoints: + for _ in range(requests_per_endpoint): + futures.append(executor.submit(self._make_request, endpoint, timeout)) + + for future in concurrent.futures.as_completed(futures): + success, duration, error = future.result() + if success: + result.add_success(duration) + else: + result.add_failure(error, duration) + + result.total_duration = time.time() - start_time + + print(result.report()) + + assert result.success_rate >= 85.0, f"Success rate {result.success_rate:.1f}% is below 85%" + + +@pytest.mark.stress +@pytest.mark.load +class TestAPILoadRampUp: + """Load tests with gradual ramp-up.""" + + def _make_request(self, url: str, timeout: float) -> Tuple[bool, float, str]: + """Make a single request and return (success, duration, error).""" + start = time.time() + try: + response = requests.get(url, timeout=timeout) + duration = time.time() - start + if response.status_code == 200: + data = response.json() + if data.get('success'): + return (True, duration, '') + return (False, duration, f"API error: {data.get('error', 'unknown')}") + return (False, duration, f"HTTP {response.status_code}") + except Exception as e: + duration = time.time() - start + return (False, duration, str(e)[:50]) + + def test_gradual_load_increase(self, base_url: str, stress_result): + """Test API stability as load gradually increases.""" + result = stress_result("Gradual Load Increase") + url = f"{base_url}/api/wip/overview/summary" + + # Start with 2 concurrent users, increase to 20 + load_levels = [2, 5, 10, 15, 20] + requests_per_level = 10 + timeout = 30.0 + + start_time = time.time() + + for concurrent_users in load_levels: + print(f"\n Testing with {concurrent_users} concurrent users...") + with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor: + futures = [ + executor.submit(self._make_request, url, timeout) + for _ in range(requests_per_level) + ] + + for future in concurrent.futures.as_completed(futures): + success, duration, error = future.result() + if success: + result.add_success(duration) + else: + result.add_failure(error, duration) + + time.sleep(0.5) # Brief pause between levels + + result.total_duration = time.time() - start_time + + print(result.report()) + + assert result.success_rate >= 80.0, f"Success rate {result.success_rate:.1f}% is below 80%" + + +@pytest.mark.stress +class TestAPITimeoutHandling: + """Tests for timeout handling under load.""" + + def test_connection_recovery_after_timeout(self, base_url: str, stress_result): + """Test that API recovers after timeout scenarios.""" + result = stress_result("Connection Recovery After Timeout") + + # First, make requests with very short timeout to trigger timeouts + short_timeout_url = f"{base_url}/api/wip/overview/matrix" + + print("\n Phase 1: Triggering timeouts with 0.1s timeout...") + for _ in range(5): + start = time.time() + try: + requests.get(short_timeout_url, timeout=0.1) + result.add_success(time.time() - start) + except requests.exceptions.Timeout: + result.add_failure("Expected timeout", time.time() - start) + except Exception as e: + result.add_failure(str(e)[:50], time.time() - start) + + # Now verify system recovers with normal timeout + print(" Phase 2: Verifying recovery with 30s timeout...") + recovery_url = f"{base_url}/api/wip/overview/summary" + recovered = False + for i in range(10): + start = time.time() + try: + response = requests.get(recovery_url, timeout=30.0) + duration = time.time() - start + if response.status_code == 200 and response.json().get('success'): + result.add_success(duration) + recovered = True + print(f" Recovered on attempt {i+1}") + break + except Exception as e: + result.add_failure(str(e)[:50], time.time() - start) + time.sleep(0.5) + + result.total_duration = sum(result.response_times) + + print(result.report()) + + assert recovered, "System did not recover after timeout scenarios" + + +@pytest.mark.stress +class TestAPIResponseConsistency: + """Tests for response consistency under load.""" + + def test_response_data_consistency(self, base_url: str, stress_config: dict): + """Verify API returns consistent data structure under load.""" + url = f"{base_url}/api/wip/overview/summary" + concurrent_users = 5 + requests_per_user = 10 + timeout = 30.0 + + responses = [] + + def make_request(): + try: + response = requests.get(url, timeout=timeout) + if response.status_code == 200: + return response.json() + except Exception: + pass + return None + + with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor: + futures = [ + executor.submit(make_request) + for _ in range(concurrent_users * requests_per_user) + ] + + for future in concurrent.futures.as_completed(futures): + result = future.result() + if result: + responses.append(result) + + # Verify all successful responses have consistent structure + assert len(responses) > 0, "No successful responses received" + + first_response = responses[0] + required_fields = {'success'} + + for i, response in enumerate(responses): + for field in required_fields: + assert field in response, f"Response {i} missing field '{field}'" + + print(f"\n Received {len(responses)} consistent responses") diff --git a/tests/stress/test_frontend_stress.py b/tests/stress/test_frontend_stress.py new file mode 100644 index 0000000..4b8a285 --- /dev/null +++ b/tests/stress/test_frontend_stress.py @@ -0,0 +1,367 @@ +# -*- coding: utf-8 -*- +"""Frontend stress tests using Playwright. + +Tests frontend stability under high-frequency operations: +- Toast notification system under rapid fire +- MesApi client under rapid requests +- AbortController behavior +- Page navigation stress + +Run with: pytest tests/stress/test_frontend_stress.py -v -s +""" + +import pytest +import time +import re +from playwright.sync_api import Page, expect + + +@pytest.fixture(scope="session") +def app_server() -> str: + """Get the base URL for stress testing.""" + import os + return os.environ.get('STRESS_TEST_URL', 'http://127.0.0.1:8080') + + +@pytest.fixture(scope="session") +def browser_context_args(browser_context_args): + """Configure browser context for stress tests.""" + return { + **browser_context_args, + "viewport": {"width": 1280, "height": 720}, + "locale": "zh-TW", + } + + +def load_page_with_js(page: Page, url: str, timeout: int = 60000): + """Load page and wait for JS to initialize.""" + page.goto(url, wait_until='domcontentloaded', timeout=timeout) + page.wait_for_timeout(1000) # Allow JS initialization + + +@pytest.mark.stress +class TestToastStress: + """Stress tests for Toast notification system.""" + + def test_rapid_toast_creation(self, page: Page, app_server: str): + """Test Toast system under rapid creation - should enforce max limit.""" + load_page_with_js(page, f"{app_server}/tables") + + # Create 50 toasts rapidly + start_time = time.time() + for i in range(50): + page.evaluate(f"Toast.info('Rapid toast {i}')") + + creation_time = time.time() - start_time + print(f"\n Created 50 toasts in {creation_time:.3f}s") + + page.wait_for_timeout(500) + + # Should only have max 5 toasts visible + toast_count = page.locator('.mes-toast').count() + assert toast_count <= 5, f"Toast count {toast_count} exceeds max limit of 5" + print(f" Toast count enforced: {toast_count} (max 5)") + + def test_toast_type_cycling(self, page: Page, app_server: str): + """Test rapid cycling through all toast types - system remains stable.""" + load_page_with_js(page, f"{app_server}/tables") + + toast_types = ['info', 'success', 'warning', 'error'] + + start_time = time.time() + for i in range(100): + toast_type = toast_types[i % len(toast_types)] + page.evaluate(f"Toast.{toast_type}('Type cycle {i}')") + + cycle_time = time.time() - start_time + print(f"\n Cycled 100 toasts in {cycle_time:.3f}s") + + # Wait for animations to complete + page.wait_for_timeout(1000) + + # Dismiss all and verify system can recover + page.evaluate("Toast.dismissAll()") + page.wait_for_timeout(500) + + toast_count = page.locator('.mes-toast').count() + assert toast_count <= 5, f"Toast overflow after dismissAll: {toast_count}" + print(f" System stable after cleanup, toast count: {toast_count}") + + def test_toast_dismiss_stress(self, page: Page, app_server: str): + """Test rapid toast creation and dismissal.""" + load_page_with_js(page, f"{app_server}/tables") + + start_time = time.time() + + # Create and immediately dismiss + for i in range(30): + toast_id = page.evaluate(f"Toast.info('Dismiss test {i}')") + page.evaluate(f"Toast.dismiss({toast_id})") + + dismiss_time = time.time() - start_time + print(f"\n Created and dismissed 30 toasts in {dismiss_time:.3f}s") + + page.wait_for_timeout(500) + + # Should have no or few toasts + toast_count = page.locator('.mes-toast').count() + assert toast_count <= 2, f"Undismissed toasts remain: {toast_count}" + print(f" Remaining toasts: {toast_count}") + + def test_loading_toast_stress(self, page: Page, app_server: str): + """Test loading toasts can be created and properly dismissed.""" + load_page_with_js(page, f"{app_server}/tables") + + toast_ids = [] + + # Create 10 loading toasts + for i in range(10): + toast_id = page.evaluate(f"Toast.loading('Loading {i}...')") + toast_ids.append(toast_id) + + page.wait_for_timeout(200) + + # Loading toasts are created + loading_count = page.locator('.mes-toast-loading').count() + print(f"\n Created {len(toast_ids)} loading toasts, visible: {loading_count}") + + # Dismiss all using dismissAll + page.evaluate("Toast.dismissAll()") + page.wait_for_timeout(500) + + # All should be gone after dismissAll + loading_count = page.locator('.mes-toast-loading').count() + assert loading_count == 0, f"Loading toasts not dismissed: {loading_count}" + print(f" Loading toast dismiss test passed") + + +@pytest.mark.stress +class TestMesApiStress: + """Stress tests for MesApi client.""" + + def test_rapid_api_requests(self, page: Page, app_server: str): + """Test MesApi under rapid sequential requests.""" + load_page_with_js(page, f"{app_server}/tables") + + # Make 20 rapid API requests + results = page.evaluate(""" + async () => { + const results = []; + const startTime = Date.now(); + + for (let i = 0; i < 20; i++) { + try { + const response = await MesApi.get('/api/wip/meta/workcenters'); + results.push({ success: true, status: response?.status || 'ok' }); + } catch (e) { + results.push({ success: false, error: e.message }); + } + } + + return { + results, + duration: Date.now() - startTime, + successCount: results.filter(r => r.success).length + }; + } + """) + + print(f"\n 20 requests in {results['duration']}ms") + print(f" Success: {results['successCount']}/20") + + assert results['successCount'] >= 15, f"Too many failures: {20 - results['successCount']}" + + def test_concurrent_api_requests(self, page: Page, app_server: str): + """Test MesApi with concurrent requests using Promise.all.""" + load_page_with_js(page, f"{app_server}/tables") + + # Make 10 concurrent requests + results = page.evaluate(""" + async () => { + const endpoints = [ + '/api/wip/overview/summary', + '/api/wip/overview/matrix', + '/api/wip/meta/workcenters', + '/api/wip/meta/packages', + ]; + + const startTime = Date.now(); + const promises = []; + + // 2 requests per endpoint = 8 total concurrent + for (const endpoint of endpoints) { + promises.push(MesApi.get(endpoint).catch(e => ({ error: e.message }))); + promises.push(MesApi.get(endpoint).catch(e => ({ error: e.message }))); + } + + const results = await Promise.all(promises); + const successCount = results.filter(r => !r.error).length; + + return { + duration: Date.now() - startTime, + total: results.length, + successCount + }; + } + """) + + print(f"\n {results['total']} concurrent requests in {results['duration']}ms") + print(f" Success: {results['successCount']}/{results['total']}") + + assert results['successCount'] >= 6, f"Too many concurrent failures" + + def test_abort_controller_stress(self, page: Page, app_server: str): + """Test AbortController under rapid request cancellation.""" + load_page_with_js(page, f"{app_server}/tables") + + # Start requests and cancel them rapidly + results = page.evaluate(""" + async () => { + const results = { started: 0, aborted: 0, completed: 0, errors: 0 }; + + for (let i = 0; i < 10; i++) { + results.started++; + + const controller = new AbortController(); + + const request = fetch('/api/wip/overview/summary', { + signal: controller.signal + }).then(() => { + results.completed++; + }).catch(e => { + if (e.name === 'AbortError') { + results.aborted++; + } else { + results.errors++; + } + }); + + // Cancel after 50ms + setTimeout(() => controller.abort(), 50); + + await new Promise(resolve => setTimeout(resolve, 100)); + } + + return results; + } + """) + + print(f"\n Started: {results['started']}") + print(f" Aborted: {results['aborted']}") + print(f" Completed: {results['completed']}") + print(f" Errors: {results['errors']}") + + # Most should either abort or complete + total_resolved = results['aborted'] + results['completed'] + assert total_resolved >= 5, f"Too many unresolved requests" + + +@pytest.mark.stress +class TestPageNavigationStress: + """Stress tests for rapid page navigation.""" + + def test_rapid_tab_switching(self, page: Page, app_server: str): + """Test rapid tab switching in portal.""" + page.goto(app_server, wait_until='domcontentloaded', timeout=30000) + page.wait_for_timeout(500) + + # Only use released pages that are visible without admin login + tabs = [ + '.tab:has-text("WIP 即時概況")', + '.tab:has-text("設備即時概況")', + '.tab:has-text("設備歷史績效")', + '.tab:has-text("設備維修查詢")', + ] + + start_time = time.time() + + # Rapidly switch tabs 20 times + for i in range(20): + tab = tabs[i % len(tabs)] + page.locator(tab).click() + page.wait_for_timeout(50) + + switch_time = time.time() - start_time + print(f"\n 20 tab switches in {switch_time:.3f}s") + + # Page should still be responsive + expect(page.locator('h1')).to_contain_text('MES 報表入口') + print(" Portal remained stable") + + def test_portal_iframe_stress(self, page: Page, app_server: str): + """Test portal remains responsive with iframe loading.""" + page.goto(app_server, wait_until='domcontentloaded', timeout=30000) + page.wait_for_timeout(500) + + # Switch through released tabs (dev tabs hidden without admin login) + tabs = [ + 'WIP 即時概況', + '設備即時概況', + '設備歷史績效', + '設備維修查詢', + ] + + for tab_name in tabs: + page.locator(f'.tab:has-text("{tab_name}")').click() + page.wait_for_timeout(200) + + # Verify tab is active + tab = page.locator(f'.tab:has-text("{tab_name}")') + expect(tab).to_have_class(re.compile(r'active')) + + print(f"\n All {len(tabs)} tabs clickable and responsive") + + +@pytest.mark.stress +class TestMemoryStress: + """Tests for memory leak detection.""" + + def test_toast_memory_cleanup(self, page: Page, app_server: str): + """Check Toast system cleans up properly.""" + load_page_with_js(page, f"{app_server}/tables") + + # Create and dismiss many toasts + for batch in range(5): + for i in range(20): + page.evaluate(f"Toast.info('Memory test {batch}-{i}')") + page.evaluate("Toast.dismissAll()") + page.wait_for_timeout(100) + + page.wait_for_timeout(500) + + # Check DOM is clean + toast_count = page.locator('.mes-toast').count() + assert toast_count <= 5, f"Toast elements not cleaned up: {toast_count}" + print(f"\n Toast memory cleanup test passed (remaining: {toast_count})") + + +@pytest.mark.stress +class TestConsoleErrorMonitoring: + """Monitor for JavaScript errors under stress.""" + + def test_no_js_errors_under_stress(self, page: Page, app_server: str): + """Verify no JavaScript errors occur under stress conditions.""" + js_errors = [] + + page.on("pageerror", lambda error: js_errors.append(str(error))) + + load_page_with_js(page, f"{app_server}/tables") + + # Perform stress operations + for i in range(30): + page.evaluate(f"Toast.info('Error check {i}')") + + for i in range(10): + page.evaluate(""" + MesApi.get('/api/wip/overview/summary').catch(() => {}) + """) + + page.wait_for_timeout(2000) + + if js_errors: + print(f"\n JavaScript errors detected:") + for err in js_errors[:5]: + print(f" - {err[:100]}") + + assert len(js_errors) == 0, f"Found {len(js_errors)} JavaScript errors" + print("\n No JavaScript errors under stress") diff --git a/tests/test_api_integration.py b/tests/test_api_integration.py new file mode 100644 index 0000000..d148168 --- /dev/null +++ b/tests/test_api_integration.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +"""Integration tests for API endpoints. + +Tests API endpoints for proper response format, error handling, +and timeout behavior compatible with the MesApi client. +""" + +import unittest +from unittest.mock import patch, MagicMock +import json + +from mes_dashboard.app import create_app +import mes_dashboard.core.database as db + + +class TestTableQueryAPIIntegration(unittest.TestCase): + """Integration tests for table query APIs.""" + + def setUp(self): + """Set up test client.""" + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + + @patch('mes_dashboard.app.get_table_columns') + def test_get_table_columns_success(self, mock_get_columns): + """GET table columns should return JSON with columns array.""" + mock_get_columns.return_value = ['ID', 'NAME', 'STATUS', 'CREATED_AT'] + + response = self.client.post( + '/api/get_table_columns', + json={'table_name': 'TEST_TABLE'}, + content_type='application/json' + ) + + self.assertEqual(response.status_code, 200) + data = json.loads(response.data) + self.assertIn('columns', data) + self.assertEqual(len(data['columns']), 4) + + def test_get_table_columns_missing_table_name(self): + """GET table columns without table_name should return 400.""" + response = self.client.post( + '/api/get_table_columns', + json={}, + content_type='application/json' + ) + + self.assertEqual(response.status_code, 400) + data = json.loads(response.data) + self.assertIn('error', data) + + @patch('mes_dashboard.app.get_table_data') + def test_query_table_success(self, mock_get_data): + """Query table should return JSON with data array.""" + mock_get_data.return_value = { + 'data': [{'ID': 1, 'NAME': 'Test'}], + 'row_count': 1 + } + + response = self.client.post( + '/api/query_table', + json={'table_name': 'TEST_TABLE', 'limit': 100}, + content_type='application/json' + ) + + self.assertEqual(response.status_code, 200) + data = json.loads(response.data) + self.assertIn('data', data) + self.assertEqual(data['row_count'], 1) + + def test_query_table_missing_table_name(self): + """Query table without table_name should return 400.""" + response = self.client.post( + '/api/query_table', + json={'limit': 100}, + content_type='application/json' + ) + + self.assertEqual(response.status_code, 400) + data = json.loads(response.data) + self.assertIn('error', data) + + @patch('mes_dashboard.app.get_table_data') + def test_query_table_with_filters(self, mock_get_data): + """Query table should pass filters to the service.""" + mock_get_data.return_value = { + 'data': [], + 'row_count': 0 + } + + response = self.client.post( + '/api/query_table', + json={ + 'table_name': 'TEST_TABLE', + 'limit': 100, + 'filters': {'STATUS': 'ACTIVE'} + }, + content_type='application/json' + ) + + self.assertEqual(response.status_code, 200) + mock_get_data.assert_called_once() + call_args = mock_get_data.call_args + self.assertEqual(call_args[0][3], {'STATUS': 'ACTIVE'}) + + +class TestWIPAPIIntegration(unittest.TestCase): + """Integration tests for WIP API endpoints.""" + + def setUp(self): + """Set up test client.""" + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + + @patch('mes_dashboard.routes.wip_routes.get_wip_summary') + def test_wip_summary_response_format(self, mock_summary): + """WIP summary should return consistent JSON structure.""" + mock_summary.return_value = { + 'totalLots': 1000, + 'totalQtyPcs': 100000, + 'byWipStatus': { + 'run': {'lots': 800, 'qtyPcs': 80000}, + 'queue': {'lots': 150, 'qtyPcs': 15000}, + 'hold': {'lots': 50, 'qtyPcs': 5000} + }, + 'dataUpdateDate': '2026-01-28 10:00:00' + } + + response = self.client.get('/api/wip/overview/summary') + + self.assertEqual(response.status_code, 200) + data = json.loads(response.data) + + # Verify response structure for MesApi compatibility + self.assertIn('success', data) + self.assertTrue(data['success']) + self.assertIn('data', data) + + @patch('mes_dashboard.routes.wip_routes.get_wip_summary') + def test_wip_summary_error_response(self, mock_summary): + """WIP summary error should return proper error structure.""" + mock_summary.return_value = None + + response = self.client.get('/api/wip/overview/summary') + + self.assertEqual(response.status_code, 500) + data = json.loads(response.data) + + # Verify error response structure + self.assertIn('success', data) + self.assertFalse(data['success']) + self.assertIn('error', data) + + @patch('mes_dashboard.routes.wip_routes.get_wip_matrix') + def test_wip_matrix_response_format(self, mock_matrix): + """WIP matrix should return consistent JSON structure.""" + mock_matrix.return_value = { + 'workcenters': ['WC1', 'WC2'], + 'packages': ['PKG1'], + 'matrix': {'WC1': {'PKG1': 100}}, + 'workcenter_totals': {'WC1': 100}, + 'package_totals': {'PKG1': 100}, + 'grand_total': 100 + } + + response = self.client.get('/api/wip/overview/matrix') + + self.assertEqual(response.status_code, 200) + data = json.loads(response.data) + + self.assertIn('success', data) + self.assertTrue(data['success']) + self.assertIn('data', data) + self.assertIn('workcenters', data['data']) + self.assertIn('matrix', data['data']) + + @patch('mes_dashboard.routes.wip_routes.get_wip_detail') + def test_wip_detail_response_format(self, mock_detail): + """WIP detail should return consistent JSON structure.""" + mock_detail.return_value = { + 'workcenter': 'TestWC', + 'summary': { + 'total_lots': 100, + 'on_equipment_lots': 50, + 'waiting_lots': 40, + 'hold_lots': 10 + }, + 'specs': ['Spec1'], + 'lots': [{'lot_id': 'LOT001', 'status': 'ACTIVE'}], + 'pagination': { + 'page': 1, + 'page_size': 100, + 'total_count': 100, + 'total_pages': 1 + }, + 'sys_date': '2026-01-28 10:00:00' + } + + response = self.client.get('/api/wip/detail/TestWC') + + self.assertEqual(response.status_code, 200) + data = json.loads(response.data) + + self.assertIn('success', data) + self.assertTrue(data['success']) + self.assertIn('data', data) + self.assertIn('pagination', data['data']) + + +class TestResourceAPIIntegration(unittest.TestCase): + """Integration tests for Resource API endpoints.""" + + def setUp(self): + """Set up test client.""" + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + + @patch('mes_dashboard.routes.resource_routes.get_resource_status_summary') + def test_resource_status_summary_response_format(self, mock_summary): + """Resource status summary should return consistent JSON structure.""" + mock_summary.return_value = { + 'total_count': 100, + 'by_status_category': {'PRODUCTIVE': 60, 'STANDBY': 30, 'DOWN': 10}, + 'by_status': {'PRD': 60, 'SBY': 30, 'UDT': 5, 'SDT': 5, 'EGT': 0, 'NST': 0, 'OTHER': 0}, + 'by_workcenter_group': {'焊接': 50, '成型': 50}, + 'with_active_job': 40, + 'with_wip': 35, + 'ou_pct': 63.2, + 'availability_pct': 90.0, + } + + response = self.client.get('/api/resource/status/summary') + + self.assertEqual(response.status_code, 200) + data = json.loads(response.data) + + # Verify response structure + self.assertIn('success', data) + self.assertTrue(data['success']) + self.assertIn('data', data) + self.assertIn('total_count', data['data']) + + +class TestAPIContentType(unittest.TestCase): + """Test that APIs return proper content types.""" + + def setUp(self): + """Set up test client.""" + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + + @patch('mes_dashboard.routes.wip_routes.get_wip_summary') + def test_api_returns_json_content_type(self, mock_summary): + """API endpoints should return application/json content type.""" + mock_summary.return_value = { + 'totalLots': 0, 'totalQtyPcs': 0, + 'byWipStatus': {'run': {}, 'queue': {}, 'hold': {}}, + 'dataUpdateDate': None + } + + response = self.client.get('/api/wip/overview/summary') + + self.assertIn('application/json', response.content_type) + + @patch('mes_dashboard.app.get_table_columns') + def test_table_api_returns_json_content_type(self, mock_columns): + """Table API should return application/json content type.""" + mock_columns.return_value = ['COL1', 'COL2'] + + response = self.client.post( + '/api/get_table_columns', + json={'table_name': 'TEST'}, + content_type='application/json' + ) + + self.assertIn('application/json', response.content_type) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_app_factory.py b/tests/test_app_factory.py new file mode 100644 index 0000000..e81a94c --- /dev/null +++ b/tests/test_app_factory.py @@ -0,0 +1,56 @@ +import unittest + +from mes_dashboard.app import create_app +import mes_dashboard.core.database as db + + +class AppFactoryTests(unittest.TestCase): + def setUp(self): + db._ENGINE = None + + def test_create_app_default_config(self): + app = create_app() + self.assertTrue(app.config.get("DEBUG")) + self.assertEqual(app.config.get("ENV"), "development") + cache = app.extensions.get("cache") + self.assertIsNotNone(cache) + cache.set("app_factory_probe", {"ok": True}, 30) + self.assertEqual(cache.get("app_factory_probe"), {"ok": True}) + + def test_create_app_production_config(self): + app = create_app("production") + self.assertFalse(app.config.get("DEBUG")) + self.assertEqual(app.config.get("ENV"), "production") + + def test_create_app_independent_instances(self): + app1 = create_app() + db._ENGINE = None + app2 = create_app() + self.assertIsNot(app1, app2) + + def test_routes_registered(self): + app = create_app() + rules = {rule.rule for rule in app.url_map.iter_rules()} + expected = { + "/", + "/tables", + "/resource", + "/wip-overview", + "/wip-detail", + "/excel-query", + "/api/wip/overview/summary", + "/api/wip/overview/matrix", + "/api/wip/overview/hold", + "/api/wip/detail/", + "/api/wip/meta/workcenters", + "/api/wip/meta/packages", + "/api/resource/status/summary", + "/api/dashboard/kpi", + "/api/excel-query/upload", + } + missing = expected - rules + self.assertFalse(missing, f"Missing routes: {sorted(missing)}") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_auth_integration.py b/tests/test_auth_integration.py new file mode 100644 index 0000000..bc76056 --- /dev/null +++ b/tests/test_auth_integration.py @@ -0,0 +1,301 @@ +# -*- coding: utf-8 -*- +"""Integration tests for authentication routes and permission middleware.""" + +import json +import pytest +from unittest.mock import patch, MagicMock +import tempfile +from pathlib import Path + +import sys +import os +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src')) + +import mes_dashboard.core.database as db +from mes_dashboard.app import create_app +from mes_dashboard.services import page_registry + + +@pytest.fixture +def temp_page_status(tmp_path): + """Create temporary page status file.""" + data_file = tmp_path / "page_status.json" + initial_data = { + "pages": [ + {"route": "/", "name": "Portal", "status": "released"}, + {"route": "/wip-overview", "name": "WIP Overview", "status": "released"}, + {"route": "/dev-feature", "name": "Dev Feature", "status": "dev"}, + ], + "api_public": True + } + data_file.write_text(json.dumps(initial_data), encoding="utf-8") + return data_file + + +@pytest.fixture +def app(temp_page_status): + """Create application for testing.""" + db._ENGINE = None + + # Mock page registry to use temp file + original_data_file = page_registry.DATA_FILE + original_cache = page_registry._cache + page_registry.DATA_FILE = temp_page_status + page_registry._cache = None + + app = create_app('testing') + app.config['TESTING'] = True + app.config['WTF_CSRF_ENABLED'] = False + + yield app + + # Restore + page_registry.DATA_FILE = original_data_file + page_registry._cache = original_cache + + +@pytest.fixture +def client(app): + """Create test client.""" + return app.test_client() + + +class TestLoginRoute: + """Tests for login route.""" + + def test_login_page_renders(self, client): + """Test login page is accessible.""" + response = client.get("/admin/login") + assert response.status_code == 200 + assert "管理員登入" in response.data.decode("utf-8") or "login" in response.data.decode("utf-8").lower() + + @patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False) + @patch('mes_dashboard.services.auth_service.requests.post') + def test_login_success(self, mock_post, client): + """Test successful login via LDAP.""" + # Mock LDAP response + mock_response = MagicMock() + mock_response.json.return_value = { + "success": True, + "user": { + "username": "92367", + "displayName": "Admin User", + "mail": "ymirliu@panjit.com.tw", + "department": "Test Dept" + } + } + mock_post.return_value = mock_response + + response = client.post("/admin/login", data={ + "username": "92367", + "password": "password123" + }, follow_redirects=False) + + # Should redirect after successful login + assert response.status_code == 302 + + # Check session contains admin + with client.session_transaction() as sess: + assert "admin" in sess + assert sess["admin"]["username"] == "92367" + + @patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False) + @patch('mes_dashboard.services.auth_service.requests.post') + def test_login_invalid_credentials(self, mock_post, client): + """Test login with invalid credentials via LDAP.""" + mock_response = MagicMock() + mock_response.json.return_value = {"success": False} + mock_post.return_value = mock_response + + response = client.post("/admin/login", data={ + "username": "wrong", + "password": "wrong" + }) + + assert response.status_code == 200 + # Should show error message + assert "錯誤" in response.data.decode("utf-8") or "error" in response.data.decode("utf-8").lower() + + @patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False) + @patch('mes_dashboard.services.auth_service.requests.post') + def test_login_non_admin_user(self, mock_post, client): + """Test login with non-admin user via LDAP.""" + mock_response = MagicMock() + mock_response.json.return_value = { + "success": True, + "user": { + "username": "99999", + "displayName": "Regular User", + "mail": "regular@panjit.com.tw", + "department": "Test Dept" + } + } + mock_post.return_value = mock_response + + response = client.post("/admin/login", data={ + "username": "99999", + "password": "password123" + }) + + assert response.status_code == 200 + # Should show non-admin error + content = response.data.decode("utf-8") + assert "管理員" in content or "admin" in content.lower() + + def test_login_empty_credentials(self, client): + """Test login with empty credentials.""" + response = client.post("/admin/login", data={ + "username": "", + "password": "" + }) + + assert response.status_code == 200 + + +class TestLogoutRoute: + """Tests for logout route.""" + + def test_logout(self, client): + """Test logout clears session.""" + # Login first + with client.session_transaction() as sess: + sess["admin"] = {"username": "admin"} + + response = client.get("/admin/logout", follow_redirects=False) + + assert response.status_code == 302 + + with client.session_transaction() as sess: + assert "admin" not in sess + + +class TestPermissionMiddleware: + """Tests for permission middleware.""" + + def test_released_page_accessible_without_login(self, client): + """Test released pages are accessible without login.""" + response = client.get("/wip-overview") + # Should not be 403 (might be 200 or redirect) + assert response.status_code != 403 + + def test_dev_page_returns_403_without_login(self, client, temp_page_status): + """Test dev pages return 403 for non-admin.""" + # Add a dev route that exists in the app + # First update page status to have an existing route as dev + data = json.loads(temp_page_status.read_text()) + data["pages"].append({"route": "/tables", "name": "Tables", "status": "dev"}) + temp_page_status.write_text(json.dumps(data)) + page_registry._cache = None + + response = client.get("/tables") + assert response.status_code == 403 + + def test_dev_page_accessible_with_admin_login(self, client, temp_page_status): + """Test dev pages are accessible for admin.""" + # Update tables to dev + data = json.loads(temp_page_status.read_text()) + data["pages"].append({"route": "/tables", "name": "Tables", "status": "dev"}) + temp_page_status.write_text(json.dumps(data)) + page_registry._cache = None + + # Login as admin + with client.session_transaction() as sess: + sess["admin"] = {"username": "admin", "mail": "admin@test.com"} + + response = client.get("/tables") + assert response.status_code != 403 + + def test_admin_pages_redirect_without_login(self, client): + """Test admin pages redirect to login without authentication.""" + response = client.get("/admin/pages", follow_redirects=False) + assert response.status_code == 302 + assert "/admin/login" in response.location + + def test_admin_pages_accessible_with_login(self, client): + """Test admin pages are accessible with login.""" + with client.session_transaction() as sess: + sess["admin"] = {"username": "admin", "mail": "admin@test.com"} + + response = client.get("/admin/pages") + assert response.status_code == 200 + + +class TestAdminAPI: + """Tests for admin API endpoints.""" + + def test_get_pages_without_login(self, client): + """Test get pages API requires login.""" + response = client.get("/admin/api/pages") + # Should redirect + assert response.status_code == 302 + + def test_get_pages_with_login(self, client): + """Test get pages API with login.""" + with client.session_transaction() as sess: + sess["admin"] = {"username": "admin"} + + response = client.get("/admin/api/pages") + assert response.status_code == 200 + + data = json.loads(response.data) + assert data["success"] is True + assert "pages" in data + + def test_update_page_status(self, client, temp_page_status): + """Test updating page status via API.""" + with client.session_transaction() as sess: + sess["admin"] = {"username": "admin"} + + response = client.put( + "/admin/api/pages/wip-overview", + data=json.dumps({"status": "dev"}), + content_type="application/json" + ) + + assert response.status_code == 200 + data = json.loads(response.data) + assert data["success"] is True + + # Verify status changed + page_registry._cache = None + assert page_registry.get_page_status("/wip-overview") == "dev" + + def test_update_page_invalid_status(self, client): + """Test updating page with invalid status.""" + with client.session_transaction() as sess: + sess["admin"] = {"username": "admin"} + + response = client.put( + "/admin/api/pages/wip-overview", + data=json.dumps({"status": "invalid"}), + content_type="application/json" + ) + + assert response.status_code == 400 + + +class TestContextProcessor: + """Tests for template context processor.""" + + def test_is_admin_in_context_when_logged_in(self, client): + """Test is_admin is True in context when logged in.""" + with client.session_transaction() as sess: + sess["admin"] = {"username": "admin", "displayName": "Admin"} + + response = client.get("/") + content = response.data.decode("utf-8") + + # Should show admin-related content (logout link, etc.) + assert "登出" in content or "logout" in content.lower() or "Admin" in content + + def test_is_admin_in_context_when_not_logged_in(self, client): + """Test is_admin is False in context when not logged in.""" + response = client.get("/") + content = response.data.decode("utf-8") + + # Should show login link, not logout + assert "管理員登入" in content or "login" in content.lower() + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/test_auth_service.py b/tests/test_auth_service.py new file mode 100644 index 0000000..fcc2777 --- /dev/null +++ b/tests/test_auth_service.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +"""Unit tests for auth_service module.""" + +import pytest +from unittest.mock import patch, MagicMock + +import sys +import os +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src')) + +from mes_dashboard.services import auth_service + + +class TestAuthenticate: + """Tests for authenticate function via LDAP.""" + + @patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False) + @patch('mes_dashboard.services.auth_service.requests.post') + def test_authenticate_success(self, mock_post): + """Test successful authentication via LDAP.""" + mock_response = MagicMock() + mock_response.json.return_value = { + "success": True, + "user": { + "username": "92367", + "displayName": "Test User", + "mail": "test@panjit.com.tw", + "department": "Test Dept" + } + } + mock_post.return_value = mock_response + + result = auth_service.authenticate("92367", "password123") + + assert result is not None + assert result["username"] == "92367" + assert result["mail"] == "test@panjit.com.tw" + mock_post.assert_called_once() + + @patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False) + @patch('mes_dashboard.services.auth_service.requests.post') + def test_authenticate_invalid_credentials(self, mock_post): + """Test authentication with invalid credentials via LDAP.""" + mock_response = MagicMock() + mock_response.json.return_value = {"success": False} + mock_post.return_value = mock_response + + result = auth_service.authenticate("wrong", "wrong") + + assert result is None + + @patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False) + @patch('mes_dashboard.services.auth_service.requests.post') + def test_authenticate_timeout(self, mock_post): + """Test authentication timeout handling.""" + import requests + mock_post.side_effect = requests.Timeout() + + result = auth_service.authenticate("user", "pass") + + assert result is None + + @patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False) + @patch('mes_dashboard.services.auth_service.requests.post') + def test_authenticate_connection_error(self, mock_post): + """Test authentication connection error handling.""" + import requests + mock_post.side_effect = requests.ConnectionError() + + result = auth_service.authenticate("user", "pass") + + assert result is None + + @patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False) + @patch('mes_dashboard.services.auth_service.requests.post') + def test_authenticate_invalid_json(self, mock_post): + """Test authentication with invalid JSON response.""" + mock_response = MagicMock() + mock_response.json.side_effect = ValueError("Invalid JSON") + mock_post.return_value = mock_response + + result = auth_service.authenticate("user", "pass") + + assert result is None + + +class TestLocalAuthenticate: + """Tests for local authentication.""" + + @patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', True) + @patch('mes_dashboard.services.auth_service.LOCAL_AUTH_USERNAME', 'testuser') + @patch('mes_dashboard.services.auth_service.LOCAL_AUTH_PASSWORD', 'testpass') + def test_local_auth_success(self): + """Test successful local authentication.""" + result = auth_service.authenticate("testuser", "testpass") + + assert result is not None + assert result["username"] == "testuser" + + @patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', True) + @patch('mes_dashboard.services.auth_service.LOCAL_AUTH_USERNAME', 'testuser') + @patch('mes_dashboard.services.auth_service.LOCAL_AUTH_PASSWORD', 'testpass') + def test_local_auth_wrong_password(self): + """Test local authentication with wrong password.""" + result = auth_service.authenticate("testuser", "wrongpass") + + assert result is None + + +class TestIsAdmin: + """Tests for is_admin function.""" + + def test_is_admin_with_admin_email(self): + """Test admin check with admin email.""" + # Save original ADMIN_EMAILS + original = auth_service.ADMIN_EMAILS + + try: + auth_service.ADMIN_EMAILS = ["admin@panjit.com.tw"] + user = {"mail": "admin@panjit.com.tw"} + assert auth_service.is_admin(user) is True + finally: + auth_service.ADMIN_EMAILS = original + + def test_is_admin_with_non_admin_email(self): + """Test admin check with non-admin email.""" + original = auth_service.ADMIN_EMAILS + + try: + auth_service.ADMIN_EMAILS = ["admin@panjit.com.tw"] + user = {"mail": "user@panjit.com.tw"} + assert auth_service.is_admin(user) is False + finally: + auth_service.ADMIN_EMAILS = original + + def test_is_admin_case_insensitive(self): + """Test admin check is case insensitive.""" + original = auth_service.ADMIN_EMAILS + + try: + auth_service.ADMIN_EMAILS = ["admin@panjit.com.tw"] + user = {"mail": "ADMIN@PANJIT.COM.TW"} + assert auth_service.is_admin(user) is True + finally: + auth_service.ADMIN_EMAILS = original + + def test_is_admin_with_missing_mail(self): + """Test admin check with missing mail field.""" + user = {} + assert auth_service.is_admin(user) is False + + def test_is_admin_with_empty_mail(self): + """Test admin check with empty mail field.""" + user = {"mail": ""} + assert auth_service.is_admin(user) is False + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/test_cache.py b/tests/test_cache.py new file mode 100644 index 0000000..0b59853 --- /dev/null +++ b/tests/test_cache.py @@ -0,0 +1,313 @@ +# -*- coding: utf-8 -*- +"""Unit tests for cache module. + +Tests cache read/write functionality and fallback mechanism. +""" + +import pytest +from unittest.mock import patch, MagicMock +import pandas as pd +import json + + +class TestGetCachedWipData: + """Test get_cached_wip_data function.""" + + @pytest.fixture(autouse=True) + def reset_redis(self): + """Reset Redis client state and process-level cache.""" + import mes_dashboard.core.redis_client as rc + import mes_dashboard.core.cache as cache + rc._REDIS_CLIENT = None + # Clear process-level cache to avoid test interference + cache._wip_df_cache.clear() + yield + rc._REDIS_CLIENT = None + cache._wip_df_cache.clear() + + def test_returns_none_when_redis_disabled(self): + """Test returns None when Redis is disabled.""" + import mes_dashboard.core.cache as cache + + with patch.object(cache, 'REDIS_ENABLED', False): + result = cache.get_cached_wip_data() + assert result is None + + def test_returns_none_when_client_unavailable(self): + """Test returns None when Redis client is unavailable.""" + import mes_dashboard.core.cache as cache + + with patch.object(cache, 'REDIS_ENABLED', True): + with patch.object(cache, 'get_redis_client', return_value=None): + result = cache.get_cached_wip_data() + assert result is None + + def test_returns_none_when_cache_miss(self, reset_redis): + """Test returns None when cache key doesn't exist.""" + import mes_dashboard.core.cache as cache + + mock_client = MagicMock() + mock_client.get.return_value = None + + with patch.object(cache, 'REDIS_ENABLED', True): + with patch.object(cache, 'get_redis_client', return_value=mock_client): + result = cache.get_cached_wip_data() + assert result is None + + def test_returns_dataframe_from_cache(self, reset_redis): + """Test returns DataFrame when cache hit.""" + import mes_dashboard.core.cache as cache + + # Create test data as JSON string (what Redis returns with decode_responses=True) + test_data = [ + {'LOTID': 'LOT001', 'QTY': 100, 'WORKORDER': 'WO001'}, + {'LOTID': 'LOT002', 'QTY': 200, 'WORKORDER': 'WO002'} + ] + cached_json = json.dumps(test_data) + + mock_client = MagicMock() + mock_client.get.return_value = cached_json # String, not bytes + + with patch.object(cache, 'REDIS_ENABLED', True): + with patch.object(cache, 'get_redis_client', return_value=mock_client): + with patch.object(cache, 'get_key', return_value='mes_wip:data'): + result = cache.get_cached_wip_data() + + assert result is not None + assert isinstance(result, pd.DataFrame) + assert len(result) == 2 + assert 'LOTID' in result.columns + + def test_handles_invalid_json(self, reset_redis): + """Test handles invalid JSON gracefully.""" + import mes_dashboard.core.cache as cache + + mock_client = MagicMock() + mock_client.get.return_value = 'invalid json {' + + with patch.object(cache, 'REDIS_ENABLED', True): + with patch.object(cache, 'get_redis_client', return_value=mock_client): + with patch.object(cache, 'get_key', return_value='mes_wip:data'): + result = cache.get_cached_wip_data() + assert result is None + + +class TestGetCachedSysDate: + """Test get_cached_sys_date function.""" + + def test_returns_none_when_redis_disabled(self): + """Test returns None when Redis is disabled.""" + import mes_dashboard.core.cache as cache + + with patch.object(cache, 'REDIS_ENABLED', False): + result = cache.get_cached_sys_date() + assert result is None + + def test_returns_sys_date_from_cache(self): + """Test returns SYS_DATE when cache hit.""" + import mes_dashboard.core.cache as cache + + mock_client = MagicMock() + mock_client.get.return_value = '2024-01-15 10:30:00' # String, not bytes + + with patch.object(cache, 'REDIS_ENABLED', True): + with patch.object(cache, 'get_redis_client', return_value=mock_client): + with patch.object(cache, 'get_key', return_value='mes_wip:meta:sys_date'): + result = cache.get_cached_sys_date() + assert result == '2024-01-15 10:30:00' + + +class TestGetCacheUpdatedAt: + """Test get_cache_updated_at function.""" + + def test_returns_none_when_redis_disabled(self): + """Test returns None when Redis is disabled.""" + import mes_dashboard.core.cache as cache + + with patch.object(cache, 'REDIS_ENABLED', False): + result = cache.get_cache_updated_at() + assert result is None + + def test_returns_updated_at_from_cache(self): + """Test returns updated_at timestamp when cache hit.""" + import mes_dashboard.core.cache as cache + + mock_client = MagicMock() + mock_client.get.return_value = '2024-01-15T10:30:00' # String, not bytes + + with patch.object(cache, 'REDIS_ENABLED', True): + with patch.object(cache, 'get_redis_client', return_value=mock_client): + with patch.object(cache, 'get_key', return_value='mes_wip:meta:updated_at'): + result = cache.get_cache_updated_at() + assert result == '2024-01-15T10:30:00' + + +class TestWipDataWithFallback: + """Test get_wip_data_with_fallback function.""" + + def test_uses_cache_when_available(self): + """Test uses cache when data is available.""" + import mes_dashboard.core.cache as cache + + cached_df = pd.DataFrame({ + 'LOTID': ['LOT001'], + 'QTY': [100] + }) + + mock_fallback = MagicMock() + + with patch.object(cache, 'get_cached_wip_data', return_value=cached_df): + result = cache.get_wip_data_with_fallback(mock_fallback) + + assert result is not None + assert len(result) == 1 + # Fallback should NOT be called + mock_fallback.assert_not_called() + + def test_fallback_when_cache_unavailable(self): + """Test falls back when cache is unavailable.""" + import mes_dashboard.core.cache as cache + + oracle_df = pd.DataFrame({ + 'LOTID': ['LOT001', 'LOT002'], + 'QTY': [100, 200] + }) + + mock_fallback = MagicMock(return_value=oracle_df) + + with patch.object(cache, 'get_cached_wip_data', return_value=None): + result = cache.get_wip_data_with_fallback(mock_fallback) + + assert result is not None + assert len(result) == 2 + mock_fallback.assert_called_once() + + +class TestNoOpCache: + """Test NoOpCache fallback class.""" + + def test_noop_cache_get(self): + """Test NoOpCache.get returns None.""" + from mes_dashboard.core.cache import NoOpCache + cache = NoOpCache() + result = cache.get('any_key') + assert result is None + + def test_noop_cache_set(self): + """Test NoOpCache.set returns None.""" + from mes_dashboard.core.cache import NoOpCache + cache = NoOpCache() + result = cache.set('any_key', 'any_value', 300) + assert result is None + + +class TestMemoryTTLCache: + """Test in-memory TTL cache backend.""" + + def test_set_and_get_value(self): + from mes_dashboard.core.cache import MemoryTTLCache + + cache = MemoryTTLCache() + cache.set('k1', {'v': 1}, 10) + assert cache.get('k1') == {'v': 1} + + def test_expired_value_returns_none(self): + from mes_dashboard.core.cache import MemoryTTLCache + + cache = MemoryTTLCache() + cache.set('k2', {'v': 2}, 1) + + with patch('mes_dashboard.core.cache.time.time', return_value=10_000): + cache._store['k2'] = ({'v': 2}, 9_999) + assert cache.get('k2') is None + + +class TestCreateDefaultCacheBackend: + """Test default cache backend factory.""" + + def test_returns_layered_cache_without_redis(self): + import mes_dashboard.core.cache as cache + + with patch.object(cache, 'redis_available', return_value=False): + backend = cache.create_default_cache_backend() + backend.set('factory-key', {'x': 1}, 30) + assert backend.get('factory-key') == {'x': 1} + + +class TestLayeredCacheTelemetry: + """Telemetry behavior for layered route cache.""" + + def test_l1_only_degraded_mode_visibility(self): + from mes_dashboard.core.cache import MemoryTTLCache, LayeredCache + + backend = LayeredCache(l1=MemoryTTLCache(), l2=None, redis_expected=True) + backend.set('k1', {'v': 1}, 30) + assert backend.get('k1') == {'v': 1} # L1 hit + assert backend.get('missing') is None # miss + + telemetry = backend.telemetry() + assert telemetry['mode'] == 'l1-only' + assert telemetry['degraded'] is True + assert telemetry['l1_hits'] >= 1 + assert telemetry['misses'] >= 1 + + def test_l1_l2_hit_and_rates(self): + from mes_dashboard.core.cache import MemoryTTLCache, LayeredCache + + class FakeL2: + def __init__(self): + self.store = {'cold': {'from': 'l2'}} + + def get(self, key): + return self.store.get(key) + + def set(self, key, value, ttl): + self.store[key] = value + + def telemetry(self): + return {'error_count': 0} + + backend = LayeredCache(l1=MemoryTTLCache(), l2=FakeL2(), redis_expected=True) + assert backend.get('cold') == {'from': 'l2'} # L2 hit then warm L1 + assert backend.get('cold') == {'from': 'l2'} # L1 hit + + telemetry = backend.telemetry() + assert telemetry['mode'] == 'l1+l2' + assert telemetry['degraded'] is False + assert telemetry['l2_hits'] >= 1 + assert telemetry['l1_hits'] >= 1 + assert telemetry['reads_total'] >= 2 + + +class TestIsCacheAvailable: + """Test is_cache_available function.""" + + def test_returns_false_when_disabled(self): + """Test returns False when Redis is disabled.""" + import mes_dashboard.core.cache as cache + + with patch.object(cache, 'REDIS_ENABLED', False): + result = cache.is_cache_available() + assert result is False + + def test_returns_false_when_no_client(self): + """Test returns False when no Redis client.""" + import mes_dashboard.core.cache as cache + + with patch.object(cache, 'REDIS_ENABLED', True): + with patch.object(cache, 'get_redis_client', return_value=None): + result = cache.is_cache_available() + assert result is False + + def test_returns_true_when_data_exists(self): + """Test returns True when data exists in Redis.""" + import mes_dashboard.core.cache as cache + + mock_client = MagicMock() + mock_client.exists.return_value = 1 + + with patch.object(cache, 'REDIS_ENABLED', True): + with patch.object(cache, 'get_redis_client', return_value=mock_client): + with patch.object(cache, 'get_key', return_value='mes_wip:data'): + result = cache.is_cache_available() + assert result is True diff --git a/tests/test_cache_integration.py b/tests/test_cache_integration.py new file mode 100644 index 0000000..20eaac7 --- /dev/null +++ b/tests/test_cache_integration.py @@ -0,0 +1,400 @@ +# -*- coding: utf-8 -*- +"""Integration tests for cache functionality. + +Tests API endpoints with cache enabled/disabled scenarios. +""" + +import pytest +from unittest.mock import patch, MagicMock +import pandas as pd +import json + + +@pytest.fixture +def app_with_mock_cache(): + """Create app with mocked cache.""" + import mes_dashboard.core.database as db + db._ENGINE = None + + from mes_dashboard.app import create_app + app = create_app('testing') + app.config['TESTING'] = True + return app + + +class TestHealthEndpoint: + """Test /health endpoint.""" + + @patch('mes_dashboard.routes.health_routes.check_database') + @patch('mes_dashboard.routes.health_routes.check_redis') + @patch('mes_dashboard.routes.health_routes.get_cache_status') + def test_health_all_ok(self, mock_cache_status, mock_check_redis, mock_check_db, app_with_mock_cache): + """Test health endpoint returns 200 when all services are healthy.""" + mock_check_db.return_value = ('ok', None) + mock_check_redis.return_value = ('ok', None) + mock_cache_status.return_value = { + 'enabled': True, + 'sys_date': '2024-01-15 10:30:00', + 'updated_at': '2024-01-15T10:30:00' + } + + with app_with_mock_cache.test_client() as client: + response = client.get('/health') + + assert response.status_code == 200 + data = response.get_json() + assert data['status'] == 'healthy' + assert data['services']['database'] == 'ok' + assert data['services']['redis'] == 'ok' + + @patch('mes_dashboard.routes.health_routes.check_database') + @patch('mes_dashboard.routes.health_routes.check_redis') + @patch('mes_dashboard.routes.health_routes.get_cache_status') + def test_health_redis_down_degraded(self, mock_cache_status, mock_check_redis, mock_check_db, app_with_mock_cache): + """Test health endpoint returns 200 degraded when Redis is down.""" + mock_check_db.return_value = ('ok', None) + mock_check_redis.return_value = ('error', 'Connection refused') + mock_cache_status.return_value = {'enabled': True, 'sys_date': None, 'updated_at': None} + + with app_with_mock_cache.test_client() as client: + response = client.get('/health') + + assert response.status_code == 200 + data = response.get_json() + assert data['status'] == 'degraded' + assert 'warnings' in data + + @patch('mes_dashboard.routes.health_routes.check_database') + @patch('mes_dashboard.routes.health_routes.check_redis') + @patch('mes_dashboard.routes.health_routes.get_cache_status') + def test_health_db_down_unhealthy(self, mock_cache_status, mock_check_redis, mock_check_db, app_with_mock_cache): + """Test health endpoint returns 503 when database is down.""" + mock_check_db.return_value = ('error', 'Connection refused') + mock_check_redis.return_value = ('ok', None) + mock_cache_status.return_value = {'enabled': True, 'sys_date': None, 'updated_at': None} + + with app_with_mock_cache.test_client() as client: + response = client.get('/health') + + assert response.status_code == 503 + data = response.get_json() + assert data['status'] == 'unhealthy' + assert 'errors' in data + + @patch('mes_dashboard.routes.health_routes.check_database') + @patch('mes_dashboard.routes.health_routes.check_redis') + @patch('mes_dashboard.routes.health_routes.get_cache_status') + def test_health_redis_disabled(self, mock_cache_status, mock_check_redis, mock_check_db, app_with_mock_cache): + """Test health endpoint shows Redis disabled status.""" + mock_check_db.return_value = ('ok', None) + mock_check_redis.return_value = ('disabled', None) + mock_cache_status.return_value = {'enabled': False, 'sys_date': None, 'updated_at': None} + + with app_with_mock_cache.test_client() as client: + response = client.get('/health') + + assert response.status_code == 200 + data = response.get_json() + assert data['status'] == 'healthy' + assert data['services']['redis'] == 'disabled' + + +class TestWipApiWithCache: + """Test WIP API endpoints with cache.""" + + @pytest.fixture + def mock_wip_cache_data(self): + """Create mock WIP data for cache.""" + return pd.DataFrame({ + 'LOTID': ['LOT001', 'LOT002', 'LOT003'], + 'QTY': [100, 200, 150], + 'WORKORDER': ['WO001', 'WO002', 'WO003'], + 'WORKCENTER_GROUP': ['WC1', 'WC1', 'WC2'], + 'WORKCENTERSEQUENCE_GROUP': [1, 1, 2], + 'PRODUCTLINENAME': ['PKG1', 'PKG2', 'PKG1'], + 'EQUIPMENTCOUNT': [1, 0, 0], + 'CURRENTHOLDCOUNT': [0, 1, 0], + 'HOLDREASONNAME': [None, 'Quality Issue', None], + 'STATUS': ['ACTIVE', 'HOLD', 'ACTIVE'], + 'SPECNAME': ['SPEC1', 'SPEC1', 'SPEC2'], + 'SPECSEQUENCE': [1, 1, 2], + 'AGEBYDAYS': [1.5, 3.2, 0.5], + 'EQUIPMENTS': ['EQ001', None, None], + 'SYS_DATE': ['2024-01-15 10:30:00'] * 3 + }) + + @patch('mes_dashboard.services.wip_service._get_wip_dataframe') + @patch('mes_dashboard.services.wip_service.get_cached_sys_date') + def test_wip_summary_uses_cache(self, mock_sys_date, mock_get_df, app_with_mock_cache, mock_wip_cache_data): + """Test /api/wip/overview/summary uses cache when available.""" + mock_get_df.return_value = mock_wip_cache_data + mock_sys_date.return_value = '2024-01-15 10:30:00' + + with app_with_mock_cache.test_client() as client: + response = client.get('/api/wip/overview/summary') + + assert response.status_code == 200 + resp = response.get_json() + # API returns wrapped response: {success: true, data: {...}} + data = resp.get('data', resp) # Handle both wrapped and unwrapped + assert data['totalLots'] == 3 + assert data['dataUpdateDate'] == '2024-01-15 10:30:00' + + @patch('mes_dashboard.services.wip_service._get_wip_dataframe') + @patch('mes_dashboard.services.wip_service.get_cached_sys_date') + def test_wip_matrix_uses_cache(self, mock_sys_date, mock_get_df, app_with_mock_cache, mock_wip_cache_data): + """Test /api/wip/overview/matrix uses cache when available.""" + mock_get_df.return_value = mock_wip_cache_data + mock_sys_date.return_value = '2024-01-15 10:30:00' + + with app_with_mock_cache.test_client() as client: + response = client.get('/api/wip/overview/matrix') + + assert response.status_code == 200 + resp = response.get_json() + # API returns wrapped response: {success: true, data: {...}} + data = resp.get('data', resp) + assert 'workcenters' in data + assert 'packages' in data + assert 'matrix' in data + + @patch('mes_dashboard.services.wip_service._get_wip_dataframe') + def test_workcenters_uses_cache(self, mock_get_df, app_with_mock_cache, mock_wip_cache_data): + """Test /api/wip/meta/workcenters uses cache when available.""" + mock_get_df.return_value = mock_wip_cache_data + + with app_with_mock_cache.test_client() as client: + response = client.get('/api/wip/meta/workcenters') + + assert response.status_code == 200 + resp = response.get_json() + # API returns wrapped response: {success: true, data: [...]} + data = resp.get('data', resp) if isinstance(resp, dict) and 'data' in resp else resp + assert isinstance(data, list) + assert len(data) == 2 # WC1 and WC2 + + @patch('mes_dashboard.services.wip_service._get_wip_dataframe') + def test_packages_uses_cache(self, mock_get_df, app_with_mock_cache, mock_wip_cache_data): + """Test /api/wip/meta/packages uses cache when available.""" + mock_get_df.return_value = mock_wip_cache_data + + with app_with_mock_cache.test_client() as client: + response = client.get('/api/wip/meta/packages') + + assert response.status_code == 200 + resp = response.get_json() + # API returns wrapped response: {success: true, data: [...]} + data = resp.get('data', resp) if isinstance(resp, dict) and 'data' in resp else resp + assert isinstance(data, list) + assert len(data) == 2 # PKG1 and PKG2 + + +class TestHealthEndpointResourceCache: + """Test /health endpoint resource cache status.""" + + @patch('mes_dashboard.routes.health_routes.check_database') + @patch('mes_dashboard.routes.health_routes.check_redis') + @patch('mes_dashboard.routes.health_routes.get_cache_status') + @patch('mes_dashboard.routes.health_routes.get_resource_cache_status') + def test_health_includes_resource_cache( + self, mock_res_cache_status, mock_cache_status, mock_check_redis, mock_check_db, app_with_mock_cache + ): + """Test health endpoint includes resource_cache field.""" + mock_check_db.return_value = ('ok', None) + mock_check_redis.return_value = ('ok', None) + mock_cache_status.return_value = { + 'enabled': True, + 'sys_date': '2024-01-15 10:30:00', + 'updated_at': '2024-01-15T10:30:00' + } + mock_res_cache_status.return_value = { + 'enabled': True, + 'loaded': True, + 'count': 1500, + 'version': '2024-01-15T10:00:00', + 'updated_at': '2024-01-15T10:30:00' + } + + with app_with_mock_cache.test_client() as client: + response = client.get('/health') + + assert response.status_code == 200 + data = response.get_json() + assert 'resource_cache' in data + assert data['resource_cache']['enabled'] is True + assert data['resource_cache']['loaded'] is True + assert data['resource_cache']['count'] == 1500 + + @patch('mes_dashboard.routes.health_routes.check_database') + @patch('mes_dashboard.routes.health_routes.check_redis') + @patch('mes_dashboard.routes.health_routes.get_cache_status') + @patch('mes_dashboard.routes.health_routes.get_resource_cache_status') + def test_health_warning_when_resource_cache_not_loaded( + self, mock_res_cache_status, mock_cache_status, mock_check_redis, mock_check_db, app_with_mock_cache + ): + """Test health endpoint shows warning when resource cache enabled but not loaded.""" + mock_check_db.return_value = ('ok', None) + mock_check_redis.return_value = ('ok', None) + mock_cache_status.return_value = { + 'enabled': True, + 'sys_date': '2024-01-15 10:30:00', + 'updated_at': '2024-01-15T10:30:00' + } + mock_res_cache_status.return_value = { + 'enabled': True, + 'loaded': False, + 'count': 0, + 'version': None, + 'updated_at': None + } + + with app_with_mock_cache.test_client() as client: + response = client.get('/health') + + assert response.status_code == 200 + data = response.get_json() + assert 'warnings' in data + assert any('Resource cache not loaded' in w for w in data['warnings']) + + @patch('mes_dashboard.routes.health_routes.check_database') + @patch('mes_dashboard.routes.health_routes.check_redis') + @patch('mes_dashboard.routes.health_routes.get_cache_status') + @patch('mes_dashboard.routes.health_routes.get_resource_cache_status') + def test_health_no_warning_when_resource_cache_disabled( + self, mock_res_cache_status, mock_cache_status, mock_check_redis, mock_check_db, app_with_mock_cache + ): + """Test health endpoint no warning when resource cache is disabled.""" + mock_check_db.return_value = ('ok', None) + mock_check_redis.return_value = ('ok', None) + mock_cache_status.return_value = { + 'enabled': True, + 'sys_date': '2024-01-15 10:30:00', + 'updated_at': '2024-01-15T10:30:00' + } + mock_res_cache_status.return_value = {'enabled': False} + + with app_with_mock_cache.test_client() as client: + response = client.get('/health') + + assert response.status_code == 200 + data = response.get_json() + # No warnings about resource cache + warnings = data.get('warnings', []) + assert not any('Resource cache' in w for w in warnings) + + +class TestResourceFilterOptionsWithCache: + """Test resource filter options with cache.""" + + @patch('mes_dashboard.services.resource_cache.get_all_resources') + @patch('mes_dashboard.services.resource_service.read_sql_df') + def test_filter_options_uses_resource_cache( + self, mock_read_sql, mock_get_all, app_with_mock_cache + ): + """Test resource filter options uses resource_cache for static data.""" + # Mock resource cache data + mock_get_all.return_value = [ + {'WORKCENTERNAME': 'WC1', 'RESOURCEFAMILYNAME': 'F1', 'PJ_DEPARTMENT': 'Dept1', + 'LOCATIONNAME': 'Loc1', 'PJ_ASSETSSTATUS': 'Active'}, + {'WORKCENTERNAME': 'WC2', 'RESOURCEFAMILYNAME': 'F2', 'PJ_DEPARTMENT': 'Dept1', + 'LOCATIONNAME': 'Loc1', 'PJ_ASSETSSTATUS': 'Active'}, + ] + mock_read_sql.return_value = pd.DataFrame({'NEWSTATUSNAME': ['PRD', 'SBY']}) + + with app_with_mock_cache.test_client() as client: + response = client.get('/api/resource/filter_options') + + assert response.status_code == 200 + data = response.get_json() + + if data.get('success'): + options = data.get('data', {}) + assert 'WC1' in options['workcenters'] + assert 'WC2' in options['workcenters'] + assert 'F1' in options['families'] + assert 'F2' in options['families'] + + +class TestResourceHistoryOptionsWithCache: + """Test resource history filter options with cache.""" + + @patch('mes_dashboard.services.filter_cache.get_workcenter_groups') + @patch('mes_dashboard.services.resource_cache.get_all_resources') + def test_history_options_uses_resource_cache( + self, mock_get_all, mock_groups, app_with_mock_cache + ): + """Test resource history options uses resource_cache for families.""" + mock_groups.return_value = [ + {'name': 'Group1', 'sequence': 1}, + {'name': 'Group2', 'sequence': 2} + ] + # Mock resource cache data for families + mock_get_all.return_value = [ + {'RESOURCEFAMILYNAME': 'Family1'}, + {'RESOURCEFAMILYNAME': 'Family2'}, + {'RESOURCEFAMILYNAME': 'Family1'}, # duplicate + ] + + with app_with_mock_cache.test_client() as client: + response = client.get('/api/resource/history/options') + + assert response.status_code == 200 + data = response.get_json() + + if data.get('success'): + options = data.get('data', {}) + assert 'families' in options + assert 'Family1' in options['families'] + assert 'Family2' in options['families'] + + +class TestFallbackToOracle: + """Test fallback to Oracle when cache is unavailable.""" + + @patch('mes_dashboard.services.wip_service._get_wip_dataframe') + @patch('mes_dashboard.services.wip_service._get_wip_summary_from_oracle') + def test_summary_falls_back_to_oracle(self, mock_oracle, mock_get_df, app_with_mock_cache): + """Test summary falls back to Oracle when cache unavailable.""" + mock_get_df.return_value = None # Cache miss + mock_oracle.return_value = { + 'totalLots': 100, + 'totalQtyPcs': 10000, + 'byWipStatus': { + 'run': {'lots': 30, 'qtyPcs': 3000}, + 'queue': {'lots': 50, 'qtyPcs': 5000}, + 'hold': {'lots': 20, 'qtyPcs': 2000}, + 'qualityHold': {'lots': 15, 'qtyPcs': 1500}, + 'nonQualityHold': {'lots': 5, 'qtyPcs': 500} + }, + 'dataUpdateDate': '2024-01-15 10:30:00' + } + + with app_with_mock_cache.test_client() as client: + response = client.get('/api/wip/overview/summary') + + assert response.status_code == 200 + resp = response.get_json() + # API returns wrapped response: {success: true, data: {...}} + data = resp.get('data', resp) + assert data['totalLots'] == 100 + mock_oracle.assert_called_once() + + @patch('mes_dashboard.services.wip_service._get_wip_dataframe') + @patch('mes_dashboard.services.wip_service._get_workcenters_from_oracle') + def test_workcenters_falls_back_to_oracle(self, mock_oracle, mock_get_df, app_with_mock_cache): + """Test workcenters falls back to Oracle when cache unavailable.""" + mock_get_df.return_value = None # Cache miss + mock_oracle.return_value = [ + {'name': 'WC1', 'lot_count': 50}, + {'name': 'WC2', 'lot_count': 30} + ] + + with app_with_mock_cache.test_client() as client: + response = client.get('/api/wip/meta/workcenters') + + assert response.status_code == 200 + resp = response.get_json() + # API returns wrapped response: {success: true, data: [...]} + data = resp.get('data', resp) if isinstance(resp, dict) and 'data' in resp else resp + assert len(data) == 2 + mock_oracle.assert_called_once() diff --git a/tests/test_cache_updater.py b/tests/test_cache_updater.py new file mode 100644 index 0000000..7211747 --- /dev/null +++ b/tests/test_cache_updater.py @@ -0,0 +1,222 @@ +# -*- coding: utf-8 -*- +"""Unit tests for cache updater module. + +Tests background cache update logic. +""" + +import pytest +from unittest.mock import patch, MagicMock +import pandas as pd +import time + + +class TestCacheUpdater: + """Test CacheUpdater class.""" + + @pytest.fixture(autouse=True) + def reset_state(self): + """Reset module state before each test.""" + import mes_dashboard.core.redis_client as rc + rc._REDIS_CLIENT = None + yield + rc._REDIS_CLIENT = None + + def test_updater_starts_when_redis_enabled(self, reset_state): + """Test updater starts when Redis is enabled.""" + import mes_dashboard.core.cache_updater as cu + + mock_client = MagicMock() + mock_client.ping.return_value = True + + with patch.object(cu, 'REDIS_ENABLED', True): + with patch.object(cu, 'redis_available', return_value=True): + with patch.object(cu, 'read_sql_df', return_value=None): + updater = cu.CacheUpdater(interval=1) + try: + updater.start() + assert updater._is_running is True + assert updater._thread is not None + finally: + updater.stop() + time.sleep(0.2) + + def test_updater_does_not_start_when_redis_disabled(self, reset_state): + """Test updater does not start when Redis is disabled.""" + import mes_dashboard.core.cache_updater as cu + + with patch.object(cu, 'REDIS_ENABLED', False): + updater = cu.CacheUpdater(interval=1) + updater.start() + assert updater._is_running is False + + def test_updater_stops_gracefully(self, reset_state): + """Test updater stops gracefully.""" + import mes_dashboard.core.cache_updater as cu + + mock_client = MagicMock() + mock_client.ping.return_value = True + + with patch.object(cu, 'REDIS_ENABLED', True): + with patch.object(cu, 'redis_available', return_value=True): + with patch.object(cu, 'read_sql_df', return_value=None): + updater = cu.CacheUpdater(interval=1) + updater.start() + assert updater._is_running is True + + updater.stop() + time.sleep(0.2) # Give thread time to stop + assert updater._is_running is False + + +class TestCheckSysDate: + """Test SYS_DATE checking logic.""" + + def test_check_sys_date_returns_value(self): + """Test _check_sys_date returns correct value.""" + import mes_dashboard.core.cache_updater as cu + + mock_df = pd.DataFrame({'SYS_DATE': ['2024-01-15 10:30:00']}) + + with patch.object(cu, 'read_sql_df', return_value=mock_df): + updater = cu.CacheUpdater() + result = updater._check_sys_date() + assert result == '2024-01-15 10:30:00' + + def test_check_sys_date_handles_empty_result(self): + """Test _check_sys_date handles empty result.""" + import mes_dashboard.core.cache_updater as cu + + with patch.object(cu, 'read_sql_df', return_value=pd.DataFrame()): + updater = cu.CacheUpdater() + result = updater._check_sys_date() + assert result is None + + def test_check_sys_date_handles_none_result(self): + """Test _check_sys_date handles None result.""" + import mes_dashboard.core.cache_updater as cu + + with patch.object(cu, 'read_sql_df', return_value=None): + updater = cu.CacheUpdater() + result = updater._check_sys_date() + assert result is None + + def test_check_sys_date_handles_exception(self): + """Test _check_sys_date handles database exception.""" + import mes_dashboard.core.cache_updater as cu + + with patch.object(cu, 'read_sql_df', side_effect=Exception("Database error")): + updater = cu.CacheUpdater() + result = updater._check_sys_date() + assert result is None + + +class TestLoadFullTable: + """Test full table loading logic.""" + + def test_load_full_table_success(self): + """Test _load_full_table loads data correctly.""" + import mes_dashboard.core.cache_updater as cu + + test_df = pd.DataFrame({ + 'LOTID': ['LOT001', 'LOT002'], + 'QTY': [100, 200], + 'WORKORDER': ['WO001', 'WO002'] + }) + + with patch.object(cu, 'read_sql_df', return_value=test_df): + updater = cu.CacheUpdater() + result = updater._load_full_table() + + assert result is not None + assert len(result) == 2 + + def test_load_full_table_handles_none(self): + """Test _load_full_table handles None result.""" + import mes_dashboard.core.cache_updater as cu + + with patch.object(cu, 'read_sql_df', return_value=None): + updater = cu.CacheUpdater() + result = updater._load_full_table() + assert result is None + + def test_load_full_table_handles_exception(self): + """Test _load_full_table handles exception.""" + import mes_dashboard.core.cache_updater as cu + + with patch.object(cu, 'read_sql_df', side_effect=Exception("Database error")): + updater = cu.CacheUpdater() + result = updater._load_full_table() + assert result is None + + +class TestUpdateRedisCache: + """Test Redis cache update logic.""" + + def test_update_redis_cache_success(self): + """Test _update_redis_cache updates cache correctly.""" + import mes_dashboard.core.cache_updater as cu + + mock_client = MagicMock() + mock_pipeline = MagicMock() + mock_client.pipeline.return_value = mock_pipeline + + test_df = pd.DataFrame({ + 'LOTID': ['LOT001'], + 'QTY': [100] + }) + + with patch.object(cu, 'get_redis_client', return_value=mock_client): + with patch.object(cu, 'get_key', side_effect=lambda k: f'mes_wip:{k}'): + updater = cu.CacheUpdater() + result = updater._update_redis_cache(test_df, '2024-01-15 10:30:00') + + assert result is True + mock_pipeline.execute.assert_called_once() + + def test_update_redis_cache_no_client(self): + """Test _update_redis_cache handles no client.""" + import mes_dashboard.core.cache_updater as cu + + test_df = pd.DataFrame({'LOTID': ['LOT001']}) + + with patch.object(cu, 'get_redis_client', return_value=None): + updater = cu.CacheUpdater() + result = updater._update_redis_cache(test_df, '2024-01-15') + assert result is False + + +class TestCacheUpdateFlow: + """Test complete cache update flow.""" + + def test_no_update_when_sys_date_unchanged(self): + """Test cache doesn't update when SYS_DATE unchanged.""" + import mes_dashboard.core.cache_updater as cu + + mock_df = pd.DataFrame({'SYS_DATE': ['2024-01-15 10:30:00']}) + mock_client = MagicMock() + mock_client.get.return_value = '2024-01-15 10:30:00' + + with patch.object(cu, 'read_sql_df', return_value=mock_df): + with patch.object(cu, 'redis_available', return_value=True): + with patch.object(cu, 'get_redis_client', return_value=mock_client): + with patch.object(cu, 'get_key', side_effect=lambda k: f'mes_wip:{k}'): + updater = cu.CacheUpdater() + # Simulate already having cached the same date + result = updater._check_and_update(force=False) + # No update because dates match + assert result is False + + def test_update_when_sys_date_changes(self): + """Test cache updates when SYS_DATE changes.""" + import mes_dashboard.core.cache_updater as cu + + updater = cu.CacheUpdater() + + mock_df = pd.DataFrame({'SYS_DATE': ['2024-01-15 11:00:00']}) + + with patch.object(cu, 'read_sql_df', return_value=mock_df): + current_date = updater._check_sys_date() + old_date = '2024-01-15 10:30:00' + needs_update = current_date != old_date + + assert needs_update is True diff --git a/tests/test_circuit_breaker.py b/tests/test_circuit_breaker.py new file mode 100644 index 0000000..40a663b --- /dev/null +++ b/tests/test_circuit_breaker.py @@ -0,0 +1,223 @@ +# -*- coding: utf-8 -*- +"""Unit tests for circuit breaker module.""" + +import os +import pytest +import time +from unittest.mock import patch + +# Set circuit breaker enabled for tests +os.environ['CIRCUIT_BREAKER_ENABLED'] = 'true' + +from mes_dashboard.core.circuit_breaker import ( + CircuitBreaker, + CircuitState, + get_database_circuit_breaker, + get_circuit_breaker_status, + CIRCUIT_BREAKER_ENABLED +) + + +class TestCircuitBreakerStates: + """Test circuit breaker state transitions.""" + + def test_initial_state_is_closed(self): + """Circuit breaker starts in CLOSED state.""" + cb = CircuitBreaker("test") + assert cb.state == CircuitState.CLOSED + + def test_allow_request_when_closed(self): + """Requests are allowed when circuit is CLOSED.""" + cb = CircuitBreaker("test") + assert cb.allow_request() is True + + def test_record_success_keeps_closed(self): + """Recording success keeps circuit CLOSED.""" + cb = CircuitBreaker("test") + cb.record_success() + assert cb.state == CircuitState.CLOSED + + def test_opens_after_failure_threshold(self): + """Circuit opens after reaching failure threshold.""" + cb = CircuitBreaker( + "test", + failure_threshold=3, + failure_rate_threshold=0.5, + window_size=5 + ) + + # Record enough failures to open + for _ in range(5): + cb.record_failure() + + assert cb.state == CircuitState.OPEN + + def test_deny_request_when_open(self): + """Requests are denied when circuit is OPEN.""" + cb = CircuitBreaker( + "test", + failure_threshold=2, + failure_rate_threshold=0.5, + window_size=4 + ) + + # Force open + for _ in range(4): + cb.record_failure() + + assert cb.allow_request() is False + + def test_transition_to_half_open_after_timeout(self): + """Circuit transitions to HALF_OPEN after recovery timeout.""" + cb = CircuitBreaker( + "test", + failure_threshold=2, + failure_rate_threshold=0.5, + window_size=4, + recovery_timeout=1 # 1 second for fast test + ) + + # Force open + for _ in range(4): + cb.record_failure() + + assert cb.state == CircuitState.OPEN + + # Wait for recovery timeout + time.sleep(1.1) + + # Accessing state should transition to HALF_OPEN + assert cb.state == CircuitState.HALF_OPEN + + def test_half_open_allows_request(self): + """Requests are allowed in HALF_OPEN state for testing.""" + cb = CircuitBreaker( + "test", + failure_threshold=2, + failure_rate_threshold=0.5, + window_size=4, + recovery_timeout=1 + ) + + # Force open + for _ in range(4): + cb.record_failure() + + # Wait for recovery timeout + time.sleep(1.1) + + assert cb.allow_request() is True + + def test_success_in_half_open_closes(self): + """Success in HALF_OPEN state closes the circuit.""" + cb = CircuitBreaker( + "test", + failure_threshold=2, + failure_rate_threshold=0.5, + window_size=4, + recovery_timeout=1 + ) + + # Force open + for _ in range(4): + cb.record_failure() + + # Wait for recovery timeout + time.sleep(1.1) + + # Force HALF_OPEN check + _ = cb.state + + # Record success + cb.record_success() + + assert cb.state == CircuitState.CLOSED + + def test_failure_in_half_open_reopens(self): + """Failure in HALF_OPEN state reopens the circuit.""" + cb = CircuitBreaker( + "test", + failure_threshold=2, + failure_rate_threshold=0.5, + window_size=4, + recovery_timeout=1 + ) + + # Force open + for _ in range(4): + cb.record_failure() + + # Wait for recovery timeout + time.sleep(1.1) + + # Force HALF_OPEN check + _ = cb.state + + # Record failure + cb.record_failure() + + assert cb.state == CircuitState.OPEN + + def test_reset_clears_state(self): + """Reset returns circuit to initial state.""" + cb = CircuitBreaker( + "test", + failure_threshold=2, + failure_rate_threshold=0.5, + window_size=4 + ) + + # Force open + for _ in range(4): + cb.record_failure() + + cb.reset() + + assert cb.state == CircuitState.CLOSED + status = cb.get_status() + assert status.total_count == 0 + + +class TestCircuitBreakerStatus: + """Test circuit breaker status reporting.""" + + def test_get_status_returns_correct_info(self): + """Status includes all expected fields.""" + cb = CircuitBreaker("test") + + cb.record_success() + cb.record_success() + cb.record_failure() + + status = cb.get_status() + + assert status.state == "CLOSED" + assert status.success_count == 2 + assert status.failure_count == 1 + assert status.total_count == 3 + assert 0.3 <= status.failure_rate <= 0.34 + + def test_get_circuit_breaker_status_dict(self): + """Global function returns status as dictionary.""" + status = get_circuit_breaker_status() + + assert "state" in status + assert "failure_count" in status + assert "success_count" in status + assert "enabled" in status + + +class TestCircuitBreakerDisabled: + """Test circuit breaker when disabled.""" + + def test_allow_request_when_disabled(self): + """Requests always allowed when circuit breaker is disabled.""" + with patch('mes_dashboard.core.circuit_breaker.CIRCUIT_BREAKER_ENABLED', False): + cb = CircuitBreaker("test", failure_threshold=1, window_size=1) + + # Record failures + cb.record_failure() + cb.record_failure() + + # Should still allow (disabled) + assert cb.allow_request() is True diff --git a/tests/test_common_filters.py b/tests/test_common_filters.py new file mode 100644 index 0000000..c46841c --- /dev/null +++ b/tests/test_common_filters.py @@ -0,0 +1,186 @@ +"""Tests for Common Filters.""" + +import pytest +from unittest.mock import patch + +from mes_dashboard.sql.builder import QueryBuilder +from mes_dashboard.sql.filters import CommonFilters, NON_QUALITY_HOLD_REASONS + + +class TestCommonFilters: + """Test CommonFilters class.""" + + def test_add_location_exclusion(self): + """Test location exclusion filter.""" + builder = QueryBuilder() + + with patch( + "mes_dashboard.sql.filters.EXCLUDED_LOCATIONS", ["ATEC", "F區"] + ): + CommonFilters.add_location_exclusion(builder) + + assert len(builder.conditions) == 1 + assert "LOCATIONNAME IS NULL OR LOCATIONNAME NOT IN" in builder.conditions[0] + assert builder.params["p0"] == "ATEC" + assert builder.params["p1"] == "F區" + + def test_add_location_exclusion_empty(self): + """Test location exclusion with empty list.""" + builder = QueryBuilder() + + with patch("mes_dashboard.sql.filters.EXCLUDED_LOCATIONS", []): + CommonFilters.add_location_exclusion(builder) + + assert len(builder.conditions) == 0 + + def test_add_location_exclusion_custom_column(self): + """Test location exclusion with custom column name.""" + builder = QueryBuilder() + + with patch( + "mes_dashboard.sql.filters.EXCLUDED_LOCATIONS", ["TEST"] + ): + CommonFilters.add_location_exclusion(builder, column="LOC_NAME") + + assert "LOC_NAME IS NULL OR LOC_NAME NOT IN" in builder.conditions[0] + + def test_add_asset_status_exclusion(self): + """Test asset status exclusion filter.""" + builder = QueryBuilder() + + with patch( + "mes_dashboard.sql.filters.EXCLUDED_ASSET_STATUSES", ["報廢", "閒置"] + ): + CommonFilters.add_asset_status_exclusion(builder) + + assert len(builder.conditions) == 1 + assert "PJ_ASSETSSTATUS IS NULL OR PJ_ASSETSSTATUS NOT IN" in builder.conditions[0] + + def test_add_asset_status_exclusion_empty(self): + """Test asset status exclusion with empty list.""" + builder = QueryBuilder() + + with patch("mes_dashboard.sql.filters.EXCLUDED_ASSET_STATUSES", []): + CommonFilters.add_asset_status_exclusion(builder) + + assert len(builder.conditions) == 0 + + def test_add_wip_base_filters_workorder(self): + """Test WIP base filter for workorder.""" + builder = QueryBuilder() + CommonFilters.add_wip_base_filters(builder, workorder="WO123") + + assert len(builder.conditions) == 1 + assert "WORKORDER LIKE" in builder.conditions[0] + assert "%WO123%" in builder.params["p0"] + + def test_add_wip_base_filters_lotid(self): + """Test WIP base filter for lot ID.""" + builder = QueryBuilder() + CommonFilters.add_wip_base_filters(builder, lotid="LOT001") + + assert len(builder.conditions) == 1 + assert "LOTID LIKE" in builder.conditions[0] + + def test_add_wip_base_filters_multiple(self): + """Test WIP base filter with multiple parameters.""" + builder = QueryBuilder() + CommonFilters.add_wip_base_filters( + builder, workorder="WO", package="PKG", pj_type="TYPE" + ) + + assert len(builder.conditions) == 3 + assert any("WORKORDER LIKE" in c for c in builder.conditions) + assert any("PACKAGE_LEF LIKE" in c for c in builder.conditions) + assert any("PJ_TYPE LIKE" in c for c in builder.conditions) + + def test_add_status_filter_single(self): + """Test status filter with single status.""" + builder = QueryBuilder() + CommonFilters.add_status_filter(builder, status="HOLD") + + assert len(builder.conditions) == 1 + assert "STATUS = :p0" in builder.conditions[0] + assert builder.params["p0"] == "HOLD" + + def test_add_status_filter_multiple(self): + """Test status filter with multiple statuses.""" + builder = QueryBuilder() + CommonFilters.add_status_filter(builder, statuses=["RUN", "QUEUE"]) + + assert len(builder.conditions) == 1 + assert "STATUS IN (:p0, :p1)" in builder.conditions[0] + assert builder.params["p0"] == "RUN" + assert builder.params["p1"] == "QUEUE" + + def test_add_hold_type_filter_quality(self): + """Test hold type filter for quality holds.""" + builder = QueryBuilder() + CommonFilters.add_hold_type_filter(builder, hold_type="quality") + + assert len(builder.conditions) == 1 + assert "HOLDREASONNAME NOT IN" in builder.conditions[0] + + def test_add_hold_type_filter_non_quality(self): + """Test hold type filter for non-quality holds.""" + builder = QueryBuilder() + CommonFilters.add_hold_type_filter(builder, hold_type="non_quality") + + assert len(builder.conditions) == 1 + assert "HOLDREASONNAME IN" in builder.conditions[0] + + def test_is_quality_hold(self): + """Test is_quality_hold helper function.""" + # Quality hold (not in non-quality list) + assert CommonFilters.is_quality_hold("品質異常") is True + + # Non-quality hold (in list) + non_quality_reason = list(NON_QUALITY_HOLD_REASONS)[0] + assert CommonFilters.is_quality_hold(non_quality_reason) is False + + def test_add_equipment_filter_resource_ids(self): + """Test equipment filter with resource IDs.""" + builder = QueryBuilder() + CommonFilters.add_equipment_filter(builder, resource_ids=["R001", "R002"]) + + assert len(builder.conditions) == 1 + assert "RESOURCEID IN" in builder.conditions[0] + + def test_add_equipment_filter_workcenters(self): + """Test equipment filter with workcenters.""" + builder = QueryBuilder() + CommonFilters.add_equipment_filter(builder, workcenters=["WC1", "WC2"]) + + assert len(builder.conditions) == 1 + assert "WORKCENTERNAME IN" in builder.conditions[0] + + def test_build_location_filter_legacy(self): + """Test legacy location filter builder.""" + result = CommonFilters.build_location_filter_legacy( + locations=["LOC1", "LOC2"], + excluded_locations=["EXC1"], + ) + + assert "LOCATIONNAME IN ('LOC1', 'LOC2')" in result + assert "LOCATIONNAME NOT IN ('EXC1')" in result + + def test_build_asset_status_filter_legacy(self): + """Test legacy asset status filter builder.""" + result = CommonFilters.build_asset_status_filter_legacy( + excluded_statuses=["報廢", "閒置"] + ) + + assert "PJ_ASSETSSTATUS NOT IN" in result + assert "'報廢'" in result + assert "'閒置'" in result + + def test_build_asset_status_filter_legacy_empty(self): + """Test legacy asset status filter with empty list.""" + result = CommonFilters.build_asset_status_filter_legacy(excluded_statuses=[]) + + assert result == "" + + def test_non_quality_hold_reasons_exists(self): + """Test that NON_QUALITY_HOLD_REASONS is defined and has content.""" + assert len(NON_QUALITY_HOLD_REASONS) > 0 + assert isinstance(NON_QUALITY_HOLD_REASONS, set) diff --git a/tests/test_degraded_responses.py b/tests/test_degraded_responses.py new file mode 100644 index 0000000..c56b3a8 --- /dev/null +++ b/tests/test_degraded_responses.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +"""Degraded response contract tests.""" + +from __future__ import annotations + +from unittest.mock import patch + +import mes_dashboard.core.database as db +from mes_dashboard.app import create_app +from mes_dashboard.core.database import ( + DatabasePoolExhaustedError, + DatabaseCircuitOpenError, +) + + +def _client(): + db._ENGINE = None + app = create_app("testing") + app.config["TESTING"] = True + + @app.route("/api/__test__/pool") + def _pool_error(): + raise DatabasePoolExhaustedError("pool exhausted", retry_after_seconds=7) + + @app.route("/api/__test__/circuit") + def _circuit_error(): + raise DatabaseCircuitOpenError("circuit open", retry_after_seconds=11) + + return app.test_client() + + +def test_pool_exhausted_error_handler_contract(): + response = _client().get("/api/__test__/pool") + assert response.status_code == 503 + assert response.headers.get("Retry-After") == "7" + + payload = response.get_json() + assert payload["success"] is False + assert payload["error"]["code"] == "DB_POOL_EXHAUSTED" + assert payload["meta"]["retry_after_seconds"] == 7 + + +def test_circuit_open_error_handler_contract(): + response = _client().get("/api/__test__/circuit") + assert response.status_code == 503 + assert response.headers.get("Retry-After") == "11" + + payload = response.get_json() + assert payload["success"] is False + assert payload["error"]["code"] == "CIRCUIT_BREAKER_OPEN" + assert payload["meta"]["retry_after_seconds"] == 11 + + +@patch( + "mes_dashboard.routes.wip_routes.get_wip_summary", + side_effect=DatabasePoolExhaustedError("pool exhausted", retry_after_seconds=5), +) +def test_wip_route_propagates_degraded_contract(_mock_summary): + response = _client().get("/api/wip/overview/summary") + assert response.status_code == 503 + payload = response.get_json() + assert payload["error"]["code"] == "DB_POOL_EXHAUSTED" + + +@patch( + "mes_dashboard.routes.resource_routes.get_resource_status_summary", + side_effect=DatabasePoolExhaustedError("pool exhausted", retry_after_seconds=9), +) +def test_resource_route_propagates_degraded_contract(_mock_summary): + response = _client().get("/api/resource/status/summary") + assert response.status_code == 503 + payload = response.get_json() + assert payload["error"]["code"] == "DB_POOL_EXHAUSTED" + assert payload["meta"]["retry_after_seconds"] == 9 + + +@patch( + "mes_dashboard.routes.dashboard_routes.query_dashboard_kpi", + side_effect=DatabaseCircuitOpenError("circuit open", retry_after_seconds=13), +) +def test_dashboard_route_propagates_degraded_contract(_mock_kpi): + response = _client().post("/api/dashboard/kpi", json={}) + assert response.status_code == 503 + payload = response.get_json() + assert payload["error"]["code"] == "CIRCUIT_BREAKER_OPEN" + assert payload["meta"]["retry_after_seconds"] == 13 diff --git a/tests/test_excel_query_e2e.py b/tests/test_excel_query_e2e.py new file mode 100644 index 0000000..e67b564 --- /dev/null +++ b/tests/test_excel_query_e2e.py @@ -0,0 +1,506 @@ +# -*- coding: utf-8 -*- +"""End-to-end tests for Excel query workflow. + +Tests the complete workflow from Excel upload to query execution and export. +""" + +import pytest +import json +import io +from unittest.mock import patch, MagicMock + +from mes_dashboard import create_app + + +@pytest.fixture +def app(): + """Create test Flask application.""" + app = create_app() + app.config['TESTING'] = True + return app + + +@pytest.fixture +def client(app): + """Create test client.""" + return app.test_client() + + +def create_test_excel(data): + """Create a test Excel file with given data. + + Args: + data: List of lists where first list is headers. + e.g. [['COL1', 'COL2'], ['val1', 'val2'], ...] + """ + import openpyxl + wb = openpyxl.Workbook() + ws = wb.active + + for row_idx, row in enumerate(data, 1): + for col_idx, value in enumerate(row, 1): + ws.cell(row=row_idx, column=col_idx, value=value) + + buffer = io.BytesIO() + wb.save(buffer) + buffer.seek(0) + return buffer + + +class TestBasicQueryWorkflow: + """E2E tests for basic query workflow.""" + + @patch('mes_dashboard.routes.excel_query_routes.execute_batch_query') + def test_complete_basic_workflow(self, mock_execute, client): + """Test complete workflow: upload → get values → execute → export.""" + # Step 1: Upload Excel file + excel_data = [ + ['LOT_ID', 'PRODUCT', 'QTY'], + ['LOT001', 'PROD_A', 100], + ['LOT002', 'PROD_B', 200], + ['LOT003', 'PROD_A', 150], + ] + excel_file = create_test_excel(excel_data) + + upload_response = client.post( + '/api/excel-query/upload', + data={'file': (excel_file, 'batch_query.xlsx')}, + content_type='multipart/form-data' + ) + assert upload_response.status_code == 200 + upload_data = json.loads(upload_response.data) + assert 'columns' in upload_data + assert 'LOT_ID' in upload_data['columns'] + assert 'preview' in upload_data + + # Step 2: Get column values + values_response = client.post( + '/api/excel-query/column-values', + json={'column_name': 'LOT_ID'} + ) + assert values_response.status_code == 200 + values_data = json.loads(values_response.data) + assert 'values' in values_data + assert set(values_data['values']) == {'LOT001', 'LOT002', 'LOT003'} + + # Step 3: Execute query + mock_execute.return_value = { + 'columns': ['LOT_ID', 'SPEC', 'STATUS'], + 'data': [ + ['LOT001', 'SPEC_001', 'ACTIVE'], + ['LOT002', 'SPEC_002', 'HOLD'], + ['LOT003', 'SPEC_001', 'ACTIVE'], + ], + 'total': 3 + } + + execute_response = client.post( + '/api/excel-query/execute', + json={ + 'table_name': 'DWH.DW_MES_WIP', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID', 'SPEC', 'STATUS'], + 'search_values': ['LOT001', 'LOT002', 'LOT003'] + } + ) + assert execute_response.status_code == 200 + execute_data = json.loads(execute_response.data) + assert execute_data['total'] == 3 + + +class TestAdvancedQueryWorkflow: + """E2E tests for advanced query workflow with date range and LIKE.""" + + @patch('mes_dashboard.routes.excel_query_routes.execute_advanced_batch_query') + def test_like_contains_workflow(self, mock_execute, client): + """Test workflow with LIKE contains query.""" + # Upload Excel with search patterns + excel_data = [ + ['SEARCH_PATTERN'], + ['LOT'], + ['WIP'], + ] + excel_file = create_test_excel(excel_data) + + upload_response = client.post( + '/api/excel-query/upload', + data={'file': (excel_file, 'patterns.xlsx')}, + content_type='multipart/form-data' + ) + assert upload_response.status_code == 200 + + # Get search values + values_response = client.post( + '/api/excel-query/column-values', + json={'column_name': 'SEARCH_PATTERN'} + ) + assert values_response.status_code == 200 + search_values = json.loads(values_response.data)['values'] + + # Execute LIKE contains query + mock_execute.return_value = { + 'columns': ['LOT_ID', 'STATUS'], + 'data': [ + ['LOT001', 'ACTIVE'], + ['LOT002', 'ACTIVE'], + ['WIP001', 'HOLD'], + ['WIP002', 'ACTIVE'], + ], + 'total': 4 + } + + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'table_name': 'DWH.DW_MES_WIP', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID', 'STATUS'], + 'search_values': search_values, + 'query_type': 'like_contains' + } + ) + assert response.status_code == 200 + data = json.loads(response.data) + assert data['total'] == 4 + + @patch('mes_dashboard.routes.excel_query_routes.execute_advanced_batch_query') + def test_date_range_workflow(self, mock_execute, client): + """Test workflow with date range filter.""" + excel_data = [ + ['LOT_ID'], + ['LOT001'], + ['LOT002'], + ] + excel_file = create_test_excel(excel_data) + + client.post( + '/api/excel-query/upload', + data={'file': (excel_file, 'lots.xlsx')}, + content_type='multipart/form-data' + ) + + # Execute with date range + mock_execute.return_value = { + 'columns': ['LOT_ID', 'TXNDATE'], + 'data': [['LOT001', '2024-01-15']], + 'total': 1 + } + + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'table_name': 'DWH.DW_MES_WIP', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID', 'TXNDATE'], + 'search_values': ['LOT001', 'LOT002'], + 'query_type': 'in', + 'date_column': 'TXNDATE', + 'date_from': '2024-01-01', + 'date_to': '2024-01-31' + } + ) + assert response.status_code == 200 + data = json.loads(response.data) + assert data['total'] == 1 + + @patch('mes_dashboard.routes.excel_query_routes.execute_advanced_batch_query') + def test_combined_like_and_date_workflow(self, mock_execute, client): + """Test workflow combining LIKE and date range.""" + excel_data = [ + ['PREFIX'], + ['LOT'], + ] + excel_file = create_test_excel(excel_data) + + client.post( + '/api/excel-query/upload', + data={'file': (excel_file, 'prefixes.xlsx')}, + content_type='multipart/form-data' + ) + + # Execute with both LIKE prefix and date range + mock_execute.return_value = { + 'columns': ['LOT_ID', 'TXNDATE', 'STATUS'], + 'data': [ + ['LOT001', '2024-01-15', 'ACTIVE'], + ['LOT002', '2024-01-20', 'ACTIVE'], + ], + 'total': 2 + } + + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'table_name': 'DWH.DW_MES_WIP', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID', 'TXNDATE', 'STATUS'], + 'search_values': ['LOT'], + 'query_type': 'like_prefix', + 'date_column': 'TXNDATE', + 'date_from': '2024-01-01', + 'date_to': '2024-01-31' + } + ) + assert response.status_code == 200 + + +class TestColumnTypeDetection: + """E2E tests for column type detection workflow.""" + + def test_detect_date_column(self, client): + """Test detecting date type from Excel column.""" + excel_data = [ + ['DATE_COL'], + ['2024-01-01'], + ['2024-01-02'], + ['2024-01-03'], + ['2024-01-04'], + ] + excel_file = create_test_excel(excel_data) + + client.post( + '/api/excel-query/upload', + data={'file': (excel_file, 'dates.xlsx')}, + content_type='multipart/form-data' + ) + + response = client.post( + '/api/excel-query/column-type', + json={'column_name': 'DATE_COL'} + ) + assert response.status_code == 200 + data = json.loads(response.data) + assert data['detected_type'] == 'date' + + def test_detect_number_column(self, client): + """Test detecting numeric type from Excel column.""" + excel_data = [ + ['QTY'], + ['100'], + ['200'], + ['350.5'], + ['-50'], + ] + excel_file = create_test_excel(excel_data) + + client.post( + '/api/excel-query/upload', + data={'file': (excel_file, 'numbers.xlsx')}, + content_type='multipart/form-data' + ) + + response = client.post( + '/api/excel-query/column-type', + json={'column_name': 'QTY'} + ) + assert response.status_code == 200 + data = json.loads(response.data) + assert data['detected_type'] == 'number' + + def test_detect_id_column(self, client): + """Test detecting ID type from Excel column.""" + excel_data = [ + ['LOT_ID'], + ['LOT001'], + ['LOT002'], + ['WIP-2024-001'], + ['PROD_ABC'], + ] + excel_file = create_test_excel(excel_data) + + client.post( + '/api/excel-query/upload', + data={'file': (excel_file, 'ids.xlsx')}, + content_type='multipart/form-data' + ) + + response = client.post( + '/api/excel-query/column-type', + json={'column_name': 'LOT_ID'} + ) + assert response.status_code == 200 + data = json.loads(response.data) + assert data['detected_type'] == 'id' + + +class TestTableMetadataWorkflow: + """E2E tests for table metadata retrieval workflow.""" + + @patch('mes_dashboard.routes.excel_query_routes.get_table_column_metadata') + def test_metadata_with_type_matching(self, mock_metadata, client): + """Test workflow checking column type compatibility.""" + # Step 1: Upload Excel with ID column + excel_data = [ + ['LOT_ID'], + ['LOT001'], + ['LOT002'], + ] + excel_file = create_test_excel(excel_data) + + client.post( + '/api/excel-query/upload', + data={'file': (excel_file, 'lots.xlsx')}, + content_type='multipart/form-data' + ) + + # Step 2: Get Excel column type + excel_type_response = client.post( + '/api/excel-query/column-type', + json={'column_name': 'LOT_ID'} + ) + excel_type = json.loads(excel_type_response.data)['detected_type'] + + # Step 3: Get table metadata + mock_metadata.return_value = { + 'columns': [ + {'name': 'LOT_ID', 'data_type': 'VARCHAR2', 'is_date': False, 'is_number': False}, + {'name': 'QTY', 'data_type': 'NUMBER', 'is_date': False, 'is_number': True}, + {'name': 'TXNDATE', 'data_type': 'DATE', 'is_date': True, 'is_number': False}, + ] + } + + metadata_response = client.post( + '/api/excel-query/table-metadata', + json={'table_name': 'DWH.DW_MES_WIP'} + ) + assert metadata_response.status_code == 200 + metadata = json.loads(metadata_response.data) + + # Verify column types are returned + assert len(metadata['columns']) == 3 + lot_col = next(c for c in metadata['columns'] if c['name'] == 'LOT_ID') + assert lot_col['data_type'] == 'VARCHAR2' + + +class TestValidationWorkflow: + """E2E tests for input validation throughout workflow.""" + + def test_like_keyword_limit_enforcement(self, client): + """Test that LIKE queries enforce keyword limit.""" + from mes_dashboard.services.excel_query_service import LIKE_KEYWORD_LIMIT + + # Create Excel with many values + excel_data = [['VALUE']] + [[f'VAL{i}'] for i in range(LIKE_KEYWORD_LIMIT + 10)] + excel_file = create_test_excel(excel_data) + + client.post( + '/api/excel-query/upload', + data={'file': (excel_file, 'many_values.xlsx')}, + content_type='multipart/form-data' + ) + + # Get all values + values_response = client.post( + '/api/excel-query/column-values', + json={'column_name': 'VALUE'} + ) + all_values = json.loads(values_response.data)['values'] + + # Attempt LIKE query with too many values + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'table_name': 'TEST_TABLE', + 'search_column': 'COL', + 'return_columns': ['COL'], + 'search_values': all_values, + 'query_type': 'like_contains' + } + ) + # This should either fail at validation or service layer + # The exact behavior depends on implementation + # At minimum, verify the request was processed + assert response.status_code in [200, 400] + + def test_date_range_boundary_validation(self, client): + """Test date range validation at boundaries.""" + excel_data = [ + ['LOT_ID'], + ['LOT001'], + ] + excel_file = create_test_excel(excel_data) + + client.post( + '/api/excel-query/upload', + data={'file': (excel_file, 'lots.xlsx')}, + content_type='multipart/form-data' + ) + + # Test exactly 365 days (should pass) + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'table_name': 'TEST_TABLE', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID'], + 'search_values': ['LOT001'], + 'date_from': '2024-01-01', + 'date_to': '2024-12-31' # 365 days (2024 is leap year, so 366) + } + ) + # 366 days in 2024, should fail + assert response.status_code == 400 + + def test_empty_search_values_rejected(self, client): + """Test that empty search values are rejected.""" + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'table_name': 'TEST_TABLE', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID'], + 'search_values': [], + 'query_type': 'in' + } + ) + assert response.status_code == 400 + + +class TestBackwardCompatibility: + """E2E tests ensuring backward compatibility with original API.""" + + @patch('mes_dashboard.routes.excel_query_routes.execute_batch_query') + def test_original_execute_endpoint_works(self, mock_execute, client): + """Test that original /execute endpoint still works.""" + mock_execute.return_value = { + 'columns': ['LOT_ID'], + 'data': [['LOT001']], + 'total': 1 + } + + # Use original endpoint without advanced features + response = client.post( + '/api/excel-query/execute', + json={ + 'table_name': 'DWH.DW_MES_WIP', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID'], + 'search_values': ['LOT001'] + } + ) + assert response.status_code == 200 + data = json.loads(response.data) + assert data['total'] == 1 + + @patch('mes_dashboard.routes.excel_query_routes.execute_batch_query') + @patch('mes_dashboard.routes.excel_query_routes.generate_csv_content') + def test_csv_export_still_works(self, mock_csv, mock_execute, client): + """Test that CSV export still works with basic query.""" + mock_execute.return_value = { + 'columns': ['LOT_ID', 'STATUS'], + 'data': [['LOT001', 'ACTIVE']], + 'total': 1 + } + mock_csv.return_value = 'LOT_ID,STATUS\nLOT001,ACTIVE\n' + + response = client.post( + '/api/excel-query/export-csv', + json={ + 'table_name': 'DWH.DW_MES_WIP', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID', 'STATUS'], + 'search_values': ['LOT001'] + } + ) + assert response.status_code == 200 + assert response.content_type.startswith('text/csv') diff --git a/tests/test_excel_query_routes.py b/tests/test_excel_query_routes.py new file mode 100644 index 0000000..6b7f01c --- /dev/null +++ b/tests/test_excel_query_routes.py @@ -0,0 +1,474 @@ +# -*- coding: utf-8 -*- +"""Integration tests for Excel query API routes. + +Tests the API endpoints with mocked database dependencies. +""" + +import pytest +import json +import io +from unittest.mock import patch, MagicMock + +from mes_dashboard import create_app + + +@pytest.fixture +def app(): + """Create test Flask application.""" + app = create_app() + app.config['TESTING'] = True + return app + + +@pytest.fixture +def client(app): + """Create test client.""" + return app.test_client() + + +@pytest.fixture +def mock_excel_file(): + """Create a mock Excel file content.""" + import openpyxl + wb = openpyxl.Workbook() + ws = wb.active + ws['A1'] = 'LOT_ID' + ws['B1'] = 'PRODUCT' + ws['C1'] = 'DATE' + ws['A2'] = 'LOT001' + ws['B2'] = 'PROD_A' + ws['C2'] = '2024-01-15' + ws['A3'] = 'LOT002' + ws['B3'] = 'PROD_B' + ws['C3'] = '2024-01-16' + ws['A4'] = 'LOT003' + ws['B4'] = 'PROD_A' + ws['C4'] = '2024-01-17' + + buffer = io.BytesIO() + wb.save(buffer) + buffer.seek(0) + return buffer + + +class TestUploadExcel: + """Tests for /api/excel-query/upload endpoint.""" + + def test_upload_no_file(self, client): + """Should return error when no file provided.""" + response = client.post('/api/excel-query/upload') + assert response.status_code == 400 + data = json.loads(response.data) + assert 'error' in data + + def test_upload_empty_filename(self, client): + """Should return error for empty filename.""" + response = client.post( + '/api/excel-query/upload', + data={'file': (io.BytesIO(b''), '')}, + content_type='multipart/form-data' + ) + assert response.status_code == 400 + + def test_upload_invalid_extension(self, client): + """Should reject non-Excel files.""" + response = client.post( + '/api/excel-query/upload', + data={'file': (io.BytesIO(b'test'), 'test.txt')}, + content_type='multipart/form-data' + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert '.xlsx' in data['error'] or '.xls' in data['error'] + + def test_upload_valid_excel(self, client, mock_excel_file): + """Should successfully parse valid Excel file.""" + response = client.post( + '/api/excel-query/upload', + data={'file': (mock_excel_file, 'test.xlsx')}, + content_type='multipart/form-data' + ) + assert response.status_code == 200 + data = json.loads(response.data) + assert 'columns' in data + assert 'LOT_ID' in data['columns'] + assert 'preview' in data + + +class TestGetColumnValues: + """Tests for /api/excel-query/column-values endpoint.""" + + def test_no_column_name(self, client): + """Should return error without column name.""" + response = client.post( + '/api/excel-query/column-values', + json={} + ) + assert response.status_code == 400 + + def test_no_excel_uploaded(self, client): + """Should return error if no Excel uploaded.""" + # Clear cache first + from mes_dashboard.routes.excel_query_routes import _uploaded_excel_cache + _uploaded_excel_cache.clear() + + response = client.post( + '/api/excel-query/column-values', + json={'column_name': 'LOT_ID'} + ) + assert response.status_code == 400 + + def test_get_values_after_upload(self, client, mock_excel_file): + """Should return column values after upload.""" + # First upload + client.post( + '/api/excel-query/upload', + data={'file': (mock_excel_file, 'test.xlsx')}, + content_type='multipart/form-data' + ) + + # Then get values + response = client.post( + '/api/excel-query/column-values', + json={'column_name': 'LOT_ID'} + ) + assert response.status_code == 200 + data = json.loads(response.data) + assert 'values' in data + assert 'LOT001' in data['values'] + + +class TestGetTables: + """Tests for /api/excel-query/tables endpoint.""" + + def test_get_tables(self, client): + """Should return available tables.""" + response = client.get('/api/excel-query/tables') + assert response.status_code == 200 + data = json.loads(response.data) + assert 'tables' in data + assert isinstance(data['tables'], list) + + +class TestTableMetadata: + """Tests for /api/excel-query/table-metadata endpoint.""" + + def test_no_table_name(self, client): + """Should return error without table name.""" + response = client.post( + '/api/excel-query/table-metadata', + json={} + ) + assert response.status_code == 400 + + @patch('mes_dashboard.routes.excel_query_routes.get_table_column_metadata') + def test_get_metadata_success(self, mock_metadata, client): + """Should return enriched metadata.""" + mock_metadata.return_value = { + 'columns': [ + {'name': 'LOT_ID', 'data_type': 'VARCHAR2', 'is_date': False, 'is_number': False}, + {'name': 'TXNDATE', 'data_type': 'DATE', 'is_date': True, 'is_number': False}, + ] + } + + response = client.post( + '/api/excel-query/table-metadata', + json={'table_name': 'TEST_TABLE'} + ) + assert response.status_code == 200 + data = json.loads(response.data) + assert 'columns' in data + assert len(data['columns']) == 2 + + @patch('mes_dashboard.routes.excel_query_routes.get_table_column_metadata') + def test_metadata_not_found(self, mock_metadata, client): + """Should handle table not found.""" + mock_metadata.return_value = {'error': 'Table not found', 'columns': []} + + response = client.post( + '/api/excel-query/table-metadata', + json={'table_name': 'NONEXISTENT'} + ) + assert response.status_code == 400 + + +class TestExecuteAdvancedQuery: + """Tests for /api/excel-query/execute-advanced endpoint.""" + + def test_missing_table_name(self, client): + """Should return error without table name.""" + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID'], + 'search_values': ['LOT001'] + } + ) + assert response.status_code == 400 + + def test_missing_search_column(self, client): + """Should return error without search column.""" + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'table_name': 'TEST_TABLE', + 'return_columns': ['LOT_ID'], + 'search_values': ['LOT001'] + } + ) + assert response.status_code == 400 + + def test_invalid_query_type(self, client): + """Should reject invalid query type.""" + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'table_name': 'TEST_TABLE', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID'], + 'search_values': ['LOT001'], + 'query_type': 'invalid_type' + } + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert 'invalid' in data['error'].lower() or '無效' in data['error'] + + def test_invalid_date_format(self, client): + """Should reject invalid date format.""" + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'table_name': 'TEST_TABLE', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID'], + 'search_values': ['LOT001'], + 'date_from': '01-01-2024', + 'date_to': '12-31-2024' + } + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert '格式' in data['error'] or 'format' in data['error'].lower() + + def test_date_range_reversed(self, client): + """Should reject if start date > end date.""" + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'table_name': 'TEST_TABLE', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID'], + 'search_values': ['LOT001'], + 'date_from': '2024-12-31', + 'date_to': '2024-01-01' + } + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert '起始' in data['error'] or 'start' in data['error'].lower() + + def test_date_range_exceeds_limit(self, client): + """Should reject date range > 365 days.""" + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'table_name': 'TEST_TABLE', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID'], + 'search_values': ['LOT001'], + 'date_from': '2023-01-01', + 'date_to': '2024-12-31' + } + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert '365' in data['error'] + + @patch('mes_dashboard.routes.excel_query_routes.execute_advanced_batch_query') + def test_execute_in_query(self, mock_execute, client): + """Should execute IN query successfully.""" + mock_execute.return_value = { + 'columns': ['LOT_ID', 'PRODUCT'], + 'data': [['LOT001', 'PROD_A']], + 'total': 1 + } + + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'table_name': 'TEST_TABLE', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID', 'PRODUCT'], + 'search_values': ['LOT001'], + 'query_type': 'in' + } + ) + assert response.status_code == 200 + data = json.loads(response.data) + assert data['total'] == 1 + + @patch('mes_dashboard.routes.excel_query_routes.execute_advanced_batch_query') + def test_execute_like_contains(self, mock_execute, client): + """Should execute LIKE contains query.""" + mock_execute.return_value = { + 'columns': ['LOT_ID'], + 'data': [['LOT001'], ['LOT002']], + 'total': 2 + } + + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'table_name': 'TEST_TABLE', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID'], + 'search_values': ['LOT'], + 'query_type': 'like_contains' + } + ) + assert response.status_code == 200 + data = json.loads(response.data) + assert data['total'] == 2 + + @patch('mes_dashboard.routes.excel_query_routes.execute_advanced_batch_query') + def test_execute_with_date_range(self, mock_execute, client): + """Should execute query with date range.""" + mock_execute.return_value = { + 'columns': ['LOT_ID', 'TXNDATE'], + 'data': [['LOT001', '2024-01-15']], + 'total': 1 + } + + response = client.post( + '/api/excel-query/execute-advanced', + json={ + 'table_name': 'TEST_TABLE', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID', 'TXNDATE'], + 'search_values': ['LOT001'], + 'query_type': 'in', + 'date_column': 'TXNDATE', + 'date_from': '2024-01-01', + 'date_to': '2024-01-31' + } + ) + assert response.status_code == 200 + mock_execute.assert_called_once() + call_kwargs = mock_execute.call_args[1] + assert call_kwargs['date_column'] == 'TXNDATE' + assert call_kwargs['date_from'] == '2024-01-01' + assert call_kwargs['date_to'] == '2024-01-31' + + +class TestExecuteQuery: + """Tests for /api/excel-query/execute endpoint (backward compatibility).""" + + def test_missing_parameters(self, client): + """Should return error for missing parameters.""" + response = client.post( + '/api/excel-query/execute', + json={'table_name': 'TEST'} + ) + assert response.status_code == 400 + + @patch('mes_dashboard.routes.excel_query_routes.execute_batch_query') + def test_execute_success(self, mock_execute, client): + """Should execute basic query successfully.""" + mock_execute.return_value = { + 'columns': ['LOT_ID'], + 'data': [['LOT001']], + 'total': 1 + } + + response = client.post( + '/api/excel-query/execute', + json={ + 'table_name': 'TEST_TABLE', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID'], + 'search_values': ['LOT001'] + } + ) + assert response.status_code == 200 + data = json.loads(response.data) + assert data['total'] == 1 + + +class TestExportCSV: + """Tests for /api/excel-query/export-csv endpoint.""" + + def test_missing_parameters(self, client): + """Should return error for missing parameters.""" + response = client.post( + '/api/excel-query/export-csv', + json={} + ) + assert response.status_code == 400 + + @patch('mes_dashboard.routes.excel_query_routes.execute_batch_query') + @patch('mes_dashboard.routes.excel_query_routes.generate_csv_content') + def test_export_success(self, mock_csv, mock_execute, client): + """Should export CSV successfully.""" + mock_execute.return_value = { + 'columns': ['LOT_ID', 'PRODUCT'], + 'data': [['LOT001', 'PROD_A']], + 'total': 1 + } + mock_csv.return_value = 'LOT_ID,PRODUCT\nLOT001,PROD_A\n' + + response = client.post( + '/api/excel-query/export-csv', + json={ + 'table_name': 'TEST_TABLE', + 'search_column': 'LOT_ID', + 'return_columns': ['LOT_ID', 'PRODUCT'], + 'search_values': ['LOT001'] + } + ) + assert response.status_code == 200 + assert response.content_type.startswith('text/csv') + assert b'LOT_ID' in response.data + + +class TestGetExcelColumnType: + """Tests for /api/excel-query/column-type endpoint.""" + + def test_no_column_name(self, client): + """Should return error without column name.""" + response = client.post( + '/api/excel-query/column-type', + json={} + ) + assert response.status_code == 400 + + def test_no_excel_uploaded(self, client): + """Should return error if no Excel uploaded.""" + from mes_dashboard.routes.excel_query_routes import _uploaded_excel_cache + _uploaded_excel_cache.clear() + + response = client.post( + '/api/excel-query/column-type', + json={'column_name': 'LOT_ID'} + ) + assert response.status_code == 400 + + def test_detect_type_after_upload(self, client, mock_excel_file): + """Should detect column type after upload.""" + # Upload first + client.post( + '/api/excel-query/upload', + data={'file': (mock_excel_file, 'test.xlsx')}, + content_type='multipart/form-data' + ) + + # Then detect type + response = client.post( + '/api/excel-query/column-type', + json={'column_name': 'LOT_ID'} + ) + assert response.status_code == 200 + data = json.loads(response.data) + assert 'detected_type' in data + assert 'type_label' in data diff --git a/tests/test_excel_query_service.py b/tests/test_excel_query_service.py new file mode 100644 index 0000000..0afe7de --- /dev/null +++ b/tests/test_excel_query_service.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +"""Unit tests for Excel query service functions. + +Tests the core service functions without database dependencies. +""" + +import pytest +from mes_dashboard.services.excel_query_service import ( + detect_excel_column_type, + escape_like_pattern, + build_like_condition, + build_date_range_condition, + validate_like_keywords, + sanitize_column_name, + validate_table_name, + LIKE_KEYWORD_LIMIT, +) + + +class TestDetectExcelColumnType: + """Tests for detect_excel_column_type function.""" + + def test_empty_values_returns_text(self): + """Empty list should return text type.""" + result = detect_excel_column_type([]) + assert result['detected_type'] == 'text' + assert result['type_label'] == '文字' + + def test_detect_date_type(self): + """Should detect date format YYYY-MM-DD.""" + values = ['2024-01-15', '2024-02-20', '2024-03-25', '2024-04-30'] + result = detect_excel_column_type(values) + assert result['detected_type'] == 'date' + assert result['type_label'] == '日期' + + def test_detect_date_with_slash(self): + """Should detect date format YYYY/MM/DD.""" + values = ['2024/01/15', '2024/02/20', '2024/03/25', '2024/04/30'] + result = detect_excel_column_type(values) + assert result['detected_type'] == 'date' + assert result['type_label'] == '日期' + + def test_detect_datetime_type(self): + """Should detect datetime format.""" + values = [ + '2024-01-15 10:30:00', + '2024-02-20 14:45:30', + '2024-03-25T08:00:00', + '2024-04-30 23:59:59' + ] + result = detect_excel_column_type(values) + assert result['detected_type'] == 'datetime' + assert result['type_label'] == '日期時間' + + def test_detect_number_type(self): + """Should detect numeric values.""" + values = ['123', '456.78', '-99', '0', '1000000'] + result = detect_excel_column_type(values) + assert result['detected_type'] == 'number' + assert result['type_label'] == '數值' + + def test_detect_id_type(self): + """Should detect ID pattern (uppercase alphanumeric).""" + values = ['LOT001', 'WIP-2024-001', 'ABC_123', 'PROD001', 'TEST_ID'] + result = detect_excel_column_type(values) + assert result['detected_type'] == 'id' + assert result['type_label'] == '識別碼' + + def test_mixed_values_returns_text(self): + """Mixed values should return text type.""" + values = ['abc', '123', '2024-01-01', 'xyz', 'test'] + result = detect_excel_column_type(values) + assert result['detected_type'] == 'text' + assert result['type_label'] == '文字' + + def test_sample_values_included(self): + """Should include sample values in result.""" + values = ['A', 'B', 'C', 'D', 'E', 'F'] + result = detect_excel_column_type(values) + assert 'sample_values' in result + assert len(result['sample_values']) <= 5 + + +class TestEscapeLikePattern: + """Tests for escape_like_pattern function.""" + + def test_escape_percent(self): + """Should escape percent sign.""" + assert escape_like_pattern('100%') == '100\\%' + + def test_escape_underscore(self): + """Should escape underscore.""" + assert escape_like_pattern('test_value') == 'test\\_value' + + def test_escape_backslash(self): + """Should escape backslash.""" + assert escape_like_pattern('path\\file') == 'path\\\\file' + + def test_escape_multiple_specials(self): + """Should escape multiple special characters.""" + assert escape_like_pattern('50%_off') == '50\\%\\_off' + + def test_no_escape_needed(self): + """Should return unchanged if no special chars.""" + assert escape_like_pattern('normalvalue') == 'normalvalue' + + +class TestBuildLikeCondition: + """Tests for build_like_condition function.""" + + def test_contains_mode(self): + """Should build LIKE %...% pattern.""" + condition, params = build_like_condition('COL', ['abc'], 'contains') + assert 'LIKE :like_0' in condition + assert params['like_0'] == '%abc%' + + def test_prefix_mode(self): + """Should build LIKE ...% pattern.""" + condition, params = build_like_condition('COL', ['abc'], 'prefix') + assert 'LIKE :like_0' in condition + assert params['like_0'] == 'abc%' + + def test_suffix_mode(self): + """Should build LIKE %... pattern.""" + condition, params = build_like_condition('COL', ['abc'], 'suffix') + assert 'LIKE :like_0' in condition + assert params['like_0'] == '%abc' + + def test_multiple_values(self): + """Should build OR conditions for multiple values.""" + condition, params = build_like_condition('COL', ['a', 'b', 'c'], 'contains') + assert 'OR' in condition + assert len(params) == 3 + assert params['like_0'] == '%a%' + assert params['like_1'] == '%b%' + assert params['like_2'] == '%c%' + + def test_empty_values(self): + """Should return empty for empty values.""" + condition, params = build_like_condition('COL', [], 'contains') + assert condition == '' + assert params == {} + + def test_escape_clause_included(self): + """Should include ESCAPE clause.""" + condition, params = build_like_condition('COL', ['test'], 'contains') + assert "ESCAPE '\\')" in condition + + +class TestBuildDateRangeCondition: + """Tests for build_date_range_condition function.""" + + def test_both_dates(self): + """Should build condition with both dates.""" + condition, params = build_date_range_condition( + 'TXNDATE', '2024-01-01', '2024-12-31' + ) + assert 'TO_DATE(:date_from' in condition + assert 'TO_DATE(:date_to' in condition + assert params['date_from'] == '2024-01-01' + assert params['date_to'] == '2024-12-31' + + def test_only_from_date(self): + """Should build condition with only start date.""" + condition, params = build_date_range_condition( + 'TXNDATE', date_from='2024-01-01' + ) + assert '>=' in condition + assert 'date_from' in params + assert 'date_to' not in params + + def test_only_to_date(self): + """Should build condition with only end date.""" + condition, params = build_date_range_condition( + 'TXNDATE', date_to='2024-12-31' + ) + assert '<' in condition + assert 'date_to' in params + assert 'date_from' not in params + + def test_no_dates(self): + """Should return empty for no dates.""" + condition, params = build_date_range_condition('TXNDATE') + assert condition == '' + assert params == {} + + def test_end_date_includes_full_day(self): + """End date condition should include +1 for full day.""" + condition, params = build_date_range_condition( + 'TXNDATE', date_to='2024-12-31' + ) + assert '+ 1' in condition + + +class TestValidateLikeKeywords: + """Tests for validate_like_keywords function.""" + + def test_within_limit(self): + """Should pass validation for values within limit.""" + values = ['a'] * 50 + result = validate_like_keywords(values) + assert result['valid'] is True + + def test_at_limit(self): + """Should pass validation at exact limit.""" + values = ['a'] * LIKE_KEYWORD_LIMIT + result = validate_like_keywords(values) + assert result['valid'] is True + + def test_exceeds_limit(self): + """Should fail validation when exceeding limit.""" + values = ['a'] * (LIKE_KEYWORD_LIMIT + 1) + result = validate_like_keywords(values) + assert result['valid'] is False + assert 'error' in result + + +class TestSanitizeColumnName: + """Tests for sanitize_column_name function.""" + + def test_valid_name(self): + """Should keep valid column name.""" + assert sanitize_column_name('LOT_ID') == 'LOT_ID' + + def test_removes_special_chars(self): + """Should remove special characters.""" + assert sanitize_column_name('LOT-ID') == 'LOTID' + assert sanitize_column_name('LOT ID') == 'LOTID' + + def test_allows_underscore(self): + """Should allow underscore.""" + assert sanitize_column_name('MY_COLUMN_NAME') == 'MY_COLUMN_NAME' + + def test_prevents_sql_injection(self): + """Should prevent SQL injection attempts.""" + assert sanitize_column_name("COL; DROP TABLE--") == 'COLDROPTABLE' + + +class TestValidateTableName: + """Tests for validate_table_name function.""" + + def test_simple_name(self): + """Should validate simple table name.""" + assert validate_table_name('MY_TABLE') is True + + def test_schema_qualified(self): + """Should validate schema.table format.""" + assert validate_table_name('DWH.DW_MES_WIP') is True + + def test_invalid_starts_with_number(self): + """Should reject names starting with number.""" + assert validate_table_name('123TABLE') is False + + def test_invalid_special_chars(self): + """Should reject names with special characters.""" + assert validate_table_name('TABLE-NAME') is False + assert validate_table_name('TABLE NAME') is False + + def test_sql_injection_prevention(self): + """Should reject SQL injection attempts.""" + assert validate_table_name('TABLE; DROP--') is False diff --git a/tests/test_field_contracts.py b/tests/test_field_contracts.py new file mode 100644 index 0000000..a93577c --- /dev/null +++ b/tests/test_field_contracts.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +"""Field contract governance tests.""" + +from __future__ import annotations + +import csv +import io +from unittest.mock import patch + +import pandas as pd + +from mes_dashboard.config.field_contracts import ( + get_page_contract, + get_export_api_keys, + get_export_headers, +) +from mes_dashboard.services.job_query_service import export_jobs_with_history +from mes_dashboard.services.resource_history_service import export_csv as export_resource_history_csv + + +def test_contract_sections_exist_for_primary_pages(): + for page, section in [ + ('job_query', 'jobs_table'), + ('job_query', 'txn_table'), + ('job_query', 'export'), + ('resource_history', 'detail_table'), + ('resource_history', 'export'), + ('tables', 'result_table'), + ('excel_query', 'result_table'), + ('resource_status', 'matrix_summary'), + ]: + contract = get_page_contract(page, section) + assert contract, f"missing contract for {page}:{section}" + + +def test_export_contracts_have_no_duplicate_api_keys(): + for page in ['job_query', 'resource_history']: + keys = [field.get('api_key') for field in get_page_contract(page, 'export')] + assert len(keys) == len(set(keys)) + + +def test_export_headers_and_keys_have_same_length(): + for page in ['job_query', 'resource_history']: + headers = get_export_headers(page) + keys = get_export_api_keys(page) + assert headers + assert keys + assert len(headers) == len(keys) + + +def test_all_contract_fields_define_semantic_type(): + pages_and_sections = [ + ('job_query', 'jobs_table'), + ('job_query', 'txn_table'), + ('job_query', 'export'), + ('resource_history', 'detail_table'), + ('resource_history', 'kpi'), + ('resource_history', 'export'), + ('tables', 'result_table'), + ('excel_query', 'result_table'), + ('resource_status', 'matrix_summary'), + ] + for page, section in pages_and_sections: + for field in get_page_contract(page, section): + assert field.get('semantic_type'), f"missing semantic_type in {page}:{section}:{field}" + + +@patch('mes_dashboard.services.job_query_service.SQLLoader.load', return_value='SELECT 1') +def test_job_query_export_uses_contract_headers(_mock_sql): + export_keys = get_export_api_keys('job_query') + export_headers = get_export_headers('job_query') + + row = {key: f'v_{idx}' for idx, key in enumerate(export_keys)} + row['JOB_CREATEDATE'] = pd.Timestamp('2024-01-01 10:00:00') + row['JOB_COMPLETEDATE'] = pd.Timestamp('2024-01-02 10:00:00') + row['TXNDATE'] = pd.Timestamp('2024-01-02 11:00:00') + df = pd.DataFrame([row], columns=export_keys) + + with patch('mes_dashboard.services.job_query_service.read_sql_df', return_value=df): + chunks = list(export_jobs_with_history(['R1'], '2024-01-01', '2024-01-10')) + + assert chunks + header_chunk = chunks[0].lstrip('\ufeff') + header_row = next(csv.reader(io.StringIO(header_chunk))) + assert header_row == export_headers + + +@patch('mes_dashboard.services.resource_history_service.SQLLoader.load', return_value='SELECT 1') +@patch('mes_dashboard.services.resource_history_service.read_sql_df') +@patch('mes_dashboard.services.filter_cache.get_workcenter_mapping') +def test_resource_history_export_uses_contract_headers( + mock_wc_mapping, + mock_read_sql, + _mock_sql_loader, +): + export_headers = get_export_headers('resource_history') + + mock_wc_mapping.return_value = { + 'WC-A': {'group': '站點-A', 'sequence': 1} + } + + mock_read_sql.return_value = pd.DataFrame([ + { + 'HISTORYID': 'RES-A', + 'PRD_HOURS': 10, + 'SBY_HOURS': 2, + 'UDT_HOURS': 1, + 'SDT_HOURS': 1, + 'EGT_HOURS': 1, + 'NST_HOURS': 1, + 'TOTAL_HOURS': 16, + } + ]) + + with patch('mes_dashboard.services.resource_history_service._get_filtered_resources', return_value=[ + { + 'RESOURCEID': 'RES-A', + 'WORKCENTERNAME': 'WC-A', + 'RESOURCEFAMILYNAME': 'FAM-A', + 'RESOURCENAME': 'EQ-A', + } + ]): + chunks = list(export_resource_history_csv('2024-01-01', '2024-01-10')) + + assert chunks + header_row = next(csv.reader(io.StringIO(chunks[0]))) + assert header_row == export_headers diff --git a/tests/test_frontend_compute_parity.py b/tests/test_frontend_compute_parity.py new file mode 100644 index 0000000..9075626 --- /dev/null +++ b/tests/test_frontend_compute_parity.py @@ -0,0 +1,74 @@ +# -*- coding: utf-8 -*- +"""Parity checks between backend formulas and frontend compute helpers.""" + +from __future__ import annotations + +import json +import subprocess +from pathlib import Path + +from mes_dashboard.services.resource_history_service import ( + _calc_ou_pct, + _calc_availability_pct, + _calc_status_pct, +) + + +def _load_fixture() -> dict: + repo_root = Path(__file__).resolve().parents[1] + fixture_path = repo_root / "tests" / "fixtures" / "frontend_compute_parity.json" + return json.loads(fixture_path.read_text(encoding="utf-8")) + + +def _backend_expected(case: dict[str, float]) -> dict[str, float]: + prd = case['prd_hours'] + sby = case['sby_hours'] + udt = case['udt_hours'] + sdt = case['sdt_hours'] + egt = case['egt_hours'] + nst = case['nst_hours'] + total = prd + sby + udt + sdt + egt + nst + + return { + 'ou_pct': _calc_ou_pct(prd, sby, udt, sdt, egt), + 'availability_pct': _calc_availability_pct(prd, sby, udt, sdt, egt, nst), + 'prd_pct': _calc_status_pct(prd, total), + 'sby_pct': _calc_status_pct(sby, total), + 'udt_pct': _calc_status_pct(udt, total), + 'sdt_pct': _calc_status_pct(sdt, total), + 'egt_pct': _calc_status_pct(egt, total), + 'nst_pct': _calc_status_pct(nst, total), + } + + +def test_frontend_compute_matches_backend_formulas(): + repo_root = Path(__file__).resolve().parents[1] + compute_module = repo_root / 'frontend' / 'src' / 'core' / 'compute.js' + fixture = _load_fixture() + cases = fixture["cases"] + tolerance = fixture["metric_tolerance"] + + node_code = ( + "import { buildResourceKpiFromHours } from '" + compute_module.as_posix() + "';" + "const cases = JSON.parse(process.argv[1]);" + "const result = cases.map((c) => buildResourceKpiFromHours(c));" + "console.log(JSON.stringify(result));" + ) + + completed = subprocess.run( + ['node', '--input-type=module', '-e', node_code, json.dumps(cases)], + cwd=repo_root, + check=True, + capture_output=True, + text=True, + ) + + frontend_values = json.loads(completed.stdout) + assert len(frontend_values) == len(cases) + + for idx, case in enumerate(cases): + expected = _backend_expected(case) + actual = frontend_values[idx] + for key, value in expected.items(): + delta = abs(float(actual[key]) - float(value)) + assert delta <= float(tolerance.get(key, 0.0)) diff --git a/tests/test_health_routes.py b/tests/test_health_routes.py new file mode 100644 index 0000000..a950dfb --- /dev/null +++ b/tests/test_health_routes.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +"""Health route telemetry tests.""" + +from __future__ import annotations + +from unittest.mock import patch + +import mes_dashboard.core.database as db +from mes_dashboard.app import create_app + + +def _client(): + db._ENGINE = None + app = create_app('testing') + app.config['TESTING'] = True + return app.test_client() + + +@patch('mes_dashboard.routes.health_routes.check_database', return_value=('ok', None)) +@patch('mes_dashboard.routes.health_routes.check_redis', return_value=('error', 'redis-down')) +@patch('mes_dashboard.routes.health_routes.get_route_cache_status') +def test_health_includes_route_cache_and_degraded_warning( + mock_route_cache, + _mock_redis, + _mock_db, +): + mock_route_cache.return_value = { + 'mode': 'l1-only', + 'degraded': True, + 'reads_total': 10, + 'l1_hits': 9, + 'misses': 1, + } + + response = _client().get('/health') + assert response.status_code == 200 + payload = response.get_json() + + assert payload['status'] == 'degraded' + assert payload['route_cache']['mode'] == 'l1-only' + assert payload['route_cache']['degraded'] is True + assert 'resilience' in payload + assert payload['resilience']['thresholds']['restart_churn_threshold'] >= 1 + assert payload['resilience']['recovery_recommendation']['action'] == 'continue_degraded_mode' + assert any('degraded' in warning.lower() for warning in payload.get('warnings', [])) + + +@patch('mes_dashboard.core.permissions.is_admin_logged_in', return_value=True) +@patch('mes_dashboard.core.metrics.get_metrics_summary', return_value={'p50_ms': 1, 'p95_ms': 2, 'p99_ms': 3, 'count': 10, 'slow_count': 0, 'slow_rate': 0.0, 'worker_pid': 123}) +@patch('mes_dashboard.core.circuit_breaker.get_circuit_breaker_status', return_value={'state': 'CLOSED'}) +@patch('mes_dashboard.routes.health_routes.check_database', return_value=('ok', None)) +@patch('mes_dashboard.routes.health_routes.check_redis', return_value=('ok', None)) +@patch('mes_dashboard.routes.health_routes.get_route_cache_status') +def test_deep_health_exposes_route_cache_telemetry( + mock_route_cache, + _mock_redis, + _mock_db, + _mock_cb, + _mock_metrics, + _mock_admin, +): + mock_route_cache.return_value = { + 'mode': 'l1+l2', + 'degraded': False, + 'reads_total': 20, + 'l1_hits': 8, + 'l2_hits': 11, + 'misses': 1, + } + + response = _client().get('/health/deep') + assert response.status_code == 200 + payload = response.get_json() + + route_cache = payload['checks']['route_cache'] + assert route_cache['mode'] == 'l1+l2' + assert route_cache['reads_total'] == 20 + assert route_cache['degraded'] is False + assert payload['resilience']['recovery_recommendation']['action'] == 'none' + assert payload['resilience']['thresholds']['pool_saturation_warning'] >= 0.5 diff --git a/tests/test_hold_routes.py b/tests/test_hold_routes.py new file mode 100644 index 0000000..1e6a3b7 --- /dev/null +++ b/tests/test_hold_routes.py @@ -0,0 +1,317 @@ +# -*- coding: utf-8 -*- +"""Unit tests for Hold Detail API routes. + +Tests the Hold Detail API endpoints in hold_routes.py. +""" + +import unittest +from unittest.mock import patch +import json + +from mes_dashboard.app import create_app +import mes_dashboard.core.database as db + + +class TestHoldRoutesBase(unittest.TestCase): + """Base class for Hold routes tests.""" + + def setUp(self): + """Set up test client.""" + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + + +class TestHoldDetailPageRoute(TestHoldRoutesBase): + """Test GET /hold-detail page route.""" + + def test_hold_detail_page_requires_reason(self): + """GET /hold-detail without reason should redirect to wip-overview.""" + response = self.client.get('/hold-detail') + self.assertEqual(response.status_code, 302) + self.assertIn('/wip-overview', response.location) + + def test_hold_detail_page_with_reason(self): + """GET /hold-detail?reason=xxx should return 200.""" + response = self.client.get('/hold-detail?reason=YieldLimit') + self.assertEqual(response.status_code, 200) + + def test_hold_detail_page_contains_reason_in_html(self): + """Page should display the hold reason in the HTML.""" + response = self.client.get('/hold-detail?reason=YieldLimit') + self.assertIn(b'YieldLimit', response.data) + + +class TestHoldDetailSummaryRoute(TestHoldRoutesBase): + """Test GET /api/wip/hold-detail/summary endpoint.""" + + @patch('mes_dashboard.routes.hold_routes.get_hold_detail_summary') + def test_returns_success_with_data(self, mock_get_summary): + """Should return success=True with summary data.""" + mock_get_summary.return_value = { + 'totalLots': 128, + 'totalQty': 25600, + 'avgAge': 2.3, + 'maxAge': 15.0, + 'workcenterCount': 8 + } + + response = self.client.get('/api/wip/hold-detail/summary?reason=YieldLimit') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 200) + self.assertTrue(data['success']) + self.assertEqual(data['data']['totalLots'], 128) + self.assertEqual(data['data']['totalQty'], 25600) + self.assertEqual(data['data']['avgAge'], 2.3) + self.assertEqual(data['data']['maxAge'], 15.0) + self.assertEqual(data['data']['workcenterCount'], 8) + + def test_returns_error_without_reason(self): + """Should return 400 when reason is missing.""" + response = self.client.get('/api/wip/hold-detail/summary') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 400) + self.assertFalse(data['success']) + self.assertIn('reason', data['error']) + + @patch('mes_dashboard.routes.hold_routes.get_hold_detail_summary') + def test_returns_error_on_failure(self, mock_get_summary): + """Should return success=False and 500 on failure.""" + mock_get_summary.return_value = None + + response = self.client.get('/api/wip/hold-detail/summary?reason=YieldLimit') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 500) + self.assertFalse(data['success']) + self.assertIn('error', data) + + +class TestHoldDetailDistributionRoute(TestHoldRoutesBase): + """Test GET /api/wip/hold-detail/distribution endpoint.""" + + @patch('mes_dashboard.routes.hold_routes.get_hold_detail_distribution') + def test_returns_success_with_distribution(self, mock_get_dist): + """Should return success=True with distribution data.""" + mock_get_dist.return_value = { + 'byWorkcenter': [ + {'name': 'DA', 'lots': 45, 'qty': 9000, 'percentage': 35.2}, + {'name': 'WB', 'lots': 38, 'qty': 7600, 'percentage': 29.7} + ], + 'byPackage': [ + {'name': 'DIP-B', 'lots': 50, 'qty': 10000, 'percentage': 39.1}, + {'name': 'QFN', 'lots': 35, 'qty': 7000, 'percentage': 27.3} + ], + 'byAge': [ + {'range': '0-1', 'label': '0-1天', 'lots': 45, 'qty': 9000, 'percentage': 35.2}, + {'range': '1-3', 'label': '1-3天', 'lots': 38, 'qty': 7600, 'percentage': 29.7}, + {'range': '3-7', 'label': '3-7天', 'lots': 30, 'qty': 6000, 'percentage': 23.4}, + {'range': '7+', 'label': '7+天', 'lots': 15, 'qty': 3000, 'percentage': 11.7} + ] + } + + response = self.client.get('/api/wip/hold-detail/distribution?reason=YieldLimit') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 200) + self.assertTrue(data['success']) + self.assertIn('byWorkcenter', data['data']) + self.assertIn('byPackage', data['data']) + self.assertIn('byAge', data['data']) + self.assertEqual(len(data['data']['byWorkcenter']), 2) + self.assertEqual(len(data['data']['byAge']), 4) + + def test_returns_error_without_reason(self): + """Should return 400 when reason is missing.""" + response = self.client.get('/api/wip/hold-detail/distribution') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 400) + self.assertFalse(data['success']) + + @patch('mes_dashboard.routes.hold_routes.get_hold_detail_distribution') + def test_returns_error_on_failure(self, mock_get_dist): + """Should return success=False and 500 on failure.""" + mock_get_dist.return_value = None + + response = self.client.get('/api/wip/hold-detail/distribution?reason=YieldLimit') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 500) + self.assertFalse(data['success']) + + +class TestHoldDetailLotsRoute(TestHoldRoutesBase): + """Test GET /api/wip/hold-detail/lots endpoint.""" + + @patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots') + def test_returns_success_with_lots(self, mock_get_lots): + """Should return success=True with lots data.""" + mock_get_lots.return_value = { + 'lots': [ + { + 'lotId': 'L001', + 'workorder': 'WO123', + 'qty': 200, + 'package': 'DIP-B', + 'workcenter': 'DA', + 'spec': 'S01', + 'age': 2.3, + 'holdBy': 'EMP01', + 'dept': 'QC', + 'holdComment': 'Yield below threshold' + } + ], + 'pagination': { + 'page': 1, + 'perPage': 50, + 'total': 128, + 'totalPages': 3 + }, + 'filters': { + 'workcenter': None, + 'package': None, + 'ageRange': None + } + } + + response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 200) + self.assertTrue(data['success']) + self.assertIn('lots', data['data']) + self.assertIn('pagination', data['data']) + self.assertIn('filters', data['data']) + self.assertEqual(len(data['data']['lots']), 1) + self.assertEqual(data['data']['pagination']['total'], 128) + + def test_returns_error_without_reason(self): + """Should return 400 when reason is missing.""" + response = self.client.get('/api/wip/hold-detail/lots') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 400) + self.assertFalse(data['success']) + + @patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots') + def test_passes_filter_parameters(self, mock_get_lots): + """Should pass filter parameters to service function.""" + mock_get_lots.return_value = { + 'lots': [], + 'pagination': {'page': 2, 'perPage': 50, 'total': 0, 'totalPages': 1}, + 'filters': {'workcenter': 'DA', 'package': 'DIP-B', 'ageRange': '1-3'} + } + + response = self.client.get( + '/api/wip/hold-detail/lots?reason=YieldLimit&workcenter=DA&package=DIP-B&age_range=1-3&page=2' + ) + + mock_get_lots.assert_called_once_with( + reason='YieldLimit', + workcenter='DA', + package='DIP-B', + age_range='1-3', + include_dummy=False, + page=2, + page_size=50 + ) + + def test_validates_age_range_parameter(self): + """Should return 400 for invalid age_range.""" + response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&age_range=invalid') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 400) + self.assertFalse(data['success']) + self.assertIn('age_range', data['error']) + + @patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots') + def test_limits_per_page_to_200(self, mock_get_lots): + """Per page should be capped at 200.""" + mock_get_lots.return_value = { + 'lots': [], + 'pagination': {'page': 1, 'perPage': 200, 'total': 0, 'totalPages': 1}, + 'filters': {'workcenter': None, 'package': None, 'ageRange': None} + } + + response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&per_page=500') + + call_args = mock_get_lots.call_args + self.assertEqual(call_args.kwargs['page_size'], 200) + + @patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots') + def test_handles_page_less_than_one(self, mock_get_lots): + """Page number less than 1 should be set to 1.""" + mock_get_lots.return_value = { + 'lots': [], + 'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1}, + 'filters': {'workcenter': None, 'package': None, 'ageRange': None} + } + + response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&page=0') + + call_args = mock_get_lots.call_args + self.assertEqual(call_args.kwargs['page'], 1) + + @patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots') + def test_returns_error_on_failure(self, mock_get_lots): + """Should return success=False and 500 on failure.""" + mock_get_lots.return_value = None + + response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 500) + self.assertFalse(data['success']) + + +class TestHoldDetailAgeRangeFilters(TestHoldRoutesBase): + """Test age range filter validation.""" + + @patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots') + def test_valid_age_range_0_1(self, mock_get_lots): + """Should accept 0-1 as valid age_range.""" + mock_get_lots.return_value = { + 'lots': [], 'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1}, + 'filters': {'workcenter': None, 'package': None, 'ageRange': '0-1'} + } + response = self.client.get('/api/wip/hold-detail/lots?reason=Test&age_range=0-1') + self.assertEqual(response.status_code, 200) + + @patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots') + def test_valid_age_range_1_3(self, mock_get_lots): + """Should accept 1-3 as valid age_range.""" + mock_get_lots.return_value = { + 'lots': [], 'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1}, + 'filters': {'workcenter': None, 'package': None, 'ageRange': '1-3'} + } + response = self.client.get('/api/wip/hold-detail/lots?reason=Test&age_range=1-3') + self.assertEqual(response.status_code, 200) + + @patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots') + def test_valid_age_range_3_7(self, mock_get_lots): + """Should accept 3-7 as valid age_range.""" + mock_get_lots.return_value = { + 'lots': [], 'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1}, + 'filters': {'workcenter': None, 'package': None, 'ageRange': '3-7'} + } + response = self.client.get('/api/wip/hold-detail/lots?reason=Test&age_range=3-7') + self.assertEqual(response.status_code, 200) + + @patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots') + def test_valid_age_range_7_plus(self, mock_get_lots): + """Should accept 7+ as valid age_range.""" + mock_get_lots.return_value = { + 'lots': [], 'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1}, + 'filters': {'workcenter': None, 'package': None, 'ageRange': '7+'} + } + response = self.client.get('/api/wip/hold-detail/lots?reason=Test&age_range=7%2B') + self.assertEqual(response.status_code, 200) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_job_query_routes.py b/tests/test_job_query_routes.py new file mode 100644 index 0000000..06eca53 --- /dev/null +++ b/tests/test_job_query_routes.py @@ -0,0 +1,320 @@ +# -*- coding: utf-8 -*- +"""Integration tests for Job Query API routes. + +Tests the API endpoints with mocked service dependencies. +""" + +import pytest +import json +from unittest.mock import patch, MagicMock + +from mes_dashboard import create_app + + +@pytest.fixture +def app(): + """Create test Flask application.""" + app = create_app() + app.config['TESTING'] = True + return app + + +@pytest.fixture +def client(app): + """Create test client.""" + return app.test_client() + + +class TestJobQueryPage: + """Tests for /job-query page route.""" + + def test_page_returns_html(self, client): + """Should return the job query page.""" + response = client.get('/job-query') + assert response.status_code == 200 + assert b'html' in response.data.lower() + + +class TestGetResources: + """Tests for /api/job-query/resources endpoint.""" + + @patch('mes_dashboard.services.resource_cache.get_all_resources') + def test_get_resources_success(self, mock_get_resources, client): + """Should return resources list.""" + mock_get_resources.return_value = [ + { + 'RESOURCEID': 'RES001', + 'RESOURCENAME': 'Machine-01', + 'WORKCENTERNAME': 'WC-A', + 'RESOURCEFAMILYNAME': 'FAM-01' + }, + { + 'RESOURCEID': 'RES002', + 'RESOURCENAME': 'Machine-02', + 'WORKCENTERNAME': 'WC-B', + 'RESOURCEFAMILYNAME': 'FAM-02' + } + ] + + response = client.get('/api/job-query/resources') + assert response.status_code == 200 + data = json.loads(response.data) + assert 'data' in data + assert 'total' in data + assert data['total'] == 2 + assert data['data'][0]['RESOURCEID'] in ['RES001', 'RES002'] + + @patch('mes_dashboard.services.resource_cache.get_all_resources') + def test_get_resources_empty(self, mock_get_resources, client): + """Should return error when no resources available.""" + mock_get_resources.return_value = [] + + response = client.get('/api/job-query/resources') + assert response.status_code == 500 + data = json.loads(response.data) + assert 'error' in data + + @patch('mes_dashboard.services.resource_cache.get_all_resources') + def test_get_resources_exception(self, mock_get_resources, client): + """Should handle exception gracefully.""" + mock_get_resources.side_effect = Exception('Database error') + + response = client.get('/api/job-query/resources') + assert response.status_code == 500 + data = json.loads(response.data) + assert 'error' in data + + +class TestQueryJobs: + """Tests for /api/job-query/jobs endpoint.""" + + def test_missing_resource_ids(self, client): + """Should return error without resource_ids.""" + response = client.post( + '/api/job-query/jobs', + json={ + 'start_date': '2024-01-01', + 'end_date': '2024-01-31' + } + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert 'error' in data + assert '設備' in data['error'] + + def test_empty_resource_ids(self, client): + """Should return error for empty resource_ids.""" + response = client.post( + '/api/job-query/jobs', + json={ + 'resource_ids': [], + 'start_date': '2024-01-01', + 'end_date': '2024-01-31' + } + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert 'error' in data + + def test_missing_start_date(self, client): + """Should return error without start_date.""" + response = client.post( + '/api/job-query/jobs', + json={ + 'resource_ids': ['RES001'], + 'end_date': '2024-01-31' + } + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert 'error' in data + assert '日期' in data['error'] + + def test_missing_end_date(self, client): + """Should return error without end_date.""" + response = client.post( + '/api/job-query/jobs', + json={ + 'resource_ids': ['RES001'], + 'start_date': '2024-01-01' + } + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert 'error' in data + + def test_invalid_date_range(self, client): + """Should return error for invalid date range.""" + response = client.post( + '/api/job-query/jobs', + json={ + 'resource_ids': ['RES001'], + 'start_date': '2024-12-31', + 'end_date': '2024-01-01' + } + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert 'error' in data + assert '結束日期' in data['error'] or '早於' in data['error'] + + def test_date_range_exceeds_limit(self, client): + """Should reject date range > 365 days.""" + response = client.post( + '/api/job-query/jobs', + json={ + 'resource_ids': ['RES001'], + 'start_date': '2023-01-01', + 'end_date': '2024-12-31' + } + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert 'error' in data + assert '365' in data['error'] + + @patch('mes_dashboard.routes.job_query_routes.get_jobs_by_resources') + def test_query_jobs_success(self, mock_query, client): + """Should return jobs list on success.""" + mock_query.return_value = { + 'data': [ + {'JOBID': 'JOB001', 'RESOURCENAME': 'Machine-01', 'JOBSTATUS': 'Complete'} + ], + 'total': 1, + 'resource_count': 1 + } + + response = client.post( + '/api/job-query/jobs', + json={ + 'resource_ids': ['RES001'], + 'start_date': '2024-01-01', + 'end_date': '2024-01-31' + } + ) + assert response.status_code == 200 + data = json.loads(response.data) + assert 'data' in data + assert data['total'] == 1 + assert data['data'][0]['JOBID'] == 'JOB001' + + @patch('mes_dashboard.routes.job_query_routes.get_jobs_by_resources') + def test_query_jobs_service_error(self, mock_query, client): + """Should return error from service.""" + mock_query.return_value = {'error': '查詢失敗: Database error'} + + response = client.post( + '/api/job-query/jobs', + json={ + 'resource_ids': ['RES001'], + 'start_date': '2024-01-01', + 'end_date': '2024-01-31' + } + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert 'error' in data + + +class TestQueryJobTxnHistory: + """Tests for /api/job-query/txn/ endpoint.""" + + @patch('mes_dashboard.routes.job_query_routes.get_job_txn_history') + def test_get_txn_history_success(self, mock_query, client): + """Should return transaction history.""" + mock_query.return_value = { + 'data': [ + { + 'JOBTXNHISTORYID': 'TXN001', + 'JOBID': 'JOB001', + 'TXNDATE': '2024-01-15 10:30:00', + 'FROMJOBSTATUS': 'Open', + 'JOBSTATUS': 'In Progress' + } + ], + 'total': 1, + 'job_id': 'JOB001' + } + + response = client.get('/api/job-query/txn/JOB001') + assert response.status_code == 200 + data = json.loads(response.data) + assert 'data' in data + assert data['total'] == 1 + assert data['job_id'] == 'JOB001' + + @patch('mes_dashboard.routes.job_query_routes.get_job_txn_history') + def test_get_txn_history_service_error(self, mock_query, client): + """Should return error from service.""" + mock_query.return_value = {'error': '查詢失敗: Job not found'} + + response = client.get('/api/job-query/txn/INVALID_JOB') + assert response.status_code == 400 + data = json.loads(response.data) + assert 'error' in data + + +class TestExportJobs: + """Tests for /api/job-query/export endpoint.""" + + def test_missing_resource_ids(self, client): + """Should return error without resource_ids.""" + response = client.post( + '/api/job-query/export', + json={ + 'start_date': '2024-01-01', + 'end_date': '2024-01-31' + } + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert 'error' in data + + def test_missing_dates(self, client): + """Should return error without dates.""" + response = client.post( + '/api/job-query/export', + json={ + 'resource_ids': ['RES001'] + } + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert 'error' in data + + def test_invalid_date_range(self, client): + """Should return error for invalid date range.""" + response = client.post( + '/api/job-query/export', + json={ + 'resource_ids': ['RES001'], + 'start_date': '2024-12-31', + 'end_date': '2024-01-01' + } + ) + assert response.status_code == 400 + data = json.loads(response.data) + assert 'error' in data + + @patch('mes_dashboard.routes.job_query_routes.export_jobs_with_history') + def test_export_success(self, mock_export, client): + """Should return CSV streaming response.""" + # Mock generator that yields CSV content + def mock_generator(*args): + yield '\ufeff設備名稱,工單ID\n' + yield 'Machine-01,JOB001\n' + + mock_export.return_value = mock_generator() + + response = client.post( + '/api/job-query/export', + json={ + 'resource_ids': ['RES001'], + 'start_date': '2024-01-01', + 'end_date': '2024-01-31' + } + ) + assert response.status_code == 200 + assert 'text/csv' in response.content_type + assert 'attachment' in response.headers.get('Content-Disposition', '') + assert 'job_history_export.csv' in response.headers.get('Content-Disposition', '') diff --git a/tests/test_job_query_service.py b/tests/test_job_query_service.py new file mode 100644 index 0000000..7b64257 --- /dev/null +++ b/tests/test_job_query_service.py @@ -0,0 +1,170 @@ +# -*- coding: utf-8 -*- +"""Unit tests for Job Query service functions. + +Tests the core service functions without database dependencies. +""" + +import pytest +from mes_dashboard.services.job_query_service import ( + validate_date_range, + _build_resource_filter, + _build_resource_filter_sql, + BATCH_SIZE, + MAX_DATE_RANGE_DAYS, +) + + +class TestValidateDateRange: + """Tests for validate_date_range function.""" + + def test_valid_range(self): + """Should return None for valid date range.""" + result = validate_date_range('2024-01-01', '2024-01-31') + assert result is None + + def test_same_day(self): + """Should allow same day as start and end.""" + result = validate_date_range('2024-01-01', '2024-01-01') + assert result is None + + def test_end_before_start(self): + """Should reject end date before start date.""" + result = validate_date_range('2024-12-31', '2024-01-01') + assert result is not None + assert '結束日期' in result or '早於' in result + + def test_exceeds_max_range(self): + """Should reject date range exceeding limit.""" + result = validate_date_range('2023-01-01', '2024-12-31') + assert result is not None + assert str(MAX_DATE_RANGE_DAYS) in result + + def test_exactly_max_range(self): + """Should allow exactly max range days.""" + # 365 days from 2024-01-01 is 2024-12-31 + result = validate_date_range('2024-01-01', '2024-12-31') + assert result is None + + def test_one_day_over_max_range(self): + """Should reject one day over max range.""" + # 366 days + result = validate_date_range('2024-01-01', '2025-01-01') + assert result is not None + assert str(MAX_DATE_RANGE_DAYS) in result + + def test_invalid_date_format(self): + """Should reject invalid date format.""" + result = validate_date_range('01-01-2024', '12-31-2024') + assert result is not None + assert '格式' in result or 'format' in result.lower() + + def test_invalid_start_date(self): + """Should reject invalid start date.""" + result = validate_date_range('2024-13-01', '2024-12-31') + assert result is not None + assert '格式' in result or 'format' in result.lower() + + def test_invalid_end_date(self): + """Should reject invalid end date.""" + result = validate_date_range('2024-01-01', '2024-02-30') + assert result is not None + assert '格式' in result or 'format' in result.lower() + + def test_non_date_string(self): + """Should reject non-date strings.""" + result = validate_date_range('abc', 'def') + assert result is not None + assert '格式' in result or 'format' in result.lower() + + +class TestBuildResourceFilter: + """Tests for _build_resource_filter function.""" + + def test_empty_list(self): + """Should return empty list for empty input.""" + result = _build_resource_filter([]) + assert result == [] + + def test_single_id(self): + """Should return single chunk for single ID.""" + result = _build_resource_filter(['RES001']) + assert len(result) == 1 + assert result[0] == "'RES001'" + + def test_multiple_ids(self): + """Should join multiple IDs with comma.""" + result = _build_resource_filter(['RES001', 'RES002', 'RES003']) + assert len(result) == 1 + assert "'RES001'" in result[0] + assert "'RES002'" in result[0] + assert "'RES003'" in result[0] + + def test_chunking(self): + """Should chunk when exceeding batch size.""" + # Create more than BATCH_SIZE IDs + ids = [f'RES{i:05d}' for i in range(BATCH_SIZE + 10)] + result = _build_resource_filter(ids) + assert len(result) == 2 + # First chunk should have BATCH_SIZE items + assert result[0].count("'") == BATCH_SIZE * 2 # 2 quotes per ID + + def test_escape_single_quotes(self): + """Should escape single quotes in IDs.""" + result = _build_resource_filter(["RES'001"]) + assert len(result) == 1 + assert "RES''001" in result[0] # Escaped + + def test_custom_chunk_size(self): + """Should respect custom chunk size.""" + ids = ['RES001', 'RES002', 'RES003', 'RES004', 'RES005'] + result = _build_resource_filter(ids, max_chunk_size=2) + assert len(result) == 3 # 2+2+1 + + +class TestBuildResourceFilterSql: + """Tests for _build_resource_filter_sql function.""" + + def test_empty_list(self): + """Should return 1=0 for empty input (no results).""" + result = _build_resource_filter_sql([]) + assert result == "1=0" + + def test_single_id(self): + """Should build simple IN clause for single ID.""" + result = _build_resource_filter_sql(['RES001']) + assert "j.RESOURCEID IN" in result + assert "'RES001'" in result + + def test_multiple_ids(self): + """Should build IN clause with multiple IDs.""" + result = _build_resource_filter_sql(['RES001', 'RES002']) + assert "j.RESOURCEID IN" in result + assert "'RES001'" in result + assert "'RES002'" in result + + def test_custom_column(self): + """Should use custom column name.""" + result = _build_resource_filter_sql(['RES001'], column='r.ID') + assert "r.ID IN" in result + + def test_large_list_uses_or(self): + """Should use OR for chunked results.""" + # Create more than BATCH_SIZE IDs + ids = [f'RES{i:05d}' for i in range(BATCH_SIZE + 10)] + result = _build_resource_filter_sql(ids) + assert " OR " in result + # Should have parentheses wrapping the OR conditions + assert result.startswith("(") + assert result.endswith(")") + + +class TestServiceConstants: + """Tests for service constants.""" + + def test_batch_size_is_reasonable(self): + """Batch size should be <= 1000 (Oracle limit).""" + assert BATCH_SIZE <= 1000 + + def test_max_date_range_is_year(self): + """Max date range should be 365 days.""" + assert MAX_DATE_RANGE_DAYS == 365 diff --git a/tests/test_log_store.py b/tests/test_log_store.py new file mode 100644 index 0000000..b91b126 --- /dev/null +++ b/tests/test_log_store.py @@ -0,0 +1,277 @@ +# -*- coding: utf-8 -*- +"""Unit tests for SQLite log store module.""" + +import os +import pytest +import sqlite3 +import tempfile +import time +from datetime import datetime, timedelta +from unittest.mock import patch + +from mes_dashboard.core.log_store import ( + LogStore, + SQLiteLogHandler, + LOG_STORE_ENABLED +) + + +class TestLogStore: + """Test LogStore class.""" + + @pytest.fixture + def temp_db_path(self): + """Create a temporary database file.""" + fd, path = tempfile.mkstemp(suffix='.db') + os.close(fd) + yield path + # Cleanup + try: + os.unlink(path) + except OSError: + pass + + @pytest.fixture + def log_store(self, temp_db_path): + """Create a LogStore instance with temp database.""" + store = LogStore(db_path=temp_db_path) + store.initialize() # Explicitly initialize + return store + + def test_init_creates_table(self, temp_db_path): + """LogStore creates logs table on init.""" + store = LogStore(db_path=temp_db_path) + store.initialize() + + conn = sqlite3.connect(temp_db_path) + cursor = conn.cursor() + cursor.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name='logs'" + ) + result = cursor.fetchone() + conn.close() + + assert result is not None + assert result[0] == 'logs' + + def test_write_log(self, log_store): + """Write a log entry successfully.""" + log_store.write_log( + level="INFO", + logger_name="test.logger", + message="Test message", + request_id="req-123", + user="testuser", + ip="192.168.1.1" + ) + + logs = log_store.query_logs(limit=10) + assert len(logs) == 1 + assert logs[0]["level"] == "INFO" + assert logs[0]["logger_name"] == "test.logger" + assert logs[0]["message"] == "Test message" + assert logs[0]["request_id"] == "req-123" + assert logs[0]["user"] == "testuser" + assert logs[0]["ip"] == "192.168.1.1" + + def test_query_logs_by_level(self, log_store): + """Query logs filtered by level.""" + log_store.write_log(level="INFO", logger_name="test", message="Info msg") + log_store.write_log(level="ERROR", logger_name="test", message="Error msg") + log_store.write_log(level="WARNING", logger_name="test", message="Warning msg") + + error_logs = log_store.query_logs(level="ERROR", limit=10) + assert len(error_logs) == 1 + assert error_logs[0]["level"] == "ERROR" + + def test_query_logs_by_keyword(self, log_store): + """Query logs filtered by keyword search.""" + log_store.write_log(level="INFO", logger_name="test", message="User logged in") + log_store.write_log(level="INFO", logger_name="test", message="Data processed") + log_store.write_log(level="INFO", logger_name="test", message="User logged out") + + user_logs = log_store.query_logs(q="User", limit=10) + assert len(user_logs) == 2 + + def test_query_logs_limit(self, log_store): + """Query logs respects limit parameter.""" + for i in range(20): + log_store.write_log(level="INFO", logger_name="test", message=f"Msg {i}") + + logs = log_store.query_logs(limit=5) + assert len(logs) == 5 + + def test_query_logs_since(self, log_store): + """Query logs filtered by timestamp.""" + # Write some old logs + log_store.write_log(level="INFO", logger_name="test", message="Old msg") + + # Record time after first log + time.sleep(0.1) + since_time = datetime.now().isoformat() + + # Write some new logs + time.sleep(0.1) + log_store.write_log(level="INFO", logger_name="test", message="New msg 1") + log_store.write_log(level="INFO", logger_name="test", message="New msg 2") + + logs = log_store.query_logs(since=since_time, limit=10) + assert len(logs) == 2 + + def test_query_logs_order(self, log_store): + """Query logs returns most recent first.""" + log_store.write_log(level="INFO", logger_name="test", message="First") + time.sleep(0.01) + log_store.write_log(level="INFO", logger_name="test", message="Second") + time.sleep(0.01) + log_store.write_log(level="INFO", logger_name="test", message="Third") + + logs = log_store.query_logs(limit=10) + assert logs[0]["message"] == "Third" + assert logs[2]["message"] == "First" + + def test_get_stats(self, log_store, temp_db_path): + """Get stats returns count and size.""" + log_store.write_log(level="INFO", logger_name="test", message="Msg 1") + log_store.write_log(level="INFO", logger_name="test", message="Msg 2") + + stats = log_store.get_stats() + + assert stats["count"] == 2 + assert stats["size_bytes"] > 0 + + +class TestLogStoreRetention: + """Test log store retention policies.""" + + @pytest.fixture + def temp_db_path(self): + """Create a temporary database file.""" + fd, path = tempfile.mkstemp(suffix='.db') + os.close(fd) + yield path + try: + os.unlink(path) + except OSError: + pass + + def test_cleanup_by_max_rows(self, temp_db_path): + """Cleanup removes old logs when max rows exceeded.""" + # Patch the max rows config to a small value + with patch('mes_dashboard.core.log_store.LOG_SQLITE_MAX_ROWS', 5): + store = LogStore(db_path=temp_db_path) + store.initialize() + + # Write more than max_rows + for i in range(10): + store.write_log(level="INFO", logger_name="test", message=f"Msg {i}") + + # Force cleanup - need to reimport for patched value + from mes_dashboard.core import log_store as ls_module + with patch.object(ls_module, 'LOG_SQLITE_MAX_ROWS', 5): + store.cleanup_old_logs() + + logs = store.query_logs(limit=100) + # Cleanup may not perfectly reduce to 5 due to timing + assert len(logs) <= 10 # At minimum, should have written some + + def test_cleanup_by_retention_days(self, temp_db_path): + """Cleanup removes logs older than retention period.""" + # Patch the retention days config + with patch('mes_dashboard.core.log_store.LOG_SQLITE_RETENTION_DAYS', 1): + store = LogStore(db_path=temp_db_path) + store.initialize() + + # Insert an old log directly into the database + conn = sqlite3.connect(temp_db_path) + cursor = conn.cursor() + old_time = (datetime.now() - timedelta(days=2)).isoformat() + cursor.execute(""" + INSERT INTO logs (timestamp, level, logger_name, message) + VALUES (?, 'INFO', 'test', 'Old message') + """, (old_time,)) + conn.commit() + conn.close() + + # Write a new log + store.write_log(level="INFO", logger_name="test", message="New message") + + # Force cleanup with patched retention + from mes_dashboard.core import log_store as ls_module + with patch.object(ls_module, 'LOG_SQLITE_RETENTION_DAYS', 1): + deleted = store.cleanup_old_logs() + + logs = store.query_logs(limit=100) + # The old message should be cleaned up + new_logs = [l for l in logs if l["message"] == "New message"] + assert len(new_logs) >= 1 + + +class TestSQLiteLogHandler: + """Test SQLite logging handler.""" + + @pytest.fixture + def temp_db_path(self): + """Create a temporary database file.""" + fd, path = tempfile.mkstemp(suffix='.db') + os.close(fd) + yield path + try: + os.unlink(path) + except OSError: + pass + + def test_handler_writes_log_records(self, temp_db_path): + """Log handler writes records to database.""" + import logging + + store = LogStore(db_path=temp_db_path) + handler = SQLiteLogHandler(store) + handler.setLevel(logging.INFO) + + logger = logging.getLogger("test_handler") + logger.addHandler(handler) + logger.setLevel(logging.INFO) + + logger.info("Test log message") + + # Give it a moment to write + time.sleep(0.1) + + logs = store.query_logs(limit=10) + assert len(logs) >= 1 + + # Find our test message + test_logs = [l for l in logs if "Test log message" in l["message"]] + assert len(test_logs) == 1 + assert test_logs[0]["level"] == "INFO" + + # Cleanup + logger.removeHandler(handler) + + def test_handler_filters_by_level(self, temp_db_path): + """Log handler respects level filtering.""" + import logging + + store = LogStore(db_path=temp_db_path) + handler = SQLiteLogHandler(store) + handler.setLevel(logging.WARNING) + + logger = logging.getLogger("test_handler_level") + logger.addHandler(handler) + logger.setLevel(logging.DEBUG) + + logger.debug("Debug message") + logger.info("Info message") + logger.warning("Warning message") + + time.sleep(0.1) + + logs = store.query_logs(limit=10) + # Only warning should be written (handler level is WARNING) + warning_logs = [l for l in logs if l["logger_name"] == "test_handler_level"] + assert len(warning_logs) == 1 + assert warning_logs[0]["level"] == "WARNING" + + # Cleanup + logger.removeHandler(handler) diff --git a/tests/test_metrics.py b/tests/test_metrics.py new file mode 100644 index 0000000..a19de20 --- /dev/null +++ b/tests/test_metrics.py @@ -0,0 +1,203 @@ +# -*- coding: utf-8 -*- +"""Unit tests for performance metrics module.""" + +import pytest +from mes_dashboard.core.metrics import ( + QueryMetrics, + MetricsSummary, + get_query_metrics, + get_metrics_summary, + record_query_latency, + SLOW_QUERY_THRESHOLD +) + + +class TestQueryMetrics: + """Test QueryMetrics class.""" + + def test_initial_state_empty(self): + """New metrics instance has no data.""" + metrics = QueryMetrics(window_size=100) + percentiles = metrics.get_percentiles() + + assert percentiles["count"] == 0 + assert percentiles["p50"] == 0.0 + assert percentiles["p95"] == 0.0 + assert percentiles["p99"] == 0.0 + + def test_record_latency(self): + """Latencies are recorded correctly.""" + metrics = QueryMetrics(window_size=100) + + metrics.record_latency(0.1) + metrics.record_latency(0.2) + metrics.record_latency(0.3) + + latencies = metrics.get_latencies() + assert len(latencies) == 3 + assert latencies == [0.1, 0.2, 0.3] + + def test_window_size_limit(self): + """Window size limits number of samples.""" + metrics = QueryMetrics(window_size=5) + + for i in range(10): + metrics.record_latency(float(i)) + + latencies = metrics.get_latencies() + assert len(latencies) == 5 + # Should have last 5 values (5, 6, 7, 8, 9) + assert latencies == [5.0, 6.0, 7.0, 8.0, 9.0] + + def test_percentile_calculation_p50(self): + """P50 (median) is calculated correctly.""" + metrics = QueryMetrics(window_size=100) + + # Record 100 values: 1, 2, 3, ..., 100 + for i in range(1, 101): + metrics.record_latency(float(i)) + + percentiles = metrics.get_percentiles() + # P50 of 1-100 should be around 50 + assert 49 <= percentiles["p50"] <= 51 + + def test_percentile_calculation_p95(self): + """P95 is calculated correctly.""" + metrics = QueryMetrics(window_size=100) + + # Record 100 values: 1, 2, 3, ..., 100 + for i in range(1, 101): + metrics.record_latency(float(i)) + + percentiles = metrics.get_percentiles() + # P95 of 1-100 should be around 95 + assert 94 <= percentiles["p95"] <= 96 + + def test_percentile_calculation_p99(self): + """P99 is calculated correctly.""" + metrics = QueryMetrics(window_size=100) + + # Record 100 values: 1, 2, 3, ..., 100 + for i in range(1, 101): + metrics.record_latency(float(i)) + + percentiles = metrics.get_percentiles() + # P99 of 1-100 should be around 99 + assert 98 <= percentiles["p99"] <= 100 + + def test_slow_query_count(self): + """Slow queries (> threshold) are counted.""" + metrics = QueryMetrics(window_size=100) + + # Record some fast and slow queries + metrics.record_latency(0.1) # Fast + metrics.record_latency(0.5) # Fast + metrics.record_latency(1.5) # Slow + metrics.record_latency(2.0) # Slow + metrics.record_latency(0.8) # Fast + + percentiles = metrics.get_percentiles() + assert percentiles["slow_count"] == 2 + + def test_get_summary(self): + """Summary includes all required fields.""" + metrics = QueryMetrics(window_size=100) + + metrics.record_latency(0.1) + metrics.record_latency(0.5) + metrics.record_latency(1.5) + + summary = metrics.get_summary() + + assert isinstance(summary, MetricsSummary) + assert summary.p50_ms >= 0 + assert summary.p95_ms >= 0 + assert summary.p99_ms >= 0 + assert summary.count == 3 + assert summary.slow_count == 1 + assert 0 <= summary.slow_rate <= 1 + assert summary.worker_pid > 0 + assert summary.collected_at is not None + + def test_slow_rate_calculation(self): + """Slow rate is calculated correctly.""" + metrics = QueryMetrics(window_size=100) + + # 2 slow out of 4 = 50% + metrics.record_latency(0.1) + metrics.record_latency(1.5) + metrics.record_latency(0.2) + metrics.record_latency(2.0) + + summary = metrics.get_summary() + assert summary.slow_rate == 0.5 + + def test_clear_resets_metrics(self): + """Clear removes all recorded latencies.""" + metrics = QueryMetrics(window_size=100) + + metrics.record_latency(0.1) + metrics.record_latency(0.2) + + metrics.clear() + + assert len(metrics.get_latencies()) == 0 + assert metrics.get_percentiles()["count"] == 0 + + +class TestGlobalMetrics: + """Test global metrics functions.""" + + def test_get_query_metrics_returns_singleton(self): + """Global query metrics returns same instance.""" + metrics1 = get_query_metrics() + metrics2 = get_query_metrics() + + assert metrics1 is metrics2 + + def test_record_query_latency_uses_global(self): + """record_query_latency uses global metrics instance.""" + metrics = get_query_metrics() + initial_count = metrics.get_percentiles()["count"] + + record_query_latency(0.1) + + assert metrics.get_percentiles()["count"] == initial_count + 1 + + def test_get_metrics_summary_returns_dict(self): + """get_metrics_summary returns dictionary format.""" + summary = get_metrics_summary() + + assert isinstance(summary, dict) + assert "p50_ms" in summary + assert "p95_ms" in summary + assert "p99_ms" in summary + assert "count" in summary + assert "slow_count" in summary + assert "slow_rate" in summary + assert "worker_pid" in summary + assert "collected_at" in summary + + +class TestMetricsThreadSafety: + """Test thread safety of metrics collection.""" + + def test_concurrent_recording(self): + """Metrics handle concurrent recording.""" + import threading + + metrics = QueryMetrics(window_size=1000) + + def record_many(): + for _ in range(100): + metrics.record_latency(0.1) + + threads = [threading.Thread(target=record_many) for _ in range(10)] + + for t in threads: + t.start() + for t in threads: + t.join() + + # Should have 1000 entries + assert metrics.get_percentiles()["count"] == 1000 diff --git a/tests/test_page_registry.py b/tests/test_page_registry.py new file mode 100644 index 0000000..90b6106 --- /dev/null +++ b/tests/test_page_registry.py @@ -0,0 +1,194 @@ +# -*- coding: utf-8 -*- +"""Unit tests for page_registry module.""" + +import json +import pytest +import tempfile +from pathlib import Path +from unittest.mock import patch + +import sys +import os +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src')) + +from mes_dashboard.services import page_registry + + +@pytest.fixture +def temp_data_file(tmp_path): + """Create a temporary data file for testing.""" + data_file = tmp_path / "page_status.json" + initial_data = { + "pages": [ + {"route": "/", "name": "Home", "status": "released"}, + {"route": "/dev-page", "name": "Dev Page", "status": "dev"}, + ], + "api_public": True + } + data_file.write_text(json.dumps(initial_data), encoding="utf-8") + return data_file + + +@pytest.fixture +def mock_registry(temp_data_file): + """Mock page_registry to use temp file.""" + original_data_file = page_registry.DATA_FILE + original_cache = page_registry._cache + + page_registry.DATA_FILE = temp_data_file + page_registry._cache = None # Clear cache + + yield temp_data_file + + # Restore original + page_registry.DATA_FILE = original_data_file + page_registry._cache = original_cache + + +class TestGetPageStatus: + """Tests for get_page_status function.""" + + def test_get_released_page_status(self, mock_registry): + """Test getting status of released page.""" + status = page_registry.get_page_status("/") + assert status == "released" + + def test_get_dev_page_status(self, mock_registry): + """Test getting status of dev page.""" + status = page_registry.get_page_status("/dev-page") + assert status == "dev" + + def test_get_unregistered_page_status(self, mock_registry): + """Test getting status of unregistered page returns None.""" + status = page_registry.get_page_status("/not-registered") + assert status is None + + +class TestIsPageRegistered: + """Tests for is_page_registered function.""" + + def test_registered_page(self, mock_registry): + """Test checking registered page.""" + assert page_registry.is_page_registered("/") is True + + def test_unregistered_page(self, mock_registry): + """Test checking unregistered page.""" + assert page_registry.is_page_registered("/not-here") is False + + +class TestSetPageStatus: + """Tests for set_page_status function.""" + + def test_update_existing_page(self, mock_registry): + """Test updating existing page status.""" + page_registry.set_page_status("/", "dev") + assert page_registry.get_page_status("/") == "dev" + + def test_add_new_page(self, mock_registry): + """Test adding new page.""" + page_registry.set_page_status("/new-page", "released", "New Page") + assert page_registry.get_page_status("/new-page") == "released" + + def test_invalid_status_raises_error(self, mock_registry): + """Test setting invalid status raises ValueError.""" + with pytest.raises(ValueError, match="Invalid status"): + page_registry.set_page_status("/", "invalid") + + def test_update_page_name(self, mock_registry): + """Test updating page name.""" + page_registry.set_page_status("/", "released", "New Name") + pages = page_registry.get_all_pages() + home = next(p for p in pages if p["route"] == "/") + assert home["name"] == "New Name" + + +class TestGetAllPages: + """Tests for get_all_pages function.""" + + def test_get_all_pages(self, mock_registry): + """Test getting all pages.""" + pages = page_registry.get_all_pages() + assert len(pages) == 2 + routes = [p["route"] for p in pages] + assert "/" in routes + assert "/dev-page" in routes + + +class TestIsApiPublic: + """Tests for is_api_public function.""" + + def test_api_public_true(self, mock_registry): + """Test API public flag when true.""" + assert page_registry.is_api_public() is True + + def test_api_public_false(self, mock_registry, temp_data_file): + """Test API public flag when false.""" + data = json.loads(temp_data_file.read_text()) + data["api_public"] = False + temp_data_file.write_text(json.dumps(data)) + page_registry._cache = None # Clear cache + + assert page_registry.is_api_public() is False + + +class TestReloadCache: + """Tests for reload_cache function.""" + + def test_reload_cache(self, mock_registry, temp_data_file): + """Test reloading cache from disk.""" + # First load + assert page_registry.get_page_status("/") == "released" + + # Modify file directly + data = json.loads(temp_data_file.read_text()) + data["pages"][0]["status"] = "dev" + temp_data_file.write_text(json.dumps(data)) + + # Cache still has old value + assert page_registry.get_page_status("/") == "released" + + # After reload, should have new value + page_registry.reload_cache() + assert page_registry.get_page_status("/") == "dev" + + +class TestConcurrency: + """Tests for thread safety.""" + + def test_concurrent_access(self, mock_registry): + """Test concurrent read/write operations.""" + import threading + + errors = [] + + def reader(): + try: + for _ in range(100): + page_registry.get_page_status("/") + except Exception as e: + errors.append(e) + + def writer(): + try: + for i in range(100): + status = "released" if i % 2 == 0 else "dev" + page_registry.set_page_status("/", status) + except Exception as e: + errors.append(e) + + threads = [ + threading.Thread(target=reader) for _ in range(3) + ] + [ + threading.Thread(target=writer) for _ in range(2) + ] + + for t in threads: + t.start() + for t in threads: + t.join() + + assert len(errors) == 0, f"Errors occurred: {errors}" + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/test_performance_integration.py b/tests/test_performance_integration.py new file mode 100644 index 0000000..83a7952 --- /dev/null +++ b/tests/test_performance_integration.py @@ -0,0 +1,307 @@ +# -*- coding: utf-8 -*- +"""Integration tests for performance monitoring and admin APIs.""" + +import json +import os +import pytest +import tempfile +from unittest.mock import patch, MagicMock + +from mes_dashboard.app import create_app +import mes_dashboard.core.database as db + + +@pytest.fixture +def app(): + """Create application for testing.""" + db._ENGINE = None + app = create_app('testing') + app.config['TESTING'] = True + app.config['WTF_CSRF_ENABLED'] = False + return app + + +@pytest.fixture +def client(app): + """Create test client.""" + return app.test_client() + + +@pytest.fixture +def admin_client(app, client): + """Create authenticated admin client.""" + # Set admin session - the permissions module checks for 'admin' key in session + with client.session_transaction() as sess: + sess['admin'] = {'username': 'admin', 'role': 'admin'} + yield client + + +class TestAPIResponseFormat: + """Test standardized API response format.""" + + def test_success_response_format(self, admin_client): + """Success responses have correct format.""" + response = admin_client.get('/admin/api/system-status') + + assert response.status_code == 200 + data = json.loads(response.data) + assert data["success"] is True + assert "data" in data + + def test_unauthenticated_redirect(self, client): + """Unauthenticated requests redirect to login.""" + response = client.get('/admin/performance') + + # Should redirect to login page + assert response.status_code == 302 + + +class TestHealthEndpoints: + """Test health check endpoints.""" + + def test_health_basic_endpoint(self, client): + """Basic health endpoint returns status.""" + response = client.get('/health') + + assert response.status_code in (200, 503) + data = json.loads(response.data) + assert "status" in data + assert data["status"] in {"healthy", "degraded", "unhealthy"} + # Database status is under 'services' key + assert "services" in data + assert "database" in data["services"] + + def test_health_deep_requires_auth(self, client): + """Deep health endpoint requires authentication.""" + response = client.get('/health/deep') + # Redirects to login for unauthenticated requests + assert response.status_code == 302 + + def test_health_deep_returns_metrics(self, admin_client): + """Deep health endpoint returns detailed metrics.""" + response = admin_client.get('/health/deep') + + if response.status_code == 200: + data = json.loads(response.data) + assert "status" in data + + +class TestSystemStatusAPI: + """Test system status API endpoint.""" + + def test_system_status_returns_all_components(self, admin_client): + """System status includes all component statuses.""" + response = admin_client.get('/admin/api/system-status') + + assert response.status_code == 200 + data = json.loads(response.data) + assert data["success"] is True + assert "database" in data["data"] + assert "redis" in data["data"] + assert "circuit_breaker" in data["data"] + assert "runtime_resilience" in data["data"] + assert "thresholds" in data["data"]["runtime_resilience"] + assert "restart_churn" in data["data"]["runtime_resilience"] + assert "recovery_recommendation" in data["data"]["runtime_resilience"] + assert "worker_pid" in data["data"] + + +class TestMetricsAPI: + """Test metrics API endpoint.""" + + def test_metrics_returns_percentiles(self, admin_client): + """Metrics API returns percentile data.""" + response = admin_client.get('/admin/api/metrics') + + assert response.status_code == 200 + data = json.loads(response.data) + assert data["success"] is True + assert "p50_ms" in data["data"] + assert "p95_ms" in data["data"] + assert "p99_ms" in data["data"] + assert "count" in data["data"] + assert "slow_count" in data["data"] + assert "slow_rate" in data["data"] + + def test_metrics_includes_latencies(self, admin_client): + """Metrics API includes latency distribution.""" + response = admin_client.get('/admin/api/metrics') + + assert response.status_code == 200 + data = json.loads(response.data) + assert "latencies" in data["data"] + assert isinstance(data["data"]["latencies"], list) + + +class TestLogsAPI: + """Test logs API endpoint.""" + + def test_logs_api_returns_logs(self, admin_client): + """Logs API returns log entries.""" + response = admin_client.get('/admin/api/logs') + + assert response.status_code == 200 + data = json.loads(response.data) + assert data["success"] is True + assert "logs" in data["data"] + assert "enabled" in data["data"] + + def test_logs_api_filter_by_level(self, admin_client): + """Logs API filters by level.""" + response = admin_client.get('/admin/api/logs?level=ERROR') + + assert response.status_code == 200 + data = json.loads(response.data) + assert data["success"] is True + + def test_logs_api_filter_by_search(self, admin_client): + """Logs API filters by search term.""" + response = admin_client.get('/admin/api/logs?q=database') + + assert response.status_code == 200 + data = json.loads(response.data) + assert data["success"] is True + + def test_logs_api_pagination(self, admin_client): + """Logs API supports pagination with limit and offset.""" + # Test with limit=10 + response = admin_client.get('/admin/api/logs?limit=10&offset=0') + + assert response.status_code == 200 + data = json.loads(response.data) + assert data["success"] is True + assert "total" in data["data"] + assert "logs" in data["data"] + assert len(data["data"]["logs"]) <= 10 + + def test_logs_api_pagination_offset(self, admin_client): + """Logs API offset skips entries correctly.""" + # Get first page + response1 = admin_client.get('/admin/api/logs?limit=5&offset=0') + data1 = json.loads(response1.data) + + # Get second page + response2 = admin_client.get('/admin/api/logs?limit=5&offset=5') + data2 = json.loads(response2.data) + + # Total should be the same + assert data1["data"]["total"] == data2["data"]["total"] + + # If there are enough logs, pages should be different + if data1["data"]["total"] > 5: + logs1_ids = [log.get("id") for log in data1["data"]["logs"]] + logs2_ids = [log.get("id") for log in data2["data"]["logs"]] + # No overlap between pages + assert not set(logs1_ids) & set(logs2_ids) + + +class TestLogsCleanupAPI: + """Test log cleanup API endpoint.""" + + def test_logs_cleanup_requires_auth(self, client): + """Log cleanup requires admin authentication.""" + response = client.post('/admin/api/logs/cleanup') + # Should redirect to login page + assert response.status_code == 302 + + def test_logs_cleanup_success(self, admin_client): + """Log cleanup returns success with stats.""" + response = admin_client.post('/admin/api/logs/cleanup') + + assert response.status_code == 200 + data = json.loads(response.data) + assert data["success"] is True + assert "deleted" in data["data"] + assert "before" in data["data"] + assert "after" in data["data"] + assert "count" in data["data"]["before"] + assert "size_bytes" in data["data"]["before"] + + +class TestWorkerControlAPI: + """Test worker control API endpoints.""" + + def test_worker_status_returns_info(self, admin_client): + """Worker status API returns worker information.""" + response = admin_client.get('/admin/api/worker/status') + + assert response.status_code == 200 + data = json.loads(response.data) + assert data["success"] is True + assert "worker_pid" in data["data"] + assert "cooldown" in data["data"] + assert "resilience" in data["data"] + assert "restart_history" in data["data"] + assert "restart_churn" in data["data"]["resilience"] + assert "last_restart" in data["data"] + + def test_worker_restart_requires_auth(self, client): + """Worker restart requires admin authentication.""" + response = client.post('/admin/api/worker/restart') + # Should redirect to login page for unauthenticated requests + assert response.status_code == 302 + + def test_worker_restart_writes_flag(self, admin_client): + """Worker restart creates flag file.""" + # Use a temp file for the flag + fd, temp_flag = tempfile.mkstemp() + os.close(fd) + os.unlink(temp_flag) # Remove so we can test creation + + with patch('mes_dashboard.routes.admin_routes.RESTART_FLAG_PATH', temp_flag): + with patch('mes_dashboard.routes.admin_routes._check_restart_cooldown', return_value=(False, 0)): + response = admin_client.post('/admin/api/worker/restart') + + assert response.status_code == 200 + data = json.loads(response.data) + assert data["success"] is True + + # Cleanup + try: + os.unlink(temp_flag) + except OSError: + pass + + def test_worker_restart_cooldown(self, admin_client): + """Worker restart respects cooldown.""" + with patch('mes_dashboard.routes.admin_routes._check_restart_cooldown', return_value=(True, 45)): + response = admin_client.post('/admin/api/worker/restart') + + assert response.status_code == 429 + data = json.loads(response.data) + assert data["success"] is False + assert "cooldown" in data["error"]["message"].lower() + + +class TestCircuitBreakerIntegration: + """Test circuit breaker integration with database layer.""" + + def test_circuit_breaker_status_in_system_status(self, admin_client): + """Circuit breaker status is included in system status.""" + response = admin_client.get('/admin/api/system-status') + + assert response.status_code == 200 + data = json.loads(response.data) + cb_status = data["data"]["circuit_breaker"] + assert "state" in cb_status + assert "enabled" in cb_status + + +class TestPerformancePage: + """Test performance monitoring page.""" + + def test_performance_page_requires_auth(self, client): + """Performance page requires admin authentication.""" + response = client.get('/admin/performance') + # Should redirect to login + assert response.status_code == 302 + + def test_performance_page_loads(self, admin_client): + """Performance page loads for admin users.""" + response = admin_client.get('/admin/performance') + + # Should be 200 for authenticated admin + assert response.status_code == 200 + # Check for performance-related content + data_str = response.data.decode('utf-8', errors='ignore').lower() + assert 'performance' in data_str or '效能' in data_str diff --git a/tests/test_permissions.py b/tests/test_permissions.py new file mode 100644 index 0000000..bd6ce72 --- /dev/null +++ b/tests/test_permissions.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +"""Unit tests for permissions module.""" + +import pytest +from flask import Flask + +import sys +import os +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src')) + +from mes_dashboard.core.permissions import is_admin_logged_in, get_current_admin, admin_required + + +@pytest.fixture +def app(): + """Create a test Flask app.""" + app = Flask(__name__) + app.secret_key = "test-secret-key" + app.config["TESTING"] = True + return app + + +class TestIsAdminLoggedIn: + """Tests for is_admin_logged_in function.""" + + def test_admin_logged_in(self, app): + """Test when admin is logged in.""" + with app.test_request_context(): + from flask import session + session["admin"] = {"username": "admin", "mail": "admin@test.com"} + assert is_admin_logged_in() is True + + def test_admin_not_logged_in(self, app): + """Test when admin is not logged in.""" + with app.test_request_context(): + assert is_admin_logged_in() is False + + +class TestGetCurrentAdmin: + """Tests for get_current_admin function.""" + + def test_get_admin_when_logged_in(self, app): + """Test getting admin info when logged in.""" + with app.test_request_context(): + from flask import session + admin_data = {"username": "admin", "mail": "admin@test.com"} + session["admin"] = admin_data + result = get_current_admin() + assert result == admin_data + + def test_get_admin_when_not_logged_in(self, app): + """Test getting admin info when not logged in.""" + with app.test_request_context(): + result = get_current_admin() + assert result is None + + +class TestAdminRequired: + """Tests for admin_required decorator.""" + + def test_admin_required_when_logged_in(self, app): + """Test decorator allows access when admin is logged in.""" + @app.route("/test") + @admin_required + def test_route(): + return "success" + + with app.test_client() as client: + with client.session_transaction() as sess: + sess["admin"] = {"username": "admin"} + + response = client.get("/test") + assert response.status_code == 200 + assert response.data == b"success" + + def test_admin_required_when_not_logged_in(self, app): + """Test decorator redirects when admin is not logged in.""" + from flask import Blueprint + + # Register auth blueprint first with correct endpoint name + auth_bp = Blueprint("auth", __name__, url_prefix="/admin") + + @auth_bp.route("/login", endpoint="login") + def login_view(): + return "login" + + app.register_blueprint(auth_bp) + + # Now add the protected route + @app.route("/test") + @admin_required + def test_route(): + return "success" + + with app.test_client() as client: + response = client.get("/test") + assert response.status_code == 302 + assert "/admin/login" in response.location + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/test_realtime_equipment_cache.py b/tests/test_realtime_equipment_cache.py new file mode 100644 index 0000000..8028e05 --- /dev/null +++ b/tests/test_realtime_equipment_cache.py @@ -0,0 +1,494 @@ +# -*- coding: utf-8 -*- +"""Unit tests for realtime_equipment_cache module. + +Tests aggregation, status classification, and cache query functionality. +""" + +import pytest +from unittest.mock import patch, MagicMock +import json + + +class TestClassifyStatus: + """Test _classify_status function.""" + + def test_classifies_prd_as_productive(self): + """Test PRD status is classified as PRODUCTIVE.""" + from mes_dashboard.services.realtime_equipment_cache import _classify_status + + result = _classify_status('PRD') + assert result == 'PRODUCTIVE' + + def test_classifies_sby_as_standby(self): + """Test SBY status is classified as STANDBY.""" + from mes_dashboard.services.realtime_equipment_cache import _classify_status + + result = _classify_status('SBY') + assert result == 'STANDBY' + + def test_classifies_udt_as_down(self): + """Test UDT status is classified as DOWN.""" + from mes_dashboard.services.realtime_equipment_cache import _classify_status + + result = _classify_status('UDT') + assert result == 'DOWN' + + def test_classifies_sdt_as_down(self): + """Test SDT status is classified as DOWN.""" + from mes_dashboard.services.realtime_equipment_cache import _classify_status + + result = _classify_status('SDT') + assert result == 'DOWN' + + def test_classifies_egt_as_engineering(self): + """Test EGT status is classified as ENGINEERING.""" + from mes_dashboard.services.realtime_equipment_cache import _classify_status + + result = _classify_status('EGT') + assert result == 'ENGINEERING' + + def test_classifies_nst_as_not_scheduled(self): + """Test NST status is classified as NOT_SCHEDULED.""" + from mes_dashboard.services.realtime_equipment_cache import _classify_status + + result = _classify_status('NST') + assert result == 'NOT_SCHEDULED' + + def test_classifies_scrap_as_inactive(self): + """Test SCRAP status is classified as INACTIVE.""" + from mes_dashboard.services.realtime_equipment_cache import _classify_status + + result = _classify_status('SCRAP') + assert result == 'INACTIVE' + + def test_classifies_unknown_as_other(self): + """Test unknown status is classified as OTHER.""" + from mes_dashboard.services.realtime_equipment_cache import _classify_status + + result = _classify_status('UNKNOWN_STATUS') + assert result == 'OTHER' + + def test_handles_none_status(self): + """Test None status is classified as OTHER.""" + from mes_dashboard.services.realtime_equipment_cache import _classify_status + + result = _classify_status(None) + assert result == 'OTHER' + + def test_handles_empty_status(self): + """Test empty string status is classified as OTHER.""" + from mes_dashboard.services.realtime_equipment_cache import _classify_status + + result = _classify_status('') + assert result == 'OTHER' + + +class TestAggregateByResourceid: + """Test _aggregate_by_resourceid function.""" + + def test_aggregates_single_record(self): + """Test aggregation with single record per resource.""" + from mes_dashboard.services.realtime_equipment_cache import _aggregate_by_resourceid + + records = [ + { + 'RESOURCEID': 'R001', + 'EQUIPMENTID': 'E001', + 'OBJECTCATEGORY': 'ASSEMBLY', + 'EQUIPMENTASSETSSTATUS': 'PRD', + 'EQUIPMENTASSETSSTATUSREASON': None, + 'JOBORDER': 'JO001', + 'JOBSTATUS': 'RUN', + 'SYMPTOMCODE': None, + 'CAUSECODE': None, + 'REPAIRCODE': None, + 'LOTTRACKINQTY_PCS': 100, + 'LOTTRACKINTIME': '2024-01-15T10:00:00', + } + ] + + result = _aggregate_by_resourceid(records) + + assert len(result) == 1 + assert result[0]['RESOURCEID'] == 'R001' + assert result[0]['LOT_COUNT'] == 1 + assert result[0]['TOTAL_TRACKIN_QTY'] == 100 + assert result[0]['STATUS_CATEGORY'] == 'PRODUCTIVE' + + def test_aggregates_multiple_lots(self): + """Test aggregation with multiple LOTs per resource (e.g., oven).""" + from mes_dashboard.services.realtime_equipment_cache import _aggregate_by_resourceid + + records = [ + { + 'RESOURCEID': 'R001', + 'EQUIPMENTID': 'E001', + 'OBJECTCATEGORY': 'ASSEMBLY', + 'EQUIPMENTASSETSSTATUS': 'PRD', + 'EQUIPMENTASSETSSTATUSREASON': None, + 'JOBORDER': 'JO001', + 'JOBSTATUS': 'RUN', + 'SYMPTOMCODE': None, + 'CAUSECODE': None, + 'REPAIRCODE': None, + 'LOTTRACKINQTY_PCS': 100, + 'LOTTRACKINTIME': '2024-01-15T10:00:00', + }, + { + 'RESOURCEID': 'R001', + 'EQUIPMENTID': 'E001', + 'OBJECTCATEGORY': 'ASSEMBLY', + 'EQUIPMENTASSETSSTATUS': 'PRD', + 'EQUIPMENTASSETSSTATUSREASON': None, + 'JOBORDER': 'JO002', + 'JOBSTATUS': 'RUN', + 'SYMPTOMCODE': None, + 'CAUSECODE': None, + 'REPAIRCODE': None, + 'LOTTRACKINQTY_PCS': 150, + 'LOTTRACKINTIME': '2024-01-15T11:00:00', + }, + { + 'RESOURCEID': 'R001', + 'EQUIPMENTID': 'E001', + 'OBJECTCATEGORY': 'ASSEMBLY', + 'EQUIPMENTASSETSSTATUS': 'PRD', + 'EQUIPMENTASSETSSTATUSREASON': None, + 'JOBORDER': 'JO003', + 'JOBSTATUS': 'RUN', + 'SYMPTOMCODE': None, + 'CAUSECODE': None, + 'REPAIRCODE': None, + 'LOTTRACKINQTY_PCS': 50, + 'LOTTRACKINTIME': '2024-01-15T09:00:00', + }, + ] + + result = _aggregate_by_resourceid(records) + + assert len(result) == 1 + assert result[0]['RESOURCEID'] == 'R001' + assert result[0]['LOT_COUNT'] == 3 + assert result[0]['TOTAL_TRACKIN_QTY'] == 300 # 100 + 150 + 50 + assert result[0]['LATEST_TRACKIN_TIME'] == '2024-01-15T11:00:00' + + def test_aggregates_multiple_resources(self): + """Test aggregation with multiple different resources.""" + from mes_dashboard.services.realtime_equipment_cache import _aggregate_by_resourceid + + records = [ + { + 'RESOURCEID': 'R001', + 'EQUIPMENTID': 'E001', + 'OBJECTCATEGORY': 'ASSEMBLY', + 'EQUIPMENTASSETSSTATUS': 'PRD', + 'EQUIPMENTASSETSSTATUSREASON': None, + 'JOBORDER': 'JO001', + 'JOBSTATUS': 'RUN', + 'SYMPTOMCODE': None, + 'CAUSECODE': None, + 'REPAIRCODE': None, + 'LOTTRACKINQTY_PCS': 100, + 'LOTTRACKINTIME': '2024-01-15T10:00:00', + }, + { + 'RESOURCEID': 'R002', + 'EQUIPMENTID': 'E002', + 'OBJECTCATEGORY': 'WAFERSORT', + 'EQUIPMENTASSETSSTATUS': 'SBY', + 'EQUIPMENTASSETSSTATUSREASON': 'Waiting', + 'JOBORDER': None, + 'JOBSTATUS': None, + 'SYMPTOMCODE': None, + 'CAUSECODE': None, + 'REPAIRCODE': None, + 'LOTTRACKINQTY_PCS': None, + 'LOTTRACKINTIME': None, + }, + ] + + result = _aggregate_by_resourceid(records) + + assert len(result) == 2 + r1 = next(r for r in result if r['RESOURCEID'] == 'R001') + r2 = next(r for r in result if r['RESOURCEID'] == 'R002') + + assert r1['LOT_COUNT'] == 1 + assert r1['STATUS_CATEGORY'] == 'PRODUCTIVE' + assert r2['LOT_COUNT'] == 1 + assert r2['STATUS_CATEGORY'] == 'STANDBY' + + def test_handles_empty_records(self): + """Test handles empty record list.""" + from mes_dashboard.services.realtime_equipment_cache import _aggregate_by_resourceid + + result = _aggregate_by_resourceid([]) + assert result == [] + + def test_handles_null_quantities(self): + """Test handles null quantities gracefully.""" + from mes_dashboard.services.realtime_equipment_cache import _aggregate_by_resourceid + + records = [ + { + 'RESOURCEID': 'R001', + 'EQUIPMENTID': 'E001', + 'OBJECTCATEGORY': 'ASSEMBLY', + 'EQUIPMENTASSETSSTATUS': 'SBY', + 'EQUIPMENTASSETSSTATUSREASON': None, + 'JOBORDER': None, + 'JOBSTATUS': None, + 'SYMPTOMCODE': None, + 'CAUSECODE': None, + 'REPAIRCODE': None, + 'LOTTRACKINQTY_PCS': None, + 'LOTTRACKINTIME': None, + } + ] + + result = _aggregate_by_resourceid(records) + + assert len(result) == 1 + assert result[0]['TOTAL_TRACKIN_QTY'] == 0 + assert result[0]['LATEST_TRACKIN_TIME'] is None + + def test_skips_records_without_resourceid(self): + """Test skips records without RESOURCEID.""" + from mes_dashboard.services.realtime_equipment_cache import _aggregate_by_resourceid + + records = [ + { + 'RESOURCEID': None, + 'EQUIPMENTID': 'E001', + 'OBJECTCATEGORY': 'ASSEMBLY', + 'EQUIPMENTASSETSSTATUS': 'PRD', + 'EQUIPMENTASSETSSTATUSREASON': None, + 'JOBORDER': None, + 'JOBSTATUS': None, + 'SYMPTOMCODE': None, + 'CAUSECODE': None, + 'REPAIRCODE': None, + 'LOTTRACKINQTY_PCS': 100, + 'LOTTRACKINTIME': '2024-01-15T10:00:00', + }, + { + 'RESOURCEID': 'R001', + 'EQUIPMENTID': 'E001', + 'OBJECTCATEGORY': 'ASSEMBLY', + 'EQUIPMENTASSETSSTATUS': 'PRD', + 'EQUIPMENTASSETSSTATUSREASON': None, + 'JOBORDER': None, + 'JOBSTATUS': None, + 'SYMPTOMCODE': None, + 'CAUSECODE': None, + 'REPAIRCODE': None, + 'LOTTRACKINQTY_PCS': 50, + 'LOTTRACKINTIME': '2024-01-15T10:00:00', + }, + ] + + result = _aggregate_by_resourceid(records) + + assert len(result) == 1 + assert result[0]['RESOURCEID'] == 'R001' + + +class TestGetEquipmentStatusById: + """Test get_equipment_status_by_id function.""" + + @pytest.fixture(autouse=True) + def reset_modules(self): + """Reset module state before each test.""" + import mes_dashboard.core.redis_client as rc + rc._REDIS_CLIENT = None + yield + rc._REDIS_CLIENT = None + + def test_returns_none_when_redis_unavailable(self): + """Test returns None when Redis client unavailable.""" + from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_by_id + + with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=None): + result = get_equipment_status_by_id('R001') + assert result is None + + def test_returns_none_when_id_not_found(self): + """Test returns None when resource ID not in index.""" + from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_by_id + + mock_client = MagicMock() + mock_client.hget.return_value = None + + with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=mock_client): + with patch('mes_dashboard.services.realtime_equipment_cache.get_key_prefix', return_value='mes_wip'): + result = get_equipment_status_by_id('R999') + assert result is None + + def test_returns_matching_record(self): + """Test returns matching record from cache.""" + from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_by_id + + test_data = [ + {'RESOURCEID': 'R001', 'STATUS_CATEGORY': 'PRODUCTIVE'}, + {'RESOURCEID': 'R002', 'STATUS_CATEGORY': 'STANDBY'}, + ] + + mock_client = MagicMock() + mock_client.hget.return_value = '1' # Index 1 -> R002 + mock_client.get.return_value = json.dumps(test_data) + + with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=mock_client): + with patch('mes_dashboard.services.realtime_equipment_cache.get_key_prefix', return_value='mes_wip'): + result = get_equipment_status_by_id('R002') + + assert result is not None + assert result['RESOURCEID'] == 'R002' + assert result['STATUS_CATEGORY'] == 'STANDBY' + + +class TestGetEquipmentStatusByIds: + """Test get_equipment_status_by_ids function.""" + + @pytest.fixture(autouse=True) + def reset_modules(self): + """Reset module state before each test.""" + import mes_dashboard.core.redis_client as rc + rc._REDIS_CLIENT = None + yield + rc._REDIS_CLIENT = None + + def test_returns_empty_for_empty_input(self): + """Test returns empty list for empty input.""" + from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_by_ids + + result = get_equipment_status_by_ids([]) + assert result == [] + + def test_returns_empty_when_redis_unavailable(self): + """Test returns empty list when Redis unavailable.""" + from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_by_ids + + with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=None): + result = get_equipment_status_by_ids(['R001', 'R002']) + assert result == [] + + def test_returns_matching_records(self): + """Test returns all matching records.""" + from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_by_ids + + test_data = [ + {'RESOURCEID': 'R001', 'STATUS_CATEGORY': 'PRODUCTIVE'}, + {'RESOURCEID': 'R002', 'STATUS_CATEGORY': 'STANDBY'}, + {'RESOURCEID': 'R003', 'STATUS_CATEGORY': 'DOWN'}, + ] + + mock_client = MagicMock() + mock_client.hmget.return_value = ['0', '2', None] # R001 at idx 0, R003 at idx 2, R999 not found + mock_client.get.return_value = json.dumps(test_data) + + with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=mock_client): + with patch('mes_dashboard.services.realtime_equipment_cache.get_key_prefix', return_value='mes_wip'): + result = get_equipment_status_by_ids(['R001', 'R003', 'R999']) + + assert len(result) == 2 + ids = [r['RESOURCEID'] for r in result] + assert 'R001' in ids + assert 'R003' in ids + assert 'R999' not in ids + + +class TestGetAllEquipmentStatus: + """Test get_all_equipment_status function.""" + + @pytest.fixture(autouse=True) + def reset_modules(self): + """Reset module state before each test.""" + import mes_dashboard.core.redis_client as rc + rc._REDIS_CLIENT = None + yield + rc._REDIS_CLIENT = None + + def test_returns_empty_when_redis_unavailable(self): + """Test returns empty list when Redis unavailable.""" + from mes_dashboard.services.realtime_equipment_cache import get_all_equipment_status + + with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=None): + result = get_all_equipment_status() + assert result == [] + + def test_returns_empty_when_no_data(self): + """Test returns empty list when no data in cache.""" + from mes_dashboard.services.realtime_equipment_cache import get_all_equipment_status + + mock_client = MagicMock() + mock_client.get.return_value = None + + with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=mock_client): + with patch('mes_dashboard.services.realtime_equipment_cache.get_key_prefix', return_value='mes_wip'): + result = get_all_equipment_status() + assert result == [] + + def test_returns_all_cached_data(self): + """Test returns all cached equipment status.""" + from mes_dashboard.services.realtime_equipment_cache import get_all_equipment_status + + test_data = [ + {'RESOURCEID': 'R001', 'STATUS_CATEGORY': 'PRODUCTIVE'}, + {'RESOURCEID': 'R002', 'STATUS_CATEGORY': 'STANDBY'}, + ] + + mock_client = MagicMock() + mock_client.get.return_value = json.dumps(test_data) + + with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=mock_client): + with patch('mes_dashboard.services.realtime_equipment_cache.get_key_prefix', return_value='mes_wip'): + result = get_all_equipment_status() + + assert len(result) == 2 + assert result[0]['RESOURCEID'] == 'R001' + assert result[1]['RESOURCEID'] == 'R002' + + +class TestGetEquipmentStatusCacheStatus: + """Test get_equipment_status_cache_status function.""" + + @pytest.fixture + def app(self): + """Create application for testing.""" + from mes_dashboard.app import create_app + import mes_dashboard.core.database as db + db._ENGINE = None + app = create_app('testing') + app.config['TESTING'] = True + return app + + def test_returns_disabled_when_cache_disabled(self, app): + """Test returns disabled status when cache is disabled.""" + app.config['REALTIME_EQUIPMENT_CACHE_ENABLED'] = False + + with app.app_context(): + from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_cache_status + result = get_equipment_status_cache_status() + + assert result['enabled'] is False + assert result['loaded'] is False + + def test_returns_loaded_status_when_data_exists(self, app): + """Test returns loaded status when cache has data.""" + app.config['REALTIME_EQUIPMENT_CACHE_ENABLED'] = True + + mock_client = MagicMock() + mock_client.get.side_effect = lambda key: { + 'mes_wip:equipment_status:meta:updated': '2024-01-15T10:30:00', + 'mes_wip:equipment_status:meta:count': '1000', + }.get(key) + + with app.app_context(): + with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=mock_client): + with patch('mes_dashboard.services.realtime_equipment_cache.get_key_prefix', return_value='mes_wip'): + from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_cache_status + result = get_equipment_status_cache_status() + + assert result['enabled'] is True + assert result['loaded'] is True + assert result['count'] == 1000 diff --git a/tests/test_redis_client.py b/tests/test_redis_client.py new file mode 100644 index 0000000..82520a9 --- /dev/null +++ b/tests/test_redis_client.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +"""Unit tests for Redis client module. + +Tests Redis connection management with mocked Redis. +""" + +import pytest +from unittest.mock import patch, MagicMock +import importlib + + +class TestRedisClient: + """Test Redis client connection management.""" + + @pytest.fixture(autouse=True) + def reset_module(self): + """Reset module state before each test.""" + import mes_dashboard.core.redis_client as rc + rc._REDIS_CLIENT = None + yield + rc._REDIS_CLIENT = None + + def test_get_redis_client_success(self, reset_module): + """Test successful Redis client creation.""" + import mes_dashboard.core.redis_client as rc + + with patch.object(rc, 'REDIS_ENABLED', True): + with patch.object(rc.redis.Redis, 'from_url') as mock_from_url: + mock_client = MagicMock() + mock_client.ping.return_value = True + mock_from_url.return_value = mock_client + + client = rc.get_redis_client() + + assert client is mock_client + mock_from_url.assert_called_once() + + def test_get_redis_client_disabled(self, reset_module): + """Test Redis client returns None when disabled.""" + import mes_dashboard.core.redis_client as rc + + with patch.object(rc, 'REDIS_ENABLED', False): + client = rc.get_redis_client() + assert client is None + + def test_get_redis_client_connection_error(self, reset_module): + """Test Redis client handles connection errors gracefully.""" + import mes_dashboard.core.redis_client as rc + import redis as redis_lib + + with patch.object(rc, 'REDIS_ENABLED', True): + with patch.object(rc.redis.Redis, 'from_url') as mock_from_url: + mock_from_url.side_effect = redis_lib.RedisError("Connection refused") + + client = rc.get_redis_client() + + assert client is None + + def test_redis_available_true(self, reset_module): + """Test redis_available returns True when Redis is connected.""" + import mes_dashboard.core.redis_client as rc + + with patch.object(rc, 'REDIS_ENABLED', True): + with patch.object(rc.redis.Redis, 'from_url') as mock_from_url: + mock_client = MagicMock() + mock_client.ping.return_value = True + mock_from_url.return_value = mock_client + + assert rc.redis_available() is True + + def test_redis_available_disabled(self, reset_module): + """Test redis_available returns False when disabled.""" + import mes_dashboard.core.redis_client as rc + + with patch.object(rc, 'REDIS_ENABLED', False): + assert rc.redis_available() is False + + def test_get_key_with_prefix(self): + """Test get_key adds prefix correctly.""" + import mes_dashboard.core.redis_client as rc + + with patch.object(rc, 'REDIS_KEY_PREFIX', 'test_prefix'): + key = rc.get_key('mykey') + assert key == 'test_prefix:mykey' + + def test_get_key_without_prefix(self): + """Test get_key works with empty prefix.""" + import mes_dashboard.core.redis_client as rc + + with patch.object(rc, 'REDIS_KEY_PREFIX', ''): + key = rc.get_key('mykey') + assert key == ':mykey' + + +class TestRedisClientSingleton: + """Test Redis client singleton behavior.""" + + @pytest.fixture(autouse=True) + def reset_module(self): + """Reset module state before each test.""" + import mes_dashboard.core.redis_client as rc + rc._REDIS_CLIENT = None + yield + rc._REDIS_CLIENT = None + + def test_client_is_singleton(self, reset_module): + """Test that get_redis_client returns same instance.""" + import mes_dashboard.core.redis_client as rc + + with patch.object(rc, 'REDIS_ENABLED', True): + with patch.object(rc.redis.Redis, 'from_url') as mock_from_url: + mock_client = MagicMock() + mock_client.ping.return_value = True + mock_from_url.return_value = mock_client + + client1 = rc.get_redis_client() + client2 = rc.get_redis_client() + + assert client1 is client2 + # from_url should only be called once + assert mock_from_url.call_count == 1 + + +class TestCloseRedis: + """Test Redis client cleanup.""" + + @pytest.fixture(autouse=True) + def reset_module(self): + """Reset module state before each test.""" + import mes_dashboard.core.redis_client as rc + rc._REDIS_CLIENT = None + yield + rc._REDIS_CLIENT = None + + def test_close_redis(self, reset_module): + """Test close_redis properly closes connection.""" + import mes_dashboard.core.redis_client as rc + + with patch.object(rc, 'REDIS_ENABLED', True): + with patch.object(rc.redis.Redis, 'from_url') as mock_from_url: + mock_client = MagicMock() + mock_client.ping.return_value = True + mock_from_url.return_value = mock_client + + # Get client first + client = rc.get_redis_client() + assert client is not None + + # Close it + rc.close_redis() + + # Verify close was called + mock_client.close.assert_called_once() + assert rc._REDIS_CLIENT is None + + def test_close_redis_when_none(self, reset_module): + """Test close_redis does nothing when no client.""" + import mes_dashboard.core.redis_client as rc + + # Should not raise any errors + rc.close_redis() + assert rc._REDIS_CLIENT is None diff --git a/tests/test_resilience.py b/tests/test_resilience.py new file mode 100644 index 0000000..bb90652 --- /dev/null +++ b/tests/test_resilience.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +"""Tests for runtime resilience helper contracts.""" + +from __future__ import annotations + +from datetime import datetime, timedelta, timezone + +from mes_dashboard.core.resilience import ( + build_recovery_recommendation, + get_resilience_thresholds, + summarize_restart_history, +) + + +def test_get_resilience_thresholds_from_env(monkeypatch): + monkeypatch.setenv("RESILIENCE_RESTART_CHURN_WINDOW_SECONDS", "120") + monkeypatch.setenv("RESILIENCE_RESTART_CHURN_THRESHOLD", "2") + monkeypatch.setenv("RESILIENCE_POOL_SATURATION_WARNING", "0.8") + + thresholds = get_resilience_thresholds() + assert thresholds["restart_churn_window_seconds"] == 120 + assert thresholds["restart_churn_threshold"] == 2 + assert thresholds["pool_saturation_warning"] == 0.8 + + +def test_summarize_restart_history_counts_entries_in_window(): + now = datetime(2026, 2, 7, 12, 0, tzinfo=timezone.utc) + history = [ + {"completed_at": (now - timedelta(seconds=30)).isoformat()}, + {"completed_at": (now - timedelta(seconds=90)).isoformat()}, + {"completed_at": (now - timedelta(seconds=700)).isoformat()}, + ] + + summary = summarize_restart_history(history, now=now, window_seconds=120, threshold=2) + assert summary["count"] == 2 + assert summary["exceeded"] is True + assert summary["window_seconds"] == 120 + assert summary["threshold"] == 2 + + +def test_build_recovery_recommendation_for_pool_churn_and_cooldown(): + recommendation = build_recovery_recommendation( + degraded_reason="db_pool_saturated", + pool_saturation=1.0, + circuit_state="CLOSED", + restart_churn_exceeded=True, + cooldown_active=False, + ) + assert recommendation["action"] == "throttle_and_investigate_queries" + + cooldown_recommendation = build_recovery_recommendation( + degraded_reason="db_pool_saturated", + pool_saturation=1.0, + circuit_state="CLOSED", + restart_churn_exceeded=False, + cooldown_active=True, + ) + assert cooldown_recommendation["action"] == "wait_for_restart_cooldown" diff --git a/tests/test_resource_cache.py b/tests/test_resource_cache.py new file mode 100644 index 0000000..98dc5ee --- /dev/null +++ b/tests/test_resource_cache.py @@ -0,0 +1,579 @@ +# -*- coding: utf-8 -*- +"""Unit tests for resource_cache module. + +Tests cache read/write functionality, fallback mechanism, and distinct values API. +""" + +import pytest +from unittest.mock import patch, MagicMock +import pandas as pd +import json + + +class TestGetDistinctValues: + """Test get_distinct_values function.""" + + @pytest.fixture(autouse=True) + def reset_modules(self): + """Reset module state before each test.""" + import mes_dashboard.core.redis_client as rc + rc._REDIS_CLIENT = None + yield + rc._REDIS_CLIENT = None + + def test_returns_sorted_unique_values(self): + """Test returns sorted unique values from resources.""" + import mes_dashboard.services.resource_cache as rc + + mock_resources = [ + {'WORKCENTERNAME': 'Station_B', 'RESOURCEFAMILYNAME': 'Family1'}, + {'WORKCENTERNAME': 'Station_A', 'RESOURCEFAMILYNAME': 'Family2'}, + {'WORKCENTERNAME': 'Station_B', 'RESOURCEFAMILYNAME': 'Family1'}, # duplicate + {'WORKCENTERNAME': 'Station_C', 'RESOURCEFAMILYNAME': None}, # None value + ] + + with patch.object(rc, 'get_all_resources', return_value=mock_resources): + result = rc.get_distinct_values('WORKCENTERNAME') + + assert result == ['Station_A', 'Station_B', 'Station_C'] + + def test_excludes_none_and_empty_strings(self): + """Test excludes None and empty string values.""" + import mes_dashboard.services.resource_cache as rc + + mock_resources = [ + {'RESOURCEFAMILYNAME': 'Family1'}, + {'RESOURCEFAMILYNAME': None}, + {'RESOURCEFAMILYNAME': ''}, + {'RESOURCEFAMILYNAME': 'Family2'}, + ] + + with patch.object(rc, 'get_all_resources', return_value=mock_resources): + result = rc.get_distinct_values('RESOURCEFAMILYNAME') + + assert result == ['Family1', 'Family2'] + + def test_handles_nan_values(self): + """Test handles NaN values (pandas float NaN).""" + import mes_dashboard.services.resource_cache as rc + import numpy as np + + mock_resources = [ + {'WORKCENTERNAME': 'Station_A'}, + {'WORKCENTERNAME': float('nan')}, # NaN + {'WORKCENTERNAME': np.nan}, # NumPy NaN + {'WORKCENTERNAME': 'Station_B'}, + ] + + with patch.object(rc, 'get_all_resources', return_value=mock_resources): + result = rc.get_distinct_values('WORKCENTERNAME') + + assert result == ['Station_A', 'Station_B'] + + def test_handles_mixed_types(self): + """Test handles mixed types (converts to string).""" + import mes_dashboard.services.resource_cache as rc + + mock_resources = [ + {'PJ_DEPARTMENT': 'Dept_A'}, + {'PJ_DEPARTMENT': 123}, # int + {'PJ_DEPARTMENT': 'Dept_B'}, + ] + + with patch.object(rc, 'get_all_resources', return_value=mock_resources): + result = rc.get_distinct_values('PJ_DEPARTMENT') + + assert '123' in result + assert 'Dept_A' in result + assert 'Dept_B' in result + + def test_returns_empty_list_when_no_resources(self): + """Test returns empty list when no resources.""" + import mes_dashboard.services.resource_cache as rc + + with patch.object(rc, 'get_all_resources', return_value=[]): + result = rc.get_distinct_values('WORKCENTERNAME') + + assert result == [] + + +class TestConvenienceMethods: + """Test convenience methods for common columns.""" + + def test_get_resource_families_calls_get_distinct_values(self): + """Test get_resource_families calls get_distinct_values with correct column.""" + import mes_dashboard.services.resource_cache as rc + + with patch.object(rc, 'get_distinct_values', return_value=['Family1', 'Family2']) as mock: + result = rc.get_resource_families() + + mock.assert_called_once_with('RESOURCEFAMILYNAME') + assert result == ['Family1', 'Family2'] + + def test_get_workcenters_calls_get_distinct_values(self): + """Test get_workcenters calls get_distinct_values with correct column.""" + import mes_dashboard.services.resource_cache as rc + + with patch.object(rc, 'get_distinct_values', return_value=['WC1', 'WC2']) as mock: + result = rc.get_workcenters() + + mock.assert_called_once_with('WORKCENTERNAME') + assert result == ['WC1', 'WC2'] + + def test_get_departments_calls_get_distinct_values(self): + """Test get_departments calls get_distinct_values with correct column.""" + import mes_dashboard.services.resource_cache as rc + + with patch.object(rc, 'get_distinct_values', return_value=['Dept1', 'Dept2']) as mock: + result = rc.get_departments() + + mock.assert_called_once_with('PJ_DEPARTMENT') + assert result == ['Dept1', 'Dept2'] + + +class TestGetAllResources: + """Test get_all_resources function.""" + + @pytest.fixture(autouse=True) + def reset_modules(self): + """Reset module state before each test.""" + import mes_dashboard.core.redis_client as rc + rc._REDIS_CLIENT = None + yield + rc._REDIS_CLIENT = None + + def test_returns_cached_data_when_available(self): + """Test returns cached data from Redis when available.""" + import mes_dashboard.services.resource_cache as rc + + test_data = [ + {'RESOURCEID': 'R001', 'RESOURCENAME': 'Machine1'}, + {'RESOURCEID': 'R002', 'RESOURCENAME': 'Machine2'} + ] + cached_json = json.dumps(test_data) + + mock_client = MagicMock() + mock_client.get.return_value = cached_json + + with patch.object(rc, 'REDIS_ENABLED', True): + with patch.object(rc, 'RESOURCE_CACHE_ENABLED', True): + with patch.object(rc, 'get_redis_client', return_value=mock_client): + result = rc.get_all_resources() + + assert len(result) == 2 + assert result[0]['RESOURCEID'] == 'R001' + + def test_falls_back_to_oracle_when_cache_miss(self): + """Test falls back to Oracle when cache is empty.""" + import mes_dashboard.services.resource_cache as rc + + mock_client = MagicMock() + mock_client.get.return_value = None + + oracle_df = pd.DataFrame({ + 'RESOURCEID': ['R001'], + 'RESOURCENAME': ['Machine1'] + }) + + with patch.object(rc, 'REDIS_ENABLED', True): + with patch.object(rc, 'RESOURCE_CACHE_ENABLED', True): + with patch.object(rc, 'get_redis_client', return_value=mock_client): + with patch.object(rc, '_load_from_oracle', return_value=oracle_df): + result = rc.get_all_resources() + + assert len(result) == 1 + assert result[0]['RESOURCEID'] == 'R001' + + def test_returns_empty_when_both_unavailable(self): + """Test returns empty list when both cache and Oracle fail.""" + import mes_dashboard.services.resource_cache as rc + + mock_client = MagicMock() + mock_client.get.return_value = None + + with patch.object(rc, 'REDIS_ENABLED', True): + with patch.object(rc, 'RESOURCE_CACHE_ENABLED', True): + with patch.object(rc, 'get_redis_client', return_value=mock_client): + with patch.object(rc, '_load_from_oracle', return_value=None): + result = rc.get_all_resources() + + assert result == [] + + +class TestGetResourceById: + """Test get_resource_by_id function.""" + + def test_returns_matching_resource(self): + """Test returns resource with matching ID.""" + import mes_dashboard.services.resource_cache as rc + + mock_resources = [ + {'RESOURCEID': 'R001', 'RESOURCENAME': 'Machine1'}, + {'RESOURCEID': 'R002', 'RESOURCENAME': 'Machine2'} + ] + + with patch.object(rc, 'get_all_resources', return_value=mock_resources): + result = rc.get_resource_by_id('R002') + + assert result is not None + assert result['RESOURCEID'] == 'R002' + assert result['RESOURCENAME'] == 'Machine2' + + def test_returns_none_when_not_found(self): + """Test returns None when ID not found.""" + import mes_dashboard.services.resource_cache as rc + + mock_resources = [ + {'RESOURCEID': 'R001', 'RESOURCENAME': 'Machine1'} + ] + + with patch.object(rc, 'get_all_resources', return_value=mock_resources): + result = rc.get_resource_by_id('R999') + + assert result is None + + +class TestGetResourcesByIds: + """Test get_resources_by_ids function.""" + + def test_returns_matching_resources(self): + """Test returns all resources with matching IDs.""" + import mes_dashboard.services.resource_cache as rc + + mock_resources = [ + {'RESOURCEID': 'R001', 'RESOURCENAME': 'Machine1'}, + {'RESOURCEID': 'R002', 'RESOURCENAME': 'Machine2'}, + {'RESOURCEID': 'R003', 'RESOURCENAME': 'Machine3'} + ] + + with patch.object(rc, 'get_all_resources', return_value=mock_resources): + result = rc.get_resources_by_ids(['R001', 'R003']) + + assert len(result) == 2 + ids = [r['RESOURCEID'] for r in result] + assert 'R001' in ids + assert 'R003' in ids + + def test_ignores_missing_ids(self): + """Test ignores IDs that don't exist.""" + import mes_dashboard.services.resource_cache as rc + + mock_resources = [ + {'RESOURCEID': 'R001', 'RESOURCENAME': 'Machine1'} + ] + + with patch.object(rc, 'get_all_resources', return_value=mock_resources): + result = rc.get_resources_by_ids(['R001', 'R999']) + + assert len(result) == 1 + assert result[0]['RESOURCEID'] == 'R001' + + +class TestGetResourcesByFilter: + """Test get_resources_by_filter function.""" + + def test_filters_by_workcenter(self): + """Test filters resources by workcenter.""" + import mes_dashboard.services.resource_cache as rc + + mock_resources = [ + {'RESOURCEID': 'R001', 'WORKCENTERNAME': 'WC1'}, + {'RESOURCEID': 'R002', 'WORKCENTERNAME': 'WC2'}, + {'RESOURCEID': 'R003', 'WORKCENTERNAME': 'WC1'} + ] + + with patch.object(rc, 'get_all_resources', return_value=mock_resources): + result = rc.get_resources_by_filter(workcenters=['WC1']) + + assert len(result) == 2 + + def test_filters_by_family(self): + """Test filters resources by family.""" + import mes_dashboard.services.resource_cache as rc + + mock_resources = [ + {'RESOURCEID': 'R001', 'RESOURCEFAMILYNAME': 'F1'}, + {'RESOURCEID': 'R002', 'RESOURCEFAMILYNAME': 'F2'} + ] + + with patch.object(rc, 'get_all_resources', return_value=mock_resources): + result = rc.get_resources_by_filter(families=['F1']) + + assert len(result) == 1 + assert result[0]['RESOURCEFAMILYNAME'] == 'F1' + + def test_filters_by_production_flag(self): + """Test filters resources by production flag.""" + import mes_dashboard.services.resource_cache as rc + + mock_resources = [ + {'RESOURCEID': 'R001', 'PJ_ISPRODUCTION': 1}, + {'RESOURCEID': 'R002', 'PJ_ISPRODUCTION': 0}, + {'RESOURCEID': 'R003', 'PJ_ISPRODUCTION': 1} + ] + + with patch.object(rc, 'get_all_resources', return_value=mock_resources): + result = rc.get_resources_by_filter(is_production=True) + + assert len(result) == 2 + + def test_combines_multiple_filters(self): + """Test combines multiple filter criteria.""" + import mes_dashboard.services.resource_cache as rc + + mock_resources = [ + {'RESOURCEID': 'R001', 'WORKCENTERNAME': 'WC1', 'RESOURCEFAMILYNAME': 'F1'}, + {'RESOURCEID': 'R002', 'WORKCENTERNAME': 'WC1', 'RESOURCEFAMILYNAME': 'F2'}, + {'RESOURCEID': 'R003', 'WORKCENTERNAME': 'WC2', 'RESOURCEFAMILYNAME': 'F1'} + ] + + with patch.object(rc, 'get_all_resources', return_value=mock_resources): + result = rc.get_resources_by_filter(workcenters=['WC1'], families=['F1']) + + assert len(result) == 1 + assert result[0]['RESOURCEID'] == 'R001' + + +class TestGetCacheStatus: + """Test get_cache_status function.""" + + @pytest.fixture(autouse=True) + def reset_modules(self): + """Reset module state before each test.""" + import mes_dashboard.core.redis_client as rc + rc._REDIS_CLIENT = None + yield + rc._REDIS_CLIENT = None + + def test_returns_disabled_when_cache_disabled(self): + """Test returns disabled status when cache is disabled.""" + import mes_dashboard.services.resource_cache as rc + + with patch.object(rc, 'REDIS_ENABLED', False): + result = rc.get_cache_status() + + assert result['enabled'] is False + assert result['loaded'] is False + + def test_returns_loaded_status_when_data_exists(self): + """Test returns loaded status when cache has data.""" + import mes_dashboard.services.resource_cache as rc + + mock_client = MagicMock() + mock_client.exists.return_value = 1 + mock_client.get.side_effect = lambda key: { + 'mes_wip:resource:meta:count': '1000', + 'mes_wip:resource:meta:version': '2024-01-15T10:00:00', + 'mes_wip:resource:meta:updated': '2024-01-15T10:30:00', + }.get(key) + + with patch.object(rc, 'REDIS_ENABLED', True): + with patch.object(rc, 'RESOURCE_CACHE_ENABLED', True): + with patch.object(rc, 'get_redis_client', return_value=mock_client): + result = rc.get_cache_status() + + assert result['enabled'] is True + assert result['loaded'] is True + + +class TestRefreshCache: + """Test refresh_cache function.""" + + @pytest.fixture(autouse=True) + def reset_modules(self): + """Reset module state before each test.""" + import mes_dashboard.core.redis_client as rc + rc._REDIS_CLIENT = None + yield + rc._REDIS_CLIENT = None + + def test_returns_false_when_disabled(self): + """Test returns False when cache is disabled.""" + import mes_dashboard.services.resource_cache as rc + + with patch.object(rc, 'REDIS_ENABLED', False): + result = rc.refresh_cache() + + assert result is False + + def test_skips_sync_when_version_unchanged(self): + """Test skips sync when Oracle version matches Redis version.""" + import mes_dashboard.services.resource_cache as rc + + mock_client = MagicMock() + mock_client.get.return_value = '2024-01-15T10:00:00' + mock_client.ping.return_value = True + + with patch.object(rc, 'REDIS_ENABLED', True): + with patch.object(rc, 'RESOURCE_CACHE_ENABLED', True): + with patch.object(rc, 'redis_available', return_value=True): + with patch.object(rc, '_get_version_from_oracle', return_value='2024-01-15T10:00:00'): + with patch.object(rc, '_get_version_from_redis', return_value='2024-01-15T10:00:00'): + result = rc.refresh_cache(force=False) + + assert result is False + + def test_syncs_when_version_changed(self): + """Test syncs when Oracle version differs from Redis version.""" + import mes_dashboard.services.resource_cache as rc + + mock_df = pd.DataFrame({ + 'RESOURCEID': ['R001'], + 'RESOURCENAME': ['Machine1'] + }) + + with patch.object(rc, 'REDIS_ENABLED', True): + with patch.object(rc, 'RESOURCE_CACHE_ENABLED', True): + with patch.object(rc, 'redis_available', return_value=True): + with patch.object(rc, '_get_version_from_oracle', return_value='2024-01-15T11:00:00'): + with patch.object(rc, '_get_version_from_redis', return_value='2024-01-15T10:00:00'): + with patch.object(rc, '_load_from_oracle', return_value=mock_df): + with patch.object(rc, '_sync_to_redis', return_value=True) as mock_sync: + result = rc.refresh_cache(force=False) + + assert result is True + mock_sync.assert_called_once() + + def test_force_sync_ignores_version(self): + """Test force sync ignores version comparison.""" + import mes_dashboard.services.resource_cache as rc + + mock_df = pd.DataFrame({ + 'RESOURCEID': ['R001'], + 'RESOURCENAME': ['Machine1'] + }) + + with patch.object(rc, 'REDIS_ENABLED', True): + with patch.object(rc, 'RESOURCE_CACHE_ENABLED', True): + with patch.object(rc, 'redis_available', return_value=True): + with patch.object(rc, '_get_version_from_oracle', return_value='2024-01-15T10:00:00'): + with patch.object(rc, '_get_version_from_redis', return_value='2024-01-15T10:00:00'): + with patch.object(rc, '_load_from_oracle', return_value=mock_df): + with patch.object(rc, '_sync_to_redis', return_value=True) as mock_sync: + result = rc.refresh_cache(force=True) + + assert result is True + mock_sync.assert_called_once() + + +class TestBuildFilterBuilder: + """Test _build_filter_builder function.""" + + def test_includes_equipment_type_filter(self): + """Test includes equipment type filter.""" + import mes_dashboard.services.resource_cache as rc + + builder = rc._build_filter_builder() + builder.base_sql = "SELECT * FROM DWH.DW_MES_RESOURCE {{ WHERE_CLAUSE }}" + sql, params = builder.build() + + assert 'OBJECTCATEGORY' in sql + assert 'ASSEMBLY' in sql or 'WAFERSORT' in sql + + def test_includes_location_filter(self): + """Test includes location exclusion filter with parameterization.""" + import mes_dashboard.services.resource_cache as rc + + builder = rc._build_filter_builder() + builder.base_sql = "SELECT * FROM DWH.DW_MES_RESOURCE {{ WHERE_CLAUSE }}" + sql, params = builder.build() + + # Check SQL contains LOCATIONNAME condition + assert 'LOCATIONNAME' in sql + # Parameterized query should have bind variables + assert len(params) > 0 + + def test_includes_asset_status_filter(self): + """Test includes asset status exclusion filter with parameterization.""" + import mes_dashboard.services.resource_cache as rc + + builder = rc._build_filter_builder() + builder.base_sql = "SELECT * FROM DWH.DW_MES_RESOURCE {{ WHERE_CLAUSE }}" + sql, params = builder.build() + + # Check SQL contains PJ_ASSETSSTATUS condition + assert 'PJ_ASSETSSTATUS' in sql + # Parameterized query should have bind variables + assert len(params) > 0 + + +class TestResourceDerivedIndex: + """Test derived resource index and telemetry behavior.""" + + @pytest.fixture(autouse=True) + def reset_state(self): + import mes_dashboard.services.resource_cache as rc + rc._resource_index = rc._new_empty_index() + rc._resource_df_cache.invalidate("resource_data") + yield + rc._resource_index = rc._new_empty_index() + rc._resource_df_cache.invalidate("resource_data") + + def test_get_resource_by_id_uses_index_snapshot(self): + import mes_dashboard.services.resource_cache as rc + + snapshot = { + "records": [{"RESOURCEID": "R001", "RESOURCENAME": "Machine1"}], + "by_resource_id": {"R001": {"RESOURCEID": "R001", "RESOURCENAME": "Machine1"}}, + } + with patch.object(rc, "get_resource_index_snapshot", return_value=snapshot): + row = rc.get_resource_by_id("R001") + assert row is not None + assert row["RESOURCENAME"] == "Machine1" + + def test_get_cache_status_includes_derived_index_freshness(self): + import mes_dashboard.services.resource_cache as rc + + rc._resource_index = { + **rc._new_empty_index(), + "ready": True, + "source": "redis", + "version": "v1", + "updated_at": "2026-02-07T10:00:00", + "built_at": "2026-02-07T10:00:05", + "count": 2, + } + + mock_client = MagicMock() + mock_client.exists.return_value = 1 + mock_client.get.side_effect = lambda key: { + 'mes_wip:resource:meta:count': '2', + 'mes_wip:resource:meta:version': 'v1', + 'mes_wip:resource:meta:updated': '2026-02-07T10:00:00', + }.get(key) + + with patch.object(rc, "REDIS_ENABLED", True): + with patch.object(rc, "RESOURCE_CACHE_ENABLED", True): + with patch.object(rc, "get_redis_client", return_value=mock_client): + status = rc.get_cache_status() + assert status["derived_index"]["ready"] is True + assert status["derived_index"]["is_fresh"] is True + + def test_index_rebuilds_when_redis_version_changes(self): + import mes_dashboard.services.resource_cache as rc + + rc._resource_index = { + **rc._new_empty_index(), + "ready": True, + "source": "redis", + "version": "v1", + "updated_at": "2026-02-07T10:00:00", + "built_at": "2026-02-07T10:00:05", + "version_checked_at": 0.0, + "count": 1, + "records": [{"RESOURCEID": "OLD"}], + "by_resource_id": {"OLD": {"RESOURCEID": "OLD"}}, + } + + rebuilt_df = pd.DataFrame([ + {"RESOURCEID": "R002", "RESOURCENAME": "Machine2"} + ]) + + with patch.object(rc, "RESOURCE_INDEX_VERSION_CHECK_INTERVAL", 0): + with patch.object(rc, "_get_version_from_redis", return_value="v2"): + with patch.object(rc, "_get_cached_data", return_value=rebuilt_df): + with patch.object(rc, "_get_cache_meta", return_value=("v2", "2026-02-07T10:10:00")): + snapshot = rc.get_resource_index_snapshot() + assert snapshot["version"] == "v2" + assert snapshot["count"] == 1 + assert snapshot["by_resource_id"]["R002"]["RESOURCENAME"] == "Machine2" diff --git a/tests/test_resource_history_routes.py b/tests/test_resource_history_routes.py new file mode 100644 index 0000000..c72f4a6 --- /dev/null +++ b/tests/test_resource_history_routes.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +"""Integration tests for resource history API endpoints. + +Tests API endpoints for proper response format, error handling, +and parameter validation. +""" + +import unittest +from unittest.mock import patch, MagicMock +import json + +import sys +import os +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src')) + +import mes_dashboard.core.database as db +from mes_dashboard.app import create_app + + +class TestResourceHistoryOptionsAPI(unittest.TestCase): + """Integration tests for /api/resource/history/options endpoint.""" + + def setUp(self): + """Set up test client.""" + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + + @patch('mes_dashboard.routes.resource_history_routes.get_filter_options') + def test_options_success(self, mock_get_options): + """Successful options request should return workcenter_groups and families.""" + mock_get_options.return_value = { + 'workcenter_groups': [ + {'name': '焊接_DB', 'sequence': 1}, + {'name': '成型', 'sequence': 4} + ], + 'families': ['FAM01', 'FAM02'] + } + + response = self.client.get('/api/resource/history/options') + + self.assertEqual(response.status_code, 200) + data = json.loads(response.data) + self.assertTrue(data['success']) + self.assertIn('data', data) + self.assertEqual(len(data['data']['workcenter_groups']), 2) + self.assertEqual(data['data']['workcenter_groups'][0]['name'], '焊接_DB') + self.assertEqual(data['data']['families'], ['FAM01', 'FAM02']) + + @patch('mes_dashboard.routes.resource_history_routes.get_filter_options') + def test_options_failure(self, mock_get_options): + """Failed options request should return error.""" + mock_get_options.return_value = None + + response = self.client.get('/api/resource/history/options') + + self.assertEqual(response.status_code, 500) + data = json.loads(response.data) + self.assertFalse(data['success']) + self.assertIn('error', data) + + +class TestResourceHistorySummaryAPI(unittest.TestCase): + """Integration tests for /api/resource/history/summary endpoint.""" + + def setUp(self): + """Set up test client.""" + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + + def test_missing_start_date(self): + """Missing start_date should return 400.""" + response = self.client.get('/api/resource/history/summary?end_date=2024-01-31') + + self.assertEqual(response.status_code, 400) + data = json.loads(response.data) + self.assertFalse(data['success']) + self.assertIn('start_date', data['error']) + + def test_missing_end_date(self): + """Missing end_date should return 400.""" + response = self.client.get('/api/resource/history/summary?start_date=2024-01-01') + + self.assertEqual(response.status_code, 400) + data = json.loads(response.data) + self.assertFalse(data['success']) + self.assertIn('end_date', data['error']) + + @patch('mes_dashboard.routes.resource_history_routes.query_summary') + def test_date_range_exceeds_limit(self, mock_query): + """Date range exceeding 730 days should return error.""" + mock_query.return_value = {'error': '查詢範圍不可超過 730 天(兩年)'} + + response = self.client.get( + '/api/resource/history/summary?start_date=2024-01-01&end_date=2026-01-02' + ) + + self.assertEqual(response.status_code, 400) + data = json.loads(response.data) + self.assertFalse(data['success']) + self.assertIn('730', data['error']) + + @patch('mes_dashboard.routes.resource_history_routes.query_summary') + def test_successful_summary(self, mock_query): + """Successful summary request should return all data sections.""" + mock_query.return_value = { + 'kpi': { + 'ou_pct': 80.0, + 'prd_hours': 800, + 'sby_hours': 100, + 'udt_hours': 50, + 'sdt_hours': 30, + 'egt_hours': 20, + 'nst_hours': 100, + 'machine_count': 10 + }, + 'trend': [{'date': '2024-01-01', 'ou_pct': 80.0}], + 'heatmap': [{'workcenter': 'WC01', 'date': '2024-01-01', 'ou_pct': 80.0}], + 'workcenter_comparison': [{'workcenter': 'WC01', 'ou_pct': 80.0}] + } + + response = self.client.get( + '/api/resource/history/summary?start_date=2024-01-01&end_date=2024-01-07' + ) + + self.assertEqual(response.status_code, 200) + data = json.loads(response.data) + self.assertTrue(data['success']) + self.assertIn('kpi', data['data']) + self.assertIn('trend', data['data']) + self.assertIn('heatmap', data['data']) + self.assertIn('workcenter_comparison', data['data']) + + @patch('mes_dashboard.routes.resource_history_routes.query_summary') + def test_summary_with_filters(self, mock_query): + """Summary with filters should pass them to service.""" + mock_query.return_value = {'kpi': {}, 'trend': [], 'heatmap': [], 'workcenter_comparison': []} + + response = self.client.get( + '/api/resource/history/summary' + '?start_date=2024-01-01' + '&end_date=2024-01-07' + '&granularity=week' + '&workcenter_groups=焊接_DB' + '&workcenter_groups=成型' + '&families=FAM01' + '&families=FAM02' + '&is_production=1' + '&is_key=1' + ) + + self.assertEqual(response.status_code, 200) + mock_query.assert_called_once() + call_kwargs = mock_query.call_args[1] + self.assertEqual(call_kwargs['granularity'], 'week') + self.assertEqual(call_kwargs['workcenter_groups'], ['焊接_DB', '成型']) + self.assertEqual(call_kwargs['families'], ['FAM01', 'FAM02']) + self.assertTrue(call_kwargs['is_production']) + self.assertTrue(call_kwargs['is_key']) + + +class TestResourceHistoryDetailAPI(unittest.TestCase): + """Integration tests for /api/resource/history/detail endpoint.""" + + def setUp(self): + """Set up test client.""" + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + + def test_missing_dates(self): + """Missing dates should return 400.""" + response = self.client.get('/api/resource/history/detail') + + self.assertEqual(response.status_code, 400) + data = json.loads(response.data) + self.assertFalse(data['success']) + + @patch('mes_dashboard.routes.resource_history_routes.query_detail') + def test_successful_detail(self, mock_query): + """Successful detail request should return data with total and truncated flag.""" + mock_query.return_value = { + 'data': [ + {'workcenter': 'WC01', 'family': 'FAM01', 'resource': 'RES01', 'ou_pct': 80.0} + ], + 'total': 100, + 'truncated': False, + 'max_records': None + } + + response = self.client.get( + '/api/resource/history/detail?start_date=2024-01-01&end_date=2024-01-07' + ) + + self.assertEqual(response.status_code, 200) + data = json.loads(response.data) + self.assertTrue(data['success']) + self.assertIn('data', data) + self.assertIn('total', data) + self.assertIn('truncated', data) + self.assertFalse(data['truncated']) + + @patch('mes_dashboard.routes.resource_history_routes.query_detail') + def test_detail_truncated_warning(self, mock_query): + """Detail with truncated data should return truncated flag and max_records.""" + mock_query.return_value = { + 'data': [{'workcenter': 'WC01', 'family': 'FAM01', 'resource': 'RES01', 'ou_pct': 80.0}], + 'total': 6000, + 'truncated': True, + 'max_records': 5000 + } + + response = self.client.get( + '/api/resource/history/detail' + '?start_date=2024-01-01' + '&end_date=2024-01-07' + ) + + self.assertEqual(response.status_code, 200) + data = json.loads(response.data) + self.assertTrue(data['success']) + self.assertTrue(data['truncated']) + self.assertEqual(data['max_records'], 5000) + self.assertEqual(data['total'], 6000) + + +class TestResourceHistoryExportAPI(unittest.TestCase): + """Integration tests for /api/resource/history/export endpoint.""" + + def setUp(self): + """Set up test client.""" + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + + def test_missing_dates(self): + """Missing dates should return 400.""" + response = self.client.get('/api/resource/history/export') + + self.assertEqual(response.status_code, 400) + data = json.loads(response.data) + self.assertFalse(data['success']) + + @patch('mes_dashboard.routes.resource_history_routes.export_csv') + def test_successful_export(self, mock_export): + """Successful export should return CSV with correct headers.""" + mock_export.return_value = iter(['站點,型號,機台,OU%\n', 'WC01,FAM01,RES01,80%\n']) + + response = self.client.get( + '/api/resource/history/export?start_date=2024-01-01&end_date=2024-01-07' + ) + + self.assertEqual(response.status_code, 200) + self.assertIn('text/csv', response.content_type) + self.assertIn('attachment', response.headers['Content-Disposition']) + self.assertIn('resource_history', response.headers['Content-Disposition']) + + @patch('mes_dashboard.routes.resource_history_routes.export_csv') + def test_export_filename_includes_dates(self, mock_export): + """Export filename should include date range.""" + mock_export.return_value = iter(['header\n']) + + response = self.client.get( + '/api/resource/history/export?start_date=2024-01-01&end_date=2024-01-07' + ) + + self.assertIn('2024-01-01', response.headers['Content-Disposition']) + self.assertIn('2024-01-07', response.headers['Content-Disposition']) + + +class TestAPIContentType(unittest.TestCase): + """Test that APIs return proper content types.""" + + def setUp(self): + """Set up test client.""" + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + + @patch('mes_dashboard.routes.resource_history_routes.get_filter_options') + def test_json_content_type(self, mock_get_options): + """API endpoints should return application/json content type.""" + mock_get_options.return_value = {'workcenter_groups': [], 'families': []} + + response = self.client.get('/api/resource/history/options') + + self.assertIn('application/json', response.content_type) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_resource_history_service.py b/tests/test_resource_history_service.py new file mode 100644 index 0000000..89b25d0 --- /dev/null +++ b/tests/test_resource_history_service.py @@ -0,0 +1,446 @@ +# -*- coding: utf-8 -*- +"""Unit tests for resource_history_service.py. + +Tests the service layer functions for resource history analysis. +""" + +import unittest +from unittest.mock import patch, MagicMock +from datetime import datetime, timedelta + +import pandas as pd + +import sys +import os +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src')) + +from mes_dashboard.services.resource_history_service import ( + get_filter_options, + query_summary, + query_detail, + export_csv, + _validate_date_range, + _get_date_trunc, + _calc_ou_pct, + _calc_availability_pct, + _build_kpi_from_df, + _build_detail_from_raw_df, + MAX_QUERY_DAYS, +) + + +class TestValidateDateRange(unittest.TestCase): + """Test date range validation.""" + + def test_valid_date_range(self): + """Valid date range should return None.""" + result = _validate_date_range('2024-01-01', '2024-01-31') + self.assertIsNone(result) + + def test_date_range_exceeds_max(self): + """Date range exceeding MAX_QUERY_DAYS should return error message.""" + result = _validate_date_range('2024-01-01', '2026-01-02') + self.assertIsNotNone(result) + self.assertIn('730', result) + + def test_end_date_before_start_date(self): + """End date before start date should return error message.""" + result = _validate_date_range('2024-01-31', '2024-01-01') + self.assertIsNotNone(result) + self.assertIn('起始日期', result) + + def test_invalid_date_format(self): + """Invalid date format should return error message.""" + result = _validate_date_range('invalid', '2024-01-01') + self.assertIsNotNone(result) + self.assertIn('日期格式錯誤', result) + + +class TestGetDateTrunc(unittest.TestCase): + """Test date truncation SQL generation.""" + + def test_day_granularity(self): + """Day granularity should use TRUNC without format.""" + result = _get_date_trunc('day') + self.assertIn('TRUNC(TXNDATE)', result) + self.assertNotIn('IW', result) + + def test_week_granularity(self): + """Week granularity should use TRUNC with IW format.""" + result = _get_date_trunc('week') + self.assertIn("'IW'", result) + + def test_month_granularity(self): + """Month granularity should use TRUNC with MM format.""" + result = _get_date_trunc('month') + self.assertIn("'MM'", result) + + def test_year_granularity(self): + """Year granularity should use TRUNC with YYYY format.""" + result = _get_date_trunc('year') + self.assertIn("'YYYY'", result) + + def test_unknown_granularity(self): + """Unknown granularity should default to day.""" + result = _get_date_trunc('unknown') + self.assertIn('TRUNC(TXNDATE)', result) + self.assertNotIn("'IW'", result) + + +class TestCalcOuPct(unittest.TestCase): + """Test OU% calculation.""" + + def test_normal_calculation(self): + """OU% should be calculated correctly.""" + # PRD=800, SBY=100, UDT=50, SDT=30, EGT=20 + # OU% = 800 / (800+100+50+30+20) * 100 = 80% + result = _calc_ou_pct(800, 100, 50, 30, 20) + self.assertEqual(result, 80.0) + + def test_zero_denominator(self): + """Zero denominator should return 0, not error.""" + result = _calc_ou_pct(0, 0, 0, 0, 0) + self.assertEqual(result, 0) + + def test_all_prd(self): + """100% PRD should result in 100% OU.""" + result = _calc_ou_pct(100, 0, 0, 0, 0) + self.assertEqual(result, 100.0) + + def test_no_prd(self): + """No PRD should result in 0% OU.""" + result = _calc_ou_pct(0, 100, 50, 30, 20) + self.assertEqual(result, 0) + + +class TestCalcAvailabilityPct(unittest.TestCase): + """Test Availability% calculation.""" + + def test_normal_calculation(self): + """Availability% should be calculated correctly.""" + # PRD=800, SBY=100, UDT=50, SDT=30, EGT=20, NST=100 + # Availability% = (800+100+20) / (800+100+20+30+50+100) * 100 = 920 / 1100 * 100 = 83.6% + result = _calc_availability_pct(800, 100, 50, 30, 20, 100) + self.assertEqual(result, 83.6) + + def test_zero_denominator(self): + """Zero denominator should return 0, not error.""" + result = _calc_availability_pct(0, 0, 0, 0, 0, 0) + self.assertEqual(result, 0) + + def test_all_available(self): + """100% available (no SDT, UDT, NST) should result in 100%.""" + # PRD=100, SBY=50, EGT=50, no SDT/UDT/NST + # Availability% = (100+50+50) / (100+50+50+0+0+0) * 100 = 100% + result = _calc_availability_pct(100, 50, 0, 0, 50, 0) + self.assertEqual(result, 100.0) + + def test_no_available_time(self): + """No available time (all SDT/UDT/NST) should result in 0%.""" + # PRD=0, SBY=0, EGT=0, SDT=50, UDT=30, NST=20 + # Availability% = 0 / (0+0+0+50+30+20) * 100 = 0% + result = _calc_availability_pct(0, 0, 50, 30, 0, 20) + self.assertEqual(result, 0) + + def test_mixed_scenario(self): + """Mixed scenario with partial availability.""" + # PRD=500, SBY=200, UDT=100, SDT=100, EGT=50, NST=50 + # Numerator = PRD + SBY + EGT = 500 + 200 + 50 = 750 + # Denominator = 500 + 200 + 50 + 100 + 100 + 50 = 1000 + # Availability% = 750 / 1000 * 100 = 75% + result = _calc_availability_pct(500, 200, 100, 100, 50, 50) + self.assertEqual(result, 75.0) + + +class TestBuildKpiFromDf(unittest.TestCase): + """Test KPI building from DataFrame.""" + + def test_empty_dataframe(self): + """Empty DataFrame should return default KPI values.""" + df = pd.DataFrame() + result = _build_kpi_from_df(df) + + self.assertEqual(result['ou_pct'], 0) + self.assertEqual(result['availability_pct'], 0) + self.assertEqual(result['prd_hours'], 0) + self.assertEqual(result['machine_count'], 0) + + def test_normal_dataframe(self): + """Normal DataFrame should build correct KPI.""" + df = pd.DataFrame([{ + 'PRD_HOURS': 800, + 'SBY_HOURS': 100, + 'UDT_HOURS': 50, + 'SDT_HOURS': 30, + 'EGT_HOURS': 20, + 'NST_HOURS': 100, + 'MACHINE_COUNT': 10 + }]) + result = _build_kpi_from_df(df) + + self.assertEqual(result['ou_pct'], 80.0) + # Availability% = (800+100+20) / (800+100+20+30+50+100) * 100 = 920/1100 = 83.6% + self.assertEqual(result['availability_pct'], 83.6) + self.assertEqual(result['prd_hours'], 800) + self.assertEqual(result['machine_count'], 10) + + def test_none_values_in_dataframe(self): + """None values should be treated as 0.""" + df = pd.DataFrame([{ + 'PRD_HOURS': None, + 'SBY_HOURS': None, + 'UDT_HOURS': None, + 'SDT_HOURS': None, + 'EGT_HOURS': None, + 'NST_HOURS': None, + 'MACHINE_COUNT': None + }]) + result = _build_kpi_from_df(df) + + self.assertEqual(result['ou_pct'], 0) + self.assertEqual(result['availability_pct'], 0) + self.assertEqual(result['prd_hours'], 0) + self.assertEqual(result['machine_count'], 0) + + +class TestBuildDetailFromDf(unittest.TestCase): + """Test detail data building from DataFrame.""" + + def test_empty_dataframe(self): + """Empty DataFrame should return empty list.""" + df = pd.DataFrame() + resource_lookup = {} + result = _build_detail_from_raw_df(df, resource_lookup) + self.assertEqual(result, []) + + @patch('mes_dashboard.services.filter_cache.get_workcenter_mapping') + def test_normal_dataframe(self, mock_wc_mapping): + """Normal DataFrame should build correct detail data.""" + mock_wc_mapping.return_value = { + 'WC01': {'group': 'Group01', 'sequence': 1} + } + df = pd.DataFrame([{ + 'HISTORYID': 'RES01', + 'PRD_HOURS': 80, + 'SBY_HOURS': 10, + 'UDT_HOURS': 5, + 'SDT_HOURS': 3, + 'EGT_HOURS': 2, + 'NST_HOURS': 10, + 'TOTAL_HOURS': 110 + }]) + resource_lookup = { + 'RES01': { + 'RESOURCEID': 'RES01', + 'WORKCENTERNAME': 'WC01', + 'RESOURCEFAMILYNAME': 'FAM01', + 'RESOURCENAME': 'RES01' + } + } + result = _build_detail_from_raw_df(df, resource_lookup) + + self.assertEqual(len(result), 1) + self.assertEqual(result[0]['workcenter'], 'Group01') + self.assertEqual(result[0]['family'], 'FAM01') + self.assertEqual(result[0]['resource'], 'RES01') + self.assertEqual(result[0]['machine_count'], 1) + # OU% = 80 / (80+10+5+3+2) * 100 = 80% + self.assertEqual(result[0]['ou_pct'], 80.0) + + +class TestGetFilterOptions(unittest.TestCase): + """Test filter options retrieval.""" + + @patch('mes_dashboard.services.filter_cache.get_workcenter_groups') + @patch('mes_dashboard.services.resource_cache.get_resource_families') + def test_cache_failure(self, mock_families, mock_groups): + """Cache failure should return None.""" + mock_groups.return_value = None + mock_families.return_value = None + result = get_filter_options() + self.assertIsNone(result) + + @patch('mes_dashboard.services.filter_cache.get_workcenter_groups') + @patch('mes_dashboard.services.resource_cache.get_resource_families') + def test_successful_query(self, mock_families, mock_groups): + """Successful query should return workcenter groups and families.""" + mock_groups.return_value = [ + {'name': '焊接_DB', 'sequence': 1}, + {'name': '成型', 'sequence': 4}, + ] + mock_families.return_value = ['FAM01', 'FAM02'] + + result = get_filter_options() + + self.assertIsNotNone(result) + self.assertEqual(len(result['workcenter_groups']), 2) + self.assertEqual(result['workcenter_groups'][0]['name'], '焊接_DB') + self.assertEqual(result['families'], ['FAM01', 'FAM02']) + + +class TestQuerySummary(unittest.TestCase): + """Test summary query function.""" + + def test_invalid_date_range(self): + """Invalid date range should return error.""" + result = query_summary( + start_date='2024-01-01', + end_date='2026-01-02', # More than 730 days + granularity='day' + ) + self.assertIsNotNone(result) + self.assertIn('error', result) + + @patch('mes_dashboard.services.resource_history_service.read_sql_df') + def test_successful_query(self, mock_read_sql): + """Successful query should return all sections.""" + # Mock data for all queries + kpi_df = pd.DataFrame([{ + 'PRD_HOURS': 800, 'SBY_HOURS': 100, 'UDT_HOURS': 50, + 'SDT_HOURS': 30, 'EGT_HOURS': 20, 'NST_HOURS': 100, + 'MACHINE_COUNT': 10 + }]) + + trend_df = pd.DataFrame([{ + 'DATA_DATE': datetime(2024, 1, 1), + 'PRD_HOURS': 100, 'SBY_HOURS': 10, 'UDT_HOURS': 5, + 'SDT_HOURS': 3, 'EGT_HOURS': 2, 'NST_HOURS': 10, + 'MACHINE_COUNT': 5 + }]) + + heatmap_df = pd.DataFrame([{ + 'WORKCENTERNAME': 'WC01', 'DATA_DATE': datetime(2024, 1, 1), + 'PRD_HOURS': 80, 'SBY_HOURS': 10, 'UDT_HOURS': 5, + 'SDT_HOURS': 3, 'EGT_HOURS': 2 + }]) + + comparison_df = pd.DataFrame([{ + 'WORKCENTERNAME': 'WC01', + 'PRD_HOURS': 800, 'SBY_HOURS': 100, 'UDT_HOURS': 50, + 'SDT_HOURS': 30, 'EGT_HOURS': 20, 'MACHINE_COUNT': 10 + }]) + + # Use a function to return appropriate mock based on SQL content + # (ThreadPoolExecutor runs queries in parallel, so side_effect list is unreliable) + def mock_sql(sql): + sql_upper = sql.upper() + if 'DATA_DATE' in sql_upper and 'WORKCENTERNAME' in sql_upper: + return heatmap_df # heatmap has both DATA_DATE and WORKCENTERNAME + elif 'DATA_DATE' in sql_upper: + return trend_df # trend has DATA_DATE but no WORKCENTERNAME + elif 'WORKCENTERNAME' in sql_upper: + return comparison_df # comparison has WORKCENTERNAME but no DATA_DATE + else: + return kpi_df # kpi has neither + + mock_read_sql.side_effect = mock_sql + + result = query_summary( + start_date='2024-01-01', + end_date='2024-01-07', + granularity='day' + ) + + self.assertIsNotNone(result) + self.assertIn('kpi', result) + self.assertIn('trend', result) + self.assertIn('heatmap', result) + self.assertIn('workcenter_comparison', result) + + +class TestQueryDetail(unittest.TestCase): + """Test detail query function.""" + + def test_invalid_date_range(self): + """Invalid date range should return error.""" + result = query_detail( + start_date='2024-01-01', + end_date='2026-01-02', # More than 730 days + granularity='day' + ) + self.assertIsNotNone(result) + self.assertIn('error', result) + + @patch('mes_dashboard.services.filter_cache.get_workcenter_mapping') + @patch('mes_dashboard.services.resource_history_service._get_filtered_resources') + @patch('mes_dashboard.services.resource_history_service.read_sql_df') + def test_successful_query(self, mock_read_sql, mock_get_resources, mock_wc_mapping): + """Successful query should return data with total count.""" + # Mock filtered resources + mock_get_resources.return_value = [ + {'RESOURCEID': 'RES01', 'WORKCENTERNAME': 'WC01', + 'RESOURCEFAMILYNAME': 'FAM01', 'RESOURCENAME': 'RES01'} + ] + mock_wc_mapping.return_value = { + 'WC01': {'group': 'Group01', 'sequence': 1} + } + + # Mock detail query with HISTORYID column + detail_df = pd.DataFrame([{ + 'HISTORYID': 'RES01', + 'PRD_HOURS': 80, 'SBY_HOURS': 10, 'UDT_HOURS': 5, + 'SDT_HOURS': 3, 'EGT_HOURS': 2, 'NST_HOURS': 10, + 'TOTAL_HOURS': 110 + }]) + + mock_read_sql.return_value = detail_df + + result = query_detail( + start_date='2024-01-01', + end_date='2024-01-07', + granularity='day', + ) + + self.assertIsNotNone(result) + self.assertIn('data', result) + self.assertIn('total', result) + self.assertIn('truncated', result) + self.assertEqual(result['total'], 1) + self.assertFalse(result['truncated']) + + +class TestExportCsv(unittest.TestCase): + """Test CSV export function.""" + + def test_invalid_date_range(self): + """Invalid date range should yield error.""" + result = list(export_csv( + start_date='2024-01-01', + end_date='2026-01-02', # More than 730 days + )) + self.assertTrue(any('Error' in r for r in result)) + + @patch('mes_dashboard.services.filter_cache.get_workcenter_mapping') + @patch('mes_dashboard.services.resource_history_service._get_filtered_resources') + @patch('mes_dashboard.services.resource_history_service.read_sql_df') + def test_successful_export(self, mock_read_sql, mock_get_filtered_resources, mock_wc_mapping): + """Successful export should yield CSV rows.""" + mock_get_filtered_resources.return_value = [{ + 'RESOURCEID': 'RES01', + 'WORKCENTERNAME': 'WC01', + 'RESOURCEFAMILYNAME': 'FAM01', + 'RESOURCENAME': 'RES01', + }] + mock_wc_mapping.return_value = {'WC01': {'group': 'WC01', 'sequence': 1}} + + mock_read_sql.return_value = pd.DataFrame([{ + 'HISTORYID': 'RES01', + 'PRD_HOURS': 80, 'SBY_HOURS': 10, 'UDT_HOURS': 5, + 'SDT_HOURS': 3, 'EGT_HOURS': 2, 'NST_HOURS': 10, + 'TOTAL_HOURS': 110 + }]) + + result = list(export_csv( + start_date='2024-01-01', + end_date='2024-01-07', + )) + + # Should have header row + data row + self.assertGreaterEqual(len(result), 2) + # Header should contain column names + self.assertIn('站點', result[0]) + self.assertIn('OU%', result[0]) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_resource_service.py b/tests/test_resource_service.py new file mode 100644 index 0000000..a17db9e --- /dev/null +++ b/tests/test_resource_service.py @@ -0,0 +1,396 @@ +# -*- coding: utf-8 -*- +"""Unit tests for resource_service module. + +Tests merged resource status queries and summary functions. +""" + +import pytest +from unittest.mock import patch, MagicMock + + +class TestGetMergedResourceStatus: + """Test get_merged_resource_status function.""" + + def test_returns_empty_when_no_resources(self): + """Test returns empty list when no resources available.""" + from mes_dashboard.services.resource_service import get_merged_resource_status + + with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=[]): + result = get_merged_resource_status() + assert result == [] + + def test_merges_resource_and_status_data(self): + """Test merges resource-cache and realtime-equipment-cache data.""" + from mes_dashboard.services.resource_service import get_merged_resource_status + + mock_resources = [ + { + 'RESOURCEID': 'R001', + 'RESOURCENAME': 'Machine1', + 'WORKCENTERNAME': 'WC-01', + 'RESOURCEFAMILYNAME': 'Family1', + 'PJ_DEPARTMENT': 'Dept1', + 'PJ_ASSETSSTATUS': 'Active', + 'PJ_ISPRODUCTION': 1, + 'PJ_ISKEY': 0, + 'PJ_ISMONITOR': 0, + 'VENDORNAME': 'Vendor1', + 'VENDORMODEL': 'Model1', + 'LOCATIONNAME': 'Loc1', + } + ] + + mock_equipment_status = [ + { + 'RESOURCEID': 'R001', + 'EQUIPMENTASSETSSTATUS': 'PRD', + 'EQUIPMENTASSETSSTATUSREASON': None, + 'STATUS_CATEGORY': 'PRODUCTIVE', + 'JOBORDER': 'JO001', + 'JOBSTATUS': 'RUN', + 'SYMPTOMCODE': None, + 'CAUSECODE': None, + 'REPAIRCODE': None, + 'LOT_COUNT': 2, + 'TOTAL_TRACKIN_QTY': 150, + 'LATEST_TRACKIN_TIME': '2024-01-15T10:00:00', + } + ] + + with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources): + with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status): + with patch('mes_dashboard.services.resource_service.get_workcenter_group', return_value='焊接'): + with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value='DB'): + result = get_merged_resource_status() + + assert len(result) == 1 + r = result[0] + # Resource-cache data + assert r['RESOURCEID'] == 'R001' + assert r['RESOURCENAME'] == 'Machine1' + assert r['WORKCENTERNAME'] == 'WC-01' + # Workcenter mapping + assert r['WORKCENTER_GROUP'] == '焊接' + assert r['WORKCENTER_SHORT'] == 'DB' + # Realtime status + assert r['EQUIPMENTASSETSSTATUS'] == 'PRD' + assert r['STATUS_CATEGORY'] == 'PRODUCTIVE' + assert r['LOT_COUNT'] == 2 + + def test_handles_resources_without_status(self): + """Test handles resources that have no realtime status.""" + from mes_dashboard.services.resource_service import get_merged_resource_status + + mock_resources = [ + { + 'RESOURCEID': 'R001', + 'RESOURCENAME': 'Machine1', + 'WORKCENTERNAME': 'WC-01', + 'RESOURCEFAMILYNAME': 'Family1', + 'PJ_DEPARTMENT': 'Dept1', + 'PJ_ASSETSSTATUS': 'Active', + 'PJ_ISPRODUCTION': 1, + 'PJ_ISKEY': 0, + 'PJ_ISMONITOR': 0, + 'VENDORNAME': 'Vendor1', + 'VENDORMODEL': 'Model1', + 'LOCATIONNAME': 'Loc1', + } + ] + + # No matching equipment status + mock_equipment_status = [] + + with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources): + with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status): + with patch('mes_dashboard.services.resource_service.get_workcenter_group', return_value=None): + with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value=None): + result = get_merged_resource_status() + + assert len(result) == 1 + r = result[0] + assert r['RESOURCEID'] == 'R001' + # Status fields should be None + assert r['EQUIPMENTASSETSSTATUS'] is None + assert r['STATUS_CATEGORY'] is None + assert r['LOT_COUNT'] is None + + +class TestGetMergedResourceStatusWithFilters: + """Test get_merged_resource_status with filter parameters.""" + + def _get_mock_data(self): + """Get mock test data.""" + mock_resources = [ + { + 'RESOURCEID': 'R001', + 'RESOURCENAME': 'Machine1', + 'WORKCENTERNAME': 'WC-01', + 'RESOURCEFAMILYNAME': 'Family1', + 'PJ_DEPARTMENT': 'Dept1', + 'PJ_ASSETSSTATUS': 'Active', + 'PJ_ISPRODUCTION': 1, + 'PJ_ISKEY': 1, + 'PJ_ISMONITOR': 0, + 'VENDORNAME': 'Vendor1', + 'VENDORMODEL': 'Model1', + 'LOCATIONNAME': 'Loc1', + }, + { + 'RESOURCEID': 'R002', + 'RESOURCENAME': 'Machine2', + 'WORKCENTERNAME': 'WC-02', + 'RESOURCEFAMILYNAME': 'Family2', + 'PJ_DEPARTMENT': 'Dept2', + 'PJ_ASSETSSTATUS': 'Active', + 'PJ_ISPRODUCTION': 0, + 'PJ_ISKEY': 0, + 'PJ_ISMONITOR': 1, + 'VENDORNAME': 'Vendor2', + 'VENDORMODEL': 'Model2', + 'LOCATIONNAME': 'Loc2', + }, + ] + + mock_equipment_status = [ + { + 'RESOURCEID': 'R001', + 'EQUIPMENTASSETSSTATUS': 'PRD', + 'EQUIPMENTASSETSSTATUSREASON': None, + 'STATUS_CATEGORY': 'PRODUCTIVE', + 'JOBORDER': 'JO001', + 'JOBSTATUS': 'RUN', + 'SYMPTOMCODE': None, + 'CAUSECODE': None, + 'REPAIRCODE': None, + 'LOT_COUNT': 1, + 'TOTAL_TRACKIN_QTY': 100, + 'LATEST_TRACKIN_TIME': '2024-01-15T10:00:00', + }, + { + 'RESOURCEID': 'R002', + 'EQUIPMENTASSETSSTATUS': 'SBY', + 'EQUIPMENTASSETSSTATUSREASON': 'Waiting', + 'STATUS_CATEGORY': 'STANDBY', + 'JOBORDER': None, + 'JOBSTATUS': None, + 'SYMPTOMCODE': None, + 'CAUSECODE': None, + 'REPAIRCODE': None, + 'LOT_COUNT': 0, + 'TOTAL_TRACKIN_QTY': 0, + 'LATEST_TRACKIN_TIME': None, + }, + ] + + return mock_resources, mock_equipment_status + + def test_filters_by_workcenter_groups(self): + """Test filters by workcenter_groups parameter.""" + from mes_dashboard.services.resource_service import get_merged_resource_status + + mock_resources, mock_equipment_status = self._get_mock_data() + + def mock_get_group(wc_name): + return '焊接' if wc_name == 'WC-01' else '成型' + + with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources): + with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status): + with patch('mes_dashboard.services.resource_service.get_workcenter_group', side_effect=mock_get_group): + with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value=None): + result = get_merged_resource_status(workcenter_groups=['焊接']) + + assert len(result) == 1 + assert result[0]['RESOURCEID'] == 'R001' + + def test_filters_by_is_production(self): + """Test filters by is_production parameter.""" + from mes_dashboard.services.resource_service import get_merged_resource_status + + mock_resources, mock_equipment_status = self._get_mock_data() + + with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources): + with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status): + with patch('mes_dashboard.services.resource_service.get_workcenter_group', return_value=None): + with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value=None): + result = get_merged_resource_status(is_production=True) + + assert len(result) == 1 + assert result[0]['RESOURCEID'] == 'R001' + + def test_filters_by_is_key(self): + """Test filters by is_key parameter.""" + from mes_dashboard.services.resource_service import get_merged_resource_status + + mock_resources, mock_equipment_status = self._get_mock_data() + + with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources): + with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status): + with patch('mes_dashboard.services.resource_service.get_workcenter_group', return_value=None): + with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value=None): + result = get_merged_resource_status(is_key=True) + + assert len(result) == 1 + assert result[0]['RESOURCEID'] == 'R001' + + def test_filters_by_is_monitor(self): + """Test filters by is_monitor parameter.""" + from mes_dashboard.services.resource_service import get_merged_resource_status + + mock_resources, mock_equipment_status = self._get_mock_data() + + with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources): + with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status): + with patch('mes_dashboard.services.resource_service.get_workcenter_group', return_value=None): + with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value=None): + result = get_merged_resource_status(is_monitor=True) + + assert len(result) == 1 + assert result[0]['RESOURCEID'] == 'R002' + + def test_filters_by_status_categories(self): + """Test filters by status_categories parameter.""" + from mes_dashboard.services.resource_service import get_merged_resource_status + + mock_resources, mock_equipment_status = self._get_mock_data() + + with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources): + with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status): + with patch('mes_dashboard.services.resource_service.get_workcenter_group', return_value=None): + with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value=None): + result = get_merged_resource_status(status_categories=['PRODUCTIVE']) + + assert len(result) == 1 + assert result[0]['RESOURCEID'] == 'R001' + assert result[0]['STATUS_CATEGORY'] == 'PRODUCTIVE' + + def test_combines_multiple_filters(self): + """Test combines multiple filter criteria.""" + from mes_dashboard.services.resource_service import get_merged_resource_status + + mock_resources, mock_equipment_status = self._get_mock_data() + + with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources): + with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status): + with patch('mes_dashboard.services.resource_service.get_workcenter_group', return_value=None): + with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value=None): + # Filter: production AND key + result = get_merged_resource_status(is_production=True, is_key=True) + + assert len(result) == 1 + assert result[0]['RESOURCEID'] == 'R001' + + +class TestGetResourceStatusSummary: + """Test get_resource_status_summary function.""" + + def test_returns_empty_summary_when_no_data(self): + """Test returns empty summary when no data.""" + from mes_dashboard.services.resource_service import get_resource_status_summary + + with patch('mes_dashboard.services.resource_service.get_merged_resource_status', return_value=[]): + result = get_resource_status_summary() + + assert result['total_count'] == 0 + assert result['by_status_category'] == {} + assert result['by_workcenter_group'] == {} + + def test_calculates_summary_statistics(self): + """Test calculates correct summary statistics.""" + from mes_dashboard.services.resource_service import get_resource_status_summary + + mock_data = [ + { + 'RESOURCEID': 'R001', + 'STATUS_CATEGORY': 'PRODUCTIVE', + 'WORKCENTER_GROUP': '焊接', + 'JOBORDER': 'JO001', + 'LOT_COUNT': 2, + }, + { + 'RESOURCEID': 'R002', + 'STATUS_CATEGORY': 'PRODUCTIVE', + 'WORKCENTER_GROUP': '焊接', + 'JOBORDER': 'JO002', + 'LOT_COUNT': 1, + }, + { + 'RESOURCEID': 'R003', + 'STATUS_CATEGORY': 'STANDBY', + 'WORKCENTER_GROUP': '成型', + 'JOBORDER': None, + 'LOT_COUNT': 0, + }, + ] + + with patch('mes_dashboard.services.resource_service.get_merged_resource_status', return_value=mock_data): + result = get_resource_status_summary() + + assert result['total_count'] == 3 + assert result['by_status_category']['PRODUCTIVE'] == 2 + assert result['by_status_category']['STANDBY'] == 1 + assert result['by_workcenter_group']['焊接'] == 2 + assert result['by_workcenter_group']['成型'] == 1 + assert result['with_active_job'] == 2 + assert result['with_wip'] == 2 + + +class TestGetWorkcenterStatusMatrix: + """Test get_workcenter_status_matrix function.""" + + def test_returns_empty_when_no_data(self): + """Test returns empty list when no data.""" + from mes_dashboard.services.resource_service import get_workcenter_status_matrix + + with patch('mes_dashboard.services.resource_service.get_merged_resource_status', return_value=[]): + result = get_workcenter_status_matrix() + assert result == [] + + def test_builds_matrix_by_workcenter_and_status(self): + """Test builds matrix by workcenter group and status.""" + from mes_dashboard.services.resource_service import get_workcenter_status_matrix + + mock_data = [ + {'WORKCENTER_GROUP': '焊接', 'EQUIPMENTASSETSSTATUS': 'PRD'}, + {'WORKCENTER_GROUP': '焊接', 'EQUIPMENTASSETSSTATUS': 'PRD'}, + {'WORKCENTER_GROUP': '焊接', 'EQUIPMENTASSETSSTATUS': 'SBY'}, + {'WORKCENTER_GROUP': '成型', 'EQUIPMENTASSETSSTATUS': 'UDT'}, + ] + + mock_groups = [ + {'name': '焊接', 'sequence': 1}, + {'name': '成型', 'sequence': 2}, + ] + + with patch('mes_dashboard.services.resource_service.get_merged_resource_status', return_value=mock_data): + with patch('mes_dashboard.services.resource_service.get_workcenter_groups', return_value=mock_groups): + result = get_workcenter_status_matrix() + + assert len(result) == 2 + + # Should be sorted by sequence + assert result[0]['workcenter_group'] == '焊接' + assert result[0]['total'] == 3 + assert result[0]['PRD'] == 2 + assert result[0]['SBY'] == 1 + + assert result[1]['workcenter_group'] == '成型' + assert result[1]['total'] == 1 + assert result[1]['UDT'] == 1 + + def test_handles_unknown_status(self): + """Test handles unknown status codes.""" + from mes_dashboard.services.resource_service import get_workcenter_status_matrix + + mock_data = [ + {'WORKCENTER_GROUP': '焊接', 'EQUIPMENTASSETSSTATUS': 'CUSTOM_STATUS'}, + ] + + mock_groups = [{'name': '焊接', 'sequence': 1}] + + with patch('mes_dashboard.services.resource_service.get_merged_resource_status', return_value=mock_data): + with patch('mes_dashboard.services.resource_service.get_workcenter_groups', return_value=mock_groups): + result = get_workcenter_status_matrix() + + assert len(result) == 1 + assert result[0]['OTHER'] == 1 diff --git a/tests/test_sql_builder.py b/tests/test_sql_builder.py new file mode 100644 index 0000000..32b7bd4 --- /dev/null +++ b/tests/test_sql_builder.py @@ -0,0 +1,238 @@ +"""Tests for Query Builder.""" + +import pytest + +from mes_dashboard.sql.builder import QueryBuilder + + +class TestQueryBuilder: + """Test QueryBuilder class.""" + + def test_add_param_condition(self): + """Test adding a parameterized condition.""" + builder = QueryBuilder() + builder.add_param_condition("status", "RUN") + + assert len(builder.conditions) == 1 + assert "status = :p0" in builder.conditions[0] + assert builder.params["p0"] == "RUN" + + def test_add_param_condition_with_operator(self): + """Test adding a parameterized condition with custom operator.""" + builder = QueryBuilder() + builder.add_param_condition("count", 10, operator=">=") + + assert "count >= :p0" in builder.conditions[0] + assert builder.params["p0"] == 10 + + def test_add_in_condition(self): + """Test adding an IN condition.""" + builder = QueryBuilder() + builder.add_in_condition("status", ["RUN", "QUEUE", "HOLD"]) + + assert len(builder.conditions) == 1 + assert "status IN (:p0, :p1, :p2)" in builder.conditions[0] + assert builder.params["p0"] == "RUN" + assert builder.params["p1"] == "QUEUE" + assert builder.params["p2"] == "HOLD" + + def test_add_in_condition_empty_list(self): + """Test that empty list doesn't add condition.""" + builder = QueryBuilder() + builder.add_in_condition("status", []) + + assert len(builder.conditions) == 0 + assert len(builder.params) == 0 + + def test_add_not_in_condition(self): + """Test adding a NOT IN condition.""" + builder = QueryBuilder() + builder.add_not_in_condition("location", ["ATEC", "F區"]) + + assert len(builder.conditions) == 1 + assert "location NOT IN (:p0, :p1)" in builder.conditions[0] + assert builder.params["p0"] == "ATEC" + assert builder.params["p1"] == "F區" + + def test_add_not_in_condition_with_null(self): + """Test NOT IN condition allowing NULL values.""" + builder = QueryBuilder() + builder.add_not_in_condition("location", ["ATEC"], allow_null=True) + + assert len(builder.conditions) == 1 + assert "(location IS NULL OR location NOT IN (:p0))" in builder.conditions[0] + + def test_add_like_condition_both(self): + """Test LIKE condition with wildcards on both sides.""" + builder = QueryBuilder() + builder.add_like_condition("name", "test") + + assert "name LIKE :p0 ESCAPE '\\'" in builder.conditions[0] + assert builder.params["p0"] == "%test%" + + def test_add_like_condition_start(self): + """Test LIKE condition with wildcard at end only.""" + builder = QueryBuilder() + builder.add_like_condition("name", "prefix", position="start") + + assert builder.params["p0"] == "prefix%" + + def test_add_like_condition_end(self): + """Test LIKE condition with wildcard at start only.""" + builder = QueryBuilder() + builder.add_like_condition("name", "suffix", position="end") + + assert builder.params["p0"] == "%suffix" + + def test_add_like_condition_escapes_wildcards(self): + """Test that LIKE condition escapes SQL wildcards.""" + builder = QueryBuilder() + builder.add_like_condition("name", "test%value") + + assert builder.params["p0"] == "%test\\%value%" + + def test_add_like_condition_escapes_underscore(self): + """Test that LIKE condition escapes underscores.""" + builder = QueryBuilder() + builder.add_like_condition("name", "test_value") + + assert builder.params["p0"] == "%test\\_value%" + + def test_build_with_conditions(self): + """Test building SQL with multiple conditions.""" + builder = QueryBuilder("SELECT * FROM t {{ WHERE_CLAUSE }}") + builder.add_param_condition("status", "RUN") + builder.add_in_condition("type", ["A", "B"]) + + sql, params = builder.build() + + assert "WHERE" in sql + assert "status = :p0" in sql + assert "type IN (:p1, :p2)" in sql + assert "AND" in sql + assert params["p0"] == "RUN" + assert params["p1"] == "A" + assert params["p2"] == "B" + + def test_build_without_conditions(self): + """Test building SQL with no conditions.""" + builder = QueryBuilder("SELECT * FROM t {{ WHERE_CLAUSE }}") + sql, params = builder.build() + + assert "WHERE" not in sql + assert "{{ WHERE_CLAUSE }}" not in sql + assert params == {} + + def test_build_where_only(self): + """Test building only the WHERE clause.""" + builder = QueryBuilder() + builder.add_param_condition("status", "RUN") + + where_clause, params = builder.build_where_only() + + assert where_clause.startswith("WHERE") + assert "status = :p0" in where_clause + + def test_get_conditions_sql(self): + """Test getting conditions as string.""" + builder = QueryBuilder() + builder.add_param_condition("a", 1) + builder.add_param_condition("b", 2) + + conditions = builder.get_conditions_sql() + + assert "a = :p0 AND b = :p1" == conditions + + def test_reset(self): + """Test resetting the builder.""" + builder = QueryBuilder("SELECT * FROM t") + builder.add_param_condition("status", "RUN") + builder.reset() + + assert len(builder.conditions) == 0 + assert len(builder.params) == 0 + assert builder._param_counter == 0 + assert builder.base_sql == "SELECT * FROM t" + + def test_method_chaining(self): + """Test that methods support chaining.""" + builder = ( + QueryBuilder("SELECT * FROM t {{ WHERE_CLAUSE }}") + .add_param_condition("status", "RUN") + .add_in_condition("type", ["A", "B"]) + .add_like_condition("name", "test") + ) + + assert len(builder.conditions) == 3 + + def test_add_is_null(self): + """Test adding IS NULL condition.""" + builder = QueryBuilder() + builder.add_is_null("deleted_at") + + assert "deleted_at IS NULL" in builder.conditions[0] + + def test_add_is_not_null(self): + """Test adding IS NOT NULL condition.""" + builder = QueryBuilder() + builder.add_is_not_null("updated_at") + + assert "updated_at IS NOT NULL" in builder.conditions[0] + + def test_add_condition_fixed(self): + """Test adding a fixed condition.""" + builder = QueryBuilder() + builder.add_condition("1=1") + + assert "1=1" in builder.conditions[0] + assert len(builder.params) == 0 + + def test_add_or_like_conditions(self): + """Test adding multiple LIKE conditions combined with OR.""" + builder = QueryBuilder() + builder.add_or_like_conditions("name", ["foo", "bar", "baz"]) + + assert len(builder.conditions) == 1 + condition = builder.conditions[0] + assert "name LIKE :p0 ESCAPE '\\'" in condition + assert "name LIKE :p1 ESCAPE '\\'" in condition + assert "name LIKE :p2 ESCAPE '\\'" in condition + assert " OR " in condition + assert condition.startswith("(") + assert condition.endswith(")") + assert builder.params["p0"] == "%foo%" + assert builder.params["p1"] == "%bar%" + assert builder.params["p2"] == "%baz%" + + def test_add_or_like_conditions_case_insensitive(self): + """Test OR LIKE conditions with case insensitive matching.""" + builder = QueryBuilder() + builder.add_or_like_conditions("name", ["Foo", "BAR"], case_insensitive=True) + + condition = builder.conditions[0] + assert "UPPER(name)" in condition + assert builder.params["p0"] == "%FOO%" + assert builder.params["p1"] == "%BAR%" + + def test_add_or_like_conditions_escapes_wildcards(self): + """Test OR LIKE conditions escape SQL wildcards.""" + builder = QueryBuilder() + builder.add_or_like_conditions("name", ["test%val", "foo_bar"]) + + assert builder.params["p0"] == "%test\\%val%" + assert builder.params["p1"] == "%foo\\_bar%" + + def test_add_or_like_conditions_empty_list(self): + """Test that empty list doesn't add condition.""" + builder = QueryBuilder() + builder.add_or_like_conditions("name", []) + + assert len(builder.conditions) == 0 + assert len(builder.params) == 0 + + def test_add_or_like_conditions_position(self): + """Test OR LIKE conditions with different positions.""" + builder = QueryBuilder() + builder.add_or_like_conditions("name", ["test"], position="start") + + assert builder.params["p0"] == "test%" diff --git a/tests/test_sql_loader.py b/tests/test_sql_loader.py new file mode 100644 index 0000000..7f3ff24 --- /dev/null +++ b/tests/test_sql_loader.py @@ -0,0 +1,109 @@ +"""Tests for SQL Loader.""" + +import pytest +from pathlib import Path +from unittest.mock import patch, MagicMock + +from mes_dashboard.sql.loader import SQLLoader + + +class TestSQLLoader: + """Test SQLLoader class.""" + + def setup_method(self): + """Clear cache before each test.""" + SQLLoader.clear_cache() + + def test_load_existing_file(self, tmp_path): + """Test loading an existing SQL file.""" + # Create a temporary SQL file + sql_dir = tmp_path / "wip" + sql_dir.mkdir() + sql_file = sql_dir / "summary.sql" + sql_file.write_text("SELECT * FROM DWH.DW_MES_LOT_V") + + # Patch the _sql_dir to use our temp directory + with patch.object(SQLLoader, "_sql_dir", tmp_path): + result = SQLLoader.load("wip/summary") + assert result == "SELECT * FROM DWH.DW_MES_LOT_V" + + def test_load_nonexistent_file(self): + """Test loading a non-existent SQL file raises FileNotFoundError.""" + with pytest.raises(FileNotFoundError) as exc_info: + SQLLoader.load("nonexistent/query") + assert "SQL file not found" in str(exc_info.value) + + def test_load_uses_cache(self, tmp_path): + """Test that repeated loads use the cache.""" + # Create a temporary SQL file + sql_dir = tmp_path / "test" + sql_dir.mkdir() + sql_file = sql_dir / "cached.sql" + sql_file.write_text("SELECT 1") + + with patch.object(SQLLoader, "_sql_dir", tmp_path): + SQLLoader.clear_cache() + + # First load + result1 = SQLLoader.load("test/cached") + info1 = SQLLoader.cache_info() + + # Second load (should hit cache) + result2 = SQLLoader.load("test/cached") + info2 = SQLLoader.cache_info() + + assert result1 == result2 + assert info1.misses == 1 + assert info2.hits == 1 + + def test_load_with_params_substitutes_values(self, tmp_path): + """Test structural parameter substitution.""" + sql_dir = tmp_path + sql_file = sql_dir / "query.sql" + sql_file.write_text("SELECT * FROM {{ table_name }}") + + with patch.object(SQLLoader, "_sql_dir", tmp_path): + result = SQLLoader.load_with_params("query", table_name="DWH.MY_TABLE") + assert result == "SELECT * FROM DWH.MY_TABLE" + + def test_load_with_params_preserves_unsubstituted(self, tmp_path): + """Test that unsubstituted parameters remain unchanged.""" + sql_dir = tmp_path + sql_file = sql_dir / "query.sql" + sql_file.write_text("SELECT * FROM {{ table_name }} {{ WHERE_CLAUSE }}") + + with patch.object(SQLLoader, "_sql_dir", tmp_path): + result = SQLLoader.load_with_params("query", table_name="T") + assert result == "SELECT * FROM T {{ WHERE_CLAUSE }}" + + def test_clear_cache(self, tmp_path): + """Test cache clearing.""" + sql_dir = tmp_path + sql_file = sql_dir / "test.sql" + sql_file.write_text("SELECT 1") + + with patch.object(SQLLoader, "_sql_dir", tmp_path): + SQLLoader.load("test") + info_before = SQLLoader.cache_info() + assert info_before.currsize > 0 + + SQLLoader.clear_cache() + info_after = SQLLoader.cache_info() + assert info_after.currsize == 0 + + def test_cache_info(self, tmp_path): + """Test cache_info returns valid statistics.""" + sql_dir = tmp_path + sql_file = sql_dir / "test.sql" + sql_file.write_text("SELECT 1") + + with patch.object(SQLLoader, "_sql_dir", tmp_path): + SQLLoader.clear_cache() + SQLLoader.load("test") + info = SQLLoader.cache_info() + + assert hasattr(info, "hits") + assert hasattr(info, "misses") + assert hasattr(info, "maxsize") + assert hasattr(info, "currsize") + assert info.maxsize == 100 diff --git a/tests/test_template_integration.py b/tests/test_template_integration.py new file mode 100644 index 0000000..9d46b32 --- /dev/null +++ b/tests/test_template_integration.py @@ -0,0 +1,249 @@ +# -*- coding: utf-8 -*- +"""Unit tests for template integration with _base.html. + +Verifies that all templates properly extend _base.html and include +required core JavaScript resources. +""" + +import unittest +from unittest.mock import patch + +from mes_dashboard.app import create_app +import mes_dashboard.core.database as db + + +def _login_as_admin(client): + with client.session_transaction() as sess: + sess['admin'] = {'displayName': 'Test Admin', 'employeeNo': 'A001'} + + +class TestTemplateIntegration(unittest.TestCase): + """Test that all templates properly extend _base.html.""" + + def setUp(self): + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + _login_as_admin(self.client) + + def test_portal_includes_base_scripts(self): + response = self.client.get('/') + self.assertEqual(response.status_code, 200) + html = response.data.decode('utf-8') + + self.assertIn('toast.js', html) + self.assertIn('mes-api.js', html) + self.assertIn('mes-toast-container', html) + + def test_wip_overview_includes_base_scripts(self): + response = self.client.get('/wip-overview') + self.assertEqual(response.status_code, 200) + html = response.data.decode('utf-8') + + self.assertIn('toast.js', html) + self.assertIn('mes-api.js', html) + self.assertIn('mes-toast-container', html) + + def test_wip_detail_includes_base_scripts(self): + response = self.client.get('/wip-detail') + self.assertEqual(response.status_code, 200) + html = response.data.decode('utf-8') + + self.assertIn('toast.js', html) + self.assertIn('mes-api.js', html) + self.assertIn('mes-toast-container', html) + + def test_tables_page_includes_base_scripts(self): + response = self.client.get('/tables') + self.assertEqual(response.status_code, 200) + html = response.data.decode('utf-8') + + self.assertIn('toast.js', html) + self.assertIn('mes-api.js', html) + self.assertIn('mes-toast-container', html) + + def test_resource_page_includes_base_scripts(self): + response = self.client.get('/resource') + self.assertEqual(response.status_code, 200) + html = response.data.decode('utf-8') + + self.assertIn('toast.js', html) + self.assertIn('mes-api.js', html) + self.assertIn('mes-toast-container', html) + + def test_excel_query_page_includes_base_scripts(self): + response = self.client.get('/excel-query') + self.assertEqual(response.status_code, 200) + html = response.data.decode('utf-8') + + self.assertIn('toast.js', html) + self.assertIn('mes-api.js', html) + self.assertIn('mes-toast-container', html) + + +class TestToastCSSIntegration(unittest.TestCase): + """Test that Toast CSS styles are included in pages.""" + + def setUp(self): + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + _login_as_admin(self.client) + + def test_portal_includes_toast_css(self): + response = self.client.get('/') + html = response.data.decode('utf-8') + + self.assertIn('.mes-toast-container', html) + self.assertIn('.mes-toast', html) + + def test_wip_overview_includes_toast_css(self): + response = self.client.get('/wip-overview') + html = response.data.decode('utf-8') + + self.assertIn('.mes-toast-container', html) + self.assertIn('.mes-toast', html) + + def test_wip_detail_includes_toast_css(self): + response = self.client.get('/wip-detail') + html = response.data.decode('utf-8') + + self.assertIn('.mes-toast-container', html) + self.assertIn('.mes-toast', html) + + +class TestMesApiUsageInTemplates(unittest.TestCase): + """Test that templates either inline MesApi usage or load Vite modules.""" + + def setUp(self): + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + _login_as_admin(self.client) + + def test_wip_overview_uses_mesapi(self): + response = self.client.get('/wip-overview') + html = response.data.decode('utf-8') + + self.assertTrue('MesApi.get' in html or '/static/dist/wip-overview.js' in html) + self.assertNotIn('fetchWithTimeout', html) + + def test_wip_detail_uses_mesapi(self): + response = self.client.get('/wip-detail') + html = response.data.decode('utf-8') + + self.assertTrue('MesApi.get' in html or '/static/dist/wip-detail.js' in html) + self.assertNotIn('fetchWithTimeout', html) + + def test_tables_page_uses_mesapi_or_vite_module(self): + response = self.client.get('/tables') + html = response.data.decode('utf-8') + + self.assertTrue('MesApi.post' in html or '/static/dist/tables.js' in html) + + def test_resource_page_uses_mesapi_or_vite_module(self): + response = self.client.get('/resource') + html = response.data.decode('utf-8') + + self.assertTrue('MesApi.post' in html or '/static/dist/resource-status.js' in html) + + +class TestViteModuleFallbackIntegration(unittest.TestCase): + """Ensure page templates support Vite module assets with inline fallback.""" + + def setUp(self): + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + _login_as_admin(self.client) + + def test_pages_render_inline_fallback_when_asset_missing(self): + endpoints_and_markers = [ + ('/wip-overview', 'function applyFilters'), + ('/wip-detail', 'function init'), + ('/hold-detail?reason=test-reason', 'function loadAllData'), + ('/tables', 'function loadTableData'), + ('/resource', 'function loadData'), + ('/resource-history', 'function executeQuery'), + ('/job-query', 'function queryJobs'), + ('/excel-query', 'function uploadExcel'), + ] + for endpoint, marker in endpoints_and_markers: + with patch('mes_dashboard.app.os.path.exists', return_value=False): + response = self.client.get(endpoint) + self.assertEqual(response.status_code, 200) + html = response.data.decode('utf-8') + self.assertIn(marker, html) + + def test_pages_render_vite_module_when_asset_exists(self): + endpoints_and_assets = [ + ('/wip-overview', 'wip-overview.js'), + ('/wip-detail', 'wip-detail.js'), + ('/hold-detail?reason=test-reason', 'hold-detail.js'), + ('/tables', 'tables.js'), + ('/resource', 'resource-status.js'), + ('/resource-history', 'resource-history.js'), + ('/job-query', 'job-query.js'), + ('/excel-query', 'excel-query.js'), + ] + for endpoint, asset in endpoints_and_assets: + with patch('mes_dashboard.app.os.path.exists', return_value=True): + response = self.client.get(endpoint) + self.assertEqual(response.status_code, 200) + html = response.data.decode('utf-8') + self.assertIn(f'/static/dist/{asset}', html) + self.assertIn('type="module"', html) + + +class TestStaticFilesServing(unittest.TestCase): + """Test that static JavaScript files are served correctly.""" + + def setUp(self): + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + _login_as_admin(self.client) + + def test_toast_js_is_served(self): + response = self.client.get('/static/js/toast.js') + self.assertEqual(response.status_code, 200) + content = response.data.decode('utf-8') + + self.assertIn('Toast', content) + self.assertIn('info', content) + self.assertIn('success', content) + self.assertIn('error', content) + self.assertIn('loading', content) + + def test_mes_api_js_is_served(self): + response = self.client.get('/static/js/mes-api.js') + self.assertEqual(response.status_code, 200) + content = response.data.decode('utf-8') + + self.assertIn('MesApi', content) + self.assertIn('get', content) + self.assertIn('post', content) + self.assertIn('AbortController', content) + + def test_toast_js_contains_retry_button(self): + response = self.client.get('/static/js/toast.js') + content = response.data.decode('utf-8') + + self.assertIn('retry', content) + self.assertIn('mes-toast-retry', content) + + def test_mes_api_js_has_exponential_backoff(self): + response = self.client.get('/static/js/mes-api.js') + content = response.data.decode('utf-8') + + self.assertIn('1000', content) + self.assertIn('retry', content.lower()) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_wip_routes.py b/tests/test_wip_routes.py new file mode 100644 index 0000000..ff30d8c --- /dev/null +++ b/tests/test_wip_routes.py @@ -0,0 +1,337 @@ +# -*- coding: utf-8 -*- +"""Unit tests for WIP API routes. + +Tests the WIP API endpoints in wip_routes.py. +""" + +import unittest +from unittest.mock import patch +import json + +from mes_dashboard.app import create_app +import mes_dashboard.core.database as db + + +class TestWipRoutesBase(unittest.TestCase): + """Base class for WIP routes tests.""" + + def setUp(self): + """Set up test client.""" + db._ENGINE = None + self.app = create_app('testing') + self.app.config['TESTING'] = True + self.client = self.app.test_client() + + +class TestOverviewSummaryRoute(TestWipRoutesBase): + """Test GET /api/wip/overview/summary endpoint.""" + + @patch('mes_dashboard.routes.wip_routes.get_wip_summary') + def test_returns_success_with_data(self, mock_get_summary): + """Should return success=True with summary data.""" + mock_get_summary.return_value = { + 'totalLots': 9073, + 'totalQtyPcs': 858878718, + 'byWipStatus': { + 'run': {'lots': 8000, 'qtyPcs': 800000000}, + 'queue': {'lots': 953, 'qtyPcs': 504645323}, + 'hold': {'lots': 120, 'qtyPcs': 8213395} + }, + 'dataUpdateDate': '2026-01-26 19:18:29' + } + + response = self.client.get('/api/wip/overview/summary') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 200) + self.assertTrue(data['success']) + self.assertEqual(data['data']['totalLots'], 9073) + self.assertEqual(data['data']['byWipStatus']['hold']['lots'], 120) + + @patch('mes_dashboard.routes.wip_routes.get_wip_summary') + def test_returns_error_on_failure(self, mock_get_summary): + """Should return success=False and 500 on failure.""" + mock_get_summary.return_value = None + + response = self.client.get('/api/wip/overview/summary') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 500) + self.assertFalse(data['success']) + self.assertIn('error', data) + + +class TestOverviewMatrixRoute(TestWipRoutesBase): + """Test GET /api/wip/overview/matrix endpoint.""" + + @patch('mes_dashboard.routes.wip_routes.get_wip_matrix') + def test_returns_success_with_matrix(self, mock_get_matrix): + """Should return success=True with matrix data.""" + mock_get_matrix.return_value = { + 'workcenters': ['切割', '焊接_DB'], + 'packages': ['SOT-23', 'SOD-323'], + 'matrix': {'切割': {'SOT-23': 50000000}}, + 'workcenter_totals': {'切割': 50000000}, + 'package_totals': {'SOT-23': 50000000}, + 'grand_total': 50000000 + } + + response = self.client.get('/api/wip/overview/matrix') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 200) + self.assertTrue(data['success']) + self.assertIn('workcenters', data['data']) + self.assertIn('packages', data['data']) + self.assertIn('matrix', data['data']) + + @patch('mes_dashboard.routes.wip_routes.get_wip_matrix') + def test_returns_error_on_failure(self, mock_get_matrix): + """Should return success=False and 500 on failure.""" + mock_get_matrix.return_value = None + + response = self.client.get('/api/wip/overview/matrix') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 500) + self.assertFalse(data['success']) + + +class TestOverviewHoldRoute(TestWipRoutesBase): + """Test GET /api/wip/overview/hold endpoint.""" + + @patch('mes_dashboard.routes.wip_routes.get_wip_hold_summary') + def test_returns_success_with_hold_items(self, mock_get_hold): + """Should return success=True with hold items.""" + mock_get_hold.return_value = { + 'items': [ + {'reason': '特殊需求管控', 'lots': 44, 'qty': 4235060}, + {'reason': 'YieldLimit', 'lots': 21, 'qty': 1084443} + ] + } + + response = self.client.get('/api/wip/overview/hold') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 200) + self.assertTrue(data['success']) + self.assertEqual(len(data['data']['items']), 2) + + @patch('mes_dashboard.routes.wip_routes.get_wip_hold_summary') + def test_returns_error_on_failure(self, mock_get_hold): + """Should return success=False and 500 on failure.""" + mock_get_hold.return_value = None + + response = self.client.get('/api/wip/overview/hold') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 500) + self.assertFalse(data['success']) + + +class TestDetailRoute(TestWipRoutesBase): + """Test GET /api/wip/detail/ endpoint.""" + + @patch('mes_dashboard.routes.wip_routes.get_wip_detail') + def test_returns_success_with_detail(self, mock_get_detail): + """Should return success=True with detail data.""" + mock_get_detail.return_value = { + 'workcenter': '焊接_DB', + 'summary': { + 'total_lots': 859, + 'on_equipment_lots': 312, + 'waiting_lots': 547, + 'hold_lots': 15 + }, + 'specs': ['Spec1', 'Spec2'], + 'lots': [ + {'lot_id': 'GA25102485', 'equipment': 'GSMP-0054', + 'status': 'ACTIVE', 'hold_reason': None, + 'qty': 750, 'package': 'SOT-23', 'spec': 'Spec1'} + ], + 'pagination': { + 'page': 1, 'page_size': 100, + 'total_count': 859, 'total_pages': 9 + }, + 'sys_date': '2026-01-26 19:18:29' + } + + response = self.client.get('/api/wip/detail/焊接_DB') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 200) + self.assertTrue(data['success']) + self.assertEqual(data['data']['workcenter'], '焊接_DB') + self.assertIn('summary', data['data']) + self.assertIn('lots', data['data']) + self.assertIn('pagination', data['data']) + + @patch('mes_dashboard.routes.wip_routes.get_wip_detail') + def test_passes_query_parameters(self, mock_get_detail): + """Should pass query parameters to service function.""" + mock_get_detail.return_value = { + 'workcenter': '焊接_DB', + 'summary': {'total_lots': 100, 'on_equipment_lots': 50, + 'waiting_lots': 50, 'hold_lots': 0}, + 'specs': [], + 'lots': [], + 'pagination': {'page': 2, 'page_size': 50, + 'total_count': 100, 'total_pages': 2}, + 'sys_date': None + } + + response = self.client.get( + '/api/wip/detail/焊接_DB?package=SOT-23&status=RUN&page=2&page_size=50' + ) + + mock_get_detail.assert_called_once_with( + workcenter='焊接_DB', + package='SOT-23', + status='RUN', + hold_type=None, + workorder=None, + lotid=None, + include_dummy=False, + page=2, + page_size=50 + ) + + @patch('mes_dashboard.routes.wip_routes.get_wip_detail') + def test_limits_page_size_to_500(self, mock_get_detail): + """Page size should be capped at 500.""" + mock_get_detail.return_value = { + 'workcenter': '切割', + 'summary': {'total_lots': 0, 'on_equipment_lots': 0, + 'waiting_lots': 0, 'hold_lots': 0}, + 'specs': [], + 'lots': [], + 'pagination': {'page': 1, 'page_size': 500, + 'total_count': 0, 'total_pages': 1}, + 'sys_date': None + } + + response = self.client.get('/api/wip/detail/切割?page_size=1000') + + # Should be capped to 500 + call_args = mock_get_detail.call_args + self.assertEqual(call_args.kwargs['page_size'], 500) + + @patch('mes_dashboard.routes.wip_routes.get_wip_detail') + def test_handles_page_less_than_one(self, mock_get_detail): + """Page number less than 1 should be set to 1.""" + mock_get_detail.return_value = { + 'workcenter': '切割', + 'summary': {'total_lots': 0, 'on_equipment_lots': 0, + 'waiting_lots': 0, 'hold_lots': 0}, + 'specs': [], + 'lots': [], + 'pagination': {'page': 1, 'page_size': 100, + 'total_count': 0, 'total_pages': 1}, + 'sys_date': None + } + + response = self.client.get('/api/wip/detail/切割?page=0') + + call_args = mock_get_detail.call_args + self.assertEqual(call_args.kwargs['page'], 1) + + @patch('mes_dashboard.routes.wip_routes.get_wip_detail') + def test_returns_error_on_failure(self, mock_get_detail): + """Should return success=False and 500 on failure.""" + mock_get_detail.return_value = None + + response = self.client.get('/api/wip/detail/不存在的工站') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 500) + self.assertFalse(data['success']) + + +class TestMetaWorkcentersRoute(TestWipRoutesBase): + """Test GET /api/wip/meta/workcenters endpoint.""" + + @patch('mes_dashboard.routes.wip_routes.get_workcenters') + def test_returns_success_with_workcenters(self, mock_get_wcs): + """Should return success=True with workcenters list.""" + mock_get_wcs.return_value = [ + {'name': '切割', 'lot_count': 1377}, + {'name': '焊接_DB', 'lot_count': 859} + ] + + response = self.client.get('/api/wip/meta/workcenters') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 200) + self.assertTrue(data['success']) + self.assertEqual(len(data['data']), 2) + self.assertEqual(data['data'][0]['name'], '切割') + + @patch('mes_dashboard.routes.wip_routes.get_workcenters') + def test_returns_error_on_failure(self, mock_get_wcs): + """Should return success=False and 500 on failure.""" + mock_get_wcs.return_value = None + + response = self.client.get('/api/wip/meta/workcenters') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 500) + self.assertFalse(data['success']) + + +class TestMetaPackagesRoute(TestWipRoutesBase): + """Test GET /api/wip/meta/packages endpoint.""" + + @patch('mes_dashboard.routes.wip_routes.get_packages') + def test_returns_success_with_packages(self, mock_get_pkgs): + """Should return success=True with packages list.""" + mock_get_pkgs.return_value = [ + {'name': 'SOT-23', 'lot_count': 2234}, + {'name': 'SOD-323', 'lot_count': 1392} + ] + + response = self.client.get('/api/wip/meta/packages') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 200) + self.assertTrue(data['success']) + self.assertEqual(len(data['data']), 2) + self.assertEqual(data['data'][0]['name'], 'SOT-23') + + @patch('mes_dashboard.routes.wip_routes.get_packages') + def test_returns_error_on_failure(self, mock_get_pkgs): + """Should return success=False and 500 on failure.""" + mock_get_pkgs.return_value = None + + response = self.client.get('/api/wip/meta/packages') + data = json.loads(response.data) + + self.assertEqual(response.status_code, 500) + self.assertFalse(data['success']) + + +class TestPageRoutes(TestWipRoutesBase): + """Test page routes for WIP dashboards.""" + + def test_wip_overview_page_exists(self): + """GET /wip-overview should return 200.""" + response = self.client.get('/wip-overview') + self.assertEqual(response.status_code, 200) + + def test_wip_detail_page_exists(self): + """GET /wip-detail should return 200.""" + response = self.client.get('/wip-detail') + self.assertEqual(response.status_code, 200) + + def test_wip_detail_page_with_workcenter(self): + """GET /wip-detail?workcenter=xxx should return 200.""" + response = self.client.get('/wip-detail?workcenter=焊接_DB') + self.assertEqual(response.status_code, 200) + + def test_old_wip_route_removed(self): + """GET /wip should return 404 (route removed).""" + response = self.client.get('/wip') + self.assertEqual(response.status_code, 404) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_wip_service.py b/tests/test_wip_service.py new file mode 100644 index 0000000..3c40c1c --- /dev/null +++ b/tests/test_wip_service.py @@ -0,0 +1,767 @@ +# -*- coding: utf-8 -*- +"""Unit tests for WIP service layer. + +Tests the WIP query functions that use DW_MES_LOT_V view. +""" + +import unittest +from unittest.mock import patch, MagicMock +from functools import wraps +import pandas as pd + +from mes_dashboard.services.wip_service import ( + WIP_VIEW, + get_wip_summary, + get_wip_matrix, + get_wip_hold_summary, + get_wip_detail, + get_workcenters, + get_packages, + search_workorders, + search_lot_ids, +) + + +def disable_cache(func): + """Decorator to disable Redis cache for Oracle fallback tests.""" + @wraps(func) + def wrapper(*args, **kwargs): + with patch('mes_dashboard.services.wip_service.get_cached_wip_data', return_value=None): + with patch('mes_dashboard.services.wip_service.get_cached_sys_date', return_value=None): + return func(*args, **kwargs) + return wrapper + + +class TestWipServiceConfig(unittest.TestCase): + """Test WIP service configuration.""" + + def test_wip_view_configured(self): + """WIP_VIEW should be configured correctly.""" + self.assertEqual(WIP_VIEW, "DWH.DW_MES_LOT_V") + + +class TestGetWipSummary(unittest.TestCase): + """Test get_wip_summary function.""" + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_none_on_empty_result(self, mock_read_sql): + """Should return None when query returns empty DataFrame.""" + mock_read_sql.return_value = pd.DataFrame() + + result = get_wip_summary() + + self.assertIsNone(result) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_none_on_exception(self, mock_read_sql): + """Should return None when query raises exception.""" + mock_read_sql.side_effect = Exception("Database error") + + result = get_wip_summary() + + self.assertIsNone(result) + + + +class TestGetWipMatrix(unittest.TestCase): + """Test get_wip_matrix function.""" + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_matrix_structure(self, mock_read_sql): + """Should return dict with matrix structure.""" + mock_df = pd.DataFrame({ + 'WORKCENTER_GROUP': ['切割', '切割', '焊接_DB'], + 'WORKCENTERSEQUENCE_GROUP': [1, 1, 2], + 'PACKAGE_LEF': ['SOT-23', 'SOD-323', 'SOT-23'], + 'QTY': [50000000, 30000000, 40000000] + }) + mock_read_sql.return_value = mock_df + + result = get_wip_matrix() + + self.assertIsNotNone(result) + self.assertIn('workcenters', result) + self.assertIn('packages', result) + self.assertIn('matrix', result) + self.assertIn('workcenter_totals', result) + self.assertIn('package_totals', result) + self.assertIn('grand_total', result) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_workcenters_sorted_by_sequence(self, mock_read_sql): + """Workcenters should be sorted by WORKCENTERSEQUENCE_GROUP.""" + mock_df = pd.DataFrame({ + 'WORKCENTER_GROUP': ['焊接_DB', '切割'], + 'WORKCENTERSEQUENCE_GROUP': [2, 1], + 'PACKAGE_LEF': ['SOT-23', 'SOT-23'], + 'QTY': [40000000, 50000000] + }) + mock_read_sql.return_value = mock_df + + result = get_wip_matrix() + + self.assertEqual(result['workcenters'], ['切割', '焊接_DB']) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_packages_sorted_by_qty_desc(self, mock_read_sql): + """Packages should be sorted by total QTY descending.""" + mock_df = pd.DataFrame({ + 'WORKCENTER_GROUP': ['切割', '切割'], + 'WORKCENTERSEQUENCE_GROUP': [1, 1], + 'PACKAGE_LEF': ['SOD-323', 'SOT-23'], + 'QTY': [30000000, 50000000] + }) + mock_read_sql.return_value = mock_df + + result = get_wip_matrix() + + self.assertEqual(result['packages'][0], 'SOT-23') # Higher QTY first + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_empty_structure_on_empty_result(self, mock_read_sql): + """Should return empty structure when no data.""" + mock_read_sql.return_value = pd.DataFrame() + + result = get_wip_matrix() + + self.assertIsNotNone(result) + self.assertEqual(result['workcenters'], []) + self.assertEqual(result['packages'], []) + self.assertEqual(result['grand_total'], 0) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_calculates_totals_correctly(self, mock_read_sql): + """Should calculate workcenter and package totals correctly.""" + mock_df = pd.DataFrame({ + 'WORKCENTER_GROUP': ['切割', '切割'], + 'WORKCENTERSEQUENCE_GROUP': [1, 1], + 'PACKAGE_LEF': ['SOT-23', 'SOD-323'], + 'QTY': [50000000, 30000000] + }) + mock_read_sql.return_value = mock_df + + result = get_wip_matrix() + + self.assertEqual(result['workcenter_totals']['切割'], 80000000) + self.assertEqual(result['package_totals']['SOT-23'], 50000000) + self.assertEqual(result['grand_total'], 80000000) + + +class TestGetWipHoldSummary(unittest.TestCase): + """Test get_wip_hold_summary function.""" + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_hold_items(self, mock_read_sql): + """Should return list of hold items.""" + mock_df = pd.DataFrame({ + 'REASON': ['YieldLimit', '特殊需求管控'], + 'LOTS': [21, 44], + 'QTY': [1084443, 4235060] + }) + mock_read_sql.return_value = mock_df + + result = get_wip_hold_summary() + + self.assertIsNotNone(result) + self.assertIn('items', result) + self.assertEqual(len(result['items']), 2) + self.assertEqual(result['items'][0]['reason'], 'YieldLimit') + self.assertEqual(result['items'][0]['lots'], 21) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_empty_items_on_no_holds(self, mock_read_sql): + """Should return empty items list when no holds.""" + mock_read_sql.return_value = pd.DataFrame() + + result = get_wip_hold_summary() + + self.assertIsNotNone(result) + self.assertEqual(result['items'], []) + + +class TestGetWipDetail(unittest.TestCase): + """Test get_wip_detail function.""" + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_none_on_empty_summary(self, mock_read_sql): + """Should return None when summary query returns empty.""" + mock_read_sql.return_value = pd.DataFrame() + + result = get_wip_detail('不存在的工站') + + self.assertIsNone(result) + + +class TestGetWorkcenters(unittest.TestCase): + """Test get_workcenters function.""" + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_workcenter_list(self, mock_read_sql): + """Should return list of workcenters with lot counts.""" + mock_df = pd.DataFrame({ + 'WORKCENTER_GROUP': ['切割', '焊接_DB'], + 'WORKCENTERSEQUENCE_GROUP': [1, 2], + 'LOT_COUNT': [1377, 859] + }) + mock_read_sql.return_value = mock_df + + result = get_workcenters() + + self.assertIsNotNone(result) + self.assertEqual(len(result), 2) + self.assertEqual(result[0]['name'], '切割') + self.assertEqual(result[0]['lot_count'], 1377) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_empty_list_on_no_data(self, mock_read_sql): + """Should return empty list when no workcenters.""" + mock_read_sql.return_value = pd.DataFrame() + + result = get_workcenters() + + self.assertEqual(result, []) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_none_on_exception(self, mock_read_sql): + """Should return None on exception.""" + mock_read_sql.side_effect = Exception("Database error") + + result = get_workcenters() + + self.assertIsNone(result) + + +class TestGetPackages(unittest.TestCase): + """Test get_packages function.""" + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_package_list(self, mock_read_sql): + """Should return list of packages with lot counts.""" + mock_df = pd.DataFrame({ + 'PACKAGE_LEF': ['SOT-23', 'SOD-323'], + 'LOT_COUNT': [2234, 1392] + }) + mock_read_sql.return_value = mock_df + + result = get_packages() + + self.assertIsNotNone(result) + self.assertEqual(len(result), 2) + self.assertEqual(result[0]['name'], 'SOT-23') + self.assertEqual(result[0]['lot_count'], 2234) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_empty_list_on_no_data(self, mock_read_sql): + """Should return empty list when no packages.""" + mock_read_sql.return_value = pd.DataFrame() + + result = get_packages() + + self.assertEqual(result, []) + + +class TestSearchWorkorders(unittest.TestCase): + """Test search_workorders function.""" + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_matching_workorders(self, mock_read_sql): + """Should return list of matching WORKORDER values.""" + mock_df = pd.DataFrame({ + 'WORKORDER': ['GA26012001', 'GA26012002', 'GA26012003'] + }) + mock_read_sql.return_value = mock_df + + result = search_workorders('GA26') + + self.assertIsNotNone(result) + self.assertEqual(len(result), 3) + self.assertEqual(result[0], 'GA26012001') + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_empty_list_for_short_query(self, mock_read_sql): + """Should return empty list for query < 2 characters.""" + result = search_workorders('G') + + self.assertEqual(result, []) + mock_read_sql.assert_not_called() + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_empty_list_for_empty_query(self, mock_read_sql): + """Should return empty list for empty query.""" + result = search_workorders('') + + self.assertEqual(result, []) + mock_read_sql.assert_not_called() + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_empty_list_on_no_matches(self, mock_read_sql): + """Should return empty list when no matches found.""" + mock_read_sql.return_value = pd.DataFrame() + + result = search_workorders('NONEXISTENT') + + self.assertEqual(result, []) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_respects_limit_parameter(self, mock_read_sql): + """Should respect the limit parameter.""" + mock_df = pd.DataFrame({ + 'WORKORDER': ['GA26012001', 'GA26012002'] + }) + mock_read_sql.return_value = mock_df + + result = search_workorders('GA26', limit=2) + + self.assertEqual(len(result), 2) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_caps_limit_at_50(self, mock_read_sql): + """Should cap limit at 50.""" + mock_df = pd.DataFrame({'WORKORDER': ['GA26012001']}) + mock_read_sql.return_value = mock_df + + search_workorders('GA26', limit=100) + + # Verify params contain row_limit=50 (capped from 100) + call_args = mock_read_sql.call_args + params = call_args[0][1] if len(call_args[0]) > 1 else call_args[1].get('params', {}) + self.assertEqual(params.get('row_limit'), 50) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_none_on_exception(self, mock_read_sql): + """Should return None on exception.""" + mock_read_sql.side_effect = Exception("Database error") + + result = search_workorders('GA26') + + self.assertIsNone(result) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_excludes_dummy_by_default(self, mock_read_sql): + """Should exclude DUMMY lots by default.""" + mock_df = pd.DataFrame({'WORKORDER': []}) + mock_read_sql.return_value = mock_df + + search_workorders('GA26') + + call_args = mock_read_sql.call_args[0][0] + self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_includes_dummy_when_specified(self, mock_read_sql): + """Should include DUMMY lots when include_dummy=True.""" + mock_df = pd.DataFrame({'WORKORDER': []}) + mock_read_sql.return_value = mock_df + + search_workorders('GA26', include_dummy=True) + + call_args = mock_read_sql.call_args[0][0] + self.assertNotIn("LOTID NOT LIKE '%DUMMY%'", call_args) + + +class TestSearchLotIds(unittest.TestCase): + """Test search_lot_ids function.""" + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_matching_lotids(self, mock_read_sql): + """Should return list of matching LOTID values.""" + mock_df = pd.DataFrame({ + 'LOTID': ['GA26012345-A00-001', 'GA26012345-A00-002'] + }) + mock_read_sql.return_value = mock_df + + result = search_lot_ids('GA26012345') + + self.assertIsNotNone(result) + self.assertEqual(len(result), 2) + self.assertEqual(result[0], 'GA26012345-A00-001') + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_empty_list_for_short_query(self, mock_read_sql): + """Should return empty list for query < 2 characters.""" + result = search_lot_ids('G') + + self.assertEqual(result, []) + mock_read_sql.assert_not_called() + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_empty_list_on_no_matches(self, mock_read_sql): + """Should return empty list when no matches found.""" + mock_read_sql.return_value = pd.DataFrame() + + result = search_lot_ids('NONEXISTENT') + + self.assertEqual(result, []) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_returns_none_on_exception(self, mock_read_sql): + """Should return None on exception.""" + mock_read_sql.side_effect = Exception("Database error") + + result = search_lot_ids('GA26') + + self.assertIsNone(result) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_excludes_dummy_by_default(self, mock_read_sql): + """Should exclude DUMMY lots by default.""" + mock_df = pd.DataFrame({'LOTID': []}) + mock_read_sql.return_value = mock_df + + search_lot_ids('GA26') + + call_args = mock_read_sql.call_args[0][0] + self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args) + + +class TestWipSearchIndexShortcut(unittest.TestCase): + """Test derived search index fast-path behavior.""" + + @patch('mes_dashboard.services.wip_service._search_workorders_from_oracle') + @patch('mes_dashboard.services.wip_service._get_wip_search_index') + def test_workorder_search_uses_index_without_cross_filters(self, mock_index, mock_oracle): + mock_index.return_value = { + "workorders": ["GA26012001", "GA26012002", "GB00000001"] + } + + result = search_workorders("GA26", limit=10) + + self.assertEqual(result, ["GA26012001", "GA26012002"]) + mock_oracle.assert_not_called() + + @patch('mes_dashboard.services.wip_service._search_workorders_from_oracle') + @patch('mes_dashboard.services.wip_service._get_wip_search_index') + def test_workorder_search_with_cross_filters_falls_back(self, mock_index, mock_oracle): + mock_index.return_value = { + "workorders": ["GA26012001", "GA26012002"] + } + mock_oracle.return_value = ["GA26012001"] + + result = search_workorders("GA26", package="SOT-23") + + self.assertEqual(result, ["GA26012001"]) + mock_oracle.assert_called_once() + + +class TestDummyExclusionInAllFunctions(unittest.TestCase): + """Test DUMMY exclusion is applied in all WIP functions.""" + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_get_wip_summary_excludes_dummy_by_default(self, mock_read_sql): + """get_wip_summary should exclude DUMMY by default.""" + mock_df = pd.DataFrame({ + 'TOTAL_LOTS': [100], + 'TOTAL_QTY_PCS': [1000], + 'RUN_LOTS': [80], + 'RUN_QTY_PCS': [800], + 'QUEUE_LOTS': [10], + 'QUEUE_QTY_PCS': [100], + 'HOLD_LOTS': [10], + 'HOLD_QTY_PCS': [100], + 'DATA_UPDATE_DATE': ['2026-01-26'] + }) + mock_read_sql.return_value = mock_df + + get_wip_summary() + + call_args = mock_read_sql.call_args[0][0] + self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_get_wip_summary_includes_dummy_when_specified(self, mock_read_sql): + """get_wip_summary should include DUMMY when specified.""" + mock_df = pd.DataFrame({ + 'TOTAL_LOTS': [100], + 'TOTAL_QTY_PCS': [1000], + 'RUN_LOTS': [80], + 'RUN_QTY_PCS': [800], + 'QUEUE_LOTS': [10], + 'QUEUE_QTY_PCS': [100], + 'HOLD_LOTS': [10], + 'HOLD_QTY_PCS': [100], + 'DATA_UPDATE_DATE': ['2026-01-26'] + }) + mock_read_sql.return_value = mock_df + + get_wip_summary(include_dummy=True) + + call_args = mock_read_sql.call_args[0][0] + self.assertNotIn("LOTID NOT LIKE '%DUMMY%'", call_args) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_get_wip_matrix_excludes_dummy_by_default(self, mock_read_sql): + """get_wip_matrix should exclude DUMMY by default.""" + mock_df = pd.DataFrame({ + 'WORKCENTER_GROUP': ['切割'], + 'WORKCENTERSEQUENCE_GROUP': [1], + 'PACKAGE_LEF': ['SOT-23'], + 'QTY': [1000] + }) + mock_read_sql.return_value = mock_df + + get_wip_matrix() + + call_args = mock_read_sql.call_args[0][0] + self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_get_wip_hold_summary_excludes_dummy_by_default(self, mock_read_sql): + """get_wip_hold_summary should exclude DUMMY by default.""" + mock_df = pd.DataFrame({ + 'REASON': ['YieldLimit'], 'LOTS': [10], 'QTY': [1000] + }) + mock_read_sql.return_value = mock_df + + get_wip_hold_summary() + + call_args = mock_read_sql.call_args[0][0] + self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_get_workcenters_excludes_dummy_by_default(self, mock_read_sql): + """get_workcenters should exclude DUMMY by default.""" + mock_df = pd.DataFrame({ + 'WORKCENTER_GROUP': ['切割'], + 'WORKCENTERSEQUENCE_GROUP': [1], + 'LOT_COUNT': [100] + }) + mock_read_sql.return_value = mock_df + + get_workcenters() + + call_args = mock_read_sql.call_args[0][0] + self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_get_packages_excludes_dummy_by_default(self, mock_read_sql): + """get_packages should exclude DUMMY by default.""" + mock_df = pd.DataFrame({ + 'PACKAGE_LEF': ['SOT-23'], 'LOT_COUNT': [100] + }) + mock_read_sql.return_value = mock_df + + get_packages() + + call_args = mock_read_sql.call_args[0][0] + self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args) + + +class TestMultipleFilterConditions(unittest.TestCase): + """Test multiple filter conditions work together.""" + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_get_wip_summary_with_all_filters(self, mock_read_sql): + """get_wip_summary should combine all filter conditions via parameterized queries.""" + mock_df = pd.DataFrame({ + 'TOTAL_LOTS': [50], + 'TOTAL_QTY_PCS': [500], + 'RUN_LOTS': [40], + 'RUN_QTY_PCS': [400], + 'QUEUE_LOTS': [5], + 'QUEUE_QTY_PCS': [50], + 'HOLD_LOTS': [5], + 'HOLD_QTY_PCS': [50], + 'QUALITY_HOLD_LOTS': [3], + 'QUALITY_HOLD_QTY_PCS': [30], + 'NON_QUALITY_HOLD_LOTS': [2], + 'NON_QUALITY_HOLD_QTY_PCS': [20], + 'DATA_UPDATE_DATE': ['2026-01-26'] + }) + mock_read_sql.return_value = mock_df + + get_wip_summary(workorder='GA26', lotid='A00') + + # Check SQL contains parameterized LIKE conditions + call_args = mock_read_sql.call_args + sql = call_args[0][0] + params = call_args[0][1] if len(call_args[0]) > 1 else {} + + self.assertIn("WORKORDER LIKE", sql) + self.assertIn("LOTID LIKE", sql) + self.assertIn("LOTID NOT LIKE '%DUMMY%'", sql) + # Verify params contain the search patterns + self.assertTrue(any('%GA26%' in str(v) for v in params.values())) + self.assertTrue(any('%A00%' in str(v) for v in params.values())) + + @disable_cache + @patch('mes_dashboard.services.wip_service.read_sql_df') + def test_get_wip_matrix_with_all_filters(self, mock_read_sql): + """get_wip_matrix should combine all filter conditions via parameterized queries.""" + mock_df = pd.DataFrame({ + 'WORKCENTER_GROUP': ['切割'], + 'WORKCENTERSEQUENCE_GROUP': [1], + 'PACKAGE_LEF': ['SOT-23'], + 'QTY': [500] + }) + mock_read_sql.return_value = mock_df + + get_wip_matrix(workorder='GA26', lotid='A00', include_dummy=True) + + # Check SQL contains parameterized LIKE conditions + call_args = mock_read_sql.call_args + sql = call_args[0][0] + params = call_args[0][1] if len(call_args[0]) > 1 else {} + + self.assertIn("WORKORDER LIKE", sql) + self.assertIn("LOTID LIKE", sql) + # Should NOT contain DUMMY exclusion since include_dummy=True + self.assertNotIn("LOTID NOT LIKE '%DUMMY%'", sql) + # Verify params contain the search patterns + self.assertTrue(any('%GA26%' in str(v) for v in params.values())) + self.assertTrue(any('%A00%' in str(v) for v in params.values())) + + + +import pytest + + +class TestWipServiceIntegration: + """Integration tests that hit the actual database. + + These tests are skipped by default. Run with: + python -m pytest tests/test_wip_service.py -k Integration --run-integration + """ + + @pytest.mark.integration + def test_get_wip_summary_integration(self): + """Integration test for get_wip_summary.""" + result = get_wip_summary() + assert result is not None + assert result['totalLots'] > 0 + assert 'dataUpdateDate' in result + + @pytest.mark.integration + def test_get_wip_matrix_integration(self): + """Integration test for get_wip_matrix.""" + result = get_wip_matrix() + assert result is not None + assert len(result['workcenters']) > 0 + assert result['grand_total'] > 0 + + @pytest.mark.integration + def test_get_wip_hold_summary_integration(self): + """Integration test for get_wip_hold_summary.""" + result = get_wip_hold_summary() + assert result is not None + assert 'items' in result + + @pytest.mark.integration + def test_get_wip_detail_integration(self): + """Integration test for get_wip_detail.""" + # First get a valid workcenter + workcenters = get_workcenters() + assert workcenters is not None and len(workcenters) > 0 + + wc_name = workcenters[0]['name'] + result = get_wip_detail(wc_name, page=1, page_size=10) + + assert result is not None + assert result['workcenter'] == wc_name + assert 'summary' in result + assert 'lots' in result + assert 'pagination' in result + + @pytest.mark.integration + def test_get_workcenters_integration(self): + """Integration test for get_workcenters.""" + result = get_workcenters() + assert result is not None + assert len(result) > 0 + assert 'name' in result[0] + assert 'lot_count' in result[0] + + @pytest.mark.integration + def test_get_packages_integration(self): + """Integration test for get_packages.""" + result = get_packages() + assert result is not None + assert len(result) > 0 + assert 'name' in result[0] + assert 'lot_count' in result[0] + + @pytest.mark.integration + def test_search_workorders_integration(self): + """Integration test for search_workorders.""" + # Use a common prefix that likely exists + result = search_workorders('GA') + assert result is not None + # Should return a list (possibly empty if no GA* workorders) + assert isinstance(result, list) + + @pytest.mark.integration + def test_search_lot_ids_integration(self): + """Integration test for search_lot_ids.""" + # Use a common prefix that likely exists + result = search_lot_ids('GA') + assert result is not None + assert isinstance(result, list) + + @pytest.mark.integration + def test_dummy_exclusion_integration(self): + """Integration test to verify DUMMY exclusion works.""" + # Get summary with and without DUMMY + result_without_dummy = get_wip_summary(include_dummy=False) + result_with_dummy = get_wip_summary(include_dummy=True) + + assert result_without_dummy is not None + assert result_with_dummy is not None + + # If there are DUMMY lots, with_dummy should have more + # (or equal if no DUMMY lots exist) + assert result_with_dummy['totalLots'] >= result_without_dummy['totalLots'] + + @pytest.mark.integration + def test_workorder_filter_integration(self): + """Integration test for workorder filter.""" + # Get all data first + all_result = get_wip_summary() + assert all_result is not None + + # Search for a workorder that exists + workorders = search_workorders('GA', limit=1) + if workorders and len(workorders) > 0: + # Filter by that workorder + filtered_result = get_wip_summary(workorder=workorders[0]) + assert filtered_result is not None + # Filtered count should be less than or equal to total + assert filtered_result['totalLots'] <= all_result['totalLots'] + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_workcenter_mapping.py b/tests/test_workcenter_mapping.py new file mode 100644 index 0000000..758dc61 --- /dev/null +++ b/tests/test_workcenter_mapping.py @@ -0,0 +1,349 @@ +# -*- coding: utf-8 -*- +"""Unit tests for workcenter mapping in filter_cache module. + +Tests workcenter group lookup and mapping functionality. +""" + +import pytest +from unittest.mock import patch, MagicMock +import pandas as pd + + +class TestGetWorkcenterGroup: + """Test get_workcenter_group function.""" + + @pytest.fixture(autouse=True) + def reset_cache(self): + """Reset cache state before each test.""" + import mes_dashboard.services.filter_cache as fc + with fc._CACHE_LOCK: + fc._CACHE['workcenter_groups'] = None + fc._CACHE['workcenter_mapping'] = None + fc._CACHE['workcenter_to_short'] = None + fc._CACHE['last_refresh'] = None + fc._CACHE['is_loading'] = False + yield + with fc._CACHE_LOCK: + fc._CACHE['workcenter_groups'] = None + fc._CACHE['workcenter_mapping'] = None + fc._CACHE['workcenter_to_short'] = None + fc._CACHE['last_refresh'] = None + fc._CACHE['is_loading'] = False + + def test_returns_group_for_valid_workcenter(self): + """Test returns group for valid workcenter name.""" + import mes_dashboard.services.filter_cache as fc + + mock_mapping = { + 'DB-01': {'group': '焊接', 'sequence': 1}, + 'WB-01': {'group': '焊線', 'sequence': 2}, + } + + with patch.object(fc, 'get_workcenter_mapping', return_value=mock_mapping): + result = fc.get_workcenter_group('DB-01') + assert result == '焊接' + + def test_returns_none_for_unknown_workcenter(self): + """Test returns None for unknown workcenter name.""" + import mes_dashboard.services.filter_cache as fc + + mock_mapping = { + 'DB-01': {'group': '焊接', 'sequence': 1}, + } + + with patch.object(fc, 'get_workcenter_mapping', return_value=mock_mapping): + result = fc.get_workcenter_group('UNKNOWN') + assert result is None + + def test_returns_none_when_mapping_unavailable(self): + """Test returns None when mapping is unavailable.""" + import mes_dashboard.services.filter_cache as fc + + with patch.object(fc, 'get_workcenter_mapping', return_value=None): + result = fc.get_workcenter_group('DB-01') + assert result is None + + +class TestGetWorkcenterShort: + """Test get_workcenter_short function.""" + + @pytest.fixture(autouse=True) + def reset_cache(self): + """Reset cache state before each test.""" + import mes_dashboard.services.filter_cache as fc + with fc._CACHE_LOCK: + fc._CACHE['workcenter_groups'] = None + fc._CACHE['workcenter_mapping'] = None + fc._CACHE['workcenter_to_short'] = None + fc._CACHE['last_refresh'] = None + fc._CACHE['is_loading'] = False + yield + with fc._CACHE_LOCK: + fc._CACHE['workcenter_groups'] = None + fc._CACHE['workcenter_mapping'] = None + fc._CACHE['workcenter_to_short'] = None + fc._CACHE['last_refresh'] = None + fc._CACHE['is_loading'] = False + + def test_returns_short_name_for_valid_workcenter(self): + """Test returns short name for valid workcenter.""" + import mes_dashboard.services.filter_cache as fc + from datetime import datetime + + # Set up cache directly + with fc._CACHE_LOCK: + fc._CACHE['workcenter_to_short'] = { + 'DB-01': 'DB', + 'WB-01': 'WB', + } + fc._CACHE['workcenter_groups'] = [{'name': '焊接', 'sequence': 1}] + fc._CACHE['workcenter_mapping'] = {} + fc._CACHE['last_refresh'] = datetime.now() + + result = fc.get_workcenter_short('DB-01') + assert result == 'DB' + + def test_returns_none_for_unknown_workcenter(self): + """Test returns None for unknown workcenter.""" + import mes_dashboard.services.filter_cache as fc + from datetime import datetime + + with fc._CACHE_LOCK: + fc._CACHE['workcenter_to_short'] = { + 'DB-01': 'DB', + } + fc._CACHE['workcenter_groups'] = [{'name': '焊接', 'sequence': 1}] + fc._CACHE['workcenter_mapping'] = {} + fc._CACHE['last_refresh'] = datetime.now() + + result = fc.get_workcenter_short('UNKNOWN') + assert result is None + + +class TestGetWorkcentersByGroup: + """Test get_workcenters_by_group function.""" + + def test_returns_workcenters_in_group(self): + """Test returns all workcenters in specified group.""" + import mes_dashboard.services.filter_cache as fc + + mock_mapping = { + 'DB-01': {'group': '焊接', 'sequence': 1}, + 'DB-02': {'group': '焊接', 'sequence': 1}, + 'WB-01': {'group': '焊線', 'sequence': 2}, + } + + with patch.object(fc, 'get_workcenter_mapping', return_value=mock_mapping): + result = fc.get_workcenters_by_group('焊接') + + assert len(result) == 2 + assert 'DB-01' in result + assert 'DB-02' in result + assert 'WB-01' not in result + + def test_returns_empty_for_unknown_group(self): + """Test returns empty list for unknown group.""" + import mes_dashboard.services.filter_cache as fc + + mock_mapping = { + 'DB-01': {'group': '焊接', 'sequence': 1}, + } + + with patch.object(fc, 'get_workcenter_mapping', return_value=mock_mapping): + result = fc.get_workcenters_by_group('UNKNOWN') + assert result == [] + + def test_returns_empty_when_mapping_unavailable(self): + """Test returns empty list when mapping unavailable.""" + import mes_dashboard.services.filter_cache as fc + + with patch.object(fc, 'get_workcenter_mapping', return_value=None): + result = fc.get_workcenters_by_group('焊接') + assert result == [] + + +class TestGetWorkcentersForGroups: + """Test get_workcenters_for_groups function.""" + + def test_returns_workcenters_for_multiple_groups(self): + """Test returns workcenters for multiple groups.""" + import mes_dashboard.services.filter_cache as fc + + mock_mapping = { + 'DB-01': {'group': '焊接', 'sequence': 1}, + 'WB-01': {'group': '焊線', 'sequence': 2}, + 'MD-01': {'group': '成型', 'sequence': 3}, + } + + with patch.object(fc, 'get_workcenter_mapping', return_value=mock_mapping): + result = fc.get_workcenters_for_groups(['焊接', '焊線']) + + assert len(result) == 2 + assert 'DB-01' in result + assert 'WB-01' in result + assert 'MD-01' not in result + + def test_returns_empty_for_empty_groups_list(self): + """Test returns empty list for empty groups list.""" + import mes_dashboard.services.filter_cache as fc + + mock_mapping = { + 'DB-01': {'group': '焊接', 'sequence': 1}, + } + + with patch.object(fc, 'get_workcenter_mapping', return_value=mock_mapping): + result = fc.get_workcenters_for_groups([]) + assert result == [] + + +class TestGetWorkcenterGroups: + """Test get_workcenter_groups function.""" + + @pytest.fixture(autouse=True) + def reset_cache(self): + """Reset cache state before each test.""" + import mes_dashboard.services.filter_cache as fc + with fc._CACHE_LOCK: + fc._CACHE['workcenter_groups'] = None + fc._CACHE['workcenter_mapping'] = None + fc._CACHE['workcenter_to_short'] = None + fc._CACHE['last_refresh'] = None + fc._CACHE['is_loading'] = False + yield + with fc._CACHE_LOCK: + fc._CACHE['workcenter_groups'] = None + fc._CACHE['workcenter_mapping'] = None + fc._CACHE['workcenter_to_short'] = None + fc._CACHE['last_refresh'] = None + fc._CACHE['is_loading'] = False + + def test_returns_groups_sorted_by_sequence(self): + """Test returns groups sorted by sequence.""" + import mes_dashboard.services.filter_cache as fc + from datetime import datetime + + # Set up cache directly + with fc._CACHE_LOCK: + fc._CACHE['workcenter_groups'] = [ + {'name': '成型', 'sequence': 3}, + {'name': '焊接', 'sequence': 1}, + {'name': '焊線', 'sequence': 2}, + ] + fc._CACHE['workcenter_mapping'] = {} + fc._CACHE['workcenter_to_short'] = {} + fc._CACHE['last_refresh'] = datetime.now() + + result = fc.get_workcenter_groups() + + # Should preserve original order (as stored) + assert len(result) == 3 + names = [g['name'] for g in result] + assert '成型' in names + assert '焊接' in names + assert '焊線' in names + + +class TestLoadWorkcenterMappingFromSpec: + """Test _load_workcenter_mapping_from_spec function.""" + + def test_builds_mapping_from_spec_view(self): + """Test builds mapping from SPEC_WORKCENTER_V data.""" + import mes_dashboard.services.filter_cache as fc + + mock_df = pd.DataFrame({ + 'WORK_CENTER': ['DB-01', 'DB-02', 'WB-01'], + 'WORK_CENTER_GROUP': ['焊接', '焊接', '焊線'], + 'WORKCENTERSEQUENCE_GROUP': [1, 1, 2], + 'WORK_CENTER_SHORT': ['DB', 'DB', 'WB'], + }) + + with patch.object(fc, 'read_sql_df', return_value=mock_df): + groups, mapping, short_mapping = fc._load_workcenter_mapping_from_spec() + + # Check groups + assert len(groups) == 2 # 2 unique groups + group_names = [g['name'] for g in groups] + assert '焊接' in group_names + assert '焊線' in group_names + + # Check mapping + assert len(mapping) == 3 + assert mapping['DB-01']['group'] == '焊接' + assert mapping['WB-01']['group'] == '焊線' + + # Check short mapping + assert short_mapping['DB-01'] == 'DB' + assert short_mapping['WB-01'] == 'WB' + + def test_returns_empty_when_no_data(self): + """Test returns empty structures when no data.""" + import mes_dashboard.services.filter_cache as fc + + with patch.object(fc, 'read_sql_df', return_value=None): + groups, mapping, short_mapping = fc._load_workcenter_mapping_from_spec() + + assert groups == [] + assert mapping == {} + assert short_mapping == {} + + def test_handles_empty_dataframe(self): + """Test handles empty DataFrame.""" + import mes_dashboard.services.filter_cache as fc + + mock_df = pd.DataFrame(columns=['WORK_CENTER', 'WORK_CENTER_GROUP', 'WORKCENTERSEQUENCE_GROUP', 'WORK_CENTER_SHORT']) + + with patch.object(fc, 'read_sql_df', return_value=mock_df): + groups, mapping, short_mapping = fc._load_workcenter_mapping_from_spec() + + assert groups == [] + assert mapping == {} + assert short_mapping == {} + + +class TestGetCacheStatus: + """Test get_cache_status function.""" + + @pytest.fixture(autouse=True) + def reset_cache(self): + """Reset cache state before each test.""" + import mes_dashboard.services.filter_cache as fc + with fc._CACHE_LOCK: + fc._CACHE['workcenter_groups'] = None + fc._CACHE['workcenter_mapping'] = None + fc._CACHE['workcenter_to_short'] = None + fc._CACHE['last_refresh'] = None + fc._CACHE['is_loading'] = False + yield + with fc._CACHE_LOCK: + fc._CACHE['workcenter_groups'] = None + fc._CACHE['workcenter_mapping'] = None + fc._CACHE['workcenter_to_short'] = None + fc._CACHE['last_refresh'] = None + fc._CACHE['is_loading'] = False + + def test_returns_not_loaded_when_empty(self): + """Test returns loaded=False when cache empty.""" + import mes_dashboard.services.filter_cache as fc + + result = fc.get_cache_status() + + assert result['loaded'] is False + assert result['last_refresh'] is None + + def test_returns_loaded_when_data_exists(self): + """Test returns loaded=True when cache has data.""" + import mes_dashboard.services.filter_cache as fc + from datetime import datetime + + now = datetime.now() + with fc._CACHE_LOCK: + fc._CACHE['workcenter_groups'] = [{'name': 'G1', 'sequence': 1}] + fc._CACHE['workcenter_mapping'] = {'WC1': {'group': 'G1', 'sequence': 1}} + fc._CACHE['last_refresh'] = now + + result = fc.get_cache_status() + + assert result['loaded'] is True + assert result['last_refresh'] is not None + assert result['workcenter_groups_count'] == 1 + assert result['workcenter_mapping_count'] == 1 diff --git a/tools/generate_documentation.py b/tools/generate_documentation.py new file mode 100644 index 0000000..2a81b88 --- /dev/null +++ b/tools/generate_documentation.py @@ -0,0 +1,344 @@ +""" +生成 MES 数据库参考文档 +用于报表开发参考 +""" + +import json +from pathlib import Path +from datetime import datetime + +# 读取表结构信息 +ROOT_DIR = Path(__file__).resolve().parent.parent +DATA_FILE = ROOT_DIR / 'data' / 'table_schema_info.json' +with open(DATA_FILE, 'r', encoding='utf-8') as f: + table_info = json.load(f) + +# 表用途描述(根据表名推断) +TABLE_DESCRIPTIONS = { + 'DW_MES_CONTAINER': '容器/批次主檔 - 目前在製容器狀態、數量與流程資訊', + 'DW_MES_HOLDRELEASEHISTORY': 'Hold/Release 歷史表 - 批次停工與解除紀錄', + 'DW_MES_JOB': '設備維修工單表 - 維修工單的當前狀態與流程', + 'DW_MES_LOTREJECTHISTORY': '批次不良/報廢歷史表 - 不良原因與數量', + 'DW_MES_LOTWIPDATAHISTORY': '在製數據採集歷史表 - 製程量測/參數紀錄', + 'DW_MES_LOTWIPHISTORY': '在製流轉歷史表 - 批次進出站與流程軌跡', + 'DW_MES_MAINTENANCE': '設備保養/維護紀錄表 - 保養計畫與點檢數據', + 'DW_MES_PARTREQUESTORDER': '維修用料請求表 - 維修/設備零件請領', + 'DW_MES_PJ_COMBINEDASSYLOTS': '併批紀錄表 - 合批/合併批次關聯與數量資訊', + 'DW_MES_RESOURCESTATUS': '設備狀態變更歷史表 - 狀態切換與原因', + 'DW_MES_RESOURCESTATUS_SHIFT': '設備狀態班次彙總表 - 班次級狀態/工時', + 'DW_MES_WIP': '在製品現況表(含歷史累積)- 當前 WIP 狀態/數量', + 'DW_MES_HM_LOTMOVEOUT': '批次出站事件歷史表 - 出站/移出交易', + 'DW_MES_JOBTXNHISTORY': '維修工單交易歷史表 - 工單狀態變更紀錄', + 'DW_MES_LOTMATERIALSHISTORY': '批次物料消耗歷史表 - 用料與批次關聯', + 'DW_MES_RESOURCE': '資源表 - 設備/載具等資源基本資料(OBJECTCATEGORY=ASSEMBLY 時,RESOURCENAME 為設備編號)' +} + +# 常见字段说明 +COMMON_FIELD_NOTES = { + 'ID': '唯一标识符', + 'NAME': '名称', + 'STATUS': '状态', + 'TIMESTAMP': '时间戳', + 'CREATEDATE': '创建日期', + 'UPDATEDATE': '更新日期', + 'LOTID': '批次ID', + 'CONTAINERID': '容器ID', + 'RESOURCEID': '资源ID', + 'EQUIPMENTID': '设备ID', + 'OPERATIONID': '工序ID', + 'JOBID': '工单ID', + 'PRODUCTID': '产品ID', + 'CUSTOMERID': '客户ID', + 'QTY': '数量', + 'QUANTITY': '数量' +} + + +def generate_markdown(): + """生成 Markdown 文档""" + + md = [] + + # 标题和简介 + md.append("# MES 数据库报表开发参考文档\n") + md.append(f"**生成时间**: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n") + md.append("---\n") + + # 目录 + md.append("## 目录\n") + md.append("1. [数据库连接信息](#数据库连接信息)") + md.append("2. [数据库概览](#数据库概览)") + md.append("3. [表结构详细说明](#表结构详细说明)") + md.append("4. [报表开发注意事项](#报表开发注意事项)") + md.append("5. [常用查询示例](#常用查询示例)\n") + md.append("---\n") + + # 1. 数据库连接信息 + md.append("## 数据库连接信息\n") + md.append("### 连接参数\n") + md.append("| 参数 | 值 |") + md.append("|------|------|") + md.append("| 数据库类型 | Oracle Database 19c Enterprise Edition |") + md.append("| 主机地址 | 請參考 .env 檔案 (DB_HOST) |") + md.append("| 端口 | 請參考 .env 檔案 (DB_PORT) |") + md.append("| 服务名 | 請參考 .env 檔案 (DB_SERVICE) |") + md.append("| 用户名 | 請參考 .env 檔案 (DB_USER) |") + md.append("| 密码 | 請參考 .env 檔案 (DB_PASSWORD) |\n") + + md.append("### Python 连接示例\n") + md.append("```python") + md.append("import os") + md.append("import oracledb") + md.append("from dotenv import load_dotenv") + md.append("") + md.append("# 載入環境變數") + md.append("load_dotenv()") + md.append("") + md.append("# 连接配置 (從環境變數讀取)") + md.append("DB_CONFIG = {") + md.append(" 'user': os.getenv('DB_USER'),") + md.append(" 'password': os.getenv('DB_PASSWORD'),") + md.append(" 'dsn': f\"(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST={os.getenv('DB_HOST')})(PORT={os.getenv('DB_PORT')})))(CONNECT_DATA=(SERVICE_NAME={os.getenv('DB_SERVICE')})))\"") + md.append("}") + md.append("") + md.append("# 建立连接") + md.append("connection = oracledb.connect(**DB_CONFIG)") + md.append("cursor = connection.cursor()") + md.append("") + md.append("# 执行查询") + md.append("cursor.execute('SELECT * FROM DW_MES_WIP WHERE ROWNUM <= 10')") + md.append("results = cursor.fetchall()") + md.append("") + md.append("# 关闭连接") + md.append("cursor.close()") + md.append("connection.close()") + md.append("```\n") + + md.append("### JDBC 连接字符串\n") + md.append("```") + md.append("jdbc:oracle:thin:@${DB_HOST}:${DB_PORT}:${DB_SERVICE}") + md.append("```\n") + + # 2. 数据库概览 + md.append("---\n") + md.append("## 数据库概览\n") + md.append("### 表统计信息\n") + md.append("| # | 表名 | 用途 | 数据量 |") + md.append("|---|------|------|--------|") + + for idx, (table_name, info) in enumerate(sorted(table_info.items()), 1): + if 'error' not in info: + row_count = f"{info['row_count']:,}" + description = TABLE_DESCRIPTIONS.get(table_name, '待补充') + md.append(f"| {idx} | `{table_name}` | {description} | {row_count} |") + + md.append("") + + # 计算总数据量 + total_rows = sum(info['row_count'] for info in table_info.values() if 'error' not in info) + md.append(f"**总数据量**: {total_rows:,} 行\n") + + # 3. 表结构详细说明 + md.append("---\n") + md.append("## 表结构详细说明\n") + + for table_name in sorted(table_info.keys()): + info = table_info[table_name] + + if 'error' in info: + continue + + md.append(f"### {table_name}\n") + + # 表说明 + md.append(f"**用途**: {TABLE_DESCRIPTIONS.get(table_name, '待补充')}\n") + md.append(f"**数据量**: {info['row_count']:,} 行\n") + + if info.get('table_comment'): + md.append(f"**表注释**: {info['table_comment']}\n") + + # 字段列表 + md.append("#### 字段列表\n") + md.append("| # | 字段名 | 数据类型 | 长度 | 可空 | 说明 |") + md.append("|---|--------|----------|------|------|------|") + + schema = info.get('schema', []) + for col in schema: + col_num = col['column_id'] + col_name = col['column_name'] + + # 构建数据类型显示 + if col['data_type'] in ['VARCHAR2', 'CHAR']: + data_type = f"{col['data_type']}({col['data_length']})" + elif col['data_type'] == 'NUMBER' and col['data_precision']: + if col['data_scale']: + data_type = f"NUMBER({col['data_precision']},{col['data_scale']})" + else: + data_type = f"NUMBER({col['data_precision']})" + else: + data_type = col['data_type'] + + nullable = "是" if col['nullable'] == 'Y' else "否" + + # 获取字段说明 + column_comments = info.get('column_comments', {}) + comment = column_comments.get(col_name, '') + + # 如果没有注释,尝试从常见字段说明中获取 + if not comment: + for key, value in COMMON_FIELD_NOTES.items(): + if key in col_name: + comment = value + break + + md.append(f"| {col_num} | `{col_name}` | {data_type} | {col.get('data_length', '-')} | {nullable} | {comment} |") + + md.append("") + + # 索引信息 + indexes = info.get('indexes', []) + if indexes: + md.append("#### 索引\n") + md.append("| 索引名 | 类型 | 字段 |") + md.append("|--------|------|------|") + for idx_info in indexes: + idx_type = "唯一索引" if idx_info[1] == 'UNIQUE' else "普通索引" + md.append(f"| `{idx_info[0]}` | {idx_type} | {idx_info[2]} |") + md.append("") + + md.append("---\n") + + # 4. 报表开发注意事项 + md.append("## 报表开发注意事项\n") + md.append("### 性能优化建议\n") + md.append("1. **大数据量表查询优化**") + md.append(" - 以下表数据量较大,查询时务必添加时间范围限制:") + + large_tables = [(name, info['row_count']) for name, info in table_info.items() + if 'error' not in info and info['row_count'] > 10000000] + large_tables.sort(key=lambda x: x[1], reverse=True) + + for table_name, count in large_tables: + md.append(f" - `{table_name}`: {count:,} 行") + + md.append("") + md.append("2. **索引使用**") + md.append(" - 查询时尽量使用已建立索引的字段作为查询条件") + md.append(" - 避免在索引字段上使用函数,会导致索引失效") + md.append("") + md.append("3. **连接池配置**") + md.append(" - 建议使用连接池管理数据库连接") + md.append(" - 推荐连接池大小:5-10 个连接") + md.append("") + md.append("4. **查询超时设置**") + md.append(" - 建议设置查询超时时间为 30-60 秒") + md.append(" - 避免长时间运行的查询影响系统性能") + md.append("") + + md.append("### 数据时效性\n") + md.append("- **实时数据表**: `DW_MES_WIP`(含歷史累積), `DW_MES_RESOURCESTATUS`") + md.append("- **历史数据表**: 带有 `HISTORY` 后缀的表") + md.append("- **主数据表**: `DW_MES_RESOURCE`, `DW_MES_CONTAINER`") + md.append("") + + md.append("### 常用时间字段\n") + md.append("大多数历史表包含以下时间相关字段:") + md.append("- `CREATEDATE` / `CREATETIMESTAMP`: 记录创建时间") + md.append("- `UPDATEDATE` / `UPDATETIMESTAMP`: 记录更新时间") + md.append("- `TRANSACTIONDATE`: 交易发生时间") + md.append("") + + md.append("### 数据权限\n") + md.append("- 當前帳號為唯讀帳號 (詳見 .env 中的 DB_USER)") + md.append("- 仅可执行 SELECT 查询") + md.append("- 无法进行 INSERT, UPDATE, DELETE 操作") + md.append("") + + # 5. 常用查询示例 + md.append("---\n") + md.append("## 常用查询示例\n") + + md.append("### 1. 查询当前在制品数量\n") + md.append("```sql") + md.append("SELECT COUNT(*) as WIP_COUNT") + md.append("FROM DW_MES_WIP") + md.append("WHERE CURRENTSTATUSID IS NOT NULL;") + md.append("```\n") + + md.append("### 2. 查询设备状态统计\n") + md.append("```sql") + md.append("SELECT") + md.append(" CURRENTSTATUSID,") + md.append(" COUNT(*) as COUNT") + md.append("FROM DW_MES_RESOURCESTATUS") + md.append("GROUP BY CURRENTSTATUSID") + md.append("ORDER BY COUNT DESC;") + md.append("```\n") + + md.append("### 3. 查询最近 7 天的批次历史\n") + md.append("```sql") + md.append("SELECT *") + md.append("FROM DW_MES_LOTWIPHISTORY") + md.append("WHERE CREATEDATE >= SYSDATE - 7") + md.append("ORDER BY CREATEDATE DESC;") + md.append("```\n") + + md.append("### 4. 查询工单完成情况\n") + md.append("```sql") + md.append("SELECT") + md.append(" JOBID,") + md.append(" JOBSTATUS,") + md.append(" COUNT(*) as COUNT") + md.append("FROM DW_MES_JOB") + md.append("GROUP BY JOBID, JOBSTATUS") + md.append("ORDER BY JOBID;") + md.append("```\n") + + md.append("### 5. 按日期统计生产数量\n") + md.append("```sql") + md.append("SELECT") + md.append(" TRUNC(CREATEDATE) as PRODUCTION_DATE,") + md.append(" COUNT(*) as LOT_COUNT") + md.append("FROM DW_MES_HM_LOTMOVEOUT") + md.append("WHERE CREATEDATE >= SYSDATE - 30") + md.append("GROUP BY TRUNC(CREATEDATE)") + md.append("ORDER BY PRODUCTION_DATE DESC;") + md.append("```\n") + + md.append("### 6. 联表查询示例(批次与容器)\n") + md.append("```sql") + md.append("SELECT") + md.append(" w.LOTID,") + md.append(" w.CONTAINERNAME,") + md.append(" c.CURRENTSTATUSID,") + md.append(" c.CUSTOMERID") + md.append("FROM DW_MES_WIP w") + md.append("LEFT JOIN DW_MES_CONTAINER c ON w.CONTAINERID = c.CONTAINERID") + md.append("WHERE w.CREATEDATE >= SYSDATE - 1") + md.append("ORDER BY w.CREATEDATE DESC;") + md.append("```\n") + + md.append("---\n") + md.append("## 附录\n") + md.append("### 文档更新记录\n") + md.append(f"- {datetime.now().strftime('%Y-%m-%d')}: 初始版本创建") + md.append("") + md.append("### 联系方式\n") + md.append("如有疑问或需要补充信息,请联系数据库管理员。\n") + + return '\n'.join(md) + + +if __name__ == "__main__": + print("Generating documentation...") + markdown_content = generate_markdown() + + output_file = ROOT_DIR / 'docs' / 'MES_Database_Reference.md' + with open(output_file, 'w', encoding='utf-8') as f: + f.write(markdown_content) + + print(f"[OK] Documentation generated: {output_file}") + + + + diff --git a/tools/query_table_schema.py b/tools/query_table_schema.py new file mode 100644 index 0000000..2ea195a --- /dev/null +++ b/tools/query_table_schema.py @@ -0,0 +1,261 @@ +""" +查询 MES 表结构信息脚本 +用于生成报表开发参考文档 +""" + +import sys +import io +import os +import json +import argparse +from pathlib import Path + +import oracledb + +# 设置 UTF-8 编码输出 +sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace') + +# Load .env file +try: + from dotenv import load_dotenv + env_path = Path(__file__).resolve().parent.parent / '.env' + load_dotenv(env_path) +except ImportError: + pass + +# 数据库连接信息 (从环境变量读取,必须在 .env 中设置) +DB_HOST = os.getenv('DB_HOST', '') +DB_PORT = os.getenv('DB_PORT', '1521') +DB_SERVICE = os.getenv('DB_SERVICE', '') +DB_USER = os.getenv('DB_USER', '') +DB_PASSWORD = os.getenv('DB_PASSWORD', '') + +DB_CONFIG = { + 'user': DB_USER, + 'password': DB_PASSWORD, + 'dsn': f'(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST={DB_HOST})(PORT={DB_PORT})))(CONNECT_DATA=(SERVICE_NAME={DB_SERVICE})))' +} + +# MES 表列表(預設清單) +MES_TABLES = [ + 'DW_MES_CONTAINER', + 'DW_MES_HOLDRELEASEHISTORY', + 'DW_MES_JOB', + 'DW_MES_LOTREJECTHISTORY', + 'DW_MES_LOTWIPDATAHISTORY', + 'DW_MES_LOTWIPHISTORY', + 'DW_MES_MAINTENANCE', + 'DW_MES_PARTREQUESTORDER', + 'DW_MES_PJ_COMBINEDASSYLOTS', + 'DW_MES_RESOURCESTATUS', + 'DW_MES_RESOURCESTATUS_SHIFT', + 'DW_MES_WIP', + 'DW_MES_HM_LOTMOVEOUT', + 'DW_MES_JOBTXNHISTORY', + 'DW_MES_LOTMATERIALSHISTORY', + 'DW_MES_RESOURCE' +] + +def get_table_schema(cursor, table_name, owner=None): + """获取表的结构信息""" + query = """ + SELECT + COLUMN_NAME, + DATA_TYPE, + DATA_LENGTH, + DATA_PRECISION, + DATA_SCALE, + NULLABLE, + DATA_DEFAULT, + COLUMN_ID + FROM ALL_TAB_COLUMNS + WHERE TABLE_NAME = :table_name + """ + if owner: + query += " AND OWNER = :owner ORDER BY COLUMN_ID" + cursor.execute(query, table_name=table_name, owner=owner) + else: + query += " ORDER BY COLUMN_ID" + cursor.execute(query, table_name=table_name) + columns = cursor.fetchall() + + schema = [] + for col in columns: + col_info = { + 'column_name': col[0], + 'data_type': col[1], + 'data_length': col[2], + 'data_precision': col[3], + 'data_scale': col[4], + 'nullable': col[5], + 'default_value': col[6], + 'column_id': col[7] + } + schema.append(col_info) + + return schema + + +def get_table_comments(cursor, table_name, owner=None): + """获取表和列的注释""" + # 获取表注释 + table_query = """ + SELECT COMMENTS + FROM ALL_TAB_COMMENTS + WHERE TABLE_NAME = :table_name + """ + if owner: + table_query += " AND OWNER = :owner" + cursor.execute(table_query, table_name=table_name, owner=owner) + else: + cursor.execute(table_query, table_name=table_name) + table_comment = cursor.fetchone() + + # 获取列注释 + col_query = """ + SELECT COLUMN_NAME, COMMENTS + FROM ALL_COL_COMMENTS + WHERE TABLE_NAME = :table_name + """ + if owner: + col_query += " AND OWNER = :owner ORDER BY COLUMN_NAME" + cursor.execute(col_query, table_name=table_name, owner=owner) + else: + col_query += " ORDER BY COLUMN_NAME" + cursor.execute(col_query, table_name=table_name) + column_comments = {row[0]: row[1] for row in cursor.fetchall()} + + return table_comment[0] if table_comment else None, column_comments + + +def get_table_indexes(cursor, table_name, owner=None): + """获取表的索引信息""" + query = """ + SELECT + i.INDEX_NAME, + i.UNIQUENESS, + LISTAGG(ic.COLUMN_NAME, ', ') WITHIN GROUP (ORDER BY ic.COLUMN_POSITION) as COLUMNS + FROM ALL_INDEXES i + JOIN ALL_IND_COLUMNS ic ON i.INDEX_NAME = ic.INDEX_NAME AND i.TABLE_NAME = ic.TABLE_NAME + WHERE i.TABLE_NAME = :table_name + GROUP BY i.INDEX_NAME, i.UNIQUENESS + ORDER BY i.INDEX_NAME + """ + if owner: + query = query.replace( + "WHERE i.TABLE_NAME = :table_name", + "WHERE i.TABLE_NAME = :table_name AND i.TABLE_OWNER = :owner", + ) + cursor.execute(query, table_name=table_name, owner=owner) + else: + cursor.execute(query, table_name=table_name) + return cursor.fetchall() + + +def get_sample_data(cursor, table_name, owner=None, limit=5): + """获取表的示例数据""" + try: + if owner: + cursor.execute(f"SELECT * FROM {owner}.{table_name} WHERE ROWNUM <= {limit}") + else: + cursor.execute(f"SELECT * FROM {table_name} WHERE ROWNUM <= {limit}") + columns = [col[0] for col in cursor.description] + rows = cursor.fetchall() + return columns, rows + except Exception as e: + return None, str(e) + + +def main(): + """主函数""" + parser = argparse.ArgumentParser(description="Query Oracle table/view schema information") + parser.add_argument( + "--schema", + help="Schema/owner to scan (e.g. DWH). If set, scans all TABLE/VIEW in that schema.", + ) + parser.add_argument( + "--output", + help="Output JSON path (default: data/table_schema_info.json)", + default=None, + ) + args = parser.parse_args() + + print("Connecting to database...") + connection = oracledb.connect(**DB_CONFIG) + cursor = connection.cursor() + + all_table_info = {} + + owner = args.schema.strip().upper() if args.schema else None + if owner: + cursor.execute( + """ + SELECT OBJECT_NAME + FROM ALL_OBJECTS + WHERE OWNER = :owner + AND OBJECT_TYPE IN ('TABLE', 'VIEW') + ORDER BY OBJECT_NAME + """, + owner=owner, + ) + table_list = [row[0] for row in cursor.fetchall()] + else: + table_list = MES_TABLES + + print(f"\nQuerying schema information for {len(table_list)} objects...\n") + + for idx, table_name in enumerate(table_list, 1): + print(f"[{idx}/{len(table_list)}] Processing {table_name}...") + + try: + # 获取表结构 + schema = get_table_schema(cursor, table_name, owner=owner) + + # 获取注释 + table_comment, column_comments = get_table_comments(cursor, table_name, owner=owner) + + # 获取索引 + indexes = get_table_indexes(cursor, table_name, owner=owner) + + # 获取行数 + if owner: + cursor.execute(f"SELECT COUNT(*) FROM {owner}.{table_name}") + else: + cursor.execute(f"SELECT COUNT(*) FROM {table_name}") + row_count = cursor.fetchone()[0] + + # 获取示例数据 + sample_columns, sample_data = get_sample_data(cursor, table_name, owner=owner, limit=3) + + all_table_info[table_name] = { + 'owner': owner, + 'table_comment': table_comment, + 'row_count': row_count, + 'schema': schema, + 'column_comments': column_comments, + 'indexes': indexes, + 'sample_columns': sample_columns, + 'sample_data': sample_data + } + + except Exception as e: + print(f" Error: {str(e)}") + all_table_info[table_name] = {'error': str(e)} + + # 保存到 JSON 文件 + if args.output: + output_file = Path(args.output) + else: + output_file = Path(__file__).resolve().parent.parent / 'data' / 'table_schema_info.json' + with open(output_file, 'w', encoding='utf-8') as f: + json.dump(all_table_info, f, ensure_ascii=False, indent=2, default=str) + + print(f"\n[OK] Schema information saved to {output_file}") + + cursor.close() + connection.close() + print("[OK] Connection closed") + + +if __name__ == "__main__": + main() diff --git a/tools/test_oracle_connection.py b/tools/test_oracle_connection.py new file mode 100644 index 0000000..a6cc413 --- /dev/null +++ b/tools/test_oracle_connection.py @@ -0,0 +1,152 @@ +""" +Oracle Database Connection Test Script +测试连接到 DWDB 数据库并验证 MES 表访问权限 +""" + +import sys +import io +import os +import oracledb +from datetime import datetime +from pathlib import Path + +# 设置 UTF-8 编码输出 +sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace') + +# Load .env file +try: + from dotenv import load_dotenv + env_path = Path(__file__).resolve().parent.parent / '.env' + load_dotenv(env_path) +except ImportError: + pass + +# 数据库连接信息 (从环境变量读取,必须在 .env 中设置) +DB_HOST = os.getenv('DB_HOST', '') +DB_PORT = os.getenv('DB_PORT', '1521') +DB_SERVICE = os.getenv('DB_SERVICE', '') +DB_USER = os.getenv('DB_USER', '') +DB_PASSWORD = os.getenv('DB_PASSWORD', '') + +DB_CONFIG = { + 'user': DB_USER, + 'password': DB_PASSWORD, + 'dsn': f'(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST={DB_HOST})(PORT={DB_PORT})))(CONNECT_DATA=(SERVICE_NAME={DB_SERVICE})))' +} + +# MES 表列表 +MES_TABLES = [ + 'DW_MES_CONTAINER', + 'DW_MES_HOLDRELEASEHISTORY', + 'DW_MES_JOB', + 'DW_MES_LOTREJECTHISTORY', + 'DW_MES_LOTWIPDATAHISTORY', + 'DW_MES_LOTWIPHISTORY', + 'DW_MES_MAINTENANCE', + 'DW_MES_PARTREQUESTORDER', + 'DW_MES_PJ_COMBINEDASSYLOTS', + 'DW_MES_RESOURCESTATUS', + 'DW_MES_RESOURCESTATUS_SHIFT', + 'DW_MES_WIP', + 'DW_MES_HM_LOTMOVEOUT', + 'DW_MES_JOBTXNHISTORY', + 'DW_MES_LOTMATERIALSHISTORY', + 'DW_MES_RESOURCE' +] + + +def test_connection(): + """测试数据库连接""" + print("=" * 60) + print("Oracle Database Connection Test") + print("=" * 60) + print(f"Test Time: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") + print(f"Host: {DB_HOST}:{DB_PORT}") + print(f"Service Name: {DB_SERVICE}") + print(f"User: {DB_USER}") + print("=" * 60) + + try: + # 尝试连接数据库 + print("\n[1/3] Attempting to connect to database...") + connection = oracledb.connect(**DB_CONFIG) + print("[OK] Connection successful!") + + # 获取数据库版本信息 + print("\n[2/3] Retrieving database version...") + cursor = connection.cursor() + cursor.execute("SELECT * FROM v$version WHERE banner LIKE 'Oracle%'") + version = cursor.fetchone() + if version: + print(f"[OK] Database Version: {version[0]}") + + # 测试每个表的访问权限 + print("\n[3/3] Testing access to MES tables...") + print("-" * 60) + + accessible_tables = [] + inaccessible_tables = [] + + for table_name in MES_TABLES: + try: + # 尝试查询表的行数 + cursor.execute(f"SELECT COUNT(*) FROM {table_name}") + count = cursor.fetchone()[0] + print(f"[OK] {table_name:<35} - {count:,} rows") + accessible_tables.append(table_name) + except oracledb.DatabaseError as e: + error_obj, = e.args + print(f"[FAIL] {table_name:<35} - Error: {error_obj.message}") + inaccessible_tables.append((table_name, error_obj.message)) + + # 汇总结果 + print("\n" + "=" * 60) + print("Test Summary") + print("=" * 60) + print(f"Total tables tested: {len(MES_TABLES)}") + print(f"Accessible tables: {len(accessible_tables)}") + print(f"Inaccessible tables: {len(inaccessible_tables)}") + + if inaccessible_tables: + print("\nInaccessible Tables:") + for table, error in inaccessible_tables: + print(f" - {table}: {error}") + + # 关闭连接 + cursor.close() + connection.close() + print("\n[OK] Connection closed successfully") + + return len(inaccessible_tables) == 0 + + except oracledb.DatabaseError as e: + error_obj, = e.args + print(f"\n[FAIL] Database Error: {error_obj.message}") + print(f" Error Code: {error_obj.code}") + return False + + except Exception as e: + print(f"\n[FAIL] Unexpected Error: {str(e)}") + return False + + +def main(): + """主函数""" + try: + success = test_connection() + + print("\n" + "=" * 60) + if success: + print("[SUCCESS] All tests passed successfully!") + else: + print("[WARNING] Some tests failed. Please check the output above.") + print("=" * 60) + + except KeyboardInterrupt: + print("\n\nTest interrupted by user.") + except Exception as e: + print(f"\n\nFatal error: {str(e)}") + + +if __name__ == "__main__": + main() diff --git a/tools/update_oracle_authorized_objects.py b/tools/update_oracle_authorized_objects.py new file mode 100644 index 0000000..8f5125c --- /dev/null +++ b/tools/update_oracle_authorized_objects.py @@ -0,0 +1,194 @@ +#!/usr/bin/env python3 +""" +Generate a list of accessible TABLE/VIEW objects under a specific owner (default: DWH) +and update docs/Oracle_Authorized_Objects.md. +""" + +import os +import sys +from collections import Counter +from datetime import datetime +from pathlib import Path + +import oracledb + + +def load_env() -> None: + """Load .env if available (best-effort).""" + try: + from dotenv import load_dotenv # type: ignore + + env_path = Path(__file__).resolve().parent.parent / ".env" + load_dotenv(env_path) + return + except Exception: + pass + + env_path = Path(__file__).resolve().parent.parent / ".env" + if not env_path.exists(): + return + for line in env_path.read_text().splitlines(): + if not line or line.strip().startswith("#") or "=" not in line: + continue + key, value = line.split("=", 1) + os.environ.setdefault(key.strip(), value.strip()) + + +def get_connection(): + host = os.getenv("DB_HOST", "") + port = os.getenv("DB_PORT", "1521") + service = os.getenv("DB_SERVICE", "") + user = os.getenv("DB_USER", "") + password = os.getenv("DB_PASSWORD", "") + dsn = ( + "(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)" + f"(HOST={host})(PORT={port})))(CONNECT_DATA=(SERVICE_NAME={service})))" + ) + return oracledb.connect(user=user, password=password, dsn=dsn) + + +def main() -> int: + owner = "DWH" + output_path = Path("docs/Oracle_Authorized_Objects.md") + if len(sys.argv) > 1: + owner = sys.argv[1].strip().upper() + + load_env() + conn = get_connection() + cur = conn.cursor() + + cur.execute("SELECT USER FROM DUAL") + user = cur.fetchone()[0] + + # Roles + cur.execute("SELECT GRANTED_ROLE FROM USER_ROLE_PRIVS") + roles = [r[0] for r in cur.fetchall()] + + # Accessible objects under owner + cur.execute( + """ + SELECT OBJECT_NAME, OBJECT_TYPE + FROM ALL_OBJECTS + WHERE OWNER = :p_owner + AND OBJECT_TYPE IN ('TABLE', 'VIEW') + ORDER BY OBJECT_NAME + """, + p_owner=owner, + ) + objects = cur.fetchall() + + # Direct + PUBLIC grants + cur.execute( + """ + SELECT o.object_name, o.object_type, p.privilege, + CASE WHEN p.grantee = 'PUBLIC' THEN 'PUBLIC' ELSE 'DIRECT' END AS source + FROM all_tab_privs p + JOIN all_objects o + ON o.owner = p.table_schema + AND o.object_name = p.table_name + WHERE p.grantee IN (:p_user, 'PUBLIC') + AND o.owner = :p_owner + AND o.object_type IN ('TABLE', 'VIEW') + """, + p_user=user, + p_owner=owner, + ) + direct_rows = cur.fetchall() + + # Role grants + role_rows = [] + for role in roles: + cur.execute( + """ + SELECT o.object_name, o.object_type, p.privilege, p.role AS source + FROM role_tab_privs p + JOIN all_objects o + ON o.owner = p.owner + AND o.object_name = p.table_name + WHERE p.role = :p_role + AND o.owner = :p_owner + AND o.object_type IN ('TABLE', 'VIEW') + """, + p_role=role, + p_owner=owner, + ) + role_rows.extend(cur.fetchall()) + + # Aggregate privileges by object + info = {} + for name, otype in objects: + info[(name, otype)] = {"privs": set(), "sources": set()} + + for name, otype, priv, source in direct_rows + role_rows: + key = (name, otype) + if key not in info: + info[key] = {"privs": set(), "sources": set()} + info[key]["privs"].add(priv) + info[key]["sources"].add(source) + + # Fill in missing privilege/source if object is visible but not in grants + for key, data in info.items(): + if not data["privs"]: + data["privs"].add("UNKNOWN") + if not data["sources"]: + data["sources"].add("SYSTEM") + + type_counts = Counter(k[1] for k in info.keys()) + source_counts = Counter() + for data in info.values(): + for s in data["sources"]: + if s in ("DIRECT", "PUBLIC"): + source_counts[s] += 1 + elif s == "SYSTEM": + source_counts["SYSTEM"] += 1 + else: + source_counts["ROLE"] += 1 + + # Render markdown + lines = [] + lines.append("# Oracle 可使用 TABLE/VIEW 清單(DWH)") + lines.append("") + lines.append(f"**產生時間**: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") + lines.append(f"**使用者**: {user}") + lines.append(f"**Schema**: {owner}") + lines.append("") + lines.append("## 摘要") + lines.append("") + lines.append(f"- 可使用物件總數: {len(info):,}") + lines.append(f"- TABLE: {type_counts.get('TABLE', 0):,}") + lines.append(f"- VIEW: {type_counts.get('VIEW', 0):,}") + lines.append( + "- 來源 (去重後物件數): " + f"DIRECT {source_counts.get('DIRECT', 0):,}, " + f"PUBLIC {source_counts.get('PUBLIC', 0):,}, " + f"ROLE {source_counts.get('ROLE', 0):,}, " + f"SYSTEM {source_counts.get('SYSTEM', 0):,}" + ) + lines.append("") + lines.append("## 物件清單") + lines.append("") + lines.append("| 物件 | 類型 | 權限 | 授權來源 |") + lines.append("|------|------|------|----------|") + + for name, otype in sorted(info.keys()): + data = info[(name, otype)] + obj = f"{owner}.{name}" + privs = ", ".join(sorted(data["privs"])) + sources = ", ".join( + sorted( + "ROLE" if s not in ("DIRECT", "PUBLIC", "SYSTEM") else s + for s in data["sources"] + ) + ) + lines.append(f"| `{obj}` | {otype} | {privs} | {sources} |") + + output_path.write_text("\n".join(lines), encoding="utf-8") + + cur.close() + conn.close() + print(f"Wrote {output_path} ({len(info)} objects)") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main())