- Fix 6 Playwright strict-mode violations in query tool E2E (v-show dual-tab selectors) - Update 5 resource history E2E tests for POST /query API restructure - Add 22 trace pipeline E2E tests: admission control, async job queue, NDJSON streaming - Fix 3 health endpoint tests: add circuit breaker + route cache mocks - Fix WIP integration tests: load .env before DB module import for --run-integration - Remove 4 dead migration test files (20 permanently-skipped tests) Final: 1101 unit + 10 integration + 121 E2E + 23 stress = 1255 passed, 0 failed Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
391 lines
15 KiB
Python
391 lines
15 KiB
Python
# -*- coding: utf-8 -*-
|
||
"""End-to-end tests for resource history analysis page.
|
||
|
||
These tests simulate real user workflows through the resource history analysis feature.
|
||
Run with: pytest tests/e2e/test_resource_history_e2e.py -v --run-integration
|
||
"""
|
||
|
||
import json
|
||
import pytest
|
||
from unittest.mock import patch
|
||
import pandas as pd
|
||
from datetime import datetime
|
||
|
||
import sys
|
||
import os
|
||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'src'))
|
||
|
||
import mes_dashboard.core.database as db
|
||
from mes_dashboard.app import create_app
|
||
|
||
|
||
@pytest.fixture
|
||
def app():
|
||
"""Create application for testing."""
|
||
db._ENGINE = None
|
||
app = create_app('testing')
|
||
app.config['TESTING'] = True
|
||
return app
|
||
|
||
|
||
@pytest.fixture
|
||
def client(app):
|
||
"""Create test client."""
|
||
return app.test_client()
|
||
|
||
|
||
class TestResourceHistoryPageAccess:
|
||
"""E2E tests for page access and navigation."""
|
||
|
||
@staticmethod
|
||
def _load_resource_history_entry(client):
|
||
spa_enabled = bool(client.application.config.get("PORTAL_SPA_ENABLED", False))
|
||
response = client.get('/resource-history', follow_redirects=False)
|
||
if spa_enabled:
|
||
assert response.status_code == 302
|
||
assert response.location.endswith('/portal-shell/resource-history')
|
||
shell_response = client.get('/portal-shell/resource-history')
|
||
assert shell_response.status_code == 200
|
||
return shell_response, True
|
||
return response, False
|
||
|
||
def test_page_loads_successfully(self, client):
|
||
"""Resource history page should load without errors."""
|
||
response, spa_enabled = self._load_resource_history_entry(client)
|
||
assert response.status_code == 200
|
||
content = response.data.decode('utf-8')
|
||
if spa_enabled:
|
||
assert '/static/dist/portal-shell.js' in content
|
||
else:
|
||
assert '設備歷史績效' in content
|
||
|
||
def test_page_bootstrap_container_exists(self, client):
|
||
"""Resource history page should expose the Vue mount container."""
|
||
response, _spa_enabled = self._load_resource_history_entry(client)
|
||
content = response.data.decode('utf-8')
|
||
|
||
assert "id='app'" in content or 'id="app"' in content
|
||
|
||
def test_page_references_vite_module(self, client):
|
||
"""Resource history page should load the Vite module bundle."""
|
||
response, spa_enabled = self._load_resource_history_entry(client)
|
||
content = response.data.decode('utf-8')
|
||
|
||
if spa_enabled:
|
||
assert '/static/dist/portal-shell.js' in content
|
||
else:
|
||
assert '/static/dist/resource-history.js' in content
|
||
assert 'type="module"' in content
|
||
|
||
|
||
class TestResourceHistoryAPIWorkflow:
|
||
"""E2E tests for API workflows."""
|
||
|
||
@patch('mes_dashboard.services.resource_history_service.get_filter_options')
|
||
def test_filter_options_workflow(self, mock_get_filter_options, client):
|
||
"""Filter options should be loadable."""
|
||
mock_get_filter_options.return_value = {
|
||
'workcenter_groups': [
|
||
{'name': '焊接_DB', 'sequence': 1},
|
||
{'name': '焊接_WB', 'sequence': 2},
|
||
{'name': '成型', 'sequence': 4},
|
||
],
|
||
'families': ['FAM001', 'FAM002'],
|
||
}
|
||
|
||
response = client.get('/api/resource/history/options')
|
||
|
||
assert response.status_code == 200
|
||
data = json.loads(response.data)
|
||
assert data['success'] is True
|
||
assert 'workcenter_groups' in data['data']
|
||
assert 'families' in data['data']
|
||
|
||
@patch('mes_dashboard.services.resource_dataset_cache._get_workcenter_mapping')
|
||
@patch('mes_dashboard.services.resource_dataset_cache._get_resource_lookup')
|
||
@patch('mes_dashboard.services.resource_dataset_cache.read_sql_df')
|
||
@patch('mes_dashboard.services.resource_dataset_cache._get_filtered_resources_and_lookup')
|
||
def test_complete_query_workflow(self, mock_res_lookup, mock_read_sql,
|
||
mock_get_lookup, mock_get_wc, client):
|
||
"""Complete query workflow via POST /query should return summary + detail."""
|
||
resources = [
|
||
{
|
||
'RESOURCEID': 'RES001',
|
||
'WORKCENTERNAME': '焊接_DB',
|
||
'RESOURCEFAMILYNAME': 'FAM001',
|
||
'RESOURCENAME': 'RES001',
|
||
},
|
||
{
|
||
'RESOURCEID': 'RES002',
|
||
'WORKCENTERNAME': '成型',
|
||
'RESOURCEFAMILYNAME': 'FAM002',
|
||
'RESOURCENAME': 'RES002',
|
||
},
|
||
]
|
||
resource_lookup = {r['RESOURCEID']: r for r in resources}
|
||
mock_res_lookup.return_value = (
|
||
resources,
|
||
resource_lookup,
|
||
"HISTORYID IN ('RES001', 'RES002')",
|
||
)
|
||
mock_get_lookup.return_value = resource_lookup
|
||
mock_get_wc.return_value = {
|
||
'焊接_DB': {'group': '焊接_DB', 'sequence': 1},
|
||
'成型': {'group': '成型', 'sequence': 4},
|
||
}
|
||
|
||
# Base facts DataFrame (per-resource × per-day, single Oracle query)
|
||
base_df = pd.DataFrame([
|
||
{'HISTORYID': 'RES001', 'DATA_DATE': datetime(2024, 1, 1),
|
||
'PRD_HOURS': 4000, 'SBY_HOURS': 500, 'UDT_HOURS': 250,
|
||
'SDT_HOURS': 150, 'EGT_HOURS': 100, 'NST_HOURS': 500, 'TOTAL_HOURS': 5500},
|
||
{'HISTORYID': 'RES002', 'DATA_DATE': datetime(2024, 1, 1),
|
||
'PRD_HOURS': 4000, 'SBY_HOURS': 500, 'UDT_HOURS': 250,
|
||
'SDT_HOURS': 150, 'EGT_HOURS': 100, 'NST_HOURS': 500, 'TOTAL_HOURS': 5500},
|
||
])
|
||
mock_read_sql.return_value = base_df
|
||
|
||
response = client.post(
|
||
'/api/resource/history/query',
|
||
json={
|
||
'start_date': '2024-01-01',
|
||
'end_date': '2024-01-07',
|
||
'granularity': 'day',
|
||
},
|
||
)
|
||
|
||
assert response.status_code == 200
|
||
data = json.loads(response.data)
|
||
assert data['success'] is True
|
||
assert 'query_id' in data
|
||
|
||
# Verify KPI (derived from base_df)
|
||
# Total PRD=8000, SBY=1000, UDT=500, SDT=300, EGT=200
|
||
# OU% = 8000/(8000+1000+500+300+200)*100 = 80.0
|
||
assert data['summary']['kpi']['ou_pct'] == 80.0
|
||
# Availability% = (8000+1000+200)/(8000+1000+200+300+500+1000)*100 = 83.6
|
||
assert data['summary']['kpi']['availability_pct'] == 83.6
|
||
assert data['summary']['kpi']['machine_count'] == 2
|
||
|
||
# Verify trend (one period since both rows are same date)
|
||
assert len(data['summary']['trend']) >= 1
|
||
assert 'availability_pct' in data['summary']['trend'][0]
|
||
|
||
# Verify heatmap
|
||
assert len(data['summary']['heatmap']) >= 1
|
||
|
||
# Verify comparison
|
||
assert len(data['summary']['workcenter_comparison']) == 2
|
||
|
||
# Verify detail
|
||
assert data['detail']['total'] == 2
|
||
assert len(data['detail']['data']) == 2
|
||
|
||
@patch('mes_dashboard.services.resource_dataset_cache._get_workcenter_mapping')
|
||
@patch('mes_dashboard.services.resource_dataset_cache._get_resource_lookup')
|
||
@patch('mes_dashboard.services.resource_dataset_cache.read_sql_df')
|
||
@patch('mes_dashboard.services.resource_dataset_cache._get_filtered_resources_and_lookup')
|
||
def test_detail_query_workflow(self, mock_res_lookup, mock_read_sql,
|
||
mock_get_lookup, mock_get_wc, client):
|
||
"""Detail query via POST /query should return hierarchical data."""
|
||
resources = [
|
||
{
|
||
'RESOURCEID': 'RES001',
|
||
'WORKCENTERNAME': '焊接_DB',
|
||
'RESOURCEFAMILYNAME': 'FAM001',
|
||
'RESOURCENAME': 'RES001',
|
||
},
|
||
{
|
||
'RESOURCEID': 'RES002',
|
||
'WORKCENTERNAME': '焊接_DB',
|
||
'RESOURCEFAMILYNAME': 'FAM001',
|
||
'RESOURCENAME': 'RES002',
|
||
},
|
||
]
|
||
resource_lookup = {r['RESOURCEID']: r for r in resources}
|
||
mock_res_lookup.return_value = (
|
||
resources,
|
||
resource_lookup,
|
||
"HISTORYID IN ('RES001', 'RES002')",
|
||
)
|
||
mock_get_lookup.return_value = resource_lookup
|
||
mock_get_wc.return_value = {
|
||
'焊接_DB': {'group': '焊接_DB', 'sequence': 1},
|
||
}
|
||
|
||
base_df = pd.DataFrame([
|
||
{'HISTORYID': 'RES001', 'DATA_DATE': datetime(2024, 1, 1),
|
||
'PRD_HOURS': 80, 'SBY_HOURS': 10, 'UDT_HOURS': 5, 'SDT_HOURS': 3, 'EGT_HOURS': 2,
|
||
'NST_HOURS': 10, 'TOTAL_HOURS': 110},
|
||
{'HISTORYID': 'RES002', 'DATA_DATE': datetime(2024, 1, 1),
|
||
'PRD_HOURS': 75, 'SBY_HOURS': 15, 'UDT_HOURS': 5, 'SDT_HOURS': 3, 'EGT_HOURS': 2,
|
||
'NST_HOURS': 10, 'TOTAL_HOURS': 110},
|
||
])
|
||
mock_read_sql.return_value = base_df
|
||
|
||
response = client.post(
|
||
'/api/resource/history/query',
|
||
json={
|
||
'start_date': '2024-01-01',
|
||
'end_date': '2024-01-07',
|
||
},
|
||
)
|
||
|
||
assert response.status_code == 200
|
||
data = json.loads(response.data)
|
||
assert data['success'] is True
|
||
assert data['detail']['total'] == 2
|
||
assert len(data['detail']['data']) == 2
|
||
assert data['detail']['truncated'] is False
|
||
|
||
# Verify data structure
|
||
first_row = data['detail']['data'][0]
|
||
assert 'workcenter' in first_row
|
||
assert 'family' in first_row
|
||
assert 'resource' in first_row
|
||
assert 'ou_pct' in first_row
|
||
assert 'availability_pct' in first_row
|
||
assert 'prd_hours' in first_row
|
||
assert 'prd_pct' in first_row
|
||
|
||
@patch('mes_dashboard.services.resource_history_service._get_filtered_resources')
|
||
@patch('mes_dashboard.services.resource_history_service.read_sql_df')
|
||
def test_export_workflow(self, mock_read_sql, mock_resources, client):
|
||
"""Export workflow should return valid CSV."""
|
||
mock_resources.return_value = [
|
||
{
|
||
'RESOURCEID': 'RES001',
|
||
'WORKCENTERNAME': '焊接_DB',
|
||
'RESOURCEFAMILYNAME': 'FAM001',
|
||
'RESOURCENAME': 'RES001',
|
||
}
|
||
]
|
||
mock_read_sql.return_value = pd.DataFrame([
|
||
{'HISTORYID': 'RES001',
|
||
'PRD_HOURS': 80, 'SBY_HOURS': 10, 'UDT_HOURS': 5, 'SDT_HOURS': 3, 'EGT_HOURS': 2,
|
||
'NST_HOURS': 10, 'TOTAL_HOURS': 110},
|
||
])
|
||
|
||
response = client.get(
|
||
'/api/resource/history/export'
|
||
'?start_date=2024-01-01'
|
||
'&end_date=2024-01-07'
|
||
)
|
||
|
||
assert response.status_code == 200
|
||
assert 'text/csv' in response.content_type
|
||
|
||
content = response.data.decode('utf-8-sig')
|
||
lines = content.strip().split('\n')
|
||
|
||
# Should have header + data rows
|
||
assert len(lines) >= 2
|
||
|
||
# Verify header
|
||
header = lines[0]
|
||
assert '站點' in header
|
||
assert 'OU%' in header
|
||
assert 'Availability%' in header
|
||
|
||
|
||
class TestResourceHistoryValidation:
|
||
"""E2E tests for input validation."""
|
||
|
||
def test_date_range_validation(self, client):
|
||
"""Inverted date range (end_date < start_date) should be rejected."""
|
||
response = client.post(
|
||
'/api/resource/history/query',
|
||
json={
|
||
'start_date': '2026-01-02',
|
||
'end_date': '2024-01-01',
|
||
},
|
||
)
|
||
|
||
assert response.status_code == 400
|
||
data = json.loads(response.data)
|
||
assert data['success'] is False
|
||
|
||
def test_missing_required_params(self, client):
|
||
"""Missing required parameters should return error."""
|
||
response = client.post(
|
||
'/api/resource/history/query',
|
||
json={},
|
||
)
|
||
|
||
assert response.status_code == 400
|
||
data = json.loads(response.data)
|
||
assert data['success'] is False
|
||
|
||
@patch('mes_dashboard.services.resource_dataset_cache._get_workcenter_mapping')
|
||
@patch('mes_dashboard.services.resource_dataset_cache._get_resource_lookup')
|
||
@patch('mes_dashboard.services.resource_dataset_cache.read_sql_df')
|
||
@patch('mes_dashboard.services.resource_dataset_cache._get_filtered_resources_and_lookup')
|
||
def test_granularity_options(self, mock_res_lookup, mock_read_sql,
|
||
mock_get_lookup, mock_get_wc, client):
|
||
"""Different granularity options should work via POST /query."""
|
||
resources = [{
|
||
'RESOURCEID': 'RES001',
|
||
'WORKCENTERNAME': '焊接_DB',
|
||
'RESOURCEFAMILYNAME': 'FAM001',
|
||
'RESOURCENAME': 'RES001',
|
||
}]
|
||
resource_lookup = {r['RESOURCEID']: r for r in resources}
|
||
mock_res_lookup.return_value = (
|
||
resources,
|
||
resource_lookup,
|
||
"HISTORYID IN ('RES001')",
|
||
)
|
||
mock_get_lookup.return_value = resource_lookup
|
||
mock_get_wc.return_value = {
|
||
'焊接_DB': {'group': '焊接_DB', 'sequence': 1},
|
||
}
|
||
|
||
base_df = pd.DataFrame([{
|
||
'HISTORYID': 'RES001',
|
||
'DATA_DATE': datetime(2024, 1, 1),
|
||
'PRD_HOURS': 100, 'SBY_HOURS': 10, 'UDT_HOURS': 5,
|
||
'SDT_HOURS': 3, 'EGT_HOURS': 2, 'NST_HOURS': 10,
|
||
'TOTAL_HOURS': 130,
|
||
}])
|
||
mock_read_sql.return_value = base_df
|
||
|
||
for granularity in ['day', 'week', 'month', 'year']:
|
||
response = client.post(
|
||
'/api/resource/history/query',
|
||
json={
|
||
'start_date': '2024-01-01',
|
||
'end_date': '2024-01-31',
|
||
'granularity': granularity,
|
||
},
|
||
)
|
||
|
||
assert response.status_code == 200, f"Failed for granularity={granularity}"
|
||
|
||
|
||
class TestResourceHistoryNavigation:
|
||
"""E2E tests for navigation integration."""
|
||
|
||
def test_portal_includes_history_tab(self, client):
|
||
"""Portal should include resource history tab."""
|
||
if bool(client.application.config.get("PORTAL_SPA_ENABLED", False)):
|
||
response = client.get('/api/portal/navigation')
|
||
assert response.status_code == 200
|
||
payload = response.get_json()
|
||
pages = [
|
||
page
|
||
for drawer in payload.get("drawers", [])
|
||
for page in drawer.get("pages", [])
|
||
]
|
||
history_pages = [page for page in pages if page.get("route") == "/resource-history"]
|
||
assert history_pages, "resource-history route missing from portal navigation contract"
|
||
assert history_pages[0].get("name") == "設備歷史績效"
|
||
else:
|
||
response = client.get('/')
|
||
content = response.data.decode('utf-8')
|
||
assert '設備歷史績效' in content
|
||
assert 'resourceHistoryFrame' in content
|
||
|
||
|
||
if __name__ == '__main__':
|
||
pytest.main([__file__, '-v'])
|