chore: reinitialize project with vite architecture
This commit is contained in:
77
tests/conftest.py
Normal file
77
tests/conftest.py
Normal file
@@ -0,0 +1,77 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Pytest configuration and fixtures for MES Dashboard tests."""
|
||||
|
||||
import pytest
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add the src directory to Python path
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
|
||||
|
||||
import mes_dashboard.core.database as db
|
||||
from mes_dashboard.app import create_app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app():
|
||||
"""Create application for testing."""
|
||||
db._ENGINE = None
|
||||
app = create_app('testing')
|
||||
app.config['TESTING'] = True
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(app):
|
||||
"""Create test client."""
|
||||
return app.test_client()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def runner(app):
|
||||
"""Create test CLI runner."""
|
||||
return app.test_cli_runner()
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
"""Add custom markers."""
|
||||
config.addinivalue_line(
|
||||
"markers", "integration: mark test as integration test (requires database)"
|
||||
)
|
||||
config.addinivalue_line(
|
||||
"markers", "e2e: mark test as end-to-end test (requires running server)"
|
||||
)
|
||||
config.addinivalue_line(
|
||||
"markers", "redis: mark test as requiring Redis connection"
|
||||
)
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
"""Add custom command line options."""
|
||||
parser.addoption(
|
||||
"--run-integration",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Run integration tests that require database connection"
|
||||
)
|
||||
parser.addoption(
|
||||
"--run-e2e",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Run end-to-end tests that require running server"
|
||||
)
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(config, items):
|
||||
"""Skip integration/e2e tests unless explicitly enabled."""
|
||||
run_integration = config.getoption("--run-integration")
|
||||
run_e2e = config.getoption("--run-e2e")
|
||||
|
||||
skip_integration = pytest.mark.skip(reason="need --run-integration option to run")
|
||||
skip_e2e = pytest.mark.skip(reason="need --run-e2e option to run")
|
||||
|
||||
for item in items:
|
||||
if "integration" in item.keywords and not run_integration:
|
||||
item.add_marker(skip_integration)
|
||||
if "e2e" in item.keywords and not run_e2e:
|
||||
item.add_marker(skip_e2e)
|
||||
50
tests/e2e/conftest.py
Normal file
50
tests/e2e/conftest.py
Normal file
@@ -0,0 +1,50 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Pytest configuration for Playwright E2E tests."""
|
||||
|
||||
import pytest
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Add src to path
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'src'))
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def app_server() -> str:
|
||||
"""Get the base URL for E2E testing.
|
||||
|
||||
Uses environment variable E2E_BASE_URL or defaults to production server.
|
||||
"""
|
||||
return os.environ.get('E2E_BASE_URL', 'http://127.0.0.1:8080')
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def browser_context_args(browser_context_args):
|
||||
"""Configure browser context for tests."""
|
||||
return {
|
||||
**browser_context_args,
|
||||
"viewport": {"width": 1280, "height": 720},
|
||||
"locale": "zh-TW",
|
||||
}
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
"""Add custom markers for E2E tests."""
|
||||
config.addinivalue_line(
|
||||
"markers", "e2e: mark test as end-to-end test (requires running server)"
|
||||
)
|
||||
config.addinivalue_line(
|
||||
"markers", "redis: mark test as requiring Redis connection"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def api_base_url(app_server):
|
||||
"""Get the API base URL."""
|
||||
return f"{app_server}/api"
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def health_url(app_server):
|
||||
"""Get the health check URL."""
|
||||
return f"{app_server}/health"
|
||||
350
tests/e2e/test_admin_auth_e2e.py
Normal file
350
tests/e2e/test_admin_auth_e2e.py
Normal file
@@ -0,0 +1,350 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""End-to-end tests for admin authentication flow.
|
||||
|
||||
These tests simulate real user workflows through the admin authentication system.
|
||||
Run with: pytest tests/e2e/test_admin_auth_e2e.py -v --run-integration
|
||||
"""
|
||||
|
||||
import json
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'src'))
|
||||
|
||||
import mes_dashboard.core.database as db
|
||||
from mes_dashboard.app import create_app
|
||||
from mes_dashboard.services import page_registry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_page_status(tmp_path):
|
||||
"""Create temporary page status file."""
|
||||
data_file = tmp_path / "page_status.json"
|
||||
initial_data = {
|
||||
"pages": [
|
||||
{"route": "/", "name": "首頁", "status": "released"},
|
||||
{"route": "/wip-overview", "name": "WIP 即時概況", "status": "released"},
|
||||
{"route": "/wip-detail", "name": "WIP 明細", "status": "released"},
|
||||
{"route": "/tables", "name": "表格總覽", "status": "dev"},
|
||||
{"route": "/resource", "name": "機台狀態", "status": "dev"},
|
||||
],
|
||||
"api_public": True
|
||||
}
|
||||
data_file.write_text(json.dumps(initial_data, ensure_ascii=False), encoding="utf-8")
|
||||
return data_file
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app(temp_page_status):
|
||||
"""Create application for testing."""
|
||||
db._ENGINE = None
|
||||
|
||||
# Mock page registry
|
||||
original_data_file = page_registry.DATA_FILE
|
||||
original_cache = page_registry._cache
|
||||
page_registry.DATA_FILE = temp_page_status
|
||||
page_registry._cache = None
|
||||
|
||||
app = create_app('testing')
|
||||
app.config['TESTING'] = True
|
||||
app.config['WTF_CSRF_ENABLED'] = False
|
||||
|
||||
yield app
|
||||
|
||||
page_registry.DATA_FILE = original_data_file
|
||||
page_registry._cache = original_cache
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(app):
|
||||
"""Create test client."""
|
||||
return app.test_client()
|
||||
|
||||
|
||||
def mock_ldap_success(mail="ymirliu@panjit.com.tw"):
|
||||
"""Helper to create mock for successful LDAP auth."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"success": True,
|
||||
"user": {
|
||||
"username": "92367",
|
||||
"displayName": "Test Admin",
|
||||
"mail": mail,
|
||||
"department": "Test Department"
|
||||
}
|
||||
}
|
||||
return mock_response
|
||||
|
||||
|
||||
class TestFullLoginLogoutFlow:
|
||||
"""E2E tests for complete login/logout flow."""
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_complete_admin_login_workflow(self, mock_post, client):
|
||||
"""Test complete admin login workflow."""
|
||||
mock_post.return_value = mock_ldap_success()
|
||||
|
||||
# 1. Access portal - should see login link
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
content = response.data.decode("utf-8")
|
||||
assert "管理員登入" in content
|
||||
|
||||
# 2. Go to login page
|
||||
response = client.get("/admin/login")
|
||||
assert response.status_code == 200
|
||||
|
||||
# 3. Submit login form
|
||||
response = client.post("/admin/login", data={
|
||||
"username": "92367",
|
||||
"password": "password123"
|
||||
}, follow_redirects=True)
|
||||
|
||||
assert response.status_code == 200
|
||||
content = response.data.decode("utf-8")
|
||||
# Should see admin name and logout option
|
||||
assert "Test Admin" in content or "登出" in content
|
||||
|
||||
# 4. Verify session has admin
|
||||
with client.session_transaction() as sess:
|
||||
assert "admin" in sess
|
||||
assert sess["admin"]["mail"] == "ymirliu@panjit.com.tw"
|
||||
|
||||
# 5. Access admin pages
|
||||
response = client.get("/admin/pages")
|
||||
assert response.status_code == 200
|
||||
|
||||
# 6. Logout
|
||||
response = client.get("/admin/logout", follow_redirects=True)
|
||||
assert response.status_code == 200
|
||||
|
||||
# 7. Verify logged out
|
||||
with client.session_transaction() as sess:
|
||||
assert "admin" not in sess
|
||||
|
||||
# 8. Admin pages should redirect now
|
||||
response = client.get("/admin/pages", follow_redirects=False)
|
||||
assert response.status_code == 302
|
||||
|
||||
|
||||
class TestPageAccessControlFlow:
|
||||
"""E2E tests for page access control flow."""
|
||||
|
||||
def test_non_admin_cannot_access_dev_pages(self, client, temp_page_status):
|
||||
"""Test non-admin users cannot access dev pages."""
|
||||
# 1. Access released page - should work
|
||||
response = client.get("/wip-overview")
|
||||
assert response.status_code != 403
|
||||
|
||||
# 2. Access dev page - should get 403
|
||||
response = client.get("/tables")
|
||||
assert response.status_code == 403
|
||||
content = response.data.decode("utf-8")
|
||||
assert "開發中" in content or "403" in content
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_admin_can_access_all_pages(self, mock_post, client, temp_page_status):
|
||||
"""Test admin users can access all pages."""
|
||||
mock_post.return_value = mock_ldap_success()
|
||||
|
||||
# 1. Login as admin
|
||||
client.post("/admin/login", data={
|
||||
"username": "92367",
|
||||
"password": "password123"
|
||||
})
|
||||
|
||||
# 2. Access released page - should work
|
||||
response = client.get("/wip-overview")
|
||||
assert response.status_code != 403
|
||||
|
||||
# 3. Access dev page - should work for admin
|
||||
response = client.get("/tables")
|
||||
assert response.status_code != 403
|
||||
|
||||
|
||||
class TestPageManagementFlow:
|
||||
"""E2E tests for page management flow."""
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_admin_can_change_page_status(self, mock_post, client, temp_page_status):
|
||||
"""Test admin can change page status via management interface."""
|
||||
mock_post.return_value = mock_ldap_success()
|
||||
|
||||
# 1. Login as admin
|
||||
client.post("/admin/login", data={
|
||||
"username": "92367",
|
||||
"password": "password123"
|
||||
})
|
||||
|
||||
# 2. Get current pages list
|
||||
response = client.get("/admin/api/pages")
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data["success"] is True
|
||||
|
||||
# 3. Change /wip-overview from released to dev
|
||||
response = client.put(
|
||||
"/admin/api/pages/wip-overview",
|
||||
data=json.dumps({"status": "dev"}),
|
||||
content_type="application/json"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# 4. Verify change persisted
|
||||
page_registry._cache = None
|
||||
status = page_registry.get_page_status("/wip-overview")
|
||||
assert status == "dev"
|
||||
|
||||
# 5. Logout
|
||||
client.get("/admin/logout")
|
||||
|
||||
# 6. Now non-admin should get 403 on /wip-overview
|
||||
response = client.get("/wip-overview")
|
||||
assert response.status_code == 403
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_release_dev_page_makes_it_public(self, mock_post, client, temp_page_status):
|
||||
"""Test releasing a dev page makes it publicly accessible."""
|
||||
mock_post.return_value = mock_ldap_success()
|
||||
|
||||
# 1. Verify /tables is currently dev (403 for non-admin)
|
||||
response = client.get("/tables")
|
||||
assert response.status_code == 403
|
||||
|
||||
# 2. Login as admin
|
||||
client.post("/admin/login", data={
|
||||
"username": "92367",
|
||||
"password": "password123"
|
||||
})
|
||||
|
||||
# 3. Release the page
|
||||
response = client.put(
|
||||
"/admin/api/pages/tables",
|
||||
data=json.dumps({"status": "released"}),
|
||||
content_type="application/json"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# 4. Logout
|
||||
client.get("/admin/logout")
|
||||
|
||||
# 5. Clear cache and verify non-admin can access
|
||||
page_registry._cache = None
|
||||
response = client.get("/tables")
|
||||
assert response.status_code != 403
|
||||
|
||||
|
||||
class TestPortalDynamicTabs:
|
||||
"""E2E tests for dynamic portal tabs based on page status."""
|
||||
|
||||
def test_portal_hides_dev_tabs_for_non_admin(self, client, temp_page_status):
|
||||
"""Test portal hides dev page tabs for non-admin users."""
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
content = response.data.decode("utf-8")
|
||||
|
||||
# Released pages should show
|
||||
assert "WIP 即時概況" in content
|
||||
|
||||
# Dev pages should NOT show (tables and resource are dev)
|
||||
# Note: This depends on the can_view_page implementation in portal.html
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_portal_shows_all_tabs_for_admin(self, mock_post, client, temp_page_status):
|
||||
"""Test portal shows all tabs for admin users."""
|
||||
mock_post.return_value = mock_ldap_success()
|
||||
|
||||
# Login as admin
|
||||
client.post("/admin/login", data={
|
||||
"username": "92367",
|
||||
"password": "password123"
|
||||
})
|
||||
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
content = response.data.decode("utf-8")
|
||||
|
||||
# Admin should see all pages
|
||||
assert "WIP 即時概況" in content
|
||||
|
||||
|
||||
class TestSessionPersistence:
|
||||
"""E2E tests for session persistence."""
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_session_persists_across_requests(self, mock_post, client):
|
||||
"""Test admin session persists across multiple requests."""
|
||||
mock_post.return_value = mock_ldap_success()
|
||||
|
||||
# Login
|
||||
client.post("/admin/login", data={
|
||||
"username": "92367",
|
||||
"password": "password123"
|
||||
})
|
||||
|
||||
# Make multiple requests
|
||||
for _ in range(5):
|
||||
response = client.get("/admin/pages")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Session should still be valid
|
||||
with client.session_transaction() as sess:
|
||||
assert "admin" in sess
|
||||
|
||||
|
||||
class TestSecurityScenarios:
|
||||
"""E2E tests for security scenarios."""
|
||||
|
||||
def test_cannot_access_admin_api_without_login(self, client):
|
||||
"""Test admin APIs are protected."""
|
||||
# Try to get pages without login
|
||||
response = client.get("/admin/api/pages", follow_redirects=False)
|
||||
assert response.status_code == 302
|
||||
|
||||
# Try to update page without login
|
||||
response = client.put(
|
||||
"/admin/api/pages/wip-overview",
|
||||
data=json.dumps({"status": "dev"}),
|
||||
content_type="application/json",
|
||||
follow_redirects=False
|
||||
)
|
||||
assert response.status_code == 302
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_non_admin_user_cannot_login(self, mock_post, client):
|
||||
"""Test non-admin user cannot access admin features."""
|
||||
# Mock LDAP success but with non-admin email
|
||||
mock_response = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"success": True,
|
||||
"user": {
|
||||
"username": "99999",
|
||||
"displayName": "Regular User",
|
||||
"mail": "regular@panjit.com.tw",
|
||||
"department": "Test"
|
||||
}
|
||||
}
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
# Try to login
|
||||
response = client.post("/admin/login", data={
|
||||
"username": "99999",
|
||||
"password": "password123"
|
||||
})
|
||||
|
||||
# Should fail (show error, not redirect)
|
||||
assert response.status_code == 200
|
||||
content = response.data.decode("utf-8")
|
||||
assert "管理員" in content or "error" in content.lower()
|
||||
|
||||
# Should NOT have admin session
|
||||
with client.session_transaction() as sess:
|
||||
assert "admin" not in sess
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
281
tests/e2e/test_cache_e2e.py
Normal file
281
tests/e2e/test_cache_e2e.py
Normal file
@@ -0,0 +1,281 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""End-to-end tests for Redis cache functionality.
|
||||
|
||||
These tests require a running server with Redis enabled.
|
||||
Run with: pytest tests/e2e/test_cache_e2e.py -v
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import time
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestHealthEndpointE2E:
|
||||
"""E2E tests for /health endpoint."""
|
||||
|
||||
def test_health_endpoint_accessible(self, health_url):
|
||||
"""Test health endpoint is accessible."""
|
||||
response = requests.get(health_url, timeout=10)
|
||||
|
||||
assert response.status_code in [200, 503]
|
||||
data = response.json()
|
||||
assert 'status' in data
|
||||
assert 'services' in data
|
||||
assert 'cache' in data
|
||||
|
||||
def test_health_shows_database_status(self, health_url):
|
||||
"""Test health endpoint shows database status."""
|
||||
response = requests.get(health_url, timeout=10)
|
||||
data = response.json()
|
||||
|
||||
assert 'database' in data['services']
|
||||
assert data['services']['database'] in ['ok', 'error']
|
||||
|
||||
def test_health_shows_redis_status(self, health_url):
|
||||
"""Test health endpoint shows Redis status."""
|
||||
response = requests.get(health_url, timeout=10)
|
||||
data = response.json()
|
||||
|
||||
assert 'redis' in data['services']
|
||||
assert data['services']['redis'] in ['ok', 'error', 'disabled']
|
||||
|
||||
def test_health_shows_cache_info(self, health_url):
|
||||
"""Test health endpoint shows cache information."""
|
||||
response = requests.get(health_url, timeout=10)
|
||||
data = response.json()
|
||||
|
||||
assert 'cache' in data
|
||||
assert 'enabled' in data['cache']
|
||||
assert 'sys_date' in data['cache']
|
||||
assert 'updated_at' in data['cache']
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.redis
|
||||
class TestCachedWipApiE2E:
|
||||
"""E2E tests for cached WIP API endpoints."""
|
||||
|
||||
def _unwrap(self, resp_json):
|
||||
"""Unwrap API response to get data."""
|
||||
if isinstance(resp_json, dict) and 'data' in resp_json:
|
||||
return resp_json['data']
|
||||
return resp_json
|
||||
|
||||
def test_wip_summary_returns_data(self, api_base_url):
|
||||
"""Test WIP summary endpoint returns valid data."""
|
||||
response = requests.get(f"{api_base_url}/wip/overview/summary", timeout=30)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = self._unwrap(response.json())
|
||||
assert 'totalLots' in data
|
||||
assert 'totalQtyPcs' in data
|
||||
assert 'byWipStatus' in data
|
||||
assert 'dataUpdateDate' in data
|
||||
|
||||
def test_wip_summary_status_breakdown(self, api_base_url):
|
||||
"""Test WIP summary contains correct status breakdown."""
|
||||
response = requests.get(f"{api_base_url}/wip/overview/summary", timeout=30)
|
||||
data = self._unwrap(response.json())
|
||||
|
||||
by_status = data['byWipStatus']
|
||||
assert 'run' in by_status
|
||||
assert 'queue' in by_status
|
||||
assert 'hold' in by_status
|
||||
assert 'qualityHold' in by_status
|
||||
assert 'nonQualityHold' in by_status
|
||||
|
||||
# Each status should have lots and qtyPcs
|
||||
for status in ['run', 'queue', 'hold']:
|
||||
assert 'lots' in by_status[status]
|
||||
assert 'qtyPcs' in by_status[status]
|
||||
|
||||
def test_wip_matrix_returns_data(self, api_base_url):
|
||||
"""Test WIP matrix endpoint returns valid data."""
|
||||
response = requests.get(f"{api_base_url}/wip/overview/matrix", timeout=30)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = self._unwrap(response.json())
|
||||
assert 'workcenters' in data
|
||||
assert 'packages' in data
|
||||
assert 'matrix' in data
|
||||
assert 'workcenter_totals' in data
|
||||
assert 'package_totals' in data
|
||||
assert 'grand_total' in data
|
||||
|
||||
def test_wip_workcenters_returns_list(self, api_base_url):
|
||||
"""Test workcenters endpoint returns list."""
|
||||
response = requests.get(f"{api_base_url}/wip/meta/workcenters", timeout=30)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = self._unwrap(response.json())
|
||||
assert isinstance(data, list)
|
||||
|
||||
if len(data) > 0:
|
||||
assert 'name' in data[0]
|
||||
assert 'lot_count' in data[0]
|
||||
|
||||
def test_wip_packages_returns_list(self, api_base_url):
|
||||
"""Test packages endpoint returns list."""
|
||||
response = requests.get(f"{api_base_url}/wip/meta/packages", timeout=30)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = self._unwrap(response.json())
|
||||
assert isinstance(data, list)
|
||||
|
||||
if len(data) > 0:
|
||||
assert 'name' in data[0]
|
||||
assert 'lot_count' in data[0]
|
||||
|
||||
def test_wip_hold_summary_returns_data(self, api_base_url):
|
||||
"""Test hold summary endpoint returns valid data."""
|
||||
response = requests.get(f"{api_base_url}/wip/overview/hold", timeout=30)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = self._unwrap(response.json())
|
||||
assert 'items' in data
|
||||
assert isinstance(data['items'], list)
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.redis
|
||||
class TestCachePerformanceE2E:
|
||||
"""E2E tests for cache performance."""
|
||||
|
||||
def _unwrap(self, resp_json):
|
||||
"""Unwrap API response to get data."""
|
||||
if isinstance(resp_json, dict) and 'data' in resp_json:
|
||||
return resp_json['data']
|
||||
return resp_json
|
||||
|
||||
def test_cached_response_is_fast(self, api_base_url):
|
||||
"""Test cached responses are faster than 2 seconds."""
|
||||
# First request may load cache
|
||||
requests.get(f"{api_base_url}/wip/overview/summary", timeout=30)
|
||||
|
||||
# Second request should be from cache
|
||||
start = time.time()
|
||||
response = requests.get(f"{api_base_url}/wip/overview/summary", timeout=30)
|
||||
elapsed = time.time() - start
|
||||
|
||||
assert response.status_code == 200
|
||||
# Cached response should be fast (< 2 seconds)
|
||||
assert elapsed < 2.0, f"Response took {elapsed:.2f}s, expected < 2s"
|
||||
|
||||
def test_multiple_endpoints_consistent(self, api_base_url):
|
||||
"""Test multiple endpoints return consistent data."""
|
||||
# Get summary
|
||||
summary_resp = requests.get(f"{api_base_url}/wip/overview/summary", timeout=30)
|
||||
summary = self._unwrap(summary_resp.json())
|
||||
|
||||
# Get matrix
|
||||
matrix_resp = requests.get(f"{api_base_url}/wip/overview/matrix", timeout=30)
|
||||
matrix = self._unwrap(matrix_resp.json())
|
||||
|
||||
# Grand total from matrix should match total from summary (approximately)
|
||||
# There may be slight differences due to filtering
|
||||
if summary['totalLots'] > 0 and matrix['grand_total'] > 0:
|
||||
assert summary['totalQtyPcs'] > 0 or matrix['grand_total'] > 0
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.redis
|
||||
class TestSearchEndpointsE2E:
|
||||
"""E2E tests for search endpoints with cache."""
|
||||
|
||||
def _unwrap(self, resp_json):
|
||||
"""Unwrap API response to get data."""
|
||||
if isinstance(resp_json, dict) and 'data' in resp_json:
|
||||
data = resp_json['data']
|
||||
# Search returns {'items': [...]}
|
||||
if isinstance(data, dict) and 'items' in data:
|
||||
return data['items']
|
||||
return data
|
||||
return resp_json
|
||||
|
||||
def test_search_workorders(self, api_base_url):
|
||||
"""Test workorder search returns results."""
|
||||
# Use a common pattern that should exist
|
||||
response = requests.get(
|
||||
f"{api_base_url}/wip/meta/search",
|
||||
params={'type': 'workorder', 'q': 'WO', 'limit': 10},
|
||||
timeout=30
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = self._unwrap(response.json())
|
||||
assert isinstance(data, list)
|
||||
|
||||
def test_search_lotids(self, api_base_url):
|
||||
"""Test lot ID search returns results."""
|
||||
response = requests.get(
|
||||
f"{api_base_url}/wip/meta/search",
|
||||
params={'type': 'lotid', 'q': 'LOT', 'limit': 10},
|
||||
timeout=30
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = self._unwrap(response.json())
|
||||
assert isinstance(data, list)
|
||||
|
||||
def test_search_with_short_query_returns_empty(self, api_base_url):
|
||||
"""Test search with short query returns empty list."""
|
||||
response = requests.get(
|
||||
f"{api_base_url}/wip/meta/search",
|
||||
params={'type': 'workorder', 'q': 'W'}, # Too short
|
||||
timeout=30
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = self._unwrap(response.json())
|
||||
assert data == []
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.redis
|
||||
class TestWipDetailE2E:
|
||||
"""E2E tests for WIP detail endpoint with cache."""
|
||||
|
||||
def _unwrap(self, resp_json):
|
||||
"""Unwrap API response to get data."""
|
||||
if isinstance(resp_json, dict) and 'data' in resp_json:
|
||||
return resp_json['data']
|
||||
return resp_json
|
||||
|
||||
def test_wip_detail_with_workcenter(self, api_base_url):
|
||||
"""Test WIP detail endpoint for a workcenter."""
|
||||
# First get list of workcenters
|
||||
wc_resp = requests.get(f"{api_base_url}/wip/meta/workcenters", timeout=30)
|
||||
workcenters = self._unwrap(wc_resp.json())
|
||||
|
||||
if len(workcenters) > 0:
|
||||
wc_name = workcenters[0]['name']
|
||||
response = requests.get(
|
||||
f"{api_base_url}/wip/detail/{wc_name}",
|
||||
timeout=30
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = self._unwrap(response.json())
|
||||
assert 'workcenter' in data
|
||||
assert 'summary' in data
|
||||
assert 'lots' in data
|
||||
assert 'pagination' in data
|
||||
|
||||
def test_wip_detail_pagination(self, api_base_url):
|
||||
"""Test WIP detail pagination."""
|
||||
wc_resp = requests.get(f"{api_base_url}/wip/meta/workcenters", timeout=30)
|
||||
workcenters = self._unwrap(wc_resp.json())
|
||||
|
||||
if len(workcenters) > 0:
|
||||
wc_name = workcenters[0]['name']
|
||||
response = requests.get(
|
||||
f"{api_base_url}/wip/detail/{wc_name}",
|
||||
params={'page': 1, 'page_size': 10},
|
||||
timeout=30
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = self._unwrap(response.json())
|
||||
assert data['pagination']['page'] == 1
|
||||
assert data['pagination']['page_size'] == 10
|
||||
362
tests/e2e/test_global_connection.py
Normal file
362
tests/e2e/test_global_connection.py
Normal file
@@ -0,0 +1,362 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""E2E tests for global connection management features.
|
||||
|
||||
Tests the MesApi client, Toast notifications, and page functionality
|
||||
using Playwright.
|
||||
|
||||
Run with: pytest tests/e2e/ --headed (to see browser)
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import re
|
||||
from playwright.sync_api import Page, expect
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestPortalPage:
|
||||
"""E2E tests for the Portal page."""
|
||||
|
||||
def test_portal_loads_successfully(self, page: Page, app_server: str):
|
||||
"""Portal page should load without errors."""
|
||||
page.goto(app_server)
|
||||
|
||||
# Wait for page to load
|
||||
expect(page.locator('h1')).to_contain_text('MES 報表入口')
|
||||
|
||||
def test_portal_has_all_tabs(self, page: Page, app_server: str):
|
||||
"""Portal should have all navigation tabs."""
|
||||
page.goto(app_server)
|
||||
|
||||
# Check all tabs exist
|
||||
expect(page.locator('.tab:has-text("WIP 即時概況")')).to_be_visible()
|
||||
expect(page.locator('.tab:has-text("機台狀態報表")')).to_be_visible()
|
||||
expect(page.locator('.tab:has-text("數據表查詢工具")')).to_be_visible()
|
||||
expect(page.locator('.tab:has-text("Excel 批次查詢")')).to_be_visible()
|
||||
|
||||
def test_portal_tab_switching(self, page: Page, app_server: str):
|
||||
"""Portal tabs should switch iframe content."""
|
||||
page.goto(app_server)
|
||||
|
||||
# Click on a different tab
|
||||
page.locator('.tab:has-text("機台狀態報表")').click()
|
||||
|
||||
# Verify the tab is active
|
||||
expect(page.locator('.tab:has-text("機台狀態報表")')).to_have_class(re.compile(r'active'))
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestToastNotifications:
|
||||
"""E2E tests for Toast notification system."""
|
||||
|
||||
def test_toast_container_exists(self, page: Page, app_server: str):
|
||||
"""Toast container should be present in the DOM."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
# Toast container should exist in DOM (hidden when empty, which is expected)
|
||||
page.wait_for_selector('#mes-toast-container', state='attached', timeout=5000)
|
||||
|
||||
def test_toast_info_display(self, page: Page, app_server: str):
|
||||
"""Toast.info() should display info notification."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
# Execute Toast.info() in browser context
|
||||
page.evaluate("Toast.info('Test info message')")
|
||||
|
||||
# Verify toast appears
|
||||
toast = page.locator('.mes-toast-info')
|
||||
expect(toast).to_be_visible()
|
||||
expect(toast).to_contain_text('Test info message')
|
||||
|
||||
def test_toast_success_display(self, page: Page, app_server: str):
|
||||
"""Toast.success() should display success notification."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
page.evaluate("Toast.success('Operation successful')")
|
||||
|
||||
toast = page.locator('.mes-toast-success')
|
||||
expect(toast).to_be_visible()
|
||||
expect(toast).to_contain_text('Operation successful')
|
||||
|
||||
def test_toast_error_display(self, page: Page, app_server: str):
|
||||
"""Toast.error() should display error notification."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
page.evaluate("Toast.error('An error occurred')")
|
||||
|
||||
toast = page.locator('.mes-toast-error')
|
||||
expect(toast).to_be_visible()
|
||||
expect(toast).to_contain_text('An error occurred')
|
||||
|
||||
def test_toast_error_with_retry(self, page: Page, app_server: str):
|
||||
"""Toast.error() with retry callback should show retry button."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
page.evaluate("Toast.error('Connection failed', { retry: () => console.log('retry clicked') })")
|
||||
|
||||
# Verify retry button exists
|
||||
retry_btn = page.locator('.mes-toast-retry')
|
||||
expect(retry_btn).to_be_visible()
|
||||
expect(retry_btn).to_contain_text('重試')
|
||||
|
||||
def test_toast_loading_display(self, page: Page, app_server: str):
|
||||
"""Toast.loading() should display loading notification."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
page.evaluate("Toast.loading('Loading data...')")
|
||||
|
||||
toast = page.locator('.mes-toast-loading')
|
||||
expect(toast).to_be_visible()
|
||||
|
||||
def test_toast_dismiss(self, page: Page, app_server: str):
|
||||
"""Toast.dismiss() should remove toast."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
# Create and dismiss a toast
|
||||
toast_id = page.evaluate("Toast.info('Will be dismissed')")
|
||||
page.evaluate(f"Toast.dismiss({toast_id})")
|
||||
|
||||
# Wait for animation
|
||||
page.wait_for_timeout(500)
|
||||
|
||||
# Toast should be gone
|
||||
expect(page.locator('.mes-toast-info')).not_to_be_visible()
|
||||
|
||||
def test_toast_max_limit(self, page: Page, app_server: str):
|
||||
"""Toast system should enforce max 5 toasts."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
# Create 7 toasts
|
||||
for i in range(7):
|
||||
page.evaluate(f"Toast.info('Toast {i}')")
|
||||
|
||||
# Should only have 5 toasts visible
|
||||
toasts = page.locator('.mes-toast')
|
||||
expect(toasts).to_have_count(5)
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestMesApiClient:
|
||||
"""E2E tests for MesApi client."""
|
||||
|
||||
def test_mesapi_exists_on_page(self, page: Page, app_server: str):
|
||||
"""MesApi should be available in window scope."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
has_mesapi = page.evaluate("typeof MesApi !== 'undefined'")
|
||||
assert has_mesapi, "MesApi should be defined"
|
||||
|
||||
def test_mesapi_has_get_method(self, page: Page, app_server: str):
|
||||
"""MesApi should have get() method."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
has_get = page.evaluate("typeof MesApi.get === 'function'")
|
||||
assert has_get, "MesApi.get should be a function"
|
||||
|
||||
def test_mesapi_has_post_method(self, page: Page, app_server: str):
|
||||
"""MesApi should have post() method."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
has_post = page.evaluate("typeof MesApi.post === 'function'")
|
||||
assert has_post, "MesApi.post should be a function"
|
||||
|
||||
def test_mesapi_request_logging(self, page: Page, app_server: str):
|
||||
"""MesApi should log requests to console."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
# Capture console messages
|
||||
console_messages = []
|
||||
page.on("console", lambda msg: console_messages.append(msg.text))
|
||||
|
||||
# Make a request (will fail but should log)
|
||||
page.evaluate("""
|
||||
(async () => {
|
||||
try {
|
||||
await MesApi.get('/api/test-endpoint');
|
||||
} catch (e) {
|
||||
// Expected to fail
|
||||
}
|
||||
})()
|
||||
""")
|
||||
|
||||
page.wait_for_timeout(1000)
|
||||
|
||||
# Check for MesApi log pattern
|
||||
mesapi_logs = [m for m in console_messages if '[MesApi]' in m]
|
||||
assert len(mesapi_logs) > 0, "MesApi should log requests with [MesApi] prefix"
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestWIPOverviewPage:
|
||||
"""E2E tests for WIP Overview page."""
|
||||
|
||||
def test_wip_overview_loads(self, page: Page, app_server: str):
|
||||
"""WIP Overview page should load."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
# Page should have the header
|
||||
expect(page.locator('body')).to_be_visible()
|
||||
|
||||
def test_wip_overview_has_toast_system(self, page: Page, app_server: str):
|
||||
"""WIP Overview should have Toast system loaded."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
has_toast = page.evaluate("typeof Toast !== 'undefined'")
|
||||
assert has_toast, "Toast should be defined on WIP Overview page"
|
||||
|
||||
def test_wip_overview_has_mesapi(self, page: Page, app_server: str):
|
||||
"""WIP Overview should have MesApi loaded."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
has_mesapi = page.evaluate("typeof MesApi !== 'undefined'")
|
||||
assert has_mesapi, "MesApi should be defined on WIP Overview page"
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestWIPDetailPage:
|
||||
"""E2E tests for WIP Detail page."""
|
||||
|
||||
def test_wip_detail_loads(self, page: Page, app_server: str):
|
||||
"""WIP Detail page should load."""
|
||||
page.goto(f"{app_server}/wip-detail")
|
||||
|
||||
expect(page.locator('body')).to_be_visible()
|
||||
|
||||
def test_wip_detail_has_toast_system(self, page: Page, app_server: str):
|
||||
"""WIP Detail should have Toast system loaded."""
|
||||
page.goto(f"{app_server}/wip-detail")
|
||||
|
||||
has_toast = page.evaluate("typeof Toast !== 'undefined'")
|
||||
assert has_toast, "Toast should be defined on WIP Detail page"
|
||||
|
||||
def test_wip_detail_has_mesapi(self, page: Page, app_server: str):
|
||||
"""WIP Detail should have MesApi loaded."""
|
||||
page.goto(f"{app_server}/wip-detail")
|
||||
|
||||
has_mesapi = page.evaluate("typeof MesApi !== 'undefined'")
|
||||
assert has_mesapi, "MesApi should be defined on WIP Detail page"
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestTablesPage:
|
||||
"""E2E tests for Tables page."""
|
||||
|
||||
def test_tables_page_loads(self, page: Page, app_server: str):
|
||||
"""Tables page should load."""
|
||||
page.goto(f"{app_server}/tables")
|
||||
|
||||
expect(page.locator('h1')).to_contain_text('MES 數據表查詢工具')
|
||||
|
||||
def test_tables_has_toast_system(self, page: Page, app_server: str):
|
||||
"""Tables page should have Toast system loaded."""
|
||||
page.goto(f"{app_server}/tables")
|
||||
|
||||
has_toast = page.evaluate("typeof Toast !== 'undefined'")
|
||||
assert has_toast, "Toast should be defined on Tables page"
|
||||
|
||||
def test_tables_has_mesapi(self, page: Page, app_server: str):
|
||||
"""Tables page should have MesApi loaded."""
|
||||
page.goto(f"{app_server}/tables")
|
||||
|
||||
has_mesapi = page.evaluate("typeof MesApi !== 'undefined'")
|
||||
assert has_mesapi, "MesApi should be defined on Tables page"
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestResourcePage:
|
||||
"""E2E tests for Resource Status page."""
|
||||
|
||||
def test_resource_page_loads(self, page: Page, app_server: str):
|
||||
"""Resource page should load."""
|
||||
page.goto(f"{app_server}/resource")
|
||||
|
||||
expect(page.locator('body')).to_be_visible()
|
||||
|
||||
def test_resource_has_toast_system(self, page: Page, app_server: str):
|
||||
"""Resource page should have Toast system loaded."""
|
||||
page.goto(f"{app_server}/resource")
|
||||
|
||||
has_toast = page.evaluate("typeof Toast !== 'undefined'")
|
||||
assert has_toast, "Toast should be defined on Resource page"
|
||||
|
||||
def test_resource_has_mesapi(self, page: Page, app_server: str):
|
||||
"""Resource page should have MesApi loaded."""
|
||||
page.goto(f"{app_server}/resource")
|
||||
|
||||
has_mesapi = page.evaluate("typeof MesApi !== 'undefined'")
|
||||
assert has_mesapi, "MesApi should be defined on Resource page"
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestExcelQueryPage:
|
||||
"""E2E tests for Excel Query page."""
|
||||
|
||||
def test_excel_query_page_loads(self, page: Page, app_server: str):
|
||||
"""Excel Query page should load."""
|
||||
page.goto(f"{app_server}/excel-query")
|
||||
|
||||
expect(page.locator('body')).to_be_visible()
|
||||
|
||||
def test_excel_query_has_toast_system(self, page: Page, app_server: str):
|
||||
"""Excel Query page should have Toast system loaded."""
|
||||
page.goto(f"{app_server}/excel-query")
|
||||
|
||||
has_toast = page.evaluate("typeof Toast !== 'undefined'")
|
||||
assert has_toast, "Toast should be defined on Excel Query page"
|
||||
|
||||
def test_excel_query_has_mesapi(self, page: Page, app_server: str):
|
||||
"""Excel Query page should have MesApi loaded."""
|
||||
page.goto(f"{app_server}/excel-query")
|
||||
|
||||
has_mesapi = page.evaluate("typeof MesApi !== 'undefined'")
|
||||
assert has_mesapi, "MesApi should be defined on Excel Query page"
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestConsoleLogVerification:
|
||||
"""E2E tests for console log verification (Phase 4.2 tasks)."""
|
||||
|
||||
def test_request_has_request_id(self, page: Page, app_server: str):
|
||||
"""API requests should log with req_xxx ID format."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
console_messages = []
|
||||
page.on("console", lambda msg: console_messages.append(msg.text))
|
||||
|
||||
# Trigger an API request
|
||||
page.evaluate("""
|
||||
(async () => {
|
||||
try {
|
||||
await MesApi.get('/api/wip/overview/summary');
|
||||
} catch (e) {}
|
||||
})()
|
||||
""")
|
||||
|
||||
page.wait_for_timeout(2000)
|
||||
|
||||
# Check for request ID pattern
|
||||
req_id_pattern = re.compile(r'req_\d{4}')
|
||||
has_req_id = any(req_id_pattern.search(m) for m in console_messages)
|
||||
assert has_req_id, "Console should show request ID like req_0001"
|
||||
|
||||
def test_successful_request_shows_checkmark(self, page: Page, app_server: str):
|
||||
"""Successful requests should show checkmark in console."""
|
||||
page.goto(f"{app_server}/wip-overview")
|
||||
|
||||
console_messages = []
|
||||
page.on("console", lambda msg: console_messages.append(msg.text))
|
||||
|
||||
# Make request to a working endpoint
|
||||
page.evaluate("""
|
||||
(async () => {
|
||||
try {
|
||||
await MesApi.get('/api/wip/overview/summary');
|
||||
} catch (e) {}
|
||||
})()
|
||||
""")
|
||||
|
||||
page.wait_for_timeout(3000)
|
||||
|
||||
# Filter for MesApi logs
|
||||
mesapi_logs = [m for m in console_messages if '[MesApi]' in m]
|
||||
# The exact checkmark depends on implementation (✓ or similar)
|
||||
assert len(mesapi_logs) > 0, "Should have MesApi console logs"
|
||||
216
tests/e2e/test_realtime_equipment_e2e.py
Normal file
216
tests/e2e/test_realtime_equipment_e2e.py
Normal file
@@ -0,0 +1,216 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""End-to-end tests for realtime equipment status cache.
|
||||
|
||||
Tests the full flow from cache sync to API response.
|
||||
Requires a running server with --run-e2e flag.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestEquipmentStatusCacheSync:
|
||||
"""Test equipment status cache synchronization."""
|
||||
|
||||
def test_health_check_includes_equipment_status_cache(self, health_url):
|
||||
"""Test health check includes equipment_status_cache status."""
|
||||
response = requests.get(health_url)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should have equipment_status_cache in response
|
||||
assert 'equipment_status_cache' in data
|
||||
cache_status = data['equipment_status_cache']
|
||||
|
||||
# Should have expected fields
|
||||
assert 'enabled' in cache_status
|
||||
assert 'loaded' in cache_status
|
||||
assert 'count' in cache_status
|
||||
assert 'updated_at' in cache_status
|
||||
|
||||
def test_health_check_includes_workcenter_mapping(self, health_url):
|
||||
"""Test health check includes workcenter_mapping status."""
|
||||
response = requests.get(health_url)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should have workcenter_mapping in response
|
||||
assert 'workcenter_mapping' in data
|
||||
wc_status = data['workcenter_mapping']
|
||||
|
||||
# Should have expected fields
|
||||
assert 'loaded' in wc_status
|
||||
assert 'workcenter_count' in wc_status
|
||||
assert 'group_count' in wc_status
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestMergedQueryApi:
|
||||
"""Test merged resource status API endpoints."""
|
||||
|
||||
def test_resource_status_endpoint(self, api_base_url):
|
||||
"""Test /api/resource/status endpoint."""
|
||||
url = f"{api_base_url}/resource/status"
|
||||
response = requests.get(url)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data['success'] is True
|
||||
assert 'data' in data
|
||||
assert 'count' in data
|
||||
|
||||
# If data exists, verify structure
|
||||
if data['data']:
|
||||
record = data['data'][0]
|
||||
# Should have merged fields
|
||||
assert 'RESOURCEID' in record
|
||||
assert 'RESOURCENAME' in record
|
||||
# Should have workcenter mapping fields
|
||||
assert 'WORKCENTER_GROUP' in record
|
||||
assert 'WORKCENTER_SHORT' in record
|
||||
# Should have realtime status fields
|
||||
assert 'STATUS_CATEGORY' in record
|
||||
|
||||
def test_resource_status_with_workcenter_filter(self, api_base_url):
|
||||
"""Test /api/resource/status with workcenter_groups filter."""
|
||||
url = f"{api_base_url}/resource/status"
|
||||
response = requests.get(url, params={'workcenter_groups': '焊接'})
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data['success'] is True
|
||||
|
||||
# All results should be in the specified group
|
||||
for record in data['data']:
|
||||
# May be None if mapping not found
|
||||
if record.get('WORKCENTER_GROUP'):
|
||||
assert record['WORKCENTER_GROUP'] == '焊接'
|
||||
|
||||
def test_resource_status_with_production_filter(self, api_base_url):
|
||||
"""Test /api/resource/status with is_production filter."""
|
||||
url = f"{api_base_url}/resource/status"
|
||||
response = requests.get(url, params={'is_production': 'true'})
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data['success'] is True
|
||||
|
||||
def test_resource_status_with_status_category_filter(self, api_base_url):
|
||||
"""Test /api/resource/status with status_categories filter."""
|
||||
url = f"{api_base_url}/resource/status"
|
||||
response = requests.get(url, params={'status_categories': 'PRODUCTIVE,DOWN'})
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data['success'] is True
|
||||
|
||||
# All results should be in specified categories
|
||||
for record in data['data']:
|
||||
if record.get('STATUS_CATEGORY'):
|
||||
assert record['STATUS_CATEGORY'] in ['PRODUCTIVE', 'DOWN']
|
||||
|
||||
def test_resource_status_summary_endpoint(self, api_base_url):
|
||||
"""Test /api/resource/status/summary endpoint."""
|
||||
url = f"{api_base_url}/resource/status/summary"
|
||||
response = requests.get(url)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data['success'] is True
|
||||
assert 'data' in data
|
||||
|
||||
summary = data['data']
|
||||
assert 'total_count' in summary
|
||||
assert 'by_status_category' in summary
|
||||
assert 'by_workcenter_group' in summary
|
||||
assert 'with_active_job' in summary
|
||||
assert 'with_wip' in summary
|
||||
|
||||
def test_resource_status_matrix_endpoint(self, api_base_url):
|
||||
"""Test /api/resource/status/matrix endpoint."""
|
||||
url = f"{api_base_url}/resource/status/matrix"
|
||||
response = requests.get(url)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data['success'] is True
|
||||
assert 'data' in data
|
||||
|
||||
# If data exists, verify structure
|
||||
if data['data']:
|
||||
row = data['data'][0]
|
||||
assert 'workcenter_group' in row
|
||||
assert 'workcenter_sequence' in row
|
||||
assert 'total' in row
|
||||
# Should have standard status columns
|
||||
assert 'PRD' in row
|
||||
assert 'SBY' in row
|
||||
assert 'UDT' in row
|
||||
assert 'SDT' in row
|
||||
assert 'EGT' in row
|
||||
assert 'NST' in row
|
||||
assert 'OTHER' in row
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestFilterOptionsIncludeNewFields:
|
||||
"""Test filter options API includes new fields."""
|
||||
|
||||
def test_status_options_endpoint(self, api_base_url):
|
||||
"""Test /api/resource/status/options endpoint."""
|
||||
url = f"{api_base_url}/resource/status/options"
|
||||
response = requests.get(url)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data['success'] is True
|
||||
assert 'data' in data
|
||||
|
||||
options = data['data']
|
||||
# Should have workcenter_groups
|
||||
assert 'workcenter_groups' in options
|
||||
assert isinstance(options['workcenter_groups'], list)
|
||||
|
||||
# Should have status_categories
|
||||
assert 'status_categories' in options
|
||||
assert isinstance(options['status_categories'], list)
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.redis
|
||||
class TestCacheIntegration:
|
||||
"""Test cache integration (requires Redis)."""
|
||||
|
||||
def test_cache_data_consistency(self, api_base_url, health_url):
|
||||
"""Test cache data is consistent between health and API."""
|
||||
# Get health status
|
||||
health_resp = requests.get(health_url)
|
||||
health_data = health_resp.json()
|
||||
|
||||
cache_status = health_data.get('equipment_status_cache', {})
|
||||
|
||||
if not cache_status.get('enabled') or not cache_status.get('loaded'):
|
||||
pytest.skip("Equipment status cache not enabled or loaded")
|
||||
|
||||
cache_count = cache_status.get('count', 0)
|
||||
|
||||
# Get all equipment status via API
|
||||
api_resp = requests.get(f"{api_base_url}/resource/status")
|
||||
api_data = api_resp.json()
|
||||
|
||||
# Count should be consistent (within reasonable margin for filtering)
|
||||
api_count = api_data.get('count', 0)
|
||||
|
||||
# API may have filters applied from resource-cache, so it could be less
|
||||
# but should never exceed cache count
|
||||
assert api_count <= cache_count or cache_count == 0
|
||||
250
tests/e2e/test_resource_cache_e2e.py
Normal file
250
tests/e2e/test_resource_cache_e2e.py
Normal file
@@ -0,0 +1,250 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""End-to-end tests for Resource Cache functionality.
|
||||
|
||||
These tests require a running server with Redis enabled.
|
||||
Run with: pytest tests/e2e/test_resource_cache_e2e.py -v --run-e2e
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
class TestHealthEndpointResourceCacheE2E:
|
||||
"""E2E tests for /health endpoint resource cache status."""
|
||||
|
||||
def test_health_includes_resource_cache(self, health_url):
|
||||
"""Test health endpoint includes resource_cache field."""
|
||||
response = requests.get(health_url, timeout=10)
|
||||
|
||||
assert response.status_code in [200, 503]
|
||||
data = response.json()
|
||||
assert 'resource_cache' in data
|
||||
|
||||
def test_resource_cache_has_required_fields(self, health_url):
|
||||
"""Test resource_cache has all required fields."""
|
||||
response = requests.get(health_url, timeout=10)
|
||||
data = response.json()
|
||||
|
||||
rc = data['resource_cache']
|
||||
assert 'enabled' in rc
|
||||
|
||||
if rc['enabled']:
|
||||
assert 'loaded' in rc
|
||||
assert 'count' in rc
|
||||
assert 'version' in rc
|
||||
assert 'updated_at' in rc
|
||||
|
||||
def test_resource_cache_loaded_has_positive_count(self, health_url):
|
||||
"""Test resource cache has positive count when loaded."""
|
||||
response = requests.get(health_url, timeout=10)
|
||||
data = response.json()
|
||||
|
||||
rc = data['resource_cache']
|
||||
if rc.get('enabled') and rc.get('loaded'):
|
||||
assert rc['count'] > 0, "Resource cache should have data when loaded"
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.redis
|
||||
class TestResourceHistoryOptionsE2E:
|
||||
"""E2E tests for resource history filter options endpoint."""
|
||||
|
||||
def test_options_endpoint_accessible(self, api_base_url):
|
||||
"""Test resource history options endpoint is accessible."""
|
||||
response = requests.get(
|
||||
f"{api_base_url}/resource/history/options",
|
||||
timeout=30
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_options_returns_families(self, api_base_url):
|
||||
"""Test options endpoint returns families list."""
|
||||
response = requests.get(
|
||||
f"{api_base_url}/resource/history/options",
|
||||
timeout=30
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
if data.get('success'):
|
||||
options = data.get('data', {})
|
||||
assert 'families' in options
|
||||
assert isinstance(options['families'], list)
|
||||
|
||||
def test_options_returns_workcenter_groups(self, api_base_url):
|
||||
"""Test options endpoint returns workcenter groups."""
|
||||
response = requests.get(
|
||||
f"{api_base_url}/resource/history/options",
|
||||
timeout=30
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
if data.get('success'):
|
||||
options = data.get('data', {})
|
||||
assert 'workcenter_groups' in options
|
||||
assert isinstance(options['workcenter_groups'], list)
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.redis
|
||||
class TestResourceFilterOptionsE2E:
|
||||
"""E2E tests for resource filter options endpoint."""
|
||||
|
||||
def test_filter_options_endpoint_accessible(self, api_base_url):
|
||||
"""Test resource filter options endpoint is accessible."""
|
||||
response = requests.get(
|
||||
f"{api_base_url}/resource/filter_options",
|
||||
timeout=30
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_filter_options_returns_workcenters(self, api_base_url):
|
||||
"""Test filter options returns workcenters list."""
|
||||
response = requests.get(
|
||||
f"{api_base_url}/resource/filter_options",
|
||||
timeout=30
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
if data.get('success'):
|
||||
options = data.get('data', {})
|
||||
assert 'workcenters' in options
|
||||
assert isinstance(options['workcenters'], list)
|
||||
|
||||
def test_filter_options_returns_families(self, api_base_url):
|
||||
"""Test filter options returns families list."""
|
||||
response = requests.get(
|
||||
f"{api_base_url}/resource/filter_options",
|
||||
timeout=30
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
if data.get('success'):
|
||||
options = data.get('data', {})
|
||||
assert 'families' in options
|
||||
assert isinstance(options['families'], list)
|
||||
|
||||
def test_filter_options_returns_departments(self, api_base_url):
|
||||
"""Test filter options returns departments list."""
|
||||
response = requests.get(
|
||||
f"{api_base_url}/resource/filter_options",
|
||||
timeout=30
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
if data.get('success'):
|
||||
options = data.get('data', {})
|
||||
assert 'departments' in options
|
||||
assert isinstance(options['departments'], list)
|
||||
|
||||
def test_filter_options_returns_statuses(self, api_base_url):
|
||||
"""Test filter options returns statuses list (from Oracle)."""
|
||||
response = requests.get(
|
||||
f"{api_base_url}/resource/filter_options",
|
||||
timeout=30
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
if data.get('success'):
|
||||
options = data.get('data', {})
|
||||
assert 'statuses' in options
|
||||
assert isinstance(options['statuses'], list)
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.redis
|
||||
class TestResourceCachePerformanceE2E:
|
||||
"""E2E tests for resource cache performance."""
|
||||
|
||||
def test_filter_options_response_time(self, api_base_url):
|
||||
"""Test filter options responds within acceptable time."""
|
||||
import time
|
||||
|
||||
# First request may trigger cache load
|
||||
requests.get(f"{api_base_url}/resource/filter_options", timeout=30)
|
||||
|
||||
# Second request should be from cache
|
||||
start = time.time()
|
||||
response = requests.get(f"{api_base_url}/resource/filter_options", timeout=30)
|
||||
elapsed = time.time() - start
|
||||
|
||||
assert response.status_code == 200
|
||||
# Note: statuses still queries Oracle, so allow more time
|
||||
# Other fields (workcenters, families, departments) come from Redis cache
|
||||
assert elapsed < 30.0, f"Response took {elapsed:.2f}s, expected < 30s"
|
||||
|
||||
def test_history_options_response_time(self, api_base_url):
|
||||
"""Test history options responds within acceptable time."""
|
||||
import time
|
||||
|
||||
# First request
|
||||
requests.get(f"{api_base_url}/resource/history/options", timeout=30)
|
||||
|
||||
# Second request should be from cache
|
||||
start = time.time()
|
||||
response = requests.get(f"{api_base_url}/resource/history/options", timeout=30)
|
||||
elapsed = time.time() - start
|
||||
|
||||
assert response.status_code == 200
|
||||
# Should be fast (< 2 seconds)
|
||||
assert elapsed < 2.0, f"Response took {elapsed:.2f}s, expected < 2s"
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.redis
|
||||
class TestResourceCacheDataConsistencyE2E:
|
||||
"""E2E tests for resource cache data consistency."""
|
||||
|
||||
def test_cache_count_matches_health_report(self, health_url, api_base_url):
|
||||
"""Test cache count in health matches actual data count."""
|
||||
# Get health status
|
||||
health_resp = requests.get(health_url, timeout=10)
|
||||
health_data = health_resp.json()
|
||||
|
||||
rc = health_data.get('resource_cache', {})
|
||||
if not rc.get('enabled') or not rc.get('loaded'):
|
||||
pytest.skip("Resource cache not enabled or loaded")
|
||||
|
||||
reported_count = rc.get('count', 0)
|
||||
|
||||
# Get filter options which uses cached data
|
||||
options_resp = requests.get(f"{api_base_url}/resource/filter_options", timeout=30)
|
||||
options_data = options_resp.json()
|
||||
|
||||
# The workcenters list should be derived from the same cache
|
||||
if options_data.get('success'):
|
||||
workcenters = options_data.get('data', {}).get('workcenters', [])
|
||||
# Just verify we got data - exact count comparison is complex
|
||||
assert len(workcenters) > 0 or reported_count == 0
|
||||
|
||||
def test_families_consistent_across_endpoints(self, api_base_url):
|
||||
"""Test families list is consistent across endpoints."""
|
||||
# Get from resource filter options
|
||||
filter_resp = requests.get(f"{api_base_url}/resource/filter_options", timeout=30)
|
||||
filter_data = filter_resp.json()
|
||||
|
||||
# Get from resource history options
|
||||
history_resp = requests.get(f"{api_base_url}/resource/history/options", timeout=30)
|
||||
history_data = history_resp.json()
|
||||
|
||||
if filter_data.get('success') and history_data.get('success'):
|
||||
filter_families = set(filter_data.get('data', {}).get('families', []))
|
||||
history_families = set(history_data.get('data', {}).get('families', []))
|
||||
|
||||
# Both should return the same families (from same cache)
|
||||
assert filter_families == history_families, \
|
||||
f"Families mismatch: filter has {len(filter_families)}, history has {len(history_families)}"
|
||||
319
tests/e2e/test_resource_history_e2e.py
Normal file
319
tests/e2e/test_resource_history_e2e.py
Normal file
@@ -0,0 +1,319 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""End-to-end tests for resource history analysis page.
|
||||
|
||||
These tests simulate real user workflows through the resource history analysis feature.
|
||||
Run with: pytest tests/e2e/test_resource_history_e2e.py -v --run-integration
|
||||
"""
|
||||
|
||||
import json
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import pandas as pd
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'src'))
|
||||
|
||||
import mes_dashboard.core.database as db
|
||||
from mes_dashboard.app import create_app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app():
|
||||
"""Create application for testing."""
|
||||
db._ENGINE = None
|
||||
app = create_app('testing')
|
||||
app.config['TESTING'] = True
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(app):
|
||||
"""Create test client."""
|
||||
return app.test_client()
|
||||
|
||||
|
||||
class TestResourceHistoryPageAccess:
|
||||
"""E2E tests for page access and navigation."""
|
||||
|
||||
def test_page_loads_successfully(self, client):
|
||||
"""Resource history page should load without errors."""
|
||||
response = client.get('/resource-history')
|
||||
|
||||
assert response.status_code == 200
|
||||
content = response.data.decode('utf-8')
|
||||
assert '設備歷史績效' in content
|
||||
|
||||
def test_page_contains_filter_elements(self, client):
|
||||
"""Page should contain all filter elements."""
|
||||
response = client.get('/resource-history')
|
||||
content = response.data.decode('utf-8')
|
||||
|
||||
# Check for filter elements
|
||||
assert 'startDate' in content
|
||||
assert 'endDate' in content
|
||||
# Multi-select dropdowns
|
||||
assert 'workcenterGroupsDropdown' in content
|
||||
assert 'familiesDropdown' in content
|
||||
assert 'isProduction' in content
|
||||
assert 'isKey' in content
|
||||
assert 'isMonitor' in content
|
||||
|
||||
def test_page_contains_kpi_cards(self, client):
|
||||
"""Page should contain KPI card elements."""
|
||||
response = client.get('/resource-history')
|
||||
content = response.data.decode('utf-8')
|
||||
|
||||
assert 'kpiOuPct' in content
|
||||
assert 'kpiAvailabilityPct' in content
|
||||
assert 'kpiPrdHours' in content
|
||||
assert 'kpiUdtHours' in content
|
||||
assert 'kpiSdtHours' in content
|
||||
assert 'kpiEgtHours' in content
|
||||
assert 'kpiMachineCount' in content
|
||||
|
||||
def test_page_contains_chart_containers(self, client):
|
||||
"""Page should contain chart container elements."""
|
||||
response = client.get('/resource-history')
|
||||
content = response.data.decode('utf-8')
|
||||
|
||||
assert 'trendChart' in content
|
||||
assert 'stackedChart' in content
|
||||
assert 'comparisonChart' in content
|
||||
assert 'heatmapChart' in content
|
||||
|
||||
def test_page_contains_table_elements(self, client):
|
||||
"""Page should contain table elements."""
|
||||
response = client.get('/resource-history')
|
||||
content = response.data.decode('utf-8')
|
||||
|
||||
assert 'detailTableBody' in content
|
||||
assert 'expandAllBtn' in content
|
||||
assert 'collapseAllBtn' in content
|
||||
assert 'exportBtn' in content
|
||||
|
||||
|
||||
class TestResourceHistoryAPIWorkflow:
|
||||
"""E2E tests for API workflows."""
|
||||
|
||||
@patch('mes_dashboard.services.filter_cache.get_workcenter_groups')
|
||||
@patch('mes_dashboard.services.filter_cache.get_resource_families')
|
||||
def test_filter_options_workflow(self, mock_families, mock_groups, client):
|
||||
"""Filter options should be loadable."""
|
||||
mock_groups.return_value = [
|
||||
{'name': '焊接_DB', 'sequence': 1},
|
||||
{'name': '焊接_WB', 'sequence': 2},
|
||||
{'name': '成型', 'sequence': 4},
|
||||
]
|
||||
mock_families.return_value = ['FAM001', 'FAM002']
|
||||
|
||||
response = client.get('/api/resource/history/options')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data['success'] is True
|
||||
assert 'workcenter_groups' in data['data']
|
||||
assert 'families' in data['data']
|
||||
|
||||
@patch('mes_dashboard.services.resource_history_service.read_sql_df')
|
||||
def test_complete_query_workflow(self, mock_read_sql, client):
|
||||
"""Complete query workflow should return all data sections."""
|
||||
# Mock responses for the 4 queries in query_summary
|
||||
kpi_df = pd.DataFrame([{
|
||||
'PRD_HOURS': 8000, 'SBY_HOURS': 1000, 'UDT_HOURS': 500,
|
||||
'SDT_HOURS': 300, 'EGT_HOURS': 200, 'NST_HOURS': 1000,
|
||||
'MACHINE_COUNT': 100
|
||||
}])
|
||||
|
||||
trend_df = pd.DataFrame([
|
||||
{'DATA_DATE': datetime(2024, 1, 1), 'PRD_HOURS': 1000, 'SBY_HOURS': 100,
|
||||
'UDT_HOURS': 50, 'SDT_HOURS': 30, 'EGT_HOURS': 20, 'NST_HOURS': 100, 'MACHINE_COUNT': 100},
|
||||
{'DATA_DATE': datetime(2024, 1, 2), 'PRD_HOURS': 1100, 'SBY_HOURS': 90,
|
||||
'UDT_HOURS': 40, 'SDT_HOURS': 25, 'EGT_HOURS': 15, 'NST_HOURS': 100, 'MACHINE_COUNT': 100},
|
||||
])
|
||||
|
||||
heatmap_df = pd.DataFrame([
|
||||
{'WORKCENTERNAME': '焊接_DB', 'DATA_DATE': datetime(2024, 1, 1),
|
||||
'PRD_HOURS': 400, 'SBY_HOURS': 50, 'UDT_HOURS': 25, 'SDT_HOURS': 15, 'EGT_HOURS': 10},
|
||||
{'WORKCENTERNAME': '成型', 'DATA_DATE': datetime(2024, 1, 1),
|
||||
'PRD_HOURS': 600, 'SBY_HOURS': 50, 'UDT_HOURS': 25, 'SDT_HOURS': 15, 'EGT_HOURS': 10},
|
||||
])
|
||||
|
||||
comparison_df = pd.DataFrame([
|
||||
{'WORKCENTERNAME': '焊接_DB', 'PRD_HOURS': 4000, 'SBY_HOURS': 500,
|
||||
'UDT_HOURS': 250, 'SDT_HOURS': 150, 'EGT_HOURS': 100, 'MACHINE_COUNT': 50},
|
||||
{'WORKCENTERNAME': '成型', 'PRD_HOURS': 4000, 'SBY_HOURS': 500,
|
||||
'UDT_HOURS': 250, 'SDT_HOURS': 150, 'EGT_HOURS': 100, 'MACHINE_COUNT': 50},
|
||||
])
|
||||
|
||||
# Use function-based side_effect for ThreadPoolExecutor parallel queries
|
||||
def mock_sql(sql):
|
||||
sql_upper = sql.upper()
|
||||
if 'DATA_DATE' in sql_upper and 'WORKCENTERNAME' in sql_upper:
|
||||
return heatmap_df
|
||||
elif 'DATA_DATE' in sql_upper:
|
||||
return trend_df
|
||||
elif 'WORKCENTERNAME' in sql_upper:
|
||||
return comparison_df
|
||||
else:
|
||||
return kpi_df
|
||||
|
||||
mock_read_sql.side_effect = mock_sql
|
||||
|
||||
response = client.get(
|
||||
'/api/resource/history/summary'
|
||||
'?start_date=2024-01-01'
|
||||
'&end_date=2024-01-07'
|
||||
'&granularity=day'
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data['success'] is True
|
||||
|
||||
# Verify KPI
|
||||
assert data['data']['kpi']['ou_pct'] == 80.0
|
||||
# Availability% = (8000+1000+200) / (8000+1000+200+300+500+1000) * 100 = 9200/11000 = 83.6%
|
||||
assert data['data']['kpi']['availability_pct'] == 83.6
|
||||
assert data['data']['kpi']['machine_count'] == 100
|
||||
|
||||
# Verify trend
|
||||
assert len(data['data']['trend']) == 2
|
||||
# Trend should also have availability_pct
|
||||
assert 'availability_pct' in data['data']['trend'][0]
|
||||
|
||||
# Verify heatmap
|
||||
assert len(data['data']['heatmap']) == 2
|
||||
|
||||
# Verify comparison
|
||||
assert len(data['data']['workcenter_comparison']) == 2
|
||||
|
||||
@patch('mes_dashboard.services.resource_history_service.read_sql_df')
|
||||
def test_detail_query_workflow(self, mock_read_sql, client):
|
||||
"""Detail query workflow should return hierarchical data."""
|
||||
detail_df = pd.DataFrame([
|
||||
{'WORKCENTERNAME': '焊接_DB', 'RESOURCEFAMILYNAME': 'FAM001', 'RESOURCENAME': 'RES001',
|
||||
'PRD_HOURS': 80, 'SBY_HOURS': 10, 'UDT_HOURS': 5, 'SDT_HOURS': 3, 'EGT_HOURS': 2,
|
||||
'NST_HOURS': 10, 'TOTAL_HOURS': 110},
|
||||
{'WORKCENTERNAME': '焊接_DB', 'RESOURCEFAMILYNAME': 'FAM001', 'RESOURCENAME': 'RES002',
|
||||
'PRD_HOURS': 75, 'SBY_HOURS': 15, 'UDT_HOURS': 5, 'SDT_HOURS': 3, 'EGT_HOURS': 2,
|
||||
'NST_HOURS': 10, 'TOTAL_HOURS': 110},
|
||||
])
|
||||
|
||||
mock_read_sql.return_value = detail_df
|
||||
|
||||
response = client.get(
|
||||
'/api/resource/history/detail'
|
||||
'?start_date=2024-01-01'
|
||||
'&end_date=2024-01-07'
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data['success'] is True
|
||||
assert data['total'] == 2
|
||||
assert len(data['data']) == 2
|
||||
assert data['truncated'] is False
|
||||
|
||||
# Verify data structure
|
||||
first_row = data['data'][0]
|
||||
assert 'workcenter' in first_row
|
||||
assert 'family' in first_row
|
||||
assert 'resource' in first_row
|
||||
assert 'ou_pct' in first_row
|
||||
assert 'availability_pct' in first_row
|
||||
assert 'prd_hours' in first_row
|
||||
assert 'prd_pct' in first_row
|
||||
|
||||
@patch('mes_dashboard.services.resource_history_service.read_sql_df')
|
||||
def test_export_workflow(self, mock_read_sql, client):
|
||||
"""Export workflow should return valid CSV."""
|
||||
mock_read_sql.return_value = pd.DataFrame([
|
||||
{'WORKCENTERNAME': '焊接_DB', 'RESOURCEFAMILYNAME': 'FAM001', 'RESOURCENAME': 'RES001',
|
||||
'PRD_HOURS': 80, 'SBY_HOURS': 10, 'UDT_HOURS': 5, 'SDT_HOURS': 3, 'EGT_HOURS': 2,
|
||||
'NST_HOURS': 10, 'TOTAL_HOURS': 110},
|
||||
])
|
||||
|
||||
response = client.get(
|
||||
'/api/resource/history/export'
|
||||
'?start_date=2024-01-01'
|
||||
'&end_date=2024-01-07'
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert 'text/csv' in response.content_type
|
||||
|
||||
content = response.data.decode('utf-8-sig')
|
||||
lines = content.strip().split('\n')
|
||||
|
||||
# Should have header + data rows
|
||||
assert len(lines) >= 2
|
||||
|
||||
# Verify header
|
||||
header = lines[0]
|
||||
assert '站點' in header
|
||||
assert 'OU%' in header
|
||||
assert 'Availability%' in header
|
||||
|
||||
|
||||
class TestResourceHistoryValidation:
|
||||
"""E2E tests for input validation."""
|
||||
|
||||
def test_date_range_validation(self, client):
|
||||
"""Date range exceeding 730 days should be rejected."""
|
||||
response = client.get(
|
||||
'/api/resource/history/summary'
|
||||
'?start_date=2024-01-01'
|
||||
'&end_date=2026-01-02'
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert data['success'] is False
|
||||
assert '730' in data['error']
|
||||
|
||||
def test_missing_required_params(self, client):
|
||||
"""Missing required parameters should return error."""
|
||||
response = client.get('/api/resource/history/summary')
|
||||
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert data['success'] is False
|
||||
|
||||
@patch('mes_dashboard.services.resource_history_service.read_sql_df')
|
||||
def test_granularity_options(self, mock_read_sql, client):
|
||||
"""Different granularity options should work."""
|
||||
mock_df = pd.DataFrame([{
|
||||
'PRD_HOURS': 100, 'SBY_HOURS': 10, 'UDT_HOURS': 5,
|
||||
'SDT_HOURS': 3, 'EGT_HOURS': 2, 'NST_HOURS': 10, 'MACHINE_COUNT': 5
|
||||
}])
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
for granularity in ['day', 'week', 'month', 'year']:
|
||||
mock_read_sql.side_effect = [mock_df, pd.DataFrame(), pd.DataFrame(), pd.DataFrame()]
|
||||
|
||||
response = client.get(
|
||||
f'/api/resource/history/summary'
|
||||
f'?start_date=2024-01-01'
|
||||
f'&end_date=2024-01-31'
|
||||
f'&granularity={granularity}'
|
||||
)
|
||||
|
||||
assert response.status_code == 200, f"Failed for granularity={granularity}"
|
||||
|
||||
|
||||
class TestResourceHistoryNavigation:
|
||||
"""E2E tests for navigation integration."""
|
||||
|
||||
def test_portal_includes_history_tab(self, client):
|
||||
"""Portal should include resource history tab."""
|
||||
response = client.get('/')
|
||||
content = response.data.decode('utf-8')
|
||||
|
||||
assert '設備歷史績效' in content
|
||||
assert 'resourceHistoryFrame' in content
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
pytest.main([__file__, '-v'])
|
||||
46
tests/fixtures/frontend_compute_parity.json
vendored
Normal file
46
tests/fixtures/frontend_compute_parity.json
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"metric_tolerance": {
|
||||
"ou_pct": 0.1,
|
||||
"availability_pct": 0.1,
|
||||
"prd_pct": 0.1,
|
||||
"sby_pct": 0.1,
|
||||
"udt_pct": 0.1,
|
||||
"sdt_pct": 0.1,
|
||||
"egt_pct": 0.1,
|
||||
"nst_pct": 0.1
|
||||
},
|
||||
"cases": [
|
||||
{
|
||||
"prd_hours": 10,
|
||||
"sby_hours": 2,
|
||||
"udt_hours": 1,
|
||||
"sdt_hours": 1,
|
||||
"egt_hours": 1,
|
||||
"nst_hours": 1
|
||||
},
|
||||
{
|
||||
"prd_hours": 0,
|
||||
"sby_hours": 0,
|
||||
"udt_hours": 0,
|
||||
"sdt_hours": 0,
|
||||
"egt_hours": 0,
|
||||
"nst_hours": 0
|
||||
},
|
||||
{
|
||||
"prd_hours": 85.5,
|
||||
"sby_hours": 10.2,
|
||||
"udt_hours": 1.1,
|
||||
"sdt_hours": 0.8,
|
||||
"egt_hours": 2.4,
|
||||
"nst_hours": 3.0
|
||||
},
|
||||
{
|
||||
"prd_hours": 5,
|
||||
"sby_hours": 3,
|
||||
"udt_hours": 4,
|
||||
"sdt_hours": 2,
|
||||
"egt_hours": 1,
|
||||
"nst_hours": 5
|
||||
}
|
||||
]
|
||||
}
|
||||
2
tests/stress/__init__.py
Normal file
2
tests/stress/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Stress tests for MES Dashboard."""
|
||||
118
tests/stress/conftest.py
Normal file
118
tests/stress/conftest.py
Normal file
@@ -0,0 +1,118 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Pytest configuration for stress tests."""
|
||||
|
||||
import pytest
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Dict, Any
|
||||
|
||||
# Add src to path
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'src'))
|
||||
|
||||
|
||||
@dataclass
|
||||
class StressTestResult:
|
||||
"""Container for stress test results."""
|
||||
test_name: str
|
||||
total_requests: int = 0
|
||||
successful_requests: int = 0
|
||||
failed_requests: int = 0
|
||||
total_duration: float = 0.0
|
||||
min_response_time: float = float('inf')
|
||||
max_response_time: float = 0.0
|
||||
response_times: List[float] = field(default_factory=list)
|
||||
errors: List[str] = field(default_factory=list)
|
||||
|
||||
@property
|
||||
def avg_response_time(self) -> float:
|
||||
if not self.response_times:
|
||||
return 0.0
|
||||
return sum(self.response_times) / len(self.response_times)
|
||||
|
||||
@property
|
||||
def success_rate(self) -> float:
|
||||
if self.total_requests == 0:
|
||||
return 0.0
|
||||
return (self.successful_requests / self.total_requests) * 100
|
||||
|
||||
@property
|
||||
def requests_per_second(self) -> float:
|
||||
if self.total_duration == 0:
|
||||
return 0.0
|
||||
return self.total_requests / self.total_duration
|
||||
|
||||
def add_success(self, response_time: float):
|
||||
self.total_requests += 1
|
||||
self.successful_requests += 1
|
||||
self.response_times.append(response_time)
|
||||
self.min_response_time = min(self.min_response_time, response_time)
|
||||
self.max_response_time = max(self.max_response_time, response_time)
|
||||
|
||||
def add_failure(self, error: str, response_time: float = 0):
|
||||
self.total_requests += 1
|
||||
self.failed_requests += 1
|
||||
self.errors.append(error)
|
||||
if response_time > 0:
|
||||
self.response_times.append(response_time)
|
||||
|
||||
def report(self) -> str:
|
||||
"""Generate human-readable report."""
|
||||
lines = [
|
||||
f"\n{'='*60}",
|
||||
f"Stress Test Report: {self.test_name}",
|
||||
f"{'='*60}",
|
||||
f"Total Requests: {self.total_requests}",
|
||||
f"Successful: {self.successful_requests}",
|
||||
f"Failed: {self.failed_requests}",
|
||||
f"Success Rate: {self.success_rate:.2f}%",
|
||||
f"{'─'*60}",
|
||||
f"Total Duration: {self.total_duration:.2f}s",
|
||||
f"Requests/Second: {self.requests_per_second:.2f}",
|
||||
f"{'─'*60}",
|
||||
f"Min Response Time: {self.min_response_time*1000:.2f}ms" if self.min_response_time != float('inf') else "Min Response Time: N/A",
|
||||
f"Max Response Time: {self.max_response_time*1000:.2f}ms",
|
||||
f"Avg Response Time: {self.avg_response_time*1000:.2f}ms",
|
||||
f"{'='*60}",
|
||||
]
|
||||
if self.errors:
|
||||
lines.append(f"Errors (first 5):")
|
||||
for err in self.errors[:5]:
|
||||
lines.append(f" - {err[:100]}")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def base_url() -> str:
|
||||
"""Get the base URL for stress testing."""
|
||||
return os.environ.get('STRESS_TEST_URL', 'http://127.0.0.1:8080')
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def stress_config() -> Dict[str, Any]:
|
||||
"""Get stress test configuration."""
|
||||
return {
|
||||
'concurrent_users': int(os.environ.get('STRESS_CONCURRENT_USERS', '10')),
|
||||
'requests_per_user': int(os.environ.get('STRESS_REQUESTS_PER_USER', '20')),
|
||||
'ramp_up_time': float(os.environ.get('STRESS_RAMP_UP_TIME', '2.0')),
|
||||
'timeout': float(os.environ.get('STRESS_TIMEOUT', '30.0')),
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def stress_result():
|
||||
"""Factory fixture to create stress test results."""
|
||||
def _create_result(test_name: str) -> StressTestResult:
|
||||
return StressTestResult(test_name=test_name)
|
||||
return _create_result
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
"""Add custom markers for stress tests."""
|
||||
config.addinivalue_line(
|
||||
"markers", "stress: mark test as stress test (may take longer)"
|
||||
)
|
||||
config.addinivalue_line(
|
||||
"markers", "load: mark test as load test (concurrent requests)"
|
||||
)
|
||||
327
tests/stress/test_api_load.py
Normal file
327
tests/stress/test_api_load.py
Normal file
@@ -0,0 +1,327 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Backend API load tests.
|
||||
|
||||
Tests API endpoints under concurrent load to verify:
|
||||
- Connection pool stability
|
||||
- Timeout handling
|
||||
- Response consistency under pressure
|
||||
|
||||
Run with: pytest tests/stress/test_api_load.py -v -s
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import time
|
||||
import requests
|
||||
import concurrent.futures
|
||||
from typing import List, Tuple
|
||||
|
||||
# Import from local conftest via pytest fixtures
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
@pytest.mark.load
|
||||
class TestAPILoadConcurrent:
|
||||
"""Load tests with concurrent requests."""
|
||||
|
||||
def _make_request(self, url: str, timeout: float) -> Tuple[bool, float, str]:
|
||||
"""Make a single request and return (success, duration, error)."""
|
||||
start = time.time()
|
||||
try:
|
||||
response = requests.get(url, timeout=timeout)
|
||||
duration = time.time() - start
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
if data.get('success'):
|
||||
return (True, duration, '')
|
||||
return (False, duration, f"API returned success=false: {data.get('error', 'unknown')}")
|
||||
return (False, duration, f"HTTP {response.status_code}")
|
||||
except requests.exceptions.Timeout:
|
||||
duration = time.time() - start
|
||||
return (False, duration, "Request timeout")
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
duration = time.time() - start
|
||||
return (False, duration, f"Connection error: {str(e)[:50]}")
|
||||
except Exception as e:
|
||||
duration = time.time() - start
|
||||
return (False, duration, f"Error: {str(e)[:50]}")
|
||||
|
||||
def test_wip_summary_concurrent_load(self, base_url: str, stress_config: dict, stress_result):
|
||||
"""Test WIP summary API under concurrent load."""
|
||||
result = stress_result("WIP Summary Concurrent Load")
|
||||
url = f"{base_url}/api/wip/overview/summary"
|
||||
concurrent_users = stress_config['concurrent_users']
|
||||
requests_per_user = stress_config['requests_per_user']
|
||||
timeout = stress_config['timeout']
|
||||
|
||||
total_requests = concurrent_users * requests_per_user
|
||||
|
||||
start_time = time.time()
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor:
|
||||
futures = [
|
||||
executor.submit(self._make_request, url, timeout)
|
||||
for _ in range(total_requests)
|
||||
]
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
success, duration, error = future.result()
|
||||
if success:
|
||||
result.add_success(duration)
|
||||
else:
|
||||
result.add_failure(error, duration)
|
||||
|
||||
result.total_duration = time.time() - start_time
|
||||
|
||||
print(result.report())
|
||||
|
||||
# Assertions
|
||||
assert result.success_rate >= 90.0, f"Success rate {result.success_rate:.1f}% is below 90%"
|
||||
assert result.avg_response_time < 10.0, f"Avg response time {result.avg_response_time:.2f}s exceeds 10s"
|
||||
|
||||
def test_wip_matrix_concurrent_load(self, base_url: str, stress_config: dict, stress_result):
|
||||
"""Test WIP matrix API under concurrent load."""
|
||||
result = stress_result("WIP Matrix Concurrent Load")
|
||||
url = f"{base_url}/api/wip/overview/matrix"
|
||||
concurrent_users = stress_config['concurrent_users']
|
||||
requests_per_user = stress_config['requests_per_user']
|
||||
timeout = stress_config['timeout']
|
||||
|
||||
total_requests = concurrent_users * requests_per_user
|
||||
|
||||
start_time = time.time()
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor:
|
||||
futures = [
|
||||
executor.submit(self._make_request, url, timeout)
|
||||
for _ in range(total_requests)
|
||||
]
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
success, duration, error = future.result()
|
||||
if success:
|
||||
result.add_success(duration)
|
||||
else:
|
||||
result.add_failure(error, duration)
|
||||
|
||||
result.total_duration = time.time() - start_time
|
||||
|
||||
print(result.report())
|
||||
|
||||
assert result.success_rate >= 90.0, f"Success rate {result.success_rate:.1f}% is below 90%"
|
||||
assert result.avg_response_time < 15.0, f"Avg response time {result.avg_response_time:.2f}s exceeds 15s"
|
||||
|
||||
def test_resource_summary_concurrent_load(self, base_url: str, stress_config: dict, stress_result):
|
||||
"""Test resource status summary API under concurrent load."""
|
||||
result = stress_result("Resource Status Summary Concurrent Load")
|
||||
url = f"{base_url}/api/resource/status/summary"
|
||||
concurrent_users = stress_config['concurrent_users']
|
||||
requests_per_user = stress_config['requests_per_user']
|
||||
timeout = stress_config['timeout']
|
||||
|
||||
total_requests = concurrent_users * requests_per_user
|
||||
|
||||
start_time = time.time()
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor:
|
||||
futures = [
|
||||
executor.submit(self._make_request, url, timeout)
|
||||
for _ in range(total_requests)
|
||||
]
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
success, duration, error = future.result()
|
||||
if success:
|
||||
result.add_success(duration)
|
||||
else:
|
||||
result.add_failure(error, duration)
|
||||
|
||||
result.total_duration = time.time() - start_time
|
||||
|
||||
print(result.report())
|
||||
|
||||
assert result.success_rate >= 90.0, f"Success rate {result.success_rate:.1f}% is below 90%"
|
||||
|
||||
def test_mixed_endpoints_concurrent_load(self, base_url: str, stress_config: dict, stress_result):
|
||||
"""Test multiple API endpoints simultaneously."""
|
||||
result = stress_result("Mixed Endpoints Concurrent Load")
|
||||
endpoints = [
|
||||
f"{base_url}/api/wip/overview/summary",
|
||||
f"{base_url}/api/wip/overview/matrix",
|
||||
f"{base_url}/api/wip/overview/hold",
|
||||
f"{base_url}/api/wip/meta/workcenters",
|
||||
f"{base_url}/api/resource/status/summary",
|
||||
]
|
||||
concurrent_users = stress_config['concurrent_users']
|
||||
timeout = stress_config['timeout']
|
||||
|
||||
# 5 requests per endpoint per user
|
||||
requests_per_endpoint = 5
|
||||
total_requests = concurrent_users * len(endpoints) * requests_per_endpoint
|
||||
|
||||
start_time = time.time()
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor:
|
||||
futures = []
|
||||
for _ in range(concurrent_users):
|
||||
for endpoint in endpoints:
|
||||
for _ in range(requests_per_endpoint):
|
||||
futures.append(executor.submit(self._make_request, endpoint, timeout))
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
success, duration, error = future.result()
|
||||
if success:
|
||||
result.add_success(duration)
|
||||
else:
|
||||
result.add_failure(error, duration)
|
||||
|
||||
result.total_duration = time.time() - start_time
|
||||
|
||||
print(result.report())
|
||||
|
||||
assert result.success_rate >= 85.0, f"Success rate {result.success_rate:.1f}% is below 85%"
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
@pytest.mark.load
|
||||
class TestAPILoadRampUp:
|
||||
"""Load tests with gradual ramp-up."""
|
||||
|
||||
def _make_request(self, url: str, timeout: float) -> Tuple[bool, float, str]:
|
||||
"""Make a single request and return (success, duration, error)."""
|
||||
start = time.time()
|
||||
try:
|
||||
response = requests.get(url, timeout=timeout)
|
||||
duration = time.time() - start
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
if data.get('success'):
|
||||
return (True, duration, '')
|
||||
return (False, duration, f"API error: {data.get('error', 'unknown')}")
|
||||
return (False, duration, f"HTTP {response.status_code}")
|
||||
except Exception as e:
|
||||
duration = time.time() - start
|
||||
return (False, duration, str(e)[:50])
|
||||
|
||||
def test_gradual_load_increase(self, base_url: str, stress_result):
|
||||
"""Test API stability as load gradually increases."""
|
||||
result = stress_result("Gradual Load Increase")
|
||||
url = f"{base_url}/api/wip/overview/summary"
|
||||
|
||||
# Start with 2 concurrent users, increase to 20
|
||||
load_levels = [2, 5, 10, 15, 20]
|
||||
requests_per_level = 10
|
||||
timeout = 30.0
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
for concurrent_users in load_levels:
|
||||
print(f"\n Testing with {concurrent_users} concurrent users...")
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor:
|
||||
futures = [
|
||||
executor.submit(self._make_request, url, timeout)
|
||||
for _ in range(requests_per_level)
|
||||
]
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
success, duration, error = future.result()
|
||||
if success:
|
||||
result.add_success(duration)
|
||||
else:
|
||||
result.add_failure(error, duration)
|
||||
|
||||
time.sleep(0.5) # Brief pause between levels
|
||||
|
||||
result.total_duration = time.time() - start_time
|
||||
|
||||
print(result.report())
|
||||
|
||||
assert result.success_rate >= 80.0, f"Success rate {result.success_rate:.1f}% is below 80%"
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
class TestAPITimeoutHandling:
|
||||
"""Tests for timeout handling under load."""
|
||||
|
||||
def test_connection_recovery_after_timeout(self, base_url: str, stress_result):
|
||||
"""Test that API recovers after timeout scenarios."""
|
||||
result = stress_result("Connection Recovery After Timeout")
|
||||
|
||||
# First, make requests with very short timeout to trigger timeouts
|
||||
short_timeout_url = f"{base_url}/api/wip/overview/matrix"
|
||||
|
||||
print("\n Phase 1: Triggering timeouts with 0.1s timeout...")
|
||||
for _ in range(5):
|
||||
start = time.time()
|
||||
try:
|
||||
requests.get(short_timeout_url, timeout=0.1)
|
||||
result.add_success(time.time() - start)
|
||||
except requests.exceptions.Timeout:
|
||||
result.add_failure("Expected timeout", time.time() - start)
|
||||
except Exception as e:
|
||||
result.add_failure(str(e)[:50], time.time() - start)
|
||||
|
||||
# Now verify system recovers with normal timeout
|
||||
print(" Phase 2: Verifying recovery with 30s timeout...")
|
||||
recovery_url = f"{base_url}/api/wip/overview/summary"
|
||||
recovered = False
|
||||
for i in range(10):
|
||||
start = time.time()
|
||||
try:
|
||||
response = requests.get(recovery_url, timeout=30.0)
|
||||
duration = time.time() - start
|
||||
if response.status_code == 200 and response.json().get('success'):
|
||||
result.add_success(duration)
|
||||
recovered = True
|
||||
print(f" Recovered on attempt {i+1}")
|
||||
break
|
||||
except Exception as e:
|
||||
result.add_failure(str(e)[:50], time.time() - start)
|
||||
time.sleep(0.5)
|
||||
|
||||
result.total_duration = sum(result.response_times)
|
||||
|
||||
print(result.report())
|
||||
|
||||
assert recovered, "System did not recover after timeout scenarios"
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
class TestAPIResponseConsistency:
|
||||
"""Tests for response consistency under load."""
|
||||
|
||||
def test_response_data_consistency(self, base_url: str, stress_config: dict):
|
||||
"""Verify API returns consistent data structure under load."""
|
||||
url = f"{base_url}/api/wip/overview/summary"
|
||||
concurrent_users = 5
|
||||
requests_per_user = 10
|
||||
timeout = 30.0
|
||||
|
||||
responses = []
|
||||
|
||||
def make_request():
|
||||
try:
|
||||
response = requests.get(url, timeout=timeout)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=concurrent_users) as executor:
|
||||
futures = [
|
||||
executor.submit(make_request)
|
||||
for _ in range(concurrent_users * requests_per_user)
|
||||
]
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
result = future.result()
|
||||
if result:
|
||||
responses.append(result)
|
||||
|
||||
# Verify all successful responses have consistent structure
|
||||
assert len(responses) > 0, "No successful responses received"
|
||||
|
||||
first_response = responses[0]
|
||||
required_fields = {'success'}
|
||||
|
||||
for i, response in enumerate(responses):
|
||||
for field in required_fields:
|
||||
assert field in response, f"Response {i} missing field '{field}'"
|
||||
|
||||
print(f"\n Received {len(responses)} consistent responses")
|
||||
367
tests/stress/test_frontend_stress.py
Normal file
367
tests/stress/test_frontend_stress.py
Normal file
@@ -0,0 +1,367 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Frontend stress tests using Playwright.
|
||||
|
||||
Tests frontend stability under high-frequency operations:
|
||||
- Toast notification system under rapid fire
|
||||
- MesApi client under rapid requests
|
||||
- AbortController behavior
|
||||
- Page navigation stress
|
||||
|
||||
Run with: pytest tests/stress/test_frontend_stress.py -v -s
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import time
|
||||
import re
|
||||
from playwright.sync_api import Page, expect
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def app_server() -> str:
|
||||
"""Get the base URL for stress testing."""
|
||||
import os
|
||||
return os.environ.get('STRESS_TEST_URL', 'http://127.0.0.1:8080')
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def browser_context_args(browser_context_args):
|
||||
"""Configure browser context for stress tests."""
|
||||
return {
|
||||
**browser_context_args,
|
||||
"viewport": {"width": 1280, "height": 720},
|
||||
"locale": "zh-TW",
|
||||
}
|
||||
|
||||
|
||||
def load_page_with_js(page: Page, url: str, timeout: int = 60000):
|
||||
"""Load page and wait for JS to initialize."""
|
||||
page.goto(url, wait_until='domcontentloaded', timeout=timeout)
|
||||
page.wait_for_timeout(1000) # Allow JS initialization
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
class TestToastStress:
|
||||
"""Stress tests for Toast notification system."""
|
||||
|
||||
def test_rapid_toast_creation(self, page: Page, app_server: str):
|
||||
"""Test Toast system under rapid creation - should enforce max limit."""
|
||||
load_page_with_js(page, f"{app_server}/tables")
|
||||
|
||||
# Create 50 toasts rapidly
|
||||
start_time = time.time()
|
||||
for i in range(50):
|
||||
page.evaluate(f"Toast.info('Rapid toast {i}')")
|
||||
|
||||
creation_time = time.time() - start_time
|
||||
print(f"\n Created 50 toasts in {creation_time:.3f}s")
|
||||
|
||||
page.wait_for_timeout(500)
|
||||
|
||||
# Should only have max 5 toasts visible
|
||||
toast_count = page.locator('.mes-toast').count()
|
||||
assert toast_count <= 5, f"Toast count {toast_count} exceeds max limit of 5"
|
||||
print(f" Toast count enforced: {toast_count} (max 5)")
|
||||
|
||||
def test_toast_type_cycling(self, page: Page, app_server: str):
|
||||
"""Test rapid cycling through all toast types - system remains stable."""
|
||||
load_page_with_js(page, f"{app_server}/tables")
|
||||
|
||||
toast_types = ['info', 'success', 'warning', 'error']
|
||||
|
||||
start_time = time.time()
|
||||
for i in range(100):
|
||||
toast_type = toast_types[i % len(toast_types)]
|
||||
page.evaluate(f"Toast.{toast_type}('Type cycle {i}')")
|
||||
|
||||
cycle_time = time.time() - start_time
|
||||
print(f"\n Cycled 100 toasts in {cycle_time:.3f}s")
|
||||
|
||||
# Wait for animations to complete
|
||||
page.wait_for_timeout(1000)
|
||||
|
||||
# Dismiss all and verify system can recover
|
||||
page.evaluate("Toast.dismissAll()")
|
||||
page.wait_for_timeout(500)
|
||||
|
||||
toast_count = page.locator('.mes-toast').count()
|
||||
assert toast_count <= 5, f"Toast overflow after dismissAll: {toast_count}"
|
||||
print(f" System stable after cleanup, toast count: {toast_count}")
|
||||
|
||||
def test_toast_dismiss_stress(self, page: Page, app_server: str):
|
||||
"""Test rapid toast creation and dismissal."""
|
||||
load_page_with_js(page, f"{app_server}/tables")
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
# Create and immediately dismiss
|
||||
for i in range(30):
|
||||
toast_id = page.evaluate(f"Toast.info('Dismiss test {i}')")
|
||||
page.evaluate(f"Toast.dismiss({toast_id})")
|
||||
|
||||
dismiss_time = time.time() - start_time
|
||||
print(f"\n Created and dismissed 30 toasts in {dismiss_time:.3f}s")
|
||||
|
||||
page.wait_for_timeout(500)
|
||||
|
||||
# Should have no or few toasts
|
||||
toast_count = page.locator('.mes-toast').count()
|
||||
assert toast_count <= 2, f"Undismissed toasts remain: {toast_count}"
|
||||
print(f" Remaining toasts: {toast_count}")
|
||||
|
||||
def test_loading_toast_stress(self, page: Page, app_server: str):
|
||||
"""Test loading toasts can be created and properly dismissed."""
|
||||
load_page_with_js(page, f"{app_server}/tables")
|
||||
|
||||
toast_ids = []
|
||||
|
||||
# Create 10 loading toasts
|
||||
for i in range(10):
|
||||
toast_id = page.evaluate(f"Toast.loading('Loading {i}...')")
|
||||
toast_ids.append(toast_id)
|
||||
|
||||
page.wait_for_timeout(200)
|
||||
|
||||
# Loading toasts are created
|
||||
loading_count = page.locator('.mes-toast-loading').count()
|
||||
print(f"\n Created {len(toast_ids)} loading toasts, visible: {loading_count}")
|
||||
|
||||
# Dismiss all using dismissAll
|
||||
page.evaluate("Toast.dismissAll()")
|
||||
page.wait_for_timeout(500)
|
||||
|
||||
# All should be gone after dismissAll
|
||||
loading_count = page.locator('.mes-toast-loading').count()
|
||||
assert loading_count == 0, f"Loading toasts not dismissed: {loading_count}"
|
||||
print(f" Loading toast dismiss test passed")
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
class TestMesApiStress:
|
||||
"""Stress tests for MesApi client."""
|
||||
|
||||
def test_rapid_api_requests(self, page: Page, app_server: str):
|
||||
"""Test MesApi under rapid sequential requests."""
|
||||
load_page_with_js(page, f"{app_server}/tables")
|
||||
|
||||
# Make 20 rapid API requests
|
||||
results = page.evaluate("""
|
||||
async () => {
|
||||
const results = [];
|
||||
const startTime = Date.now();
|
||||
|
||||
for (let i = 0; i < 20; i++) {
|
||||
try {
|
||||
const response = await MesApi.get('/api/wip/meta/workcenters');
|
||||
results.push({ success: true, status: response?.status || 'ok' });
|
||||
} catch (e) {
|
||||
results.push({ success: false, error: e.message });
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
results,
|
||||
duration: Date.now() - startTime,
|
||||
successCount: results.filter(r => r.success).length
|
||||
};
|
||||
}
|
||||
""")
|
||||
|
||||
print(f"\n 20 requests in {results['duration']}ms")
|
||||
print(f" Success: {results['successCount']}/20")
|
||||
|
||||
assert results['successCount'] >= 15, f"Too many failures: {20 - results['successCount']}"
|
||||
|
||||
def test_concurrent_api_requests(self, page: Page, app_server: str):
|
||||
"""Test MesApi with concurrent requests using Promise.all."""
|
||||
load_page_with_js(page, f"{app_server}/tables")
|
||||
|
||||
# Make 10 concurrent requests
|
||||
results = page.evaluate("""
|
||||
async () => {
|
||||
const endpoints = [
|
||||
'/api/wip/overview/summary',
|
||||
'/api/wip/overview/matrix',
|
||||
'/api/wip/meta/workcenters',
|
||||
'/api/wip/meta/packages',
|
||||
];
|
||||
|
||||
const startTime = Date.now();
|
||||
const promises = [];
|
||||
|
||||
// 2 requests per endpoint = 8 total concurrent
|
||||
for (const endpoint of endpoints) {
|
||||
promises.push(MesApi.get(endpoint).catch(e => ({ error: e.message })));
|
||||
promises.push(MesApi.get(endpoint).catch(e => ({ error: e.message })));
|
||||
}
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
const successCount = results.filter(r => !r.error).length;
|
||||
|
||||
return {
|
||||
duration: Date.now() - startTime,
|
||||
total: results.length,
|
||||
successCount
|
||||
};
|
||||
}
|
||||
""")
|
||||
|
||||
print(f"\n {results['total']} concurrent requests in {results['duration']}ms")
|
||||
print(f" Success: {results['successCount']}/{results['total']}")
|
||||
|
||||
assert results['successCount'] >= 6, f"Too many concurrent failures"
|
||||
|
||||
def test_abort_controller_stress(self, page: Page, app_server: str):
|
||||
"""Test AbortController under rapid request cancellation."""
|
||||
load_page_with_js(page, f"{app_server}/tables")
|
||||
|
||||
# Start requests and cancel them rapidly
|
||||
results = page.evaluate("""
|
||||
async () => {
|
||||
const results = { started: 0, aborted: 0, completed: 0, errors: 0 };
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
results.started++;
|
||||
|
||||
const controller = new AbortController();
|
||||
|
||||
const request = fetch('/api/wip/overview/summary', {
|
||||
signal: controller.signal
|
||||
}).then(() => {
|
||||
results.completed++;
|
||||
}).catch(e => {
|
||||
if (e.name === 'AbortError') {
|
||||
results.aborted++;
|
||||
} else {
|
||||
results.errors++;
|
||||
}
|
||||
});
|
||||
|
||||
// Cancel after 50ms
|
||||
setTimeout(() => controller.abort(), 50);
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
""")
|
||||
|
||||
print(f"\n Started: {results['started']}")
|
||||
print(f" Aborted: {results['aborted']}")
|
||||
print(f" Completed: {results['completed']}")
|
||||
print(f" Errors: {results['errors']}")
|
||||
|
||||
# Most should either abort or complete
|
||||
total_resolved = results['aborted'] + results['completed']
|
||||
assert total_resolved >= 5, f"Too many unresolved requests"
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
class TestPageNavigationStress:
|
||||
"""Stress tests for rapid page navigation."""
|
||||
|
||||
def test_rapid_tab_switching(self, page: Page, app_server: str):
|
||||
"""Test rapid tab switching in portal."""
|
||||
page.goto(app_server, wait_until='domcontentloaded', timeout=30000)
|
||||
page.wait_for_timeout(500)
|
||||
|
||||
# Only use released pages that are visible without admin login
|
||||
tabs = [
|
||||
'.tab:has-text("WIP 即時概況")',
|
||||
'.tab:has-text("設備即時概況")',
|
||||
'.tab:has-text("設備歷史績效")',
|
||||
'.tab:has-text("設備維修查詢")',
|
||||
]
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
# Rapidly switch tabs 20 times
|
||||
for i in range(20):
|
||||
tab = tabs[i % len(tabs)]
|
||||
page.locator(tab).click()
|
||||
page.wait_for_timeout(50)
|
||||
|
||||
switch_time = time.time() - start_time
|
||||
print(f"\n 20 tab switches in {switch_time:.3f}s")
|
||||
|
||||
# Page should still be responsive
|
||||
expect(page.locator('h1')).to_contain_text('MES 報表入口')
|
||||
print(" Portal remained stable")
|
||||
|
||||
def test_portal_iframe_stress(self, page: Page, app_server: str):
|
||||
"""Test portal remains responsive with iframe loading."""
|
||||
page.goto(app_server, wait_until='domcontentloaded', timeout=30000)
|
||||
page.wait_for_timeout(500)
|
||||
|
||||
# Switch through released tabs (dev tabs hidden without admin login)
|
||||
tabs = [
|
||||
'WIP 即時概況',
|
||||
'設備即時概況',
|
||||
'設備歷史績效',
|
||||
'設備維修查詢',
|
||||
]
|
||||
|
||||
for tab_name in tabs:
|
||||
page.locator(f'.tab:has-text("{tab_name}")').click()
|
||||
page.wait_for_timeout(200)
|
||||
|
||||
# Verify tab is active
|
||||
tab = page.locator(f'.tab:has-text("{tab_name}")')
|
||||
expect(tab).to_have_class(re.compile(r'active'))
|
||||
|
||||
print(f"\n All {len(tabs)} tabs clickable and responsive")
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
class TestMemoryStress:
|
||||
"""Tests for memory leak detection."""
|
||||
|
||||
def test_toast_memory_cleanup(self, page: Page, app_server: str):
|
||||
"""Check Toast system cleans up properly."""
|
||||
load_page_with_js(page, f"{app_server}/tables")
|
||||
|
||||
# Create and dismiss many toasts
|
||||
for batch in range(5):
|
||||
for i in range(20):
|
||||
page.evaluate(f"Toast.info('Memory test {batch}-{i}')")
|
||||
page.evaluate("Toast.dismissAll()")
|
||||
page.wait_for_timeout(100)
|
||||
|
||||
page.wait_for_timeout(500)
|
||||
|
||||
# Check DOM is clean
|
||||
toast_count = page.locator('.mes-toast').count()
|
||||
assert toast_count <= 5, f"Toast elements not cleaned up: {toast_count}"
|
||||
print(f"\n Toast memory cleanup test passed (remaining: {toast_count})")
|
||||
|
||||
|
||||
@pytest.mark.stress
|
||||
class TestConsoleErrorMonitoring:
|
||||
"""Monitor for JavaScript errors under stress."""
|
||||
|
||||
def test_no_js_errors_under_stress(self, page: Page, app_server: str):
|
||||
"""Verify no JavaScript errors occur under stress conditions."""
|
||||
js_errors = []
|
||||
|
||||
page.on("pageerror", lambda error: js_errors.append(str(error)))
|
||||
|
||||
load_page_with_js(page, f"{app_server}/tables")
|
||||
|
||||
# Perform stress operations
|
||||
for i in range(30):
|
||||
page.evaluate(f"Toast.info('Error check {i}')")
|
||||
|
||||
for i in range(10):
|
||||
page.evaluate("""
|
||||
MesApi.get('/api/wip/overview/summary').catch(() => {})
|
||||
""")
|
||||
|
||||
page.wait_for_timeout(2000)
|
||||
|
||||
if js_errors:
|
||||
print(f"\n JavaScript errors detected:")
|
||||
for err in js_errors[:5]:
|
||||
print(f" - {err[:100]}")
|
||||
|
||||
assert len(js_errors) == 0, f"Found {len(js_errors)} JavaScript errors"
|
||||
print("\n No JavaScript errors under stress")
|
||||
288
tests/test_api_integration.py
Normal file
288
tests/test_api_integration.py
Normal file
@@ -0,0 +1,288 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Integration tests for API endpoints.
|
||||
|
||||
Tests API endpoints for proper response format, error handling,
|
||||
and timeout behavior compatible with the MesApi client.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import json
|
||||
|
||||
from mes_dashboard.app import create_app
|
||||
import mes_dashboard.core.database as db
|
||||
|
||||
|
||||
class TestTableQueryAPIIntegration(unittest.TestCase):
|
||||
"""Integration tests for table query APIs."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test client."""
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
|
||||
@patch('mes_dashboard.app.get_table_columns')
|
||||
def test_get_table_columns_success(self, mock_get_columns):
|
||||
"""GET table columns should return JSON with columns array."""
|
||||
mock_get_columns.return_value = ['ID', 'NAME', 'STATUS', 'CREATED_AT']
|
||||
|
||||
response = self.client.post(
|
||||
'/api/get_table_columns',
|
||||
json={'table_name': 'TEST_TABLE'},
|
||||
content_type='application/json'
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertIn('columns', data)
|
||||
self.assertEqual(len(data['columns']), 4)
|
||||
|
||||
def test_get_table_columns_missing_table_name(self):
|
||||
"""GET table columns without table_name should return 400."""
|
||||
response = self.client.post(
|
||||
'/api/get_table_columns',
|
||||
json={},
|
||||
content_type='application/json'
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = json.loads(response.data)
|
||||
self.assertIn('error', data)
|
||||
|
||||
@patch('mes_dashboard.app.get_table_data')
|
||||
def test_query_table_success(self, mock_get_data):
|
||||
"""Query table should return JSON with data array."""
|
||||
mock_get_data.return_value = {
|
||||
'data': [{'ID': 1, 'NAME': 'Test'}],
|
||||
'row_count': 1
|
||||
}
|
||||
|
||||
response = self.client.post(
|
||||
'/api/query_table',
|
||||
json={'table_name': 'TEST_TABLE', 'limit': 100},
|
||||
content_type='application/json'
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertIn('data', data)
|
||||
self.assertEqual(data['row_count'], 1)
|
||||
|
||||
def test_query_table_missing_table_name(self):
|
||||
"""Query table without table_name should return 400."""
|
||||
response = self.client.post(
|
||||
'/api/query_table',
|
||||
json={'limit': 100},
|
||||
content_type='application/json'
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = json.loads(response.data)
|
||||
self.assertIn('error', data)
|
||||
|
||||
@patch('mes_dashboard.app.get_table_data')
|
||||
def test_query_table_with_filters(self, mock_get_data):
|
||||
"""Query table should pass filters to the service."""
|
||||
mock_get_data.return_value = {
|
||||
'data': [],
|
||||
'row_count': 0
|
||||
}
|
||||
|
||||
response = self.client.post(
|
||||
'/api/query_table',
|
||||
json={
|
||||
'table_name': 'TEST_TABLE',
|
||||
'limit': 100,
|
||||
'filters': {'STATUS': 'ACTIVE'}
|
||||
},
|
||||
content_type='application/json'
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
mock_get_data.assert_called_once()
|
||||
call_args = mock_get_data.call_args
|
||||
self.assertEqual(call_args[0][3], {'STATUS': 'ACTIVE'})
|
||||
|
||||
|
||||
class TestWIPAPIIntegration(unittest.TestCase):
|
||||
"""Integration tests for WIP API endpoints."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test client."""
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_summary')
|
||||
def test_wip_summary_response_format(self, mock_summary):
|
||||
"""WIP summary should return consistent JSON structure."""
|
||||
mock_summary.return_value = {
|
||||
'totalLots': 1000,
|
||||
'totalQtyPcs': 100000,
|
||||
'byWipStatus': {
|
||||
'run': {'lots': 800, 'qtyPcs': 80000},
|
||||
'queue': {'lots': 150, 'qtyPcs': 15000},
|
||||
'hold': {'lots': 50, 'qtyPcs': 5000}
|
||||
},
|
||||
'dataUpdateDate': '2026-01-28 10:00:00'
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/overview/summary')
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
|
||||
# Verify response structure for MesApi compatibility
|
||||
self.assertIn('success', data)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertIn('data', data)
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_summary')
|
||||
def test_wip_summary_error_response(self, mock_summary):
|
||||
"""WIP summary error should return proper error structure."""
|
||||
mock_summary.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/overview/summary')
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
data = json.loads(response.data)
|
||||
|
||||
# Verify error response structure
|
||||
self.assertIn('success', data)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('error', data)
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_matrix')
|
||||
def test_wip_matrix_response_format(self, mock_matrix):
|
||||
"""WIP matrix should return consistent JSON structure."""
|
||||
mock_matrix.return_value = {
|
||||
'workcenters': ['WC1', 'WC2'],
|
||||
'packages': ['PKG1'],
|
||||
'matrix': {'WC1': {'PKG1': 100}},
|
||||
'workcenter_totals': {'WC1': 100},
|
||||
'package_totals': {'PKG1': 100},
|
||||
'grand_total': 100
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/overview/matrix')
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertIn('success', data)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertIn('data', data)
|
||||
self.assertIn('workcenters', data['data'])
|
||||
self.assertIn('matrix', data['data'])
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_detail')
|
||||
def test_wip_detail_response_format(self, mock_detail):
|
||||
"""WIP detail should return consistent JSON structure."""
|
||||
mock_detail.return_value = {
|
||||
'workcenter': 'TestWC',
|
||||
'summary': {
|
||||
'total_lots': 100,
|
||||
'on_equipment_lots': 50,
|
||||
'waiting_lots': 40,
|
||||
'hold_lots': 10
|
||||
},
|
||||
'specs': ['Spec1'],
|
||||
'lots': [{'lot_id': 'LOT001', 'status': 'ACTIVE'}],
|
||||
'pagination': {
|
||||
'page': 1,
|
||||
'page_size': 100,
|
||||
'total_count': 100,
|
||||
'total_pages': 1
|
||||
},
|
||||
'sys_date': '2026-01-28 10:00:00'
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/detail/TestWC')
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertIn('success', data)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertIn('data', data)
|
||||
self.assertIn('pagination', data['data'])
|
||||
|
||||
|
||||
class TestResourceAPIIntegration(unittest.TestCase):
|
||||
"""Integration tests for Resource API endpoints."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test client."""
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
|
||||
@patch('mes_dashboard.routes.resource_routes.get_resource_status_summary')
|
||||
def test_resource_status_summary_response_format(self, mock_summary):
|
||||
"""Resource status summary should return consistent JSON structure."""
|
||||
mock_summary.return_value = {
|
||||
'total_count': 100,
|
||||
'by_status_category': {'PRODUCTIVE': 60, 'STANDBY': 30, 'DOWN': 10},
|
||||
'by_status': {'PRD': 60, 'SBY': 30, 'UDT': 5, 'SDT': 5, 'EGT': 0, 'NST': 0, 'OTHER': 0},
|
||||
'by_workcenter_group': {'焊接': 50, '成型': 50},
|
||||
'with_active_job': 40,
|
||||
'with_wip': 35,
|
||||
'ou_pct': 63.2,
|
||||
'availability_pct': 90.0,
|
||||
}
|
||||
|
||||
response = self.client.get('/api/resource/status/summary')
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
|
||||
# Verify response structure
|
||||
self.assertIn('success', data)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertIn('data', data)
|
||||
self.assertIn('total_count', data['data'])
|
||||
|
||||
|
||||
class TestAPIContentType(unittest.TestCase):
|
||||
"""Test that APIs return proper content types."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test client."""
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_summary')
|
||||
def test_api_returns_json_content_type(self, mock_summary):
|
||||
"""API endpoints should return application/json content type."""
|
||||
mock_summary.return_value = {
|
||||
'totalLots': 0, 'totalQtyPcs': 0,
|
||||
'byWipStatus': {'run': {}, 'queue': {}, 'hold': {}},
|
||||
'dataUpdateDate': None
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/overview/summary')
|
||||
|
||||
self.assertIn('application/json', response.content_type)
|
||||
|
||||
@patch('mes_dashboard.app.get_table_columns')
|
||||
def test_table_api_returns_json_content_type(self, mock_columns):
|
||||
"""Table API should return application/json content type."""
|
||||
mock_columns.return_value = ['COL1', 'COL2']
|
||||
|
||||
response = self.client.post(
|
||||
'/api/get_table_columns',
|
||||
json={'table_name': 'TEST'},
|
||||
content_type='application/json'
|
||||
)
|
||||
|
||||
self.assertIn('application/json', response.content_type)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
56
tests/test_app_factory.py
Normal file
56
tests/test_app_factory.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import unittest
|
||||
|
||||
from mes_dashboard.app import create_app
|
||||
import mes_dashboard.core.database as db
|
||||
|
||||
|
||||
class AppFactoryTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
db._ENGINE = None
|
||||
|
||||
def test_create_app_default_config(self):
|
||||
app = create_app()
|
||||
self.assertTrue(app.config.get("DEBUG"))
|
||||
self.assertEqual(app.config.get("ENV"), "development")
|
||||
cache = app.extensions.get("cache")
|
||||
self.assertIsNotNone(cache)
|
||||
cache.set("app_factory_probe", {"ok": True}, 30)
|
||||
self.assertEqual(cache.get("app_factory_probe"), {"ok": True})
|
||||
|
||||
def test_create_app_production_config(self):
|
||||
app = create_app("production")
|
||||
self.assertFalse(app.config.get("DEBUG"))
|
||||
self.assertEqual(app.config.get("ENV"), "production")
|
||||
|
||||
def test_create_app_independent_instances(self):
|
||||
app1 = create_app()
|
||||
db._ENGINE = None
|
||||
app2 = create_app()
|
||||
self.assertIsNot(app1, app2)
|
||||
|
||||
def test_routes_registered(self):
|
||||
app = create_app()
|
||||
rules = {rule.rule for rule in app.url_map.iter_rules()}
|
||||
expected = {
|
||||
"/",
|
||||
"/tables",
|
||||
"/resource",
|
||||
"/wip-overview",
|
||||
"/wip-detail",
|
||||
"/excel-query",
|
||||
"/api/wip/overview/summary",
|
||||
"/api/wip/overview/matrix",
|
||||
"/api/wip/overview/hold",
|
||||
"/api/wip/detail/<workcenter>",
|
||||
"/api/wip/meta/workcenters",
|
||||
"/api/wip/meta/packages",
|
||||
"/api/resource/status/summary",
|
||||
"/api/dashboard/kpi",
|
||||
"/api/excel-query/upload",
|
||||
}
|
||||
missing = expected - rules
|
||||
self.assertFalse(missing, f"Missing routes: {sorted(missing)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
301
tests/test_auth_integration.py
Normal file
301
tests/test_auth_integration.py
Normal file
@@ -0,0 +1,301 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Integration tests for authentication routes and permission middleware."""
|
||||
|
||||
import json
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
|
||||
|
||||
import mes_dashboard.core.database as db
|
||||
from mes_dashboard.app import create_app
|
||||
from mes_dashboard.services import page_registry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_page_status(tmp_path):
|
||||
"""Create temporary page status file."""
|
||||
data_file = tmp_path / "page_status.json"
|
||||
initial_data = {
|
||||
"pages": [
|
||||
{"route": "/", "name": "Portal", "status": "released"},
|
||||
{"route": "/wip-overview", "name": "WIP Overview", "status": "released"},
|
||||
{"route": "/dev-feature", "name": "Dev Feature", "status": "dev"},
|
||||
],
|
||||
"api_public": True
|
||||
}
|
||||
data_file.write_text(json.dumps(initial_data), encoding="utf-8")
|
||||
return data_file
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app(temp_page_status):
|
||||
"""Create application for testing."""
|
||||
db._ENGINE = None
|
||||
|
||||
# Mock page registry to use temp file
|
||||
original_data_file = page_registry.DATA_FILE
|
||||
original_cache = page_registry._cache
|
||||
page_registry.DATA_FILE = temp_page_status
|
||||
page_registry._cache = None
|
||||
|
||||
app = create_app('testing')
|
||||
app.config['TESTING'] = True
|
||||
app.config['WTF_CSRF_ENABLED'] = False
|
||||
|
||||
yield app
|
||||
|
||||
# Restore
|
||||
page_registry.DATA_FILE = original_data_file
|
||||
page_registry._cache = original_cache
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(app):
|
||||
"""Create test client."""
|
||||
return app.test_client()
|
||||
|
||||
|
||||
class TestLoginRoute:
|
||||
"""Tests for login route."""
|
||||
|
||||
def test_login_page_renders(self, client):
|
||||
"""Test login page is accessible."""
|
||||
response = client.get("/admin/login")
|
||||
assert response.status_code == 200
|
||||
assert "管理員登入" in response.data.decode("utf-8") or "login" in response.data.decode("utf-8").lower()
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False)
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_login_success(self, mock_post, client):
|
||||
"""Test successful login via LDAP."""
|
||||
# Mock LDAP response
|
||||
mock_response = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"success": True,
|
||||
"user": {
|
||||
"username": "92367",
|
||||
"displayName": "Admin User",
|
||||
"mail": "ymirliu@panjit.com.tw",
|
||||
"department": "Test Dept"
|
||||
}
|
||||
}
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
response = client.post("/admin/login", data={
|
||||
"username": "92367",
|
||||
"password": "password123"
|
||||
}, follow_redirects=False)
|
||||
|
||||
# Should redirect after successful login
|
||||
assert response.status_code == 302
|
||||
|
||||
# Check session contains admin
|
||||
with client.session_transaction() as sess:
|
||||
assert "admin" in sess
|
||||
assert sess["admin"]["username"] == "92367"
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False)
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_login_invalid_credentials(self, mock_post, client):
|
||||
"""Test login with invalid credentials via LDAP."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.json.return_value = {"success": False}
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
response = client.post("/admin/login", data={
|
||||
"username": "wrong",
|
||||
"password": "wrong"
|
||||
})
|
||||
|
||||
assert response.status_code == 200
|
||||
# Should show error message
|
||||
assert "錯誤" in response.data.decode("utf-8") or "error" in response.data.decode("utf-8").lower()
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False)
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_login_non_admin_user(self, mock_post, client):
|
||||
"""Test login with non-admin user via LDAP."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"success": True,
|
||||
"user": {
|
||||
"username": "99999",
|
||||
"displayName": "Regular User",
|
||||
"mail": "regular@panjit.com.tw",
|
||||
"department": "Test Dept"
|
||||
}
|
||||
}
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
response = client.post("/admin/login", data={
|
||||
"username": "99999",
|
||||
"password": "password123"
|
||||
})
|
||||
|
||||
assert response.status_code == 200
|
||||
# Should show non-admin error
|
||||
content = response.data.decode("utf-8")
|
||||
assert "管理員" in content or "admin" in content.lower()
|
||||
|
||||
def test_login_empty_credentials(self, client):
|
||||
"""Test login with empty credentials."""
|
||||
response = client.post("/admin/login", data={
|
||||
"username": "",
|
||||
"password": ""
|
||||
})
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
class TestLogoutRoute:
|
||||
"""Tests for logout route."""
|
||||
|
||||
def test_logout(self, client):
|
||||
"""Test logout clears session."""
|
||||
# Login first
|
||||
with client.session_transaction() as sess:
|
||||
sess["admin"] = {"username": "admin"}
|
||||
|
||||
response = client.get("/admin/logout", follow_redirects=False)
|
||||
|
||||
assert response.status_code == 302
|
||||
|
||||
with client.session_transaction() as sess:
|
||||
assert "admin" not in sess
|
||||
|
||||
|
||||
class TestPermissionMiddleware:
|
||||
"""Tests for permission middleware."""
|
||||
|
||||
def test_released_page_accessible_without_login(self, client):
|
||||
"""Test released pages are accessible without login."""
|
||||
response = client.get("/wip-overview")
|
||||
# Should not be 403 (might be 200 or redirect)
|
||||
assert response.status_code != 403
|
||||
|
||||
def test_dev_page_returns_403_without_login(self, client, temp_page_status):
|
||||
"""Test dev pages return 403 for non-admin."""
|
||||
# Add a dev route that exists in the app
|
||||
# First update page status to have an existing route as dev
|
||||
data = json.loads(temp_page_status.read_text())
|
||||
data["pages"].append({"route": "/tables", "name": "Tables", "status": "dev"})
|
||||
temp_page_status.write_text(json.dumps(data))
|
||||
page_registry._cache = None
|
||||
|
||||
response = client.get("/tables")
|
||||
assert response.status_code == 403
|
||||
|
||||
def test_dev_page_accessible_with_admin_login(self, client, temp_page_status):
|
||||
"""Test dev pages are accessible for admin."""
|
||||
# Update tables to dev
|
||||
data = json.loads(temp_page_status.read_text())
|
||||
data["pages"].append({"route": "/tables", "name": "Tables", "status": "dev"})
|
||||
temp_page_status.write_text(json.dumps(data))
|
||||
page_registry._cache = None
|
||||
|
||||
# Login as admin
|
||||
with client.session_transaction() as sess:
|
||||
sess["admin"] = {"username": "admin", "mail": "admin@test.com"}
|
||||
|
||||
response = client.get("/tables")
|
||||
assert response.status_code != 403
|
||||
|
||||
def test_admin_pages_redirect_without_login(self, client):
|
||||
"""Test admin pages redirect to login without authentication."""
|
||||
response = client.get("/admin/pages", follow_redirects=False)
|
||||
assert response.status_code == 302
|
||||
assert "/admin/login" in response.location
|
||||
|
||||
def test_admin_pages_accessible_with_login(self, client):
|
||||
"""Test admin pages are accessible with login."""
|
||||
with client.session_transaction() as sess:
|
||||
sess["admin"] = {"username": "admin", "mail": "admin@test.com"}
|
||||
|
||||
response = client.get("/admin/pages")
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
class TestAdminAPI:
|
||||
"""Tests for admin API endpoints."""
|
||||
|
||||
def test_get_pages_without_login(self, client):
|
||||
"""Test get pages API requires login."""
|
||||
response = client.get("/admin/api/pages")
|
||||
# Should redirect
|
||||
assert response.status_code == 302
|
||||
|
||||
def test_get_pages_with_login(self, client):
|
||||
"""Test get pages API with login."""
|
||||
with client.session_transaction() as sess:
|
||||
sess["admin"] = {"username": "admin"}
|
||||
|
||||
response = client.get("/admin/api/pages")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = json.loads(response.data)
|
||||
assert data["success"] is True
|
||||
assert "pages" in data
|
||||
|
||||
def test_update_page_status(self, client, temp_page_status):
|
||||
"""Test updating page status via API."""
|
||||
with client.session_transaction() as sess:
|
||||
sess["admin"] = {"username": "admin"}
|
||||
|
||||
response = client.put(
|
||||
"/admin/api/pages/wip-overview",
|
||||
data=json.dumps({"status": "dev"}),
|
||||
content_type="application/json"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data["success"] is True
|
||||
|
||||
# Verify status changed
|
||||
page_registry._cache = None
|
||||
assert page_registry.get_page_status("/wip-overview") == "dev"
|
||||
|
||||
def test_update_page_invalid_status(self, client):
|
||||
"""Test updating page with invalid status."""
|
||||
with client.session_transaction() as sess:
|
||||
sess["admin"] = {"username": "admin"}
|
||||
|
||||
response = client.put(
|
||||
"/admin/api/pages/wip-overview",
|
||||
data=json.dumps({"status": "invalid"}),
|
||||
content_type="application/json"
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
class TestContextProcessor:
|
||||
"""Tests for template context processor."""
|
||||
|
||||
def test_is_admin_in_context_when_logged_in(self, client):
|
||||
"""Test is_admin is True in context when logged in."""
|
||||
with client.session_transaction() as sess:
|
||||
sess["admin"] = {"username": "admin", "displayName": "Admin"}
|
||||
|
||||
response = client.get("/")
|
||||
content = response.data.decode("utf-8")
|
||||
|
||||
# Should show admin-related content (logout link, etc.)
|
||||
assert "登出" in content or "logout" in content.lower() or "Admin" in content
|
||||
|
||||
def test_is_admin_in_context_when_not_logged_in(self, client):
|
||||
"""Test is_admin is False in context when not logged in."""
|
||||
response = client.get("/")
|
||||
content = response.data.decode("utf-8")
|
||||
|
||||
# Should show login link, not logout
|
||||
assert "管理員登入" in content or "login" in content.lower()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
159
tests/test_auth_service.py
Normal file
159
tests/test_auth_service.py
Normal file
@@ -0,0 +1,159 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for auth_service module."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
|
||||
|
||||
from mes_dashboard.services import auth_service
|
||||
|
||||
|
||||
class TestAuthenticate:
|
||||
"""Tests for authenticate function via LDAP."""
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False)
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_authenticate_success(self, mock_post):
|
||||
"""Test successful authentication via LDAP."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"success": True,
|
||||
"user": {
|
||||
"username": "92367",
|
||||
"displayName": "Test User",
|
||||
"mail": "test@panjit.com.tw",
|
||||
"department": "Test Dept"
|
||||
}
|
||||
}
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
result = auth_service.authenticate("92367", "password123")
|
||||
|
||||
assert result is not None
|
||||
assert result["username"] == "92367"
|
||||
assert result["mail"] == "test@panjit.com.tw"
|
||||
mock_post.assert_called_once()
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False)
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_authenticate_invalid_credentials(self, mock_post):
|
||||
"""Test authentication with invalid credentials via LDAP."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.json.return_value = {"success": False}
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
result = auth_service.authenticate("wrong", "wrong")
|
||||
|
||||
assert result is None
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False)
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_authenticate_timeout(self, mock_post):
|
||||
"""Test authentication timeout handling."""
|
||||
import requests
|
||||
mock_post.side_effect = requests.Timeout()
|
||||
|
||||
result = auth_service.authenticate("user", "pass")
|
||||
|
||||
assert result is None
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False)
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_authenticate_connection_error(self, mock_post):
|
||||
"""Test authentication connection error handling."""
|
||||
import requests
|
||||
mock_post.side_effect = requests.ConnectionError()
|
||||
|
||||
result = auth_service.authenticate("user", "pass")
|
||||
|
||||
assert result is None
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', False)
|
||||
@patch('mes_dashboard.services.auth_service.requests.post')
|
||||
def test_authenticate_invalid_json(self, mock_post):
|
||||
"""Test authentication with invalid JSON response."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.json.side_effect = ValueError("Invalid JSON")
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
result = auth_service.authenticate("user", "pass")
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestLocalAuthenticate:
|
||||
"""Tests for local authentication."""
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', True)
|
||||
@patch('mes_dashboard.services.auth_service.LOCAL_AUTH_USERNAME', 'testuser')
|
||||
@patch('mes_dashboard.services.auth_service.LOCAL_AUTH_PASSWORD', 'testpass')
|
||||
def test_local_auth_success(self):
|
||||
"""Test successful local authentication."""
|
||||
result = auth_service.authenticate("testuser", "testpass")
|
||||
|
||||
assert result is not None
|
||||
assert result["username"] == "testuser"
|
||||
|
||||
@patch('mes_dashboard.services.auth_service.LOCAL_AUTH_ENABLED', True)
|
||||
@patch('mes_dashboard.services.auth_service.LOCAL_AUTH_USERNAME', 'testuser')
|
||||
@patch('mes_dashboard.services.auth_service.LOCAL_AUTH_PASSWORD', 'testpass')
|
||||
def test_local_auth_wrong_password(self):
|
||||
"""Test local authentication with wrong password."""
|
||||
result = auth_service.authenticate("testuser", "wrongpass")
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestIsAdmin:
|
||||
"""Tests for is_admin function."""
|
||||
|
||||
def test_is_admin_with_admin_email(self):
|
||||
"""Test admin check with admin email."""
|
||||
# Save original ADMIN_EMAILS
|
||||
original = auth_service.ADMIN_EMAILS
|
||||
|
||||
try:
|
||||
auth_service.ADMIN_EMAILS = ["admin@panjit.com.tw"]
|
||||
user = {"mail": "admin@panjit.com.tw"}
|
||||
assert auth_service.is_admin(user) is True
|
||||
finally:
|
||||
auth_service.ADMIN_EMAILS = original
|
||||
|
||||
def test_is_admin_with_non_admin_email(self):
|
||||
"""Test admin check with non-admin email."""
|
||||
original = auth_service.ADMIN_EMAILS
|
||||
|
||||
try:
|
||||
auth_service.ADMIN_EMAILS = ["admin@panjit.com.tw"]
|
||||
user = {"mail": "user@panjit.com.tw"}
|
||||
assert auth_service.is_admin(user) is False
|
||||
finally:
|
||||
auth_service.ADMIN_EMAILS = original
|
||||
|
||||
def test_is_admin_case_insensitive(self):
|
||||
"""Test admin check is case insensitive."""
|
||||
original = auth_service.ADMIN_EMAILS
|
||||
|
||||
try:
|
||||
auth_service.ADMIN_EMAILS = ["admin@panjit.com.tw"]
|
||||
user = {"mail": "ADMIN@PANJIT.COM.TW"}
|
||||
assert auth_service.is_admin(user) is True
|
||||
finally:
|
||||
auth_service.ADMIN_EMAILS = original
|
||||
|
||||
def test_is_admin_with_missing_mail(self):
|
||||
"""Test admin check with missing mail field."""
|
||||
user = {}
|
||||
assert auth_service.is_admin(user) is False
|
||||
|
||||
def test_is_admin_with_empty_mail(self):
|
||||
"""Test admin check with empty mail field."""
|
||||
user = {"mail": ""}
|
||||
assert auth_service.is_admin(user) is False
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
313
tests/test_cache.py
Normal file
313
tests/test_cache.py
Normal file
@@ -0,0 +1,313 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for cache module.
|
||||
|
||||
Tests cache read/write functionality and fallback mechanism.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import pandas as pd
|
||||
import json
|
||||
|
||||
|
||||
class TestGetCachedWipData:
|
||||
"""Test get_cached_wip_data function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_redis(self):
|
||||
"""Reset Redis client state and process-level cache."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
import mes_dashboard.core.cache as cache
|
||||
rc._REDIS_CLIENT = None
|
||||
# Clear process-level cache to avoid test interference
|
||||
cache._wip_df_cache.clear()
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
cache._wip_df_cache.clear()
|
||||
|
||||
def test_returns_none_when_redis_disabled(self):
|
||||
"""Test returns None when Redis is disabled."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
with patch.object(cache, 'REDIS_ENABLED', False):
|
||||
result = cache.get_cached_wip_data()
|
||||
assert result is None
|
||||
|
||||
def test_returns_none_when_client_unavailable(self):
|
||||
"""Test returns None when Redis client is unavailable."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
with patch.object(cache, 'REDIS_ENABLED', True):
|
||||
with patch.object(cache, 'get_redis_client', return_value=None):
|
||||
result = cache.get_cached_wip_data()
|
||||
assert result is None
|
||||
|
||||
def test_returns_none_when_cache_miss(self, reset_redis):
|
||||
"""Test returns None when cache key doesn't exist."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.return_value = None
|
||||
|
||||
with patch.object(cache, 'REDIS_ENABLED', True):
|
||||
with patch.object(cache, 'get_redis_client', return_value=mock_client):
|
||||
result = cache.get_cached_wip_data()
|
||||
assert result is None
|
||||
|
||||
def test_returns_dataframe_from_cache(self, reset_redis):
|
||||
"""Test returns DataFrame when cache hit."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
# Create test data as JSON string (what Redis returns with decode_responses=True)
|
||||
test_data = [
|
||||
{'LOTID': 'LOT001', 'QTY': 100, 'WORKORDER': 'WO001'},
|
||||
{'LOTID': 'LOT002', 'QTY': 200, 'WORKORDER': 'WO002'}
|
||||
]
|
||||
cached_json = json.dumps(test_data)
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.return_value = cached_json # String, not bytes
|
||||
|
||||
with patch.object(cache, 'REDIS_ENABLED', True):
|
||||
with patch.object(cache, 'get_redis_client', return_value=mock_client):
|
||||
with patch.object(cache, 'get_key', return_value='mes_wip:data'):
|
||||
result = cache.get_cached_wip_data()
|
||||
|
||||
assert result is not None
|
||||
assert isinstance(result, pd.DataFrame)
|
||||
assert len(result) == 2
|
||||
assert 'LOTID' in result.columns
|
||||
|
||||
def test_handles_invalid_json(self, reset_redis):
|
||||
"""Test handles invalid JSON gracefully."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.return_value = 'invalid json {'
|
||||
|
||||
with patch.object(cache, 'REDIS_ENABLED', True):
|
||||
with patch.object(cache, 'get_redis_client', return_value=mock_client):
|
||||
with patch.object(cache, 'get_key', return_value='mes_wip:data'):
|
||||
result = cache.get_cached_wip_data()
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestGetCachedSysDate:
|
||||
"""Test get_cached_sys_date function."""
|
||||
|
||||
def test_returns_none_when_redis_disabled(self):
|
||||
"""Test returns None when Redis is disabled."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
with patch.object(cache, 'REDIS_ENABLED', False):
|
||||
result = cache.get_cached_sys_date()
|
||||
assert result is None
|
||||
|
||||
def test_returns_sys_date_from_cache(self):
|
||||
"""Test returns SYS_DATE when cache hit."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.return_value = '2024-01-15 10:30:00' # String, not bytes
|
||||
|
||||
with patch.object(cache, 'REDIS_ENABLED', True):
|
||||
with patch.object(cache, 'get_redis_client', return_value=mock_client):
|
||||
with patch.object(cache, 'get_key', return_value='mes_wip:meta:sys_date'):
|
||||
result = cache.get_cached_sys_date()
|
||||
assert result == '2024-01-15 10:30:00'
|
||||
|
||||
|
||||
class TestGetCacheUpdatedAt:
|
||||
"""Test get_cache_updated_at function."""
|
||||
|
||||
def test_returns_none_when_redis_disabled(self):
|
||||
"""Test returns None when Redis is disabled."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
with patch.object(cache, 'REDIS_ENABLED', False):
|
||||
result = cache.get_cache_updated_at()
|
||||
assert result is None
|
||||
|
||||
def test_returns_updated_at_from_cache(self):
|
||||
"""Test returns updated_at timestamp when cache hit."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.return_value = '2024-01-15T10:30:00' # String, not bytes
|
||||
|
||||
with patch.object(cache, 'REDIS_ENABLED', True):
|
||||
with patch.object(cache, 'get_redis_client', return_value=mock_client):
|
||||
with patch.object(cache, 'get_key', return_value='mes_wip:meta:updated_at'):
|
||||
result = cache.get_cache_updated_at()
|
||||
assert result == '2024-01-15T10:30:00'
|
||||
|
||||
|
||||
class TestWipDataWithFallback:
|
||||
"""Test get_wip_data_with_fallback function."""
|
||||
|
||||
def test_uses_cache_when_available(self):
|
||||
"""Test uses cache when data is available."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
cached_df = pd.DataFrame({
|
||||
'LOTID': ['LOT001'],
|
||||
'QTY': [100]
|
||||
})
|
||||
|
||||
mock_fallback = MagicMock()
|
||||
|
||||
with patch.object(cache, 'get_cached_wip_data', return_value=cached_df):
|
||||
result = cache.get_wip_data_with_fallback(mock_fallback)
|
||||
|
||||
assert result is not None
|
||||
assert len(result) == 1
|
||||
# Fallback should NOT be called
|
||||
mock_fallback.assert_not_called()
|
||||
|
||||
def test_fallback_when_cache_unavailable(self):
|
||||
"""Test falls back when cache is unavailable."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
oracle_df = pd.DataFrame({
|
||||
'LOTID': ['LOT001', 'LOT002'],
|
||||
'QTY': [100, 200]
|
||||
})
|
||||
|
||||
mock_fallback = MagicMock(return_value=oracle_df)
|
||||
|
||||
with patch.object(cache, 'get_cached_wip_data', return_value=None):
|
||||
result = cache.get_wip_data_with_fallback(mock_fallback)
|
||||
|
||||
assert result is not None
|
||||
assert len(result) == 2
|
||||
mock_fallback.assert_called_once()
|
||||
|
||||
|
||||
class TestNoOpCache:
|
||||
"""Test NoOpCache fallback class."""
|
||||
|
||||
def test_noop_cache_get(self):
|
||||
"""Test NoOpCache.get returns None."""
|
||||
from mes_dashboard.core.cache import NoOpCache
|
||||
cache = NoOpCache()
|
||||
result = cache.get('any_key')
|
||||
assert result is None
|
||||
|
||||
def test_noop_cache_set(self):
|
||||
"""Test NoOpCache.set returns None."""
|
||||
from mes_dashboard.core.cache import NoOpCache
|
||||
cache = NoOpCache()
|
||||
result = cache.set('any_key', 'any_value', 300)
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestMemoryTTLCache:
|
||||
"""Test in-memory TTL cache backend."""
|
||||
|
||||
def test_set_and_get_value(self):
|
||||
from mes_dashboard.core.cache import MemoryTTLCache
|
||||
|
||||
cache = MemoryTTLCache()
|
||||
cache.set('k1', {'v': 1}, 10)
|
||||
assert cache.get('k1') == {'v': 1}
|
||||
|
||||
def test_expired_value_returns_none(self):
|
||||
from mes_dashboard.core.cache import MemoryTTLCache
|
||||
|
||||
cache = MemoryTTLCache()
|
||||
cache.set('k2', {'v': 2}, 1)
|
||||
|
||||
with patch('mes_dashboard.core.cache.time.time', return_value=10_000):
|
||||
cache._store['k2'] = ({'v': 2}, 9_999)
|
||||
assert cache.get('k2') is None
|
||||
|
||||
|
||||
class TestCreateDefaultCacheBackend:
|
||||
"""Test default cache backend factory."""
|
||||
|
||||
def test_returns_layered_cache_without_redis(self):
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
with patch.object(cache, 'redis_available', return_value=False):
|
||||
backend = cache.create_default_cache_backend()
|
||||
backend.set('factory-key', {'x': 1}, 30)
|
||||
assert backend.get('factory-key') == {'x': 1}
|
||||
|
||||
|
||||
class TestLayeredCacheTelemetry:
|
||||
"""Telemetry behavior for layered route cache."""
|
||||
|
||||
def test_l1_only_degraded_mode_visibility(self):
|
||||
from mes_dashboard.core.cache import MemoryTTLCache, LayeredCache
|
||||
|
||||
backend = LayeredCache(l1=MemoryTTLCache(), l2=None, redis_expected=True)
|
||||
backend.set('k1', {'v': 1}, 30)
|
||||
assert backend.get('k1') == {'v': 1} # L1 hit
|
||||
assert backend.get('missing') is None # miss
|
||||
|
||||
telemetry = backend.telemetry()
|
||||
assert telemetry['mode'] == 'l1-only'
|
||||
assert telemetry['degraded'] is True
|
||||
assert telemetry['l1_hits'] >= 1
|
||||
assert telemetry['misses'] >= 1
|
||||
|
||||
def test_l1_l2_hit_and_rates(self):
|
||||
from mes_dashboard.core.cache import MemoryTTLCache, LayeredCache
|
||||
|
||||
class FakeL2:
|
||||
def __init__(self):
|
||||
self.store = {'cold': {'from': 'l2'}}
|
||||
|
||||
def get(self, key):
|
||||
return self.store.get(key)
|
||||
|
||||
def set(self, key, value, ttl):
|
||||
self.store[key] = value
|
||||
|
||||
def telemetry(self):
|
||||
return {'error_count': 0}
|
||||
|
||||
backend = LayeredCache(l1=MemoryTTLCache(), l2=FakeL2(), redis_expected=True)
|
||||
assert backend.get('cold') == {'from': 'l2'} # L2 hit then warm L1
|
||||
assert backend.get('cold') == {'from': 'l2'} # L1 hit
|
||||
|
||||
telemetry = backend.telemetry()
|
||||
assert telemetry['mode'] == 'l1+l2'
|
||||
assert telemetry['degraded'] is False
|
||||
assert telemetry['l2_hits'] >= 1
|
||||
assert telemetry['l1_hits'] >= 1
|
||||
assert telemetry['reads_total'] >= 2
|
||||
|
||||
|
||||
class TestIsCacheAvailable:
|
||||
"""Test is_cache_available function."""
|
||||
|
||||
def test_returns_false_when_disabled(self):
|
||||
"""Test returns False when Redis is disabled."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
with patch.object(cache, 'REDIS_ENABLED', False):
|
||||
result = cache.is_cache_available()
|
||||
assert result is False
|
||||
|
||||
def test_returns_false_when_no_client(self):
|
||||
"""Test returns False when no Redis client."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
with patch.object(cache, 'REDIS_ENABLED', True):
|
||||
with patch.object(cache, 'get_redis_client', return_value=None):
|
||||
result = cache.is_cache_available()
|
||||
assert result is False
|
||||
|
||||
def test_returns_true_when_data_exists(self):
|
||||
"""Test returns True when data exists in Redis."""
|
||||
import mes_dashboard.core.cache as cache
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.exists.return_value = 1
|
||||
|
||||
with patch.object(cache, 'REDIS_ENABLED', True):
|
||||
with patch.object(cache, 'get_redis_client', return_value=mock_client):
|
||||
with patch.object(cache, 'get_key', return_value='mes_wip:data'):
|
||||
result = cache.is_cache_available()
|
||||
assert result is True
|
||||
400
tests/test_cache_integration.py
Normal file
400
tests/test_cache_integration.py
Normal file
@@ -0,0 +1,400 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Integration tests for cache functionality.
|
||||
|
||||
Tests API endpoints with cache enabled/disabled scenarios.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import pandas as pd
|
||||
import json
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app_with_mock_cache():
|
||||
"""Create app with mocked cache."""
|
||||
import mes_dashboard.core.database as db
|
||||
db._ENGINE = None
|
||||
|
||||
from mes_dashboard.app import create_app
|
||||
app = create_app('testing')
|
||||
app.config['TESTING'] = True
|
||||
return app
|
||||
|
||||
|
||||
class TestHealthEndpoint:
|
||||
"""Test /health endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.health_routes.check_database')
|
||||
@patch('mes_dashboard.routes.health_routes.check_redis')
|
||||
@patch('mes_dashboard.routes.health_routes.get_cache_status')
|
||||
def test_health_all_ok(self, mock_cache_status, mock_check_redis, mock_check_db, app_with_mock_cache):
|
||||
"""Test health endpoint returns 200 when all services are healthy."""
|
||||
mock_check_db.return_value = ('ok', None)
|
||||
mock_check_redis.return_value = ('ok', None)
|
||||
mock_cache_status.return_value = {
|
||||
'enabled': True,
|
||||
'sys_date': '2024-01-15 10:30:00',
|
||||
'updated_at': '2024-01-15T10:30:00'
|
||||
}
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/health')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.get_json()
|
||||
assert data['status'] == 'healthy'
|
||||
assert data['services']['database'] == 'ok'
|
||||
assert data['services']['redis'] == 'ok'
|
||||
|
||||
@patch('mes_dashboard.routes.health_routes.check_database')
|
||||
@patch('mes_dashboard.routes.health_routes.check_redis')
|
||||
@patch('mes_dashboard.routes.health_routes.get_cache_status')
|
||||
def test_health_redis_down_degraded(self, mock_cache_status, mock_check_redis, mock_check_db, app_with_mock_cache):
|
||||
"""Test health endpoint returns 200 degraded when Redis is down."""
|
||||
mock_check_db.return_value = ('ok', None)
|
||||
mock_check_redis.return_value = ('error', 'Connection refused')
|
||||
mock_cache_status.return_value = {'enabled': True, 'sys_date': None, 'updated_at': None}
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/health')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.get_json()
|
||||
assert data['status'] == 'degraded'
|
||||
assert 'warnings' in data
|
||||
|
||||
@patch('mes_dashboard.routes.health_routes.check_database')
|
||||
@patch('mes_dashboard.routes.health_routes.check_redis')
|
||||
@patch('mes_dashboard.routes.health_routes.get_cache_status')
|
||||
def test_health_db_down_unhealthy(self, mock_cache_status, mock_check_redis, mock_check_db, app_with_mock_cache):
|
||||
"""Test health endpoint returns 503 when database is down."""
|
||||
mock_check_db.return_value = ('error', 'Connection refused')
|
||||
mock_check_redis.return_value = ('ok', None)
|
||||
mock_cache_status.return_value = {'enabled': True, 'sys_date': None, 'updated_at': None}
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/health')
|
||||
|
||||
assert response.status_code == 503
|
||||
data = response.get_json()
|
||||
assert data['status'] == 'unhealthy'
|
||||
assert 'errors' in data
|
||||
|
||||
@patch('mes_dashboard.routes.health_routes.check_database')
|
||||
@patch('mes_dashboard.routes.health_routes.check_redis')
|
||||
@patch('mes_dashboard.routes.health_routes.get_cache_status')
|
||||
def test_health_redis_disabled(self, mock_cache_status, mock_check_redis, mock_check_db, app_with_mock_cache):
|
||||
"""Test health endpoint shows Redis disabled status."""
|
||||
mock_check_db.return_value = ('ok', None)
|
||||
mock_check_redis.return_value = ('disabled', None)
|
||||
mock_cache_status.return_value = {'enabled': False, 'sys_date': None, 'updated_at': None}
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/health')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.get_json()
|
||||
assert data['status'] == 'healthy'
|
||||
assert data['services']['redis'] == 'disabled'
|
||||
|
||||
|
||||
class TestWipApiWithCache:
|
||||
"""Test WIP API endpoints with cache."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_wip_cache_data(self):
|
||||
"""Create mock WIP data for cache."""
|
||||
return pd.DataFrame({
|
||||
'LOTID': ['LOT001', 'LOT002', 'LOT003'],
|
||||
'QTY': [100, 200, 150],
|
||||
'WORKORDER': ['WO001', 'WO002', 'WO003'],
|
||||
'WORKCENTER_GROUP': ['WC1', 'WC1', 'WC2'],
|
||||
'WORKCENTERSEQUENCE_GROUP': [1, 1, 2],
|
||||
'PRODUCTLINENAME': ['PKG1', 'PKG2', 'PKG1'],
|
||||
'EQUIPMENTCOUNT': [1, 0, 0],
|
||||
'CURRENTHOLDCOUNT': [0, 1, 0],
|
||||
'HOLDREASONNAME': [None, 'Quality Issue', None],
|
||||
'STATUS': ['ACTIVE', 'HOLD', 'ACTIVE'],
|
||||
'SPECNAME': ['SPEC1', 'SPEC1', 'SPEC2'],
|
||||
'SPECSEQUENCE': [1, 1, 2],
|
||||
'AGEBYDAYS': [1.5, 3.2, 0.5],
|
||||
'EQUIPMENTS': ['EQ001', None, None],
|
||||
'SYS_DATE': ['2024-01-15 10:30:00'] * 3
|
||||
})
|
||||
|
||||
@patch('mes_dashboard.services.wip_service._get_wip_dataframe')
|
||||
@patch('mes_dashboard.services.wip_service.get_cached_sys_date')
|
||||
def test_wip_summary_uses_cache(self, mock_sys_date, mock_get_df, app_with_mock_cache, mock_wip_cache_data):
|
||||
"""Test /api/wip/overview/summary uses cache when available."""
|
||||
mock_get_df.return_value = mock_wip_cache_data
|
||||
mock_sys_date.return_value = '2024-01-15 10:30:00'
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/api/wip/overview/summary')
|
||||
|
||||
assert response.status_code == 200
|
||||
resp = response.get_json()
|
||||
# API returns wrapped response: {success: true, data: {...}}
|
||||
data = resp.get('data', resp) # Handle both wrapped and unwrapped
|
||||
assert data['totalLots'] == 3
|
||||
assert data['dataUpdateDate'] == '2024-01-15 10:30:00'
|
||||
|
||||
@patch('mes_dashboard.services.wip_service._get_wip_dataframe')
|
||||
@patch('mes_dashboard.services.wip_service.get_cached_sys_date')
|
||||
def test_wip_matrix_uses_cache(self, mock_sys_date, mock_get_df, app_with_mock_cache, mock_wip_cache_data):
|
||||
"""Test /api/wip/overview/matrix uses cache when available."""
|
||||
mock_get_df.return_value = mock_wip_cache_data
|
||||
mock_sys_date.return_value = '2024-01-15 10:30:00'
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/api/wip/overview/matrix')
|
||||
|
||||
assert response.status_code == 200
|
||||
resp = response.get_json()
|
||||
# API returns wrapped response: {success: true, data: {...}}
|
||||
data = resp.get('data', resp)
|
||||
assert 'workcenters' in data
|
||||
assert 'packages' in data
|
||||
assert 'matrix' in data
|
||||
|
||||
@patch('mes_dashboard.services.wip_service._get_wip_dataframe')
|
||||
def test_workcenters_uses_cache(self, mock_get_df, app_with_mock_cache, mock_wip_cache_data):
|
||||
"""Test /api/wip/meta/workcenters uses cache when available."""
|
||||
mock_get_df.return_value = mock_wip_cache_data
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/api/wip/meta/workcenters')
|
||||
|
||||
assert response.status_code == 200
|
||||
resp = response.get_json()
|
||||
# API returns wrapped response: {success: true, data: [...]}
|
||||
data = resp.get('data', resp) if isinstance(resp, dict) and 'data' in resp else resp
|
||||
assert isinstance(data, list)
|
||||
assert len(data) == 2 # WC1 and WC2
|
||||
|
||||
@patch('mes_dashboard.services.wip_service._get_wip_dataframe')
|
||||
def test_packages_uses_cache(self, mock_get_df, app_with_mock_cache, mock_wip_cache_data):
|
||||
"""Test /api/wip/meta/packages uses cache when available."""
|
||||
mock_get_df.return_value = mock_wip_cache_data
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/api/wip/meta/packages')
|
||||
|
||||
assert response.status_code == 200
|
||||
resp = response.get_json()
|
||||
# API returns wrapped response: {success: true, data: [...]}
|
||||
data = resp.get('data', resp) if isinstance(resp, dict) and 'data' in resp else resp
|
||||
assert isinstance(data, list)
|
||||
assert len(data) == 2 # PKG1 and PKG2
|
||||
|
||||
|
||||
class TestHealthEndpointResourceCache:
|
||||
"""Test /health endpoint resource cache status."""
|
||||
|
||||
@patch('mes_dashboard.routes.health_routes.check_database')
|
||||
@patch('mes_dashboard.routes.health_routes.check_redis')
|
||||
@patch('mes_dashboard.routes.health_routes.get_cache_status')
|
||||
@patch('mes_dashboard.routes.health_routes.get_resource_cache_status')
|
||||
def test_health_includes_resource_cache(
|
||||
self, mock_res_cache_status, mock_cache_status, mock_check_redis, mock_check_db, app_with_mock_cache
|
||||
):
|
||||
"""Test health endpoint includes resource_cache field."""
|
||||
mock_check_db.return_value = ('ok', None)
|
||||
mock_check_redis.return_value = ('ok', None)
|
||||
mock_cache_status.return_value = {
|
||||
'enabled': True,
|
||||
'sys_date': '2024-01-15 10:30:00',
|
||||
'updated_at': '2024-01-15T10:30:00'
|
||||
}
|
||||
mock_res_cache_status.return_value = {
|
||||
'enabled': True,
|
||||
'loaded': True,
|
||||
'count': 1500,
|
||||
'version': '2024-01-15T10:00:00',
|
||||
'updated_at': '2024-01-15T10:30:00'
|
||||
}
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/health')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.get_json()
|
||||
assert 'resource_cache' in data
|
||||
assert data['resource_cache']['enabled'] is True
|
||||
assert data['resource_cache']['loaded'] is True
|
||||
assert data['resource_cache']['count'] == 1500
|
||||
|
||||
@patch('mes_dashboard.routes.health_routes.check_database')
|
||||
@patch('mes_dashboard.routes.health_routes.check_redis')
|
||||
@patch('mes_dashboard.routes.health_routes.get_cache_status')
|
||||
@patch('mes_dashboard.routes.health_routes.get_resource_cache_status')
|
||||
def test_health_warning_when_resource_cache_not_loaded(
|
||||
self, mock_res_cache_status, mock_cache_status, mock_check_redis, mock_check_db, app_with_mock_cache
|
||||
):
|
||||
"""Test health endpoint shows warning when resource cache enabled but not loaded."""
|
||||
mock_check_db.return_value = ('ok', None)
|
||||
mock_check_redis.return_value = ('ok', None)
|
||||
mock_cache_status.return_value = {
|
||||
'enabled': True,
|
||||
'sys_date': '2024-01-15 10:30:00',
|
||||
'updated_at': '2024-01-15T10:30:00'
|
||||
}
|
||||
mock_res_cache_status.return_value = {
|
||||
'enabled': True,
|
||||
'loaded': False,
|
||||
'count': 0,
|
||||
'version': None,
|
||||
'updated_at': None
|
||||
}
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/health')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.get_json()
|
||||
assert 'warnings' in data
|
||||
assert any('Resource cache not loaded' in w for w in data['warnings'])
|
||||
|
||||
@patch('mes_dashboard.routes.health_routes.check_database')
|
||||
@patch('mes_dashboard.routes.health_routes.check_redis')
|
||||
@patch('mes_dashboard.routes.health_routes.get_cache_status')
|
||||
@patch('mes_dashboard.routes.health_routes.get_resource_cache_status')
|
||||
def test_health_no_warning_when_resource_cache_disabled(
|
||||
self, mock_res_cache_status, mock_cache_status, mock_check_redis, mock_check_db, app_with_mock_cache
|
||||
):
|
||||
"""Test health endpoint no warning when resource cache is disabled."""
|
||||
mock_check_db.return_value = ('ok', None)
|
||||
mock_check_redis.return_value = ('ok', None)
|
||||
mock_cache_status.return_value = {
|
||||
'enabled': True,
|
||||
'sys_date': '2024-01-15 10:30:00',
|
||||
'updated_at': '2024-01-15T10:30:00'
|
||||
}
|
||||
mock_res_cache_status.return_value = {'enabled': False}
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/health')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.get_json()
|
||||
# No warnings about resource cache
|
||||
warnings = data.get('warnings', [])
|
||||
assert not any('Resource cache' in w for w in warnings)
|
||||
|
||||
|
||||
class TestResourceFilterOptionsWithCache:
|
||||
"""Test resource filter options with cache."""
|
||||
|
||||
@patch('mes_dashboard.services.resource_cache.get_all_resources')
|
||||
@patch('mes_dashboard.services.resource_service.read_sql_df')
|
||||
def test_filter_options_uses_resource_cache(
|
||||
self, mock_read_sql, mock_get_all, app_with_mock_cache
|
||||
):
|
||||
"""Test resource filter options uses resource_cache for static data."""
|
||||
# Mock resource cache data
|
||||
mock_get_all.return_value = [
|
||||
{'WORKCENTERNAME': 'WC1', 'RESOURCEFAMILYNAME': 'F1', 'PJ_DEPARTMENT': 'Dept1',
|
||||
'LOCATIONNAME': 'Loc1', 'PJ_ASSETSSTATUS': 'Active'},
|
||||
{'WORKCENTERNAME': 'WC2', 'RESOURCEFAMILYNAME': 'F2', 'PJ_DEPARTMENT': 'Dept1',
|
||||
'LOCATIONNAME': 'Loc1', 'PJ_ASSETSSTATUS': 'Active'},
|
||||
]
|
||||
mock_read_sql.return_value = pd.DataFrame({'NEWSTATUSNAME': ['PRD', 'SBY']})
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/api/resource/filter_options')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.get_json()
|
||||
|
||||
if data.get('success'):
|
||||
options = data.get('data', {})
|
||||
assert 'WC1' in options['workcenters']
|
||||
assert 'WC2' in options['workcenters']
|
||||
assert 'F1' in options['families']
|
||||
assert 'F2' in options['families']
|
||||
|
||||
|
||||
class TestResourceHistoryOptionsWithCache:
|
||||
"""Test resource history filter options with cache."""
|
||||
|
||||
@patch('mes_dashboard.services.filter_cache.get_workcenter_groups')
|
||||
@patch('mes_dashboard.services.resource_cache.get_all_resources')
|
||||
def test_history_options_uses_resource_cache(
|
||||
self, mock_get_all, mock_groups, app_with_mock_cache
|
||||
):
|
||||
"""Test resource history options uses resource_cache for families."""
|
||||
mock_groups.return_value = [
|
||||
{'name': 'Group1', 'sequence': 1},
|
||||
{'name': 'Group2', 'sequence': 2}
|
||||
]
|
||||
# Mock resource cache data for families
|
||||
mock_get_all.return_value = [
|
||||
{'RESOURCEFAMILYNAME': 'Family1'},
|
||||
{'RESOURCEFAMILYNAME': 'Family2'},
|
||||
{'RESOURCEFAMILYNAME': 'Family1'}, # duplicate
|
||||
]
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/api/resource/history/options')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.get_json()
|
||||
|
||||
if data.get('success'):
|
||||
options = data.get('data', {})
|
||||
assert 'families' in options
|
||||
assert 'Family1' in options['families']
|
||||
assert 'Family2' in options['families']
|
||||
|
||||
|
||||
class TestFallbackToOracle:
|
||||
"""Test fallback to Oracle when cache is unavailable."""
|
||||
|
||||
@patch('mes_dashboard.services.wip_service._get_wip_dataframe')
|
||||
@patch('mes_dashboard.services.wip_service._get_wip_summary_from_oracle')
|
||||
def test_summary_falls_back_to_oracle(self, mock_oracle, mock_get_df, app_with_mock_cache):
|
||||
"""Test summary falls back to Oracle when cache unavailable."""
|
||||
mock_get_df.return_value = None # Cache miss
|
||||
mock_oracle.return_value = {
|
||||
'totalLots': 100,
|
||||
'totalQtyPcs': 10000,
|
||||
'byWipStatus': {
|
||||
'run': {'lots': 30, 'qtyPcs': 3000},
|
||||
'queue': {'lots': 50, 'qtyPcs': 5000},
|
||||
'hold': {'lots': 20, 'qtyPcs': 2000},
|
||||
'qualityHold': {'lots': 15, 'qtyPcs': 1500},
|
||||
'nonQualityHold': {'lots': 5, 'qtyPcs': 500}
|
||||
},
|
||||
'dataUpdateDate': '2024-01-15 10:30:00'
|
||||
}
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/api/wip/overview/summary')
|
||||
|
||||
assert response.status_code == 200
|
||||
resp = response.get_json()
|
||||
# API returns wrapped response: {success: true, data: {...}}
|
||||
data = resp.get('data', resp)
|
||||
assert data['totalLots'] == 100
|
||||
mock_oracle.assert_called_once()
|
||||
|
||||
@patch('mes_dashboard.services.wip_service._get_wip_dataframe')
|
||||
@patch('mes_dashboard.services.wip_service._get_workcenters_from_oracle')
|
||||
def test_workcenters_falls_back_to_oracle(self, mock_oracle, mock_get_df, app_with_mock_cache):
|
||||
"""Test workcenters falls back to Oracle when cache unavailable."""
|
||||
mock_get_df.return_value = None # Cache miss
|
||||
mock_oracle.return_value = [
|
||||
{'name': 'WC1', 'lot_count': 50},
|
||||
{'name': 'WC2', 'lot_count': 30}
|
||||
]
|
||||
|
||||
with app_with_mock_cache.test_client() as client:
|
||||
response = client.get('/api/wip/meta/workcenters')
|
||||
|
||||
assert response.status_code == 200
|
||||
resp = response.get_json()
|
||||
# API returns wrapped response: {success: true, data: [...]}
|
||||
data = resp.get('data', resp) if isinstance(resp, dict) and 'data' in resp else resp
|
||||
assert len(data) == 2
|
||||
mock_oracle.assert_called_once()
|
||||
222
tests/test_cache_updater.py
Normal file
222
tests/test_cache_updater.py
Normal file
@@ -0,0 +1,222 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for cache updater module.
|
||||
|
||||
Tests background cache update logic.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import pandas as pd
|
||||
import time
|
||||
|
||||
|
||||
class TestCacheUpdater:
|
||||
"""Test CacheUpdater class."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_state(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
rc._REDIS_CLIENT = None
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
|
||||
def test_updater_starts_when_redis_enabled(self, reset_state):
|
||||
"""Test updater starts when Redis is enabled."""
|
||||
import mes_dashboard.core.cache_updater as cu
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.ping.return_value = True
|
||||
|
||||
with patch.object(cu, 'REDIS_ENABLED', True):
|
||||
with patch.object(cu, 'redis_available', return_value=True):
|
||||
with patch.object(cu, 'read_sql_df', return_value=None):
|
||||
updater = cu.CacheUpdater(interval=1)
|
||||
try:
|
||||
updater.start()
|
||||
assert updater._is_running is True
|
||||
assert updater._thread is not None
|
||||
finally:
|
||||
updater.stop()
|
||||
time.sleep(0.2)
|
||||
|
||||
def test_updater_does_not_start_when_redis_disabled(self, reset_state):
|
||||
"""Test updater does not start when Redis is disabled."""
|
||||
import mes_dashboard.core.cache_updater as cu
|
||||
|
||||
with patch.object(cu, 'REDIS_ENABLED', False):
|
||||
updater = cu.CacheUpdater(interval=1)
|
||||
updater.start()
|
||||
assert updater._is_running is False
|
||||
|
||||
def test_updater_stops_gracefully(self, reset_state):
|
||||
"""Test updater stops gracefully."""
|
||||
import mes_dashboard.core.cache_updater as cu
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.ping.return_value = True
|
||||
|
||||
with patch.object(cu, 'REDIS_ENABLED', True):
|
||||
with patch.object(cu, 'redis_available', return_value=True):
|
||||
with patch.object(cu, 'read_sql_df', return_value=None):
|
||||
updater = cu.CacheUpdater(interval=1)
|
||||
updater.start()
|
||||
assert updater._is_running is True
|
||||
|
||||
updater.stop()
|
||||
time.sleep(0.2) # Give thread time to stop
|
||||
assert updater._is_running is False
|
||||
|
||||
|
||||
class TestCheckSysDate:
|
||||
"""Test SYS_DATE checking logic."""
|
||||
|
||||
def test_check_sys_date_returns_value(self):
|
||||
"""Test _check_sys_date returns correct value."""
|
||||
import mes_dashboard.core.cache_updater as cu
|
||||
|
||||
mock_df = pd.DataFrame({'SYS_DATE': ['2024-01-15 10:30:00']})
|
||||
|
||||
with patch.object(cu, 'read_sql_df', return_value=mock_df):
|
||||
updater = cu.CacheUpdater()
|
||||
result = updater._check_sys_date()
|
||||
assert result == '2024-01-15 10:30:00'
|
||||
|
||||
def test_check_sys_date_handles_empty_result(self):
|
||||
"""Test _check_sys_date handles empty result."""
|
||||
import mes_dashboard.core.cache_updater as cu
|
||||
|
||||
with patch.object(cu, 'read_sql_df', return_value=pd.DataFrame()):
|
||||
updater = cu.CacheUpdater()
|
||||
result = updater._check_sys_date()
|
||||
assert result is None
|
||||
|
||||
def test_check_sys_date_handles_none_result(self):
|
||||
"""Test _check_sys_date handles None result."""
|
||||
import mes_dashboard.core.cache_updater as cu
|
||||
|
||||
with patch.object(cu, 'read_sql_df', return_value=None):
|
||||
updater = cu.CacheUpdater()
|
||||
result = updater._check_sys_date()
|
||||
assert result is None
|
||||
|
||||
def test_check_sys_date_handles_exception(self):
|
||||
"""Test _check_sys_date handles database exception."""
|
||||
import mes_dashboard.core.cache_updater as cu
|
||||
|
||||
with patch.object(cu, 'read_sql_df', side_effect=Exception("Database error")):
|
||||
updater = cu.CacheUpdater()
|
||||
result = updater._check_sys_date()
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestLoadFullTable:
|
||||
"""Test full table loading logic."""
|
||||
|
||||
def test_load_full_table_success(self):
|
||||
"""Test _load_full_table loads data correctly."""
|
||||
import mes_dashboard.core.cache_updater as cu
|
||||
|
||||
test_df = pd.DataFrame({
|
||||
'LOTID': ['LOT001', 'LOT002'],
|
||||
'QTY': [100, 200],
|
||||
'WORKORDER': ['WO001', 'WO002']
|
||||
})
|
||||
|
||||
with patch.object(cu, 'read_sql_df', return_value=test_df):
|
||||
updater = cu.CacheUpdater()
|
||||
result = updater._load_full_table()
|
||||
|
||||
assert result is not None
|
||||
assert len(result) == 2
|
||||
|
||||
def test_load_full_table_handles_none(self):
|
||||
"""Test _load_full_table handles None result."""
|
||||
import mes_dashboard.core.cache_updater as cu
|
||||
|
||||
with patch.object(cu, 'read_sql_df', return_value=None):
|
||||
updater = cu.CacheUpdater()
|
||||
result = updater._load_full_table()
|
||||
assert result is None
|
||||
|
||||
def test_load_full_table_handles_exception(self):
|
||||
"""Test _load_full_table handles exception."""
|
||||
import mes_dashboard.core.cache_updater as cu
|
||||
|
||||
with patch.object(cu, 'read_sql_df', side_effect=Exception("Database error")):
|
||||
updater = cu.CacheUpdater()
|
||||
result = updater._load_full_table()
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestUpdateRedisCache:
|
||||
"""Test Redis cache update logic."""
|
||||
|
||||
def test_update_redis_cache_success(self):
|
||||
"""Test _update_redis_cache updates cache correctly."""
|
||||
import mes_dashboard.core.cache_updater as cu
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_pipeline = MagicMock()
|
||||
mock_client.pipeline.return_value = mock_pipeline
|
||||
|
||||
test_df = pd.DataFrame({
|
||||
'LOTID': ['LOT001'],
|
||||
'QTY': [100]
|
||||
})
|
||||
|
||||
with patch.object(cu, 'get_redis_client', return_value=mock_client):
|
||||
with patch.object(cu, 'get_key', side_effect=lambda k: f'mes_wip:{k}'):
|
||||
updater = cu.CacheUpdater()
|
||||
result = updater._update_redis_cache(test_df, '2024-01-15 10:30:00')
|
||||
|
||||
assert result is True
|
||||
mock_pipeline.execute.assert_called_once()
|
||||
|
||||
def test_update_redis_cache_no_client(self):
|
||||
"""Test _update_redis_cache handles no client."""
|
||||
import mes_dashboard.core.cache_updater as cu
|
||||
|
||||
test_df = pd.DataFrame({'LOTID': ['LOT001']})
|
||||
|
||||
with patch.object(cu, 'get_redis_client', return_value=None):
|
||||
updater = cu.CacheUpdater()
|
||||
result = updater._update_redis_cache(test_df, '2024-01-15')
|
||||
assert result is False
|
||||
|
||||
|
||||
class TestCacheUpdateFlow:
|
||||
"""Test complete cache update flow."""
|
||||
|
||||
def test_no_update_when_sys_date_unchanged(self):
|
||||
"""Test cache doesn't update when SYS_DATE unchanged."""
|
||||
import mes_dashboard.core.cache_updater as cu
|
||||
|
||||
mock_df = pd.DataFrame({'SYS_DATE': ['2024-01-15 10:30:00']})
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.return_value = '2024-01-15 10:30:00'
|
||||
|
||||
with patch.object(cu, 'read_sql_df', return_value=mock_df):
|
||||
with patch.object(cu, 'redis_available', return_value=True):
|
||||
with patch.object(cu, 'get_redis_client', return_value=mock_client):
|
||||
with patch.object(cu, 'get_key', side_effect=lambda k: f'mes_wip:{k}'):
|
||||
updater = cu.CacheUpdater()
|
||||
# Simulate already having cached the same date
|
||||
result = updater._check_and_update(force=False)
|
||||
# No update because dates match
|
||||
assert result is False
|
||||
|
||||
def test_update_when_sys_date_changes(self):
|
||||
"""Test cache updates when SYS_DATE changes."""
|
||||
import mes_dashboard.core.cache_updater as cu
|
||||
|
||||
updater = cu.CacheUpdater()
|
||||
|
||||
mock_df = pd.DataFrame({'SYS_DATE': ['2024-01-15 11:00:00']})
|
||||
|
||||
with patch.object(cu, 'read_sql_df', return_value=mock_df):
|
||||
current_date = updater._check_sys_date()
|
||||
old_date = '2024-01-15 10:30:00'
|
||||
needs_update = current_date != old_date
|
||||
|
||||
assert needs_update is True
|
||||
223
tests/test_circuit_breaker.py
Normal file
223
tests/test_circuit_breaker.py
Normal file
@@ -0,0 +1,223 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for circuit breaker module."""
|
||||
|
||||
import os
|
||||
import pytest
|
||||
import time
|
||||
from unittest.mock import patch
|
||||
|
||||
# Set circuit breaker enabled for tests
|
||||
os.environ['CIRCUIT_BREAKER_ENABLED'] = 'true'
|
||||
|
||||
from mes_dashboard.core.circuit_breaker import (
|
||||
CircuitBreaker,
|
||||
CircuitState,
|
||||
get_database_circuit_breaker,
|
||||
get_circuit_breaker_status,
|
||||
CIRCUIT_BREAKER_ENABLED
|
||||
)
|
||||
|
||||
|
||||
class TestCircuitBreakerStates:
|
||||
"""Test circuit breaker state transitions."""
|
||||
|
||||
def test_initial_state_is_closed(self):
|
||||
"""Circuit breaker starts in CLOSED state."""
|
||||
cb = CircuitBreaker("test")
|
||||
assert cb.state == CircuitState.CLOSED
|
||||
|
||||
def test_allow_request_when_closed(self):
|
||||
"""Requests are allowed when circuit is CLOSED."""
|
||||
cb = CircuitBreaker("test")
|
||||
assert cb.allow_request() is True
|
||||
|
||||
def test_record_success_keeps_closed(self):
|
||||
"""Recording success keeps circuit CLOSED."""
|
||||
cb = CircuitBreaker("test")
|
||||
cb.record_success()
|
||||
assert cb.state == CircuitState.CLOSED
|
||||
|
||||
def test_opens_after_failure_threshold(self):
|
||||
"""Circuit opens after reaching failure threshold."""
|
||||
cb = CircuitBreaker(
|
||||
"test",
|
||||
failure_threshold=3,
|
||||
failure_rate_threshold=0.5,
|
||||
window_size=5
|
||||
)
|
||||
|
||||
# Record enough failures to open
|
||||
for _ in range(5):
|
||||
cb.record_failure()
|
||||
|
||||
assert cb.state == CircuitState.OPEN
|
||||
|
||||
def test_deny_request_when_open(self):
|
||||
"""Requests are denied when circuit is OPEN."""
|
||||
cb = CircuitBreaker(
|
||||
"test",
|
||||
failure_threshold=2,
|
||||
failure_rate_threshold=0.5,
|
||||
window_size=4
|
||||
)
|
||||
|
||||
# Force open
|
||||
for _ in range(4):
|
||||
cb.record_failure()
|
||||
|
||||
assert cb.allow_request() is False
|
||||
|
||||
def test_transition_to_half_open_after_timeout(self):
|
||||
"""Circuit transitions to HALF_OPEN after recovery timeout."""
|
||||
cb = CircuitBreaker(
|
||||
"test",
|
||||
failure_threshold=2,
|
||||
failure_rate_threshold=0.5,
|
||||
window_size=4,
|
||||
recovery_timeout=1 # 1 second for fast test
|
||||
)
|
||||
|
||||
# Force open
|
||||
for _ in range(4):
|
||||
cb.record_failure()
|
||||
|
||||
assert cb.state == CircuitState.OPEN
|
||||
|
||||
# Wait for recovery timeout
|
||||
time.sleep(1.1)
|
||||
|
||||
# Accessing state should transition to HALF_OPEN
|
||||
assert cb.state == CircuitState.HALF_OPEN
|
||||
|
||||
def test_half_open_allows_request(self):
|
||||
"""Requests are allowed in HALF_OPEN state for testing."""
|
||||
cb = CircuitBreaker(
|
||||
"test",
|
||||
failure_threshold=2,
|
||||
failure_rate_threshold=0.5,
|
||||
window_size=4,
|
||||
recovery_timeout=1
|
||||
)
|
||||
|
||||
# Force open
|
||||
for _ in range(4):
|
||||
cb.record_failure()
|
||||
|
||||
# Wait for recovery timeout
|
||||
time.sleep(1.1)
|
||||
|
||||
assert cb.allow_request() is True
|
||||
|
||||
def test_success_in_half_open_closes(self):
|
||||
"""Success in HALF_OPEN state closes the circuit."""
|
||||
cb = CircuitBreaker(
|
||||
"test",
|
||||
failure_threshold=2,
|
||||
failure_rate_threshold=0.5,
|
||||
window_size=4,
|
||||
recovery_timeout=1
|
||||
)
|
||||
|
||||
# Force open
|
||||
for _ in range(4):
|
||||
cb.record_failure()
|
||||
|
||||
# Wait for recovery timeout
|
||||
time.sleep(1.1)
|
||||
|
||||
# Force HALF_OPEN check
|
||||
_ = cb.state
|
||||
|
||||
# Record success
|
||||
cb.record_success()
|
||||
|
||||
assert cb.state == CircuitState.CLOSED
|
||||
|
||||
def test_failure_in_half_open_reopens(self):
|
||||
"""Failure in HALF_OPEN state reopens the circuit."""
|
||||
cb = CircuitBreaker(
|
||||
"test",
|
||||
failure_threshold=2,
|
||||
failure_rate_threshold=0.5,
|
||||
window_size=4,
|
||||
recovery_timeout=1
|
||||
)
|
||||
|
||||
# Force open
|
||||
for _ in range(4):
|
||||
cb.record_failure()
|
||||
|
||||
# Wait for recovery timeout
|
||||
time.sleep(1.1)
|
||||
|
||||
# Force HALF_OPEN check
|
||||
_ = cb.state
|
||||
|
||||
# Record failure
|
||||
cb.record_failure()
|
||||
|
||||
assert cb.state == CircuitState.OPEN
|
||||
|
||||
def test_reset_clears_state(self):
|
||||
"""Reset returns circuit to initial state."""
|
||||
cb = CircuitBreaker(
|
||||
"test",
|
||||
failure_threshold=2,
|
||||
failure_rate_threshold=0.5,
|
||||
window_size=4
|
||||
)
|
||||
|
||||
# Force open
|
||||
for _ in range(4):
|
||||
cb.record_failure()
|
||||
|
||||
cb.reset()
|
||||
|
||||
assert cb.state == CircuitState.CLOSED
|
||||
status = cb.get_status()
|
||||
assert status.total_count == 0
|
||||
|
||||
|
||||
class TestCircuitBreakerStatus:
|
||||
"""Test circuit breaker status reporting."""
|
||||
|
||||
def test_get_status_returns_correct_info(self):
|
||||
"""Status includes all expected fields."""
|
||||
cb = CircuitBreaker("test")
|
||||
|
||||
cb.record_success()
|
||||
cb.record_success()
|
||||
cb.record_failure()
|
||||
|
||||
status = cb.get_status()
|
||||
|
||||
assert status.state == "CLOSED"
|
||||
assert status.success_count == 2
|
||||
assert status.failure_count == 1
|
||||
assert status.total_count == 3
|
||||
assert 0.3 <= status.failure_rate <= 0.34
|
||||
|
||||
def test_get_circuit_breaker_status_dict(self):
|
||||
"""Global function returns status as dictionary."""
|
||||
status = get_circuit_breaker_status()
|
||||
|
||||
assert "state" in status
|
||||
assert "failure_count" in status
|
||||
assert "success_count" in status
|
||||
assert "enabled" in status
|
||||
|
||||
|
||||
class TestCircuitBreakerDisabled:
|
||||
"""Test circuit breaker when disabled."""
|
||||
|
||||
def test_allow_request_when_disabled(self):
|
||||
"""Requests always allowed when circuit breaker is disabled."""
|
||||
with patch('mes_dashboard.core.circuit_breaker.CIRCUIT_BREAKER_ENABLED', False):
|
||||
cb = CircuitBreaker("test", failure_threshold=1, window_size=1)
|
||||
|
||||
# Record failures
|
||||
cb.record_failure()
|
||||
cb.record_failure()
|
||||
|
||||
# Should still allow (disabled)
|
||||
assert cb.allow_request() is True
|
||||
186
tests/test_common_filters.py
Normal file
186
tests/test_common_filters.py
Normal file
@@ -0,0 +1,186 @@
|
||||
"""Tests for Common Filters."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch
|
||||
|
||||
from mes_dashboard.sql.builder import QueryBuilder
|
||||
from mes_dashboard.sql.filters import CommonFilters, NON_QUALITY_HOLD_REASONS
|
||||
|
||||
|
||||
class TestCommonFilters:
|
||||
"""Test CommonFilters class."""
|
||||
|
||||
def test_add_location_exclusion(self):
|
||||
"""Test location exclusion filter."""
|
||||
builder = QueryBuilder()
|
||||
|
||||
with patch(
|
||||
"mes_dashboard.sql.filters.EXCLUDED_LOCATIONS", ["ATEC", "F區"]
|
||||
):
|
||||
CommonFilters.add_location_exclusion(builder)
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
assert "LOCATIONNAME IS NULL OR LOCATIONNAME NOT IN" in builder.conditions[0]
|
||||
assert builder.params["p0"] == "ATEC"
|
||||
assert builder.params["p1"] == "F區"
|
||||
|
||||
def test_add_location_exclusion_empty(self):
|
||||
"""Test location exclusion with empty list."""
|
||||
builder = QueryBuilder()
|
||||
|
||||
with patch("mes_dashboard.sql.filters.EXCLUDED_LOCATIONS", []):
|
||||
CommonFilters.add_location_exclusion(builder)
|
||||
|
||||
assert len(builder.conditions) == 0
|
||||
|
||||
def test_add_location_exclusion_custom_column(self):
|
||||
"""Test location exclusion with custom column name."""
|
||||
builder = QueryBuilder()
|
||||
|
||||
with patch(
|
||||
"mes_dashboard.sql.filters.EXCLUDED_LOCATIONS", ["TEST"]
|
||||
):
|
||||
CommonFilters.add_location_exclusion(builder, column="LOC_NAME")
|
||||
|
||||
assert "LOC_NAME IS NULL OR LOC_NAME NOT IN" in builder.conditions[0]
|
||||
|
||||
def test_add_asset_status_exclusion(self):
|
||||
"""Test asset status exclusion filter."""
|
||||
builder = QueryBuilder()
|
||||
|
||||
with patch(
|
||||
"mes_dashboard.sql.filters.EXCLUDED_ASSET_STATUSES", ["報廢", "閒置"]
|
||||
):
|
||||
CommonFilters.add_asset_status_exclusion(builder)
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
assert "PJ_ASSETSSTATUS IS NULL OR PJ_ASSETSSTATUS NOT IN" in builder.conditions[0]
|
||||
|
||||
def test_add_asset_status_exclusion_empty(self):
|
||||
"""Test asset status exclusion with empty list."""
|
||||
builder = QueryBuilder()
|
||||
|
||||
with patch("mes_dashboard.sql.filters.EXCLUDED_ASSET_STATUSES", []):
|
||||
CommonFilters.add_asset_status_exclusion(builder)
|
||||
|
||||
assert len(builder.conditions) == 0
|
||||
|
||||
def test_add_wip_base_filters_workorder(self):
|
||||
"""Test WIP base filter for workorder."""
|
||||
builder = QueryBuilder()
|
||||
CommonFilters.add_wip_base_filters(builder, workorder="WO123")
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
assert "WORKORDER LIKE" in builder.conditions[0]
|
||||
assert "%WO123%" in builder.params["p0"]
|
||||
|
||||
def test_add_wip_base_filters_lotid(self):
|
||||
"""Test WIP base filter for lot ID."""
|
||||
builder = QueryBuilder()
|
||||
CommonFilters.add_wip_base_filters(builder, lotid="LOT001")
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
assert "LOTID LIKE" in builder.conditions[0]
|
||||
|
||||
def test_add_wip_base_filters_multiple(self):
|
||||
"""Test WIP base filter with multiple parameters."""
|
||||
builder = QueryBuilder()
|
||||
CommonFilters.add_wip_base_filters(
|
||||
builder, workorder="WO", package="PKG", pj_type="TYPE"
|
||||
)
|
||||
|
||||
assert len(builder.conditions) == 3
|
||||
assert any("WORKORDER LIKE" in c for c in builder.conditions)
|
||||
assert any("PACKAGE_LEF LIKE" in c for c in builder.conditions)
|
||||
assert any("PJ_TYPE LIKE" in c for c in builder.conditions)
|
||||
|
||||
def test_add_status_filter_single(self):
|
||||
"""Test status filter with single status."""
|
||||
builder = QueryBuilder()
|
||||
CommonFilters.add_status_filter(builder, status="HOLD")
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
assert "STATUS = :p0" in builder.conditions[0]
|
||||
assert builder.params["p0"] == "HOLD"
|
||||
|
||||
def test_add_status_filter_multiple(self):
|
||||
"""Test status filter with multiple statuses."""
|
||||
builder = QueryBuilder()
|
||||
CommonFilters.add_status_filter(builder, statuses=["RUN", "QUEUE"])
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
assert "STATUS IN (:p0, :p1)" in builder.conditions[0]
|
||||
assert builder.params["p0"] == "RUN"
|
||||
assert builder.params["p1"] == "QUEUE"
|
||||
|
||||
def test_add_hold_type_filter_quality(self):
|
||||
"""Test hold type filter for quality holds."""
|
||||
builder = QueryBuilder()
|
||||
CommonFilters.add_hold_type_filter(builder, hold_type="quality")
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
assert "HOLDREASONNAME NOT IN" in builder.conditions[0]
|
||||
|
||||
def test_add_hold_type_filter_non_quality(self):
|
||||
"""Test hold type filter for non-quality holds."""
|
||||
builder = QueryBuilder()
|
||||
CommonFilters.add_hold_type_filter(builder, hold_type="non_quality")
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
assert "HOLDREASONNAME IN" in builder.conditions[0]
|
||||
|
||||
def test_is_quality_hold(self):
|
||||
"""Test is_quality_hold helper function."""
|
||||
# Quality hold (not in non-quality list)
|
||||
assert CommonFilters.is_quality_hold("品質異常") is True
|
||||
|
||||
# Non-quality hold (in list)
|
||||
non_quality_reason = list(NON_QUALITY_HOLD_REASONS)[0]
|
||||
assert CommonFilters.is_quality_hold(non_quality_reason) is False
|
||||
|
||||
def test_add_equipment_filter_resource_ids(self):
|
||||
"""Test equipment filter with resource IDs."""
|
||||
builder = QueryBuilder()
|
||||
CommonFilters.add_equipment_filter(builder, resource_ids=["R001", "R002"])
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
assert "RESOURCEID IN" in builder.conditions[0]
|
||||
|
||||
def test_add_equipment_filter_workcenters(self):
|
||||
"""Test equipment filter with workcenters."""
|
||||
builder = QueryBuilder()
|
||||
CommonFilters.add_equipment_filter(builder, workcenters=["WC1", "WC2"])
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
assert "WORKCENTERNAME IN" in builder.conditions[0]
|
||||
|
||||
def test_build_location_filter_legacy(self):
|
||||
"""Test legacy location filter builder."""
|
||||
result = CommonFilters.build_location_filter_legacy(
|
||||
locations=["LOC1", "LOC2"],
|
||||
excluded_locations=["EXC1"],
|
||||
)
|
||||
|
||||
assert "LOCATIONNAME IN ('LOC1', 'LOC2')" in result
|
||||
assert "LOCATIONNAME NOT IN ('EXC1')" in result
|
||||
|
||||
def test_build_asset_status_filter_legacy(self):
|
||||
"""Test legacy asset status filter builder."""
|
||||
result = CommonFilters.build_asset_status_filter_legacy(
|
||||
excluded_statuses=["報廢", "閒置"]
|
||||
)
|
||||
|
||||
assert "PJ_ASSETSSTATUS NOT IN" in result
|
||||
assert "'報廢'" in result
|
||||
assert "'閒置'" in result
|
||||
|
||||
def test_build_asset_status_filter_legacy_empty(self):
|
||||
"""Test legacy asset status filter with empty list."""
|
||||
result = CommonFilters.build_asset_status_filter_legacy(excluded_statuses=[])
|
||||
|
||||
assert result == ""
|
||||
|
||||
def test_non_quality_hold_reasons_exists(self):
|
||||
"""Test that NON_QUALITY_HOLD_REASONS is defined and has content."""
|
||||
assert len(NON_QUALITY_HOLD_REASONS) > 0
|
||||
assert isinstance(NON_QUALITY_HOLD_REASONS, set)
|
||||
86
tests/test_degraded_responses.py
Normal file
86
tests/test_degraded_responses.py
Normal file
@@ -0,0 +1,86 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Degraded response contract tests."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import mes_dashboard.core.database as db
|
||||
from mes_dashboard.app import create_app
|
||||
from mes_dashboard.core.database import (
|
||||
DatabasePoolExhaustedError,
|
||||
DatabaseCircuitOpenError,
|
||||
)
|
||||
|
||||
|
||||
def _client():
|
||||
db._ENGINE = None
|
||||
app = create_app("testing")
|
||||
app.config["TESTING"] = True
|
||||
|
||||
@app.route("/api/__test__/pool")
|
||||
def _pool_error():
|
||||
raise DatabasePoolExhaustedError("pool exhausted", retry_after_seconds=7)
|
||||
|
||||
@app.route("/api/__test__/circuit")
|
||||
def _circuit_error():
|
||||
raise DatabaseCircuitOpenError("circuit open", retry_after_seconds=11)
|
||||
|
||||
return app.test_client()
|
||||
|
||||
|
||||
def test_pool_exhausted_error_handler_contract():
|
||||
response = _client().get("/api/__test__/pool")
|
||||
assert response.status_code == 503
|
||||
assert response.headers.get("Retry-After") == "7"
|
||||
|
||||
payload = response.get_json()
|
||||
assert payload["success"] is False
|
||||
assert payload["error"]["code"] == "DB_POOL_EXHAUSTED"
|
||||
assert payload["meta"]["retry_after_seconds"] == 7
|
||||
|
||||
|
||||
def test_circuit_open_error_handler_contract():
|
||||
response = _client().get("/api/__test__/circuit")
|
||||
assert response.status_code == 503
|
||||
assert response.headers.get("Retry-After") == "11"
|
||||
|
||||
payload = response.get_json()
|
||||
assert payload["success"] is False
|
||||
assert payload["error"]["code"] == "CIRCUIT_BREAKER_OPEN"
|
||||
assert payload["meta"]["retry_after_seconds"] == 11
|
||||
|
||||
|
||||
@patch(
|
||||
"mes_dashboard.routes.wip_routes.get_wip_summary",
|
||||
side_effect=DatabasePoolExhaustedError("pool exhausted", retry_after_seconds=5),
|
||||
)
|
||||
def test_wip_route_propagates_degraded_contract(_mock_summary):
|
||||
response = _client().get("/api/wip/overview/summary")
|
||||
assert response.status_code == 503
|
||||
payload = response.get_json()
|
||||
assert payload["error"]["code"] == "DB_POOL_EXHAUSTED"
|
||||
|
||||
|
||||
@patch(
|
||||
"mes_dashboard.routes.resource_routes.get_resource_status_summary",
|
||||
side_effect=DatabasePoolExhaustedError("pool exhausted", retry_after_seconds=9),
|
||||
)
|
||||
def test_resource_route_propagates_degraded_contract(_mock_summary):
|
||||
response = _client().get("/api/resource/status/summary")
|
||||
assert response.status_code == 503
|
||||
payload = response.get_json()
|
||||
assert payload["error"]["code"] == "DB_POOL_EXHAUSTED"
|
||||
assert payload["meta"]["retry_after_seconds"] == 9
|
||||
|
||||
|
||||
@patch(
|
||||
"mes_dashboard.routes.dashboard_routes.query_dashboard_kpi",
|
||||
side_effect=DatabaseCircuitOpenError("circuit open", retry_after_seconds=13),
|
||||
)
|
||||
def test_dashboard_route_propagates_degraded_contract(_mock_kpi):
|
||||
response = _client().post("/api/dashboard/kpi", json={})
|
||||
assert response.status_code == 503
|
||||
payload = response.get_json()
|
||||
assert payload["error"]["code"] == "CIRCUIT_BREAKER_OPEN"
|
||||
assert payload["meta"]["retry_after_seconds"] == 13
|
||||
506
tests/test_excel_query_e2e.py
Normal file
506
tests/test_excel_query_e2e.py
Normal file
@@ -0,0 +1,506 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""End-to-end tests for Excel query workflow.
|
||||
|
||||
Tests the complete workflow from Excel upload to query execution and export.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import json
|
||||
import io
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from mes_dashboard import create_app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app():
|
||||
"""Create test Flask application."""
|
||||
app = create_app()
|
||||
app.config['TESTING'] = True
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(app):
|
||||
"""Create test client."""
|
||||
return app.test_client()
|
||||
|
||||
|
||||
def create_test_excel(data):
|
||||
"""Create a test Excel file with given data.
|
||||
|
||||
Args:
|
||||
data: List of lists where first list is headers.
|
||||
e.g. [['COL1', 'COL2'], ['val1', 'val2'], ...]
|
||||
"""
|
||||
import openpyxl
|
||||
wb = openpyxl.Workbook()
|
||||
ws = wb.active
|
||||
|
||||
for row_idx, row in enumerate(data, 1):
|
||||
for col_idx, value in enumerate(row, 1):
|
||||
ws.cell(row=row_idx, column=col_idx, value=value)
|
||||
|
||||
buffer = io.BytesIO()
|
||||
wb.save(buffer)
|
||||
buffer.seek(0)
|
||||
return buffer
|
||||
|
||||
|
||||
class TestBasicQueryWorkflow:
|
||||
"""E2E tests for basic query workflow."""
|
||||
|
||||
@patch('mes_dashboard.routes.excel_query_routes.execute_batch_query')
|
||||
def test_complete_basic_workflow(self, mock_execute, client):
|
||||
"""Test complete workflow: upload → get values → execute → export."""
|
||||
# Step 1: Upload Excel file
|
||||
excel_data = [
|
||||
['LOT_ID', 'PRODUCT', 'QTY'],
|
||||
['LOT001', 'PROD_A', 100],
|
||||
['LOT002', 'PROD_B', 200],
|
||||
['LOT003', 'PROD_A', 150],
|
||||
]
|
||||
excel_file = create_test_excel(excel_data)
|
||||
|
||||
upload_response = client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (excel_file, 'batch_query.xlsx')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
assert upload_response.status_code == 200
|
||||
upload_data = json.loads(upload_response.data)
|
||||
assert 'columns' in upload_data
|
||||
assert 'LOT_ID' in upload_data['columns']
|
||||
assert 'preview' in upload_data
|
||||
|
||||
# Step 2: Get column values
|
||||
values_response = client.post(
|
||||
'/api/excel-query/column-values',
|
||||
json={'column_name': 'LOT_ID'}
|
||||
)
|
||||
assert values_response.status_code == 200
|
||||
values_data = json.loads(values_response.data)
|
||||
assert 'values' in values_data
|
||||
assert set(values_data['values']) == {'LOT001', 'LOT002', 'LOT003'}
|
||||
|
||||
# Step 3: Execute query
|
||||
mock_execute.return_value = {
|
||||
'columns': ['LOT_ID', 'SPEC', 'STATUS'],
|
||||
'data': [
|
||||
['LOT001', 'SPEC_001', 'ACTIVE'],
|
||||
['LOT002', 'SPEC_002', 'HOLD'],
|
||||
['LOT003', 'SPEC_001', 'ACTIVE'],
|
||||
],
|
||||
'total': 3
|
||||
}
|
||||
|
||||
execute_response = client.post(
|
||||
'/api/excel-query/execute',
|
||||
json={
|
||||
'table_name': 'DWH.DW_MES_WIP',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID', 'SPEC', 'STATUS'],
|
||||
'search_values': ['LOT001', 'LOT002', 'LOT003']
|
||||
}
|
||||
)
|
||||
assert execute_response.status_code == 200
|
||||
execute_data = json.loads(execute_response.data)
|
||||
assert execute_data['total'] == 3
|
||||
|
||||
|
||||
class TestAdvancedQueryWorkflow:
|
||||
"""E2E tests for advanced query workflow with date range and LIKE."""
|
||||
|
||||
@patch('mes_dashboard.routes.excel_query_routes.execute_advanced_batch_query')
|
||||
def test_like_contains_workflow(self, mock_execute, client):
|
||||
"""Test workflow with LIKE contains query."""
|
||||
# Upload Excel with search patterns
|
||||
excel_data = [
|
||||
['SEARCH_PATTERN'],
|
||||
['LOT'],
|
||||
['WIP'],
|
||||
]
|
||||
excel_file = create_test_excel(excel_data)
|
||||
|
||||
upload_response = client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (excel_file, 'patterns.xlsx')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
assert upload_response.status_code == 200
|
||||
|
||||
# Get search values
|
||||
values_response = client.post(
|
||||
'/api/excel-query/column-values',
|
||||
json={'column_name': 'SEARCH_PATTERN'}
|
||||
)
|
||||
assert values_response.status_code == 200
|
||||
search_values = json.loads(values_response.data)['values']
|
||||
|
||||
# Execute LIKE contains query
|
||||
mock_execute.return_value = {
|
||||
'columns': ['LOT_ID', 'STATUS'],
|
||||
'data': [
|
||||
['LOT001', 'ACTIVE'],
|
||||
['LOT002', 'ACTIVE'],
|
||||
['WIP001', 'HOLD'],
|
||||
['WIP002', 'ACTIVE'],
|
||||
],
|
||||
'total': 4
|
||||
}
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'table_name': 'DWH.DW_MES_WIP',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID', 'STATUS'],
|
||||
'search_values': search_values,
|
||||
'query_type': 'like_contains'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data['total'] == 4
|
||||
|
||||
@patch('mes_dashboard.routes.excel_query_routes.execute_advanced_batch_query')
|
||||
def test_date_range_workflow(self, mock_execute, client):
|
||||
"""Test workflow with date range filter."""
|
||||
excel_data = [
|
||||
['LOT_ID'],
|
||||
['LOT001'],
|
||||
['LOT002'],
|
||||
]
|
||||
excel_file = create_test_excel(excel_data)
|
||||
|
||||
client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (excel_file, 'lots.xlsx')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
|
||||
# Execute with date range
|
||||
mock_execute.return_value = {
|
||||
'columns': ['LOT_ID', 'TXNDATE'],
|
||||
'data': [['LOT001', '2024-01-15']],
|
||||
'total': 1
|
||||
}
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'table_name': 'DWH.DW_MES_WIP',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID', 'TXNDATE'],
|
||||
'search_values': ['LOT001', 'LOT002'],
|
||||
'query_type': 'in',
|
||||
'date_column': 'TXNDATE',
|
||||
'date_from': '2024-01-01',
|
||||
'date_to': '2024-01-31'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data['total'] == 1
|
||||
|
||||
@patch('mes_dashboard.routes.excel_query_routes.execute_advanced_batch_query')
|
||||
def test_combined_like_and_date_workflow(self, mock_execute, client):
|
||||
"""Test workflow combining LIKE and date range."""
|
||||
excel_data = [
|
||||
['PREFIX'],
|
||||
['LOT'],
|
||||
]
|
||||
excel_file = create_test_excel(excel_data)
|
||||
|
||||
client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (excel_file, 'prefixes.xlsx')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
|
||||
# Execute with both LIKE prefix and date range
|
||||
mock_execute.return_value = {
|
||||
'columns': ['LOT_ID', 'TXNDATE', 'STATUS'],
|
||||
'data': [
|
||||
['LOT001', '2024-01-15', 'ACTIVE'],
|
||||
['LOT002', '2024-01-20', 'ACTIVE'],
|
||||
],
|
||||
'total': 2
|
||||
}
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'table_name': 'DWH.DW_MES_WIP',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID', 'TXNDATE', 'STATUS'],
|
||||
'search_values': ['LOT'],
|
||||
'query_type': 'like_prefix',
|
||||
'date_column': 'TXNDATE',
|
||||
'date_from': '2024-01-01',
|
||||
'date_to': '2024-01-31'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
class TestColumnTypeDetection:
|
||||
"""E2E tests for column type detection workflow."""
|
||||
|
||||
def test_detect_date_column(self, client):
|
||||
"""Test detecting date type from Excel column."""
|
||||
excel_data = [
|
||||
['DATE_COL'],
|
||||
['2024-01-01'],
|
||||
['2024-01-02'],
|
||||
['2024-01-03'],
|
||||
['2024-01-04'],
|
||||
]
|
||||
excel_file = create_test_excel(excel_data)
|
||||
|
||||
client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (excel_file, 'dates.xlsx')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/column-type',
|
||||
json={'column_name': 'DATE_COL'}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data['detected_type'] == 'date'
|
||||
|
||||
def test_detect_number_column(self, client):
|
||||
"""Test detecting numeric type from Excel column."""
|
||||
excel_data = [
|
||||
['QTY'],
|
||||
['100'],
|
||||
['200'],
|
||||
['350.5'],
|
||||
['-50'],
|
||||
]
|
||||
excel_file = create_test_excel(excel_data)
|
||||
|
||||
client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (excel_file, 'numbers.xlsx')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/column-type',
|
||||
json={'column_name': 'QTY'}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data['detected_type'] == 'number'
|
||||
|
||||
def test_detect_id_column(self, client):
|
||||
"""Test detecting ID type from Excel column."""
|
||||
excel_data = [
|
||||
['LOT_ID'],
|
||||
['LOT001'],
|
||||
['LOT002'],
|
||||
['WIP-2024-001'],
|
||||
['PROD_ABC'],
|
||||
]
|
||||
excel_file = create_test_excel(excel_data)
|
||||
|
||||
client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (excel_file, 'ids.xlsx')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/column-type',
|
||||
json={'column_name': 'LOT_ID'}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data['detected_type'] == 'id'
|
||||
|
||||
|
||||
class TestTableMetadataWorkflow:
|
||||
"""E2E tests for table metadata retrieval workflow."""
|
||||
|
||||
@patch('mes_dashboard.routes.excel_query_routes.get_table_column_metadata')
|
||||
def test_metadata_with_type_matching(self, mock_metadata, client):
|
||||
"""Test workflow checking column type compatibility."""
|
||||
# Step 1: Upload Excel with ID column
|
||||
excel_data = [
|
||||
['LOT_ID'],
|
||||
['LOT001'],
|
||||
['LOT002'],
|
||||
]
|
||||
excel_file = create_test_excel(excel_data)
|
||||
|
||||
client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (excel_file, 'lots.xlsx')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
|
||||
# Step 2: Get Excel column type
|
||||
excel_type_response = client.post(
|
||||
'/api/excel-query/column-type',
|
||||
json={'column_name': 'LOT_ID'}
|
||||
)
|
||||
excel_type = json.loads(excel_type_response.data)['detected_type']
|
||||
|
||||
# Step 3: Get table metadata
|
||||
mock_metadata.return_value = {
|
||||
'columns': [
|
||||
{'name': 'LOT_ID', 'data_type': 'VARCHAR2', 'is_date': False, 'is_number': False},
|
||||
{'name': 'QTY', 'data_type': 'NUMBER', 'is_date': False, 'is_number': True},
|
||||
{'name': 'TXNDATE', 'data_type': 'DATE', 'is_date': True, 'is_number': False},
|
||||
]
|
||||
}
|
||||
|
||||
metadata_response = client.post(
|
||||
'/api/excel-query/table-metadata',
|
||||
json={'table_name': 'DWH.DW_MES_WIP'}
|
||||
)
|
||||
assert metadata_response.status_code == 200
|
||||
metadata = json.loads(metadata_response.data)
|
||||
|
||||
# Verify column types are returned
|
||||
assert len(metadata['columns']) == 3
|
||||
lot_col = next(c for c in metadata['columns'] if c['name'] == 'LOT_ID')
|
||||
assert lot_col['data_type'] == 'VARCHAR2'
|
||||
|
||||
|
||||
class TestValidationWorkflow:
|
||||
"""E2E tests for input validation throughout workflow."""
|
||||
|
||||
def test_like_keyword_limit_enforcement(self, client):
|
||||
"""Test that LIKE queries enforce keyword limit."""
|
||||
from mes_dashboard.services.excel_query_service import LIKE_KEYWORD_LIMIT
|
||||
|
||||
# Create Excel with many values
|
||||
excel_data = [['VALUE']] + [[f'VAL{i}'] for i in range(LIKE_KEYWORD_LIMIT + 10)]
|
||||
excel_file = create_test_excel(excel_data)
|
||||
|
||||
client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (excel_file, 'many_values.xlsx')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
|
||||
# Get all values
|
||||
values_response = client.post(
|
||||
'/api/excel-query/column-values',
|
||||
json={'column_name': 'VALUE'}
|
||||
)
|
||||
all_values = json.loads(values_response.data)['values']
|
||||
|
||||
# Attempt LIKE query with too many values
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'table_name': 'TEST_TABLE',
|
||||
'search_column': 'COL',
|
||||
'return_columns': ['COL'],
|
||||
'search_values': all_values,
|
||||
'query_type': 'like_contains'
|
||||
}
|
||||
)
|
||||
# This should either fail at validation or service layer
|
||||
# The exact behavior depends on implementation
|
||||
# At minimum, verify the request was processed
|
||||
assert response.status_code in [200, 400]
|
||||
|
||||
def test_date_range_boundary_validation(self, client):
|
||||
"""Test date range validation at boundaries."""
|
||||
excel_data = [
|
||||
['LOT_ID'],
|
||||
['LOT001'],
|
||||
]
|
||||
excel_file = create_test_excel(excel_data)
|
||||
|
||||
client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (excel_file, 'lots.xlsx')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
|
||||
# Test exactly 365 days (should pass)
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'table_name': 'TEST_TABLE',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID'],
|
||||
'search_values': ['LOT001'],
|
||||
'date_from': '2024-01-01',
|
||||
'date_to': '2024-12-31' # 365 days (2024 is leap year, so 366)
|
||||
}
|
||||
)
|
||||
# 366 days in 2024, should fail
|
||||
assert response.status_code == 400
|
||||
|
||||
def test_empty_search_values_rejected(self, client):
|
||||
"""Test that empty search values are rejected."""
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'table_name': 'TEST_TABLE',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID'],
|
||||
'search_values': [],
|
||||
'query_type': 'in'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
class TestBackwardCompatibility:
|
||||
"""E2E tests ensuring backward compatibility with original API."""
|
||||
|
||||
@patch('mes_dashboard.routes.excel_query_routes.execute_batch_query')
|
||||
def test_original_execute_endpoint_works(self, mock_execute, client):
|
||||
"""Test that original /execute endpoint still works."""
|
||||
mock_execute.return_value = {
|
||||
'columns': ['LOT_ID'],
|
||||
'data': [['LOT001']],
|
||||
'total': 1
|
||||
}
|
||||
|
||||
# Use original endpoint without advanced features
|
||||
response = client.post(
|
||||
'/api/excel-query/execute',
|
||||
json={
|
||||
'table_name': 'DWH.DW_MES_WIP',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID'],
|
||||
'search_values': ['LOT001']
|
||||
}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data['total'] == 1
|
||||
|
||||
@patch('mes_dashboard.routes.excel_query_routes.execute_batch_query')
|
||||
@patch('mes_dashboard.routes.excel_query_routes.generate_csv_content')
|
||||
def test_csv_export_still_works(self, mock_csv, mock_execute, client):
|
||||
"""Test that CSV export still works with basic query."""
|
||||
mock_execute.return_value = {
|
||||
'columns': ['LOT_ID', 'STATUS'],
|
||||
'data': [['LOT001', 'ACTIVE']],
|
||||
'total': 1
|
||||
}
|
||||
mock_csv.return_value = 'LOT_ID,STATUS\nLOT001,ACTIVE\n'
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/export-csv',
|
||||
json={
|
||||
'table_name': 'DWH.DW_MES_WIP',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID', 'STATUS'],
|
||||
'search_values': ['LOT001']
|
||||
}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content_type.startswith('text/csv')
|
||||
474
tests/test_excel_query_routes.py
Normal file
474
tests/test_excel_query_routes.py
Normal file
@@ -0,0 +1,474 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Integration tests for Excel query API routes.
|
||||
|
||||
Tests the API endpoints with mocked database dependencies.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import json
|
||||
import io
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from mes_dashboard import create_app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app():
|
||||
"""Create test Flask application."""
|
||||
app = create_app()
|
||||
app.config['TESTING'] = True
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(app):
|
||||
"""Create test client."""
|
||||
return app.test_client()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_excel_file():
|
||||
"""Create a mock Excel file content."""
|
||||
import openpyxl
|
||||
wb = openpyxl.Workbook()
|
||||
ws = wb.active
|
||||
ws['A1'] = 'LOT_ID'
|
||||
ws['B1'] = 'PRODUCT'
|
||||
ws['C1'] = 'DATE'
|
||||
ws['A2'] = 'LOT001'
|
||||
ws['B2'] = 'PROD_A'
|
||||
ws['C2'] = '2024-01-15'
|
||||
ws['A3'] = 'LOT002'
|
||||
ws['B3'] = 'PROD_B'
|
||||
ws['C3'] = '2024-01-16'
|
||||
ws['A4'] = 'LOT003'
|
||||
ws['B4'] = 'PROD_A'
|
||||
ws['C4'] = '2024-01-17'
|
||||
|
||||
buffer = io.BytesIO()
|
||||
wb.save(buffer)
|
||||
buffer.seek(0)
|
||||
return buffer
|
||||
|
||||
|
||||
class TestUploadExcel:
|
||||
"""Tests for /api/excel-query/upload endpoint."""
|
||||
|
||||
def test_upload_no_file(self, client):
|
||||
"""Should return error when no file provided."""
|
||||
response = client.post('/api/excel-query/upload')
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
|
||||
def test_upload_empty_filename(self, client):
|
||||
"""Should return error for empty filename."""
|
||||
response = client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (io.BytesIO(b''), '')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
def test_upload_invalid_extension(self, client):
|
||||
"""Should reject non-Excel files."""
|
||||
response = client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (io.BytesIO(b'test'), 'test.txt')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert '.xlsx' in data['error'] or '.xls' in data['error']
|
||||
|
||||
def test_upload_valid_excel(self, client, mock_excel_file):
|
||||
"""Should successfully parse valid Excel file."""
|
||||
response = client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (mock_excel_file, 'test.xlsx')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert 'columns' in data
|
||||
assert 'LOT_ID' in data['columns']
|
||||
assert 'preview' in data
|
||||
|
||||
|
||||
class TestGetColumnValues:
|
||||
"""Tests for /api/excel-query/column-values endpoint."""
|
||||
|
||||
def test_no_column_name(self, client):
|
||||
"""Should return error without column name."""
|
||||
response = client.post(
|
||||
'/api/excel-query/column-values',
|
||||
json={}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
def test_no_excel_uploaded(self, client):
|
||||
"""Should return error if no Excel uploaded."""
|
||||
# Clear cache first
|
||||
from mes_dashboard.routes.excel_query_routes import _uploaded_excel_cache
|
||||
_uploaded_excel_cache.clear()
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/column-values',
|
||||
json={'column_name': 'LOT_ID'}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
def test_get_values_after_upload(self, client, mock_excel_file):
|
||||
"""Should return column values after upload."""
|
||||
# First upload
|
||||
client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (mock_excel_file, 'test.xlsx')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
|
||||
# Then get values
|
||||
response = client.post(
|
||||
'/api/excel-query/column-values',
|
||||
json={'column_name': 'LOT_ID'}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert 'values' in data
|
||||
assert 'LOT001' in data['values']
|
||||
|
||||
|
||||
class TestGetTables:
|
||||
"""Tests for /api/excel-query/tables endpoint."""
|
||||
|
||||
def test_get_tables(self, client):
|
||||
"""Should return available tables."""
|
||||
response = client.get('/api/excel-query/tables')
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert 'tables' in data
|
||||
assert isinstance(data['tables'], list)
|
||||
|
||||
|
||||
class TestTableMetadata:
|
||||
"""Tests for /api/excel-query/table-metadata endpoint."""
|
||||
|
||||
def test_no_table_name(self, client):
|
||||
"""Should return error without table name."""
|
||||
response = client.post(
|
||||
'/api/excel-query/table-metadata',
|
||||
json={}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
@patch('mes_dashboard.routes.excel_query_routes.get_table_column_metadata')
|
||||
def test_get_metadata_success(self, mock_metadata, client):
|
||||
"""Should return enriched metadata."""
|
||||
mock_metadata.return_value = {
|
||||
'columns': [
|
||||
{'name': 'LOT_ID', 'data_type': 'VARCHAR2', 'is_date': False, 'is_number': False},
|
||||
{'name': 'TXNDATE', 'data_type': 'DATE', 'is_date': True, 'is_number': False},
|
||||
]
|
||||
}
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/table-metadata',
|
||||
json={'table_name': 'TEST_TABLE'}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert 'columns' in data
|
||||
assert len(data['columns']) == 2
|
||||
|
||||
@patch('mes_dashboard.routes.excel_query_routes.get_table_column_metadata')
|
||||
def test_metadata_not_found(self, mock_metadata, client):
|
||||
"""Should handle table not found."""
|
||||
mock_metadata.return_value = {'error': 'Table not found', 'columns': []}
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/table-metadata',
|
||||
json={'table_name': 'NONEXISTENT'}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
class TestExecuteAdvancedQuery:
|
||||
"""Tests for /api/excel-query/execute-advanced endpoint."""
|
||||
|
||||
def test_missing_table_name(self, client):
|
||||
"""Should return error without table name."""
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID'],
|
||||
'search_values': ['LOT001']
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
def test_missing_search_column(self, client):
|
||||
"""Should return error without search column."""
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'table_name': 'TEST_TABLE',
|
||||
'return_columns': ['LOT_ID'],
|
||||
'search_values': ['LOT001']
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
def test_invalid_query_type(self, client):
|
||||
"""Should reject invalid query type."""
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'table_name': 'TEST_TABLE',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID'],
|
||||
'search_values': ['LOT001'],
|
||||
'query_type': 'invalid_type'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert 'invalid' in data['error'].lower() or '無效' in data['error']
|
||||
|
||||
def test_invalid_date_format(self, client):
|
||||
"""Should reject invalid date format."""
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'table_name': 'TEST_TABLE',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID'],
|
||||
'search_values': ['LOT001'],
|
||||
'date_from': '01-01-2024',
|
||||
'date_to': '12-31-2024'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert '格式' in data['error'] or 'format' in data['error'].lower()
|
||||
|
||||
def test_date_range_reversed(self, client):
|
||||
"""Should reject if start date > end date."""
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'table_name': 'TEST_TABLE',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID'],
|
||||
'search_values': ['LOT001'],
|
||||
'date_from': '2024-12-31',
|
||||
'date_to': '2024-01-01'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert '起始' in data['error'] or 'start' in data['error'].lower()
|
||||
|
||||
def test_date_range_exceeds_limit(self, client):
|
||||
"""Should reject date range > 365 days."""
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'table_name': 'TEST_TABLE',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID'],
|
||||
'search_values': ['LOT001'],
|
||||
'date_from': '2023-01-01',
|
||||
'date_to': '2024-12-31'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert '365' in data['error']
|
||||
|
||||
@patch('mes_dashboard.routes.excel_query_routes.execute_advanced_batch_query')
|
||||
def test_execute_in_query(self, mock_execute, client):
|
||||
"""Should execute IN query successfully."""
|
||||
mock_execute.return_value = {
|
||||
'columns': ['LOT_ID', 'PRODUCT'],
|
||||
'data': [['LOT001', 'PROD_A']],
|
||||
'total': 1
|
||||
}
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'table_name': 'TEST_TABLE',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID', 'PRODUCT'],
|
||||
'search_values': ['LOT001'],
|
||||
'query_type': 'in'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data['total'] == 1
|
||||
|
||||
@patch('mes_dashboard.routes.excel_query_routes.execute_advanced_batch_query')
|
||||
def test_execute_like_contains(self, mock_execute, client):
|
||||
"""Should execute LIKE contains query."""
|
||||
mock_execute.return_value = {
|
||||
'columns': ['LOT_ID'],
|
||||
'data': [['LOT001'], ['LOT002']],
|
||||
'total': 2
|
||||
}
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'table_name': 'TEST_TABLE',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID'],
|
||||
'search_values': ['LOT'],
|
||||
'query_type': 'like_contains'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data['total'] == 2
|
||||
|
||||
@patch('mes_dashboard.routes.excel_query_routes.execute_advanced_batch_query')
|
||||
def test_execute_with_date_range(self, mock_execute, client):
|
||||
"""Should execute query with date range."""
|
||||
mock_execute.return_value = {
|
||||
'columns': ['LOT_ID', 'TXNDATE'],
|
||||
'data': [['LOT001', '2024-01-15']],
|
||||
'total': 1
|
||||
}
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/execute-advanced',
|
||||
json={
|
||||
'table_name': 'TEST_TABLE',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID', 'TXNDATE'],
|
||||
'search_values': ['LOT001'],
|
||||
'query_type': 'in',
|
||||
'date_column': 'TXNDATE',
|
||||
'date_from': '2024-01-01',
|
||||
'date_to': '2024-01-31'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
mock_execute.assert_called_once()
|
||||
call_kwargs = mock_execute.call_args[1]
|
||||
assert call_kwargs['date_column'] == 'TXNDATE'
|
||||
assert call_kwargs['date_from'] == '2024-01-01'
|
||||
assert call_kwargs['date_to'] == '2024-01-31'
|
||||
|
||||
|
||||
class TestExecuteQuery:
|
||||
"""Tests for /api/excel-query/execute endpoint (backward compatibility)."""
|
||||
|
||||
def test_missing_parameters(self, client):
|
||||
"""Should return error for missing parameters."""
|
||||
response = client.post(
|
||||
'/api/excel-query/execute',
|
||||
json={'table_name': 'TEST'}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
@patch('mes_dashboard.routes.excel_query_routes.execute_batch_query')
|
||||
def test_execute_success(self, mock_execute, client):
|
||||
"""Should execute basic query successfully."""
|
||||
mock_execute.return_value = {
|
||||
'columns': ['LOT_ID'],
|
||||
'data': [['LOT001']],
|
||||
'total': 1
|
||||
}
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/execute',
|
||||
json={
|
||||
'table_name': 'TEST_TABLE',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID'],
|
||||
'search_values': ['LOT001']
|
||||
}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data['total'] == 1
|
||||
|
||||
|
||||
class TestExportCSV:
|
||||
"""Tests for /api/excel-query/export-csv endpoint."""
|
||||
|
||||
def test_missing_parameters(self, client):
|
||||
"""Should return error for missing parameters."""
|
||||
response = client.post(
|
||||
'/api/excel-query/export-csv',
|
||||
json={}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
@patch('mes_dashboard.routes.excel_query_routes.execute_batch_query')
|
||||
@patch('mes_dashboard.routes.excel_query_routes.generate_csv_content')
|
||||
def test_export_success(self, mock_csv, mock_execute, client):
|
||||
"""Should export CSV successfully."""
|
||||
mock_execute.return_value = {
|
||||
'columns': ['LOT_ID', 'PRODUCT'],
|
||||
'data': [['LOT001', 'PROD_A']],
|
||||
'total': 1
|
||||
}
|
||||
mock_csv.return_value = 'LOT_ID,PRODUCT\nLOT001,PROD_A\n'
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/export-csv',
|
||||
json={
|
||||
'table_name': 'TEST_TABLE',
|
||||
'search_column': 'LOT_ID',
|
||||
'return_columns': ['LOT_ID', 'PRODUCT'],
|
||||
'search_values': ['LOT001']
|
||||
}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.content_type.startswith('text/csv')
|
||||
assert b'LOT_ID' in response.data
|
||||
|
||||
|
||||
class TestGetExcelColumnType:
|
||||
"""Tests for /api/excel-query/column-type endpoint."""
|
||||
|
||||
def test_no_column_name(self, client):
|
||||
"""Should return error without column name."""
|
||||
response = client.post(
|
||||
'/api/excel-query/column-type',
|
||||
json={}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
def test_no_excel_uploaded(self, client):
|
||||
"""Should return error if no Excel uploaded."""
|
||||
from mes_dashboard.routes.excel_query_routes import _uploaded_excel_cache
|
||||
_uploaded_excel_cache.clear()
|
||||
|
||||
response = client.post(
|
||||
'/api/excel-query/column-type',
|
||||
json={'column_name': 'LOT_ID'}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
def test_detect_type_after_upload(self, client, mock_excel_file):
|
||||
"""Should detect column type after upload."""
|
||||
# Upload first
|
||||
client.post(
|
||||
'/api/excel-query/upload',
|
||||
data={'file': (mock_excel_file, 'test.xlsx')},
|
||||
content_type='multipart/form-data'
|
||||
)
|
||||
|
||||
# Then detect type
|
||||
response = client.post(
|
||||
'/api/excel-query/column-type',
|
||||
json={'column_name': 'LOT_ID'}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert 'detected_type' in data
|
||||
assert 'type_label' in data
|
||||
261
tests/test_excel_query_service.py
Normal file
261
tests/test_excel_query_service.py
Normal file
@@ -0,0 +1,261 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for Excel query service functions.
|
||||
|
||||
Tests the core service functions without database dependencies.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from mes_dashboard.services.excel_query_service import (
|
||||
detect_excel_column_type,
|
||||
escape_like_pattern,
|
||||
build_like_condition,
|
||||
build_date_range_condition,
|
||||
validate_like_keywords,
|
||||
sanitize_column_name,
|
||||
validate_table_name,
|
||||
LIKE_KEYWORD_LIMIT,
|
||||
)
|
||||
|
||||
|
||||
class TestDetectExcelColumnType:
|
||||
"""Tests for detect_excel_column_type function."""
|
||||
|
||||
def test_empty_values_returns_text(self):
|
||||
"""Empty list should return text type."""
|
||||
result = detect_excel_column_type([])
|
||||
assert result['detected_type'] == 'text'
|
||||
assert result['type_label'] == '文字'
|
||||
|
||||
def test_detect_date_type(self):
|
||||
"""Should detect date format YYYY-MM-DD."""
|
||||
values = ['2024-01-15', '2024-02-20', '2024-03-25', '2024-04-30']
|
||||
result = detect_excel_column_type(values)
|
||||
assert result['detected_type'] == 'date'
|
||||
assert result['type_label'] == '日期'
|
||||
|
||||
def test_detect_date_with_slash(self):
|
||||
"""Should detect date format YYYY/MM/DD."""
|
||||
values = ['2024/01/15', '2024/02/20', '2024/03/25', '2024/04/30']
|
||||
result = detect_excel_column_type(values)
|
||||
assert result['detected_type'] == 'date'
|
||||
assert result['type_label'] == '日期'
|
||||
|
||||
def test_detect_datetime_type(self):
|
||||
"""Should detect datetime format."""
|
||||
values = [
|
||||
'2024-01-15 10:30:00',
|
||||
'2024-02-20 14:45:30',
|
||||
'2024-03-25T08:00:00',
|
||||
'2024-04-30 23:59:59'
|
||||
]
|
||||
result = detect_excel_column_type(values)
|
||||
assert result['detected_type'] == 'datetime'
|
||||
assert result['type_label'] == '日期時間'
|
||||
|
||||
def test_detect_number_type(self):
|
||||
"""Should detect numeric values."""
|
||||
values = ['123', '456.78', '-99', '0', '1000000']
|
||||
result = detect_excel_column_type(values)
|
||||
assert result['detected_type'] == 'number'
|
||||
assert result['type_label'] == '數值'
|
||||
|
||||
def test_detect_id_type(self):
|
||||
"""Should detect ID pattern (uppercase alphanumeric)."""
|
||||
values = ['LOT001', 'WIP-2024-001', 'ABC_123', 'PROD001', 'TEST_ID']
|
||||
result = detect_excel_column_type(values)
|
||||
assert result['detected_type'] == 'id'
|
||||
assert result['type_label'] == '識別碼'
|
||||
|
||||
def test_mixed_values_returns_text(self):
|
||||
"""Mixed values should return text type."""
|
||||
values = ['abc', '123', '2024-01-01', 'xyz', 'test']
|
||||
result = detect_excel_column_type(values)
|
||||
assert result['detected_type'] == 'text'
|
||||
assert result['type_label'] == '文字'
|
||||
|
||||
def test_sample_values_included(self):
|
||||
"""Should include sample values in result."""
|
||||
values = ['A', 'B', 'C', 'D', 'E', 'F']
|
||||
result = detect_excel_column_type(values)
|
||||
assert 'sample_values' in result
|
||||
assert len(result['sample_values']) <= 5
|
||||
|
||||
|
||||
class TestEscapeLikePattern:
|
||||
"""Tests for escape_like_pattern function."""
|
||||
|
||||
def test_escape_percent(self):
|
||||
"""Should escape percent sign."""
|
||||
assert escape_like_pattern('100%') == '100\\%'
|
||||
|
||||
def test_escape_underscore(self):
|
||||
"""Should escape underscore."""
|
||||
assert escape_like_pattern('test_value') == 'test\\_value'
|
||||
|
||||
def test_escape_backslash(self):
|
||||
"""Should escape backslash."""
|
||||
assert escape_like_pattern('path\\file') == 'path\\\\file'
|
||||
|
||||
def test_escape_multiple_specials(self):
|
||||
"""Should escape multiple special characters."""
|
||||
assert escape_like_pattern('50%_off') == '50\\%\\_off'
|
||||
|
||||
def test_no_escape_needed(self):
|
||||
"""Should return unchanged if no special chars."""
|
||||
assert escape_like_pattern('normalvalue') == 'normalvalue'
|
||||
|
||||
|
||||
class TestBuildLikeCondition:
|
||||
"""Tests for build_like_condition function."""
|
||||
|
||||
def test_contains_mode(self):
|
||||
"""Should build LIKE %...% pattern."""
|
||||
condition, params = build_like_condition('COL', ['abc'], 'contains')
|
||||
assert 'LIKE :like_0' in condition
|
||||
assert params['like_0'] == '%abc%'
|
||||
|
||||
def test_prefix_mode(self):
|
||||
"""Should build LIKE ...% pattern."""
|
||||
condition, params = build_like_condition('COL', ['abc'], 'prefix')
|
||||
assert 'LIKE :like_0' in condition
|
||||
assert params['like_0'] == 'abc%'
|
||||
|
||||
def test_suffix_mode(self):
|
||||
"""Should build LIKE %... pattern."""
|
||||
condition, params = build_like_condition('COL', ['abc'], 'suffix')
|
||||
assert 'LIKE :like_0' in condition
|
||||
assert params['like_0'] == '%abc'
|
||||
|
||||
def test_multiple_values(self):
|
||||
"""Should build OR conditions for multiple values."""
|
||||
condition, params = build_like_condition('COL', ['a', 'b', 'c'], 'contains')
|
||||
assert 'OR' in condition
|
||||
assert len(params) == 3
|
||||
assert params['like_0'] == '%a%'
|
||||
assert params['like_1'] == '%b%'
|
||||
assert params['like_2'] == '%c%'
|
||||
|
||||
def test_empty_values(self):
|
||||
"""Should return empty for empty values."""
|
||||
condition, params = build_like_condition('COL', [], 'contains')
|
||||
assert condition == ''
|
||||
assert params == {}
|
||||
|
||||
def test_escape_clause_included(self):
|
||||
"""Should include ESCAPE clause."""
|
||||
condition, params = build_like_condition('COL', ['test'], 'contains')
|
||||
assert "ESCAPE '\\')" in condition
|
||||
|
||||
|
||||
class TestBuildDateRangeCondition:
|
||||
"""Tests for build_date_range_condition function."""
|
||||
|
||||
def test_both_dates(self):
|
||||
"""Should build condition with both dates."""
|
||||
condition, params = build_date_range_condition(
|
||||
'TXNDATE', '2024-01-01', '2024-12-31'
|
||||
)
|
||||
assert 'TO_DATE(:date_from' in condition
|
||||
assert 'TO_DATE(:date_to' in condition
|
||||
assert params['date_from'] == '2024-01-01'
|
||||
assert params['date_to'] == '2024-12-31'
|
||||
|
||||
def test_only_from_date(self):
|
||||
"""Should build condition with only start date."""
|
||||
condition, params = build_date_range_condition(
|
||||
'TXNDATE', date_from='2024-01-01'
|
||||
)
|
||||
assert '>=' in condition
|
||||
assert 'date_from' in params
|
||||
assert 'date_to' not in params
|
||||
|
||||
def test_only_to_date(self):
|
||||
"""Should build condition with only end date."""
|
||||
condition, params = build_date_range_condition(
|
||||
'TXNDATE', date_to='2024-12-31'
|
||||
)
|
||||
assert '<' in condition
|
||||
assert 'date_to' in params
|
||||
assert 'date_from' not in params
|
||||
|
||||
def test_no_dates(self):
|
||||
"""Should return empty for no dates."""
|
||||
condition, params = build_date_range_condition('TXNDATE')
|
||||
assert condition == ''
|
||||
assert params == {}
|
||||
|
||||
def test_end_date_includes_full_day(self):
|
||||
"""End date condition should include +1 for full day."""
|
||||
condition, params = build_date_range_condition(
|
||||
'TXNDATE', date_to='2024-12-31'
|
||||
)
|
||||
assert '+ 1' in condition
|
||||
|
||||
|
||||
class TestValidateLikeKeywords:
|
||||
"""Tests for validate_like_keywords function."""
|
||||
|
||||
def test_within_limit(self):
|
||||
"""Should pass validation for values within limit."""
|
||||
values = ['a'] * 50
|
||||
result = validate_like_keywords(values)
|
||||
assert result['valid'] is True
|
||||
|
||||
def test_at_limit(self):
|
||||
"""Should pass validation at exact limit."""
|
||||
values = ['a'] * LIKE_KEYWORD_LIMIT
|
||||
result = validate_like_keywords(values)
|
||||
assert result['valid'] is True
|
||||
|
||||
def test_exceeds_limit(self):
|
||||
"""Should fail validation when exceeding limit."""
|
||||
values = ['a'] * (LIKE_KEYWORD_LIMIT + 1)
|
||||
result = validate_like_keywords(values)
|
||||
assert result['valid'] is False
|
||||
assert 'error' in result
|
||||
|
||||
|
||||
class TestSanitizeColumnName:
|
||||
"""Tests for sanitize_column_name function."""
|
||||
|
||||
def test_valid_name(self):
|
||||
"""Should keep valid column name."""
|
||||
assert sanitize_column_name('LOT_ID') == 'LOT_ID'
|
||||
|
||||
def test_removes_special_chars(self):
|
||||
"""Should remove special characters."""
|
||||
assert sanitize_column_name('LOT-ID') == 'LOTID'
|
||||
assert sanitize_column_name('LOT ID') == 'LOTID'
|
||||
|
||||
def test_allows_underscore(self):
|
||||
"""Should allow underscore."""
|
||||
assert sanitize_column_name('MY_COLUMN_NAME') == 'MY_COLUMN_NAME'
|
||||
|
||||
def test_prevents_sql_injection(self):
|
||||
"""Should prevent SQL injection attempts."""
|
||||
assert sanitize_column_name("COL; DROP TABLE--") == 'COLDROPTABLE'
|
||||
|
||||
|
||||
class TestValidateTableName:
|
||||
"""Tests for validate_table_name function."""
|
||||
|
||||
def test_simple_name(self):
|
||||
"""Should validate simple table name."""
|
||||
assert validate_table_name('MY_TABLE') is True
|
||||
|
||||
def test_schema_qualified(self):
|
||||
"""Should validate schema.table format."""
|
||||
assert validate_table_name('DWH.DW_MES_WIP') is True
|
||||
|
||||
def test_invalid_starts_with_number(self):
|
||||
"""Should reject names starting with number."""
|
||||
assert validate_table_name('123TABLE') is False
|
||||
|
||||
def test_invalid_special_chars(self):
|
||||
"""Should reject names with special characters."""
|
||||
assert validate_table_name('TABLE-NAME') is False
|
||||
assert validate_table_name('TABLE NAME') is False
|
||||
|
||||
def test_sql_injection_prevention(self):
|
||||
"""Should reject SQL injection attempts."""
|
||||
assert validate_table_name('TABLE; DROP--') is False
|
||||
127
tests/test_field_contracts.py
Normal file
127
tests/test_field_contracts.py
Normal file
@@ -0,0 +1,127 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Field contract governance tests."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import csv
|
||||
import io
|
||||
from unittest.mock import patch
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from mes_dashboard.config.field_contracts import (
|
||||
get_page_contract,
|
||||
get_export_api_keys,
|
||||
get_export_headers,
|
||||
)
|
||||
from mes_dashboard.services.job_query_service import export_jobs_with_history
|
||||
from mes_dashboard.services.resource_history_service import export_csv as export_resource_history_csv
|
||||
|
||||
|
||||
def test_contract_sections_exist_for_primary_pages():
|
||||
for page, section in [
|
||||
('job_query', 'jobs_table'),
|
||||
('job_query', 'txn_table'),
|
||||
('job_query', 'export'),
|
||||
('resource_history', 'detail_table'),
|
||||
('resource_history', 'export'),
|
||||
('tables', 'result_table'),
|
||||
('excel_query', 'result_table'),
|
||||
('resource_status', 'matrix_summary'),
|
||||
]:
|
||||
contract = get_page_contract(page, section)
|
||||
assert contract, f"missing contract for {page}:{section}"
|
||||
|
||||
|
||||
def test_export_contracts_have_no_duplicate_api_keys():
|
||||
for page in ['job_query', 'resource_history']:
|
||||
keys = [field.get('api_key') for field in get_page_contract(page, 'export')]
|
||||
assert len(keys) == len(set(keys))
|
||||
|
||||
|
||||
def test_export_headers_and_keys_have_same_length():
|
||||
for page in ['job_query', 'resource_history']:
|
||||
headers = get_export_headers(page)
|
||||
keys = get_export_api_keys(page)
|
||||
assert headers
|
||||
assert keys
|
||||
assert len(headers) == len(keys)
|
||||
|
||||
|
||||
def test_all_contract_fields_define_semantic_type():
|
||||
pages_and_sections = [
|
||||
('job_query', 'jobs_table'),
|
||||
('job_query', 'txn_table'),
|
||||
('job_query', 'export'),
|
||||
('resource_history', 'detail_table'),
|
||||
('resource_history', 'kpi'),
|
||||
('resource_history', 'export'),
|
||||
('tables', 'result_table'),
|
||||
('excel_query', 'result_table'),
|
||||
('resource_status', 'matrix_summary'),
|
||||
]
|
||||
for page, section in pages_and_sections:
|
||||
for field in get_page_contract(page, section):
|
||||
assert field.get('semantic_type'), f"missing semantic_type in {page}:{section}:{field}"
|
||||
|
||||
|
||||
@patch('mes_dashboard.services.job_query_service.SQLLoader.load', return_value='SELECT 1')
|
||||
def test_job_query_export_uses_contract_headers(_mock_sql):
|
||||
export_keys = get_export_api_keys('job_query')
|
||||
export_headers = get_export_headers('job_query')
|
||||
|
||||
row = {key: f'v_{idx}' for idx, key in enumerate(export_keys)}
|
||||
row['JOB_CREATEDATE'] = pd.Timestamp('2024-01-01 10:00:00')
|
||||
row['JOB_COMPLETEDATE'] = pd.Timestamp('2024-01-02 10:00:00')
|
||||
row['TXNDATE'] = pd.Timestamp('2024-01-02 11:00:00')
|
||||
df = pd.DataFrame([row], columns=export_keys)
|
||||
|
||||
with patch('mes_dashboard.services.job_query_service.read_sql_df', return_value=df):
|
||||
chunks = list(export_jobs_with_history(['R1'], '2024-01-01', '2024-01-10'))
|
||||
|
||||
assert chunks
|
||||
header_chunk = chunks[0].lstrip('\ufeff')
|
||||
header_row = next(csv.reader(io.StringIO(header_chunk)))
|
||||
assert header_row == export_headers
|
||||
|
||||
|
||||
@patch('mes_dashboard.services.resource_history_service.SQLLoader.load', return_value='SELECT 1')
|
||||
@patch('mes_dashboard.services.resource_history_service.read_sql_df')
|
||||
@patch('mes_dashboard.services.filter_cache.get_workcenter_mapping')
|
||||
def test_resource_history_export_uses_contract_headers(
|
||||
mock_wc_mapping,
|
||||
mock_read_sql,
|
||||
_mock_sql_loader,
|
||||
):
|
||||
export_headers = get_export_headers('resource_history')
|
||||
|
||||
mock_wc_mapping.return_value = {
|
||||
'WC-A': {'group': '站點-A', 'sequence': 1}
|
||||
}
|
||||
|
||||
mock_read_sql.return_value = pd.DataFrame([
|
||||
{
|
||||
'HISTORYID': 'RES-A',
|
||||
'PRD_HOURS': 10,
|
||||
'SBY_HOURS': 2,
|
||||
'UDT_HOURS': 1,
|
||||
'SDT_HOURS': 1,
|
||||
'EGT_HOURS': 1,
|
||||
'NST_HOURS': 1,
|
||||
'TOTAL_HOURS': 16,
|
||||
}
|
||||
])
|
||||
|
||||
with patch('mes_dashboard.services.resource_history_service._get_filtered_resources', return_value=[
|
||||
{
|
||||
'RESOURCEID': 'RES-A',
|
||||
'WORKCENTERNAME': 'WC-A',
|
||||
'RESOURCEFAMILYNAME': 'FAM-A',
|
||||
'RESOURCENAME': 'EQ-A',
|
||||
}
|
||||
]):
|
||||
chunks = list(export_resource_history_csv('2024-01-01', '2024-01-10'))
|
||||
|
||||
assert chunks
|
||||
header_row = next(csv.reader(io.StringIO(chunks[0])))
|
||||
assert header_row == export_headers
|
||||
74
tests/test_frontend_compute_parity.py
Normal file
74
tests/test_frontend_compute_parity.py
Normal file
@@ -0,0 +1,74 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Parity checks between backend formulas and frontend compute helpers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
from mes_dashboard.services.resource_history_service import (
|
||||
_calc_ou_pct,
|
||||
_calc_availability_pct,
|
||||
_calc_status_pct,
|
||||
)
|
||||
|
||||
|
||||
def _load_fixture() -> dict:
|
||||
repo_root = Path(__file__).resolve().parents[1]
|
||||
fixture_path = repo_root / "tests" / "fixtures" / "frontend_compute_parity.json"
|
||||
return json.loads(fixture_path.read_text(encoding="utf-8"))
|
||||
|
||||
|
||||
def _backend_expected(case: dict[str, float]) -> dict[str, float]:
|
||||
prd = case['prd_hours']
|
||||
sby = case['sby_hours']
|
||||
udt = case['udt_hours']
|
||||
sdt = case['sdt_hours']
|
||||
egt = case['egt_hours']
|
||||
nst = case['nst_hours']
|
||||
total = prd + sby + udt + sdt + egt + nst
|
||||
|
||||
return {
|
||||
'ou_pct': _calc_ou_pct(prd, sby, udt, sdt, egt),
|
||||
'availability_pct': _calc_availability_pct(prd, sby, udt, sdt, egt, nst),
|
||||
'prd_pct': _calc_status_pct(prd, total),
|
||||
'sby_pct': _calc_status_pct(sby, total),
|
||||
'udt_pct': _calc_status_pct(udt, total),
|
||||
'sdt_pct': _calc_status_pct(sdt, total),
|
||||
'egt_pct': _calc_status_pct(egt, total),
|
||||
'nst_pct': _calc_status_pct(nst, total),
|
||||
}
|
||||
|
||||
|
||||
def test_frontend_compute_matches_backend_formulas():
|
||||
repo_root = Path(__file__).resolve().parents[1]
|
||||
compute_module = repo_root / 'frontend' / 'src' / 'core' / 'compute.js'
|
||||
fixture = _load_fixture()
|
||||
cases = fixture["cases"]
|
||||
tolerance = fixture["metric_tolerance"]
|
||||
|
||||
node_code = (
|
||||
"import { buildResourceKpiFromHours } from '" + compute_module.as_posix() + "';"
|
||||
"const cases = JSON.parse(process.argv[1]);"
|
||||
"const result = cases.map((c) => buildResourceKpiFromHours(c));"
|
||||
"console.log(JSON.stringify(result));"
|
||||
)
|
||||
|
||||
completed = subprocess.run(
|
||||
['node', '--input-type=module', '-e', node_code, json.dumps(cases)],
|
||||
cwd=repo_root,
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
frontend_values = json.loads(completed.stdout)
|
||||
assert len(frontend_values) == len(cases)
|
||||
|
||||
for idx, case in enumerate(cases):
|
||||
expected = _backend_expected(case)
|
||||
actual = frontend_values[idx]
|
||||
for key, value in expected.items():
|
||||
delta = abs(float(actual[key]) - float(value))
|
||||
assert delta <= float(tolerance.get(key, 0.0))
|
||||
80
tests/test_health_routes.py
Normal file
80
tests/test_health_routes.py
Normal file
@@ -0,0 +1,80 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Health route telemetry tests."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import mes_dashboard.core.database as db
|
||||
from mes_dashboard.app import create_app
|
||||
|
||||
|
||||
def _client():
|
||||
db._ENGINE = None
|
||||
app = create_app('testing')
|
||||
app.config['TESTING'] = True
|
||||
return app.test_client()
|
||||
|
||||
|
||||
@patch('mes_dashboard.routes.health_routes.check_database', return_value=('ok', None))
|
||||
@patch('mes_dashboard.routes.health_routes.check_redis', return_value=('error', 'redis-down'))
|
||||
@patch('mes_dashboard.routes.health_routes.get_route_cache_status')
|
||||
def test_health_includes_route_cache_and_degraded_warning(
|
||||
mock_route_cache,
|
||||
_mock_redis,
|
||||
_mock_db,
|
||||
):
|
||||
mock_route_cache.return_value = {
|
||||
'mode': 'l1-only',
|
||||
'degraded': True,
|
||||
'reads_total': 10,
|
||||
'l1_hits': 9,
|
||||
'misses': 1,
|
||||
}
|
||||
|
||||
response = _client().get('/health')
|
||||
assert response.status_code == 200
|
||||
payload = response.get_json()
|
||||
|
||||
assert payload['status'] == 'degraded'
|
||||
assert payload['route_cache']['mode'] == 'l1-only'
|
||||
assert payload['route_cache']['degraded'] is True
|
||||
assert 'resilience' in payload
|
||||
assert payload['resilience']['thresholds']['restart_churn_threshold'] >= 1
|
||||
assert payload['resilience']['recovery_recommendation']['action'] == 'continue_degraded_mode'
|
||||
assert any('degraded' in warning.lower() for warning in payload.get('warnings', []))
|
||||
|
||||
|
||||
@patch('mes_dashboard.core.permissions.is_admin_logged_in', return_value=True)
|
||||
@patch('mes_dashboard.core.metrics.get_metrics_summary', return_value={'p50_ms': 1, 'p95_ms': 2, 'p99_ms': 3, 'count': 10, 'slow_count': 0, 'slow_rate': 0.0, 'worker_pid': 123})
|
||||
@patch('mes_dashboard.core.circuit_breaker.get_circuit_breaker_status', return_value={'state': 'CLOSED'})
|
||||
@patch('mes_dashboard.routes.health_routes.check_database', return_value=('ok', None))
|
||||
@patch('mes_dashboard.routes.health_routes.check_redis', return_value=('ok', None))
|
||||
@patch('mes_dashboard.routes.health_routes.get_route_cache_status')
|
||||
def test_deep_health_exposes_route_cache_telemetry(
|
||||
mock_route_cache,
|
||||
_mock_redis,
|
||||
_mock_db,
|
||||
_mock_cb,
|
||||
_mock_metrics,
|
||||
_mock_admin,
|
||||
):
|
||||
mock_route_cache.return_value = {
|
||||
'mode': 'l1+l2',
|
||||
'degraded': False,
|
||||
'reads_total': 20,
|
||||
'l1_hits': 8,
|
||||
'l2_hits': 11,
|
||||
'misses': 1,
|
||||
}
|
||||
|
||||
response = _client().get('/health/deep')
|
||||
assert response.status_code == 200
|
||||
payload = response.get_json()
|
||||
|
||||
route_cache = payload['checks']['route_cache']
|
||||
assert route_cache['mode'] == 'l1+l2'
|
||||
assert route_cache['reads_total'] == 20
|
||||
assert route_cache['degraded'] is False
|
||||
assert payload['resilience']['recovery_recommendation']['action'] == 'none'
|
||||
assert payload['resilience']['thresholds']['pool_saturation_warning'] >= 0.5
|
||||
317
tests/test_hold_routes.py
Normal file
317
tests/test_hold_routes.py
Normal file
@@ -0,0 +1,317 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for Hold Detail API routes.
|
||||
|
||||
Tests the Hold Detail API endpoints in hold_routes.py.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
import json
|
||||
|
||||
from mes_dashboard.app import create_app
|
||||
import mes_dashboard.core.database as db
|
||||
|
||||
|
||||
class TestHoldRoutesBase(unittest.TestCase):
|
||||
"""Base class for Hold routes tests."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test client."""
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
|
||||
|
||||
class TestHoldDetailPageRoute(TestHoldRoutesBase):
|
||||
"""Test GET /hold-detail page route."""
|
||||
|
||||
def test_hold_detail_page_requires_reason(self):
|
||||
"""GET /hold-detail without reason should redirect to wip-overview."""
|
||||
response = self.client.get('/hold-detail')
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertIn('/wip-overview', response.location)
|
||||
|
||||
def test_hold_detail_page_with_reason(self):
|
||||
"""GET /hold-detail?reason=xxx should return 200."""
|
||||
response = self.client.get('/hold-detail?reason=YieldLimit')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_hold_detail_page_contains_reason_in_html(self):
|
||||
"""Page should display the hold reason in the HTML."""
|
||||
response = self.client.get('/hold-detail?reason=YieldLimit')
|
||||
self.assertIn(b'YieldLimit', response.data)
|
||||
|
||||
|
||||
class TestHoldDetailSummaryRoute(TestHoldRoutesBase):
|
||||
"""Test GET /api/wip/hold-detail/summary endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_summary')
|
||||
def test_returns_success_with_data(self, mock_get_summary):
|
||||
"""Should return success=True with summary data."""
|
||||
mock_get_summary.return_value = {
|
||||
'totalLots': 128,
|
||||
'totalQty': 25600,
|
||||
'avgAge': 2.3,
|
||||
'maxAge': 15.0,
|
||||
'workcenterCount': 8
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/hold-detail/summary?reason=YieldLimit')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertEqual(data['data']['totalLots'], 128)
|
||||
self.assertEqual(data['data']['totalQty'], 25600)
|
||||
self.assertEqual(data['data']['avgAge'], 2.3)
|
||||
self.assertEqual(data['data']['maxAge'], 15.0)
|
||||
self.assertEqual(data['data']['workcenterCount'], 8)
|
||||
|
||||
def test_returns_error_without_reason(self):
|
||||
"""Should return 400 when reason is missing."""
|
||||
response = self.client.get('/api/wip/hold-detail/summary')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('reason', data['error'])
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_summary')
|
||||
def test_returns_error_on_failure(self, mock_get_summary):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_summary.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/hold-detail/summary?reason=YieldLimit')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('error', data)
|
||||
|
||||
|
||||
class TestHoldDetailDistributionRoute(TestHoldRoutesBase):
|
||||
"""Test GET /api/wip/hold-detail/distribution endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_distribution')
|
||||
def test_returns_success_with_distribution(self, mock_get_dist):
|
||||
"""Should return success=True with distribution data."""
|
||||
mock_get_dist.return_value = {
|
||||
'byWorkcenter': [
|
||||
{'name': 'DA', 'lots': 45, 'qty': 9000, 'percentage': 35.2},
|
||||
{'name': 'WB', 'lots': 38, 'qty': 7600, 'percentage': 29.7}
|
||||
],
|
||||
'byPackage': [
|
||||
{'name': 'DIP-B', 'lots': 50, 'qty': 10000, 'percentage': 39.1},
|
||||
{'name': 'QFN', 'lots': 35, 'qty': 7000, 'percentage': 27.3}
|
||||
],
|
||||
'byAge': [
|
||||
{'range': '0-1', 'label': '0-1天', 'lots': 45, 'qty': 9000, 'percentage': 35.2},
|
||||
{'range': '1-3', 'label': '1-3天', 'lots': 38, 'qty': 7600, 'percentage': 29.7},
|
||||
{'range': '3-7', 'label': '3-7天', 'lots': 30, 'qty': 6000, 'percentage': 23.4},
|
||||
{'range': '7+', 'label': '7+天', 'lots': 15, 'qty': 3000, 'percentage': 11.7}
|
||||
]
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/hold-detail/distribution?reason=YieldLimit')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertIn('byWorkcenter', data['data'])
|
||||
self.assertIn('byPackage', data['data'])
|
||||
self.assertIn('byAge', data['data'])
|
||||
self.assertEqual(len(data['data']['byWorkcenter']), 2)
|
||||
self.assertEqual(len(data['data']['byAge']), 4)
|
||||
|
||||
def test_returns_error_without_reason(self):
|
||||
"""Should return 400 when reason is missing."""
|
||||
response = self.client.get('/api/wip/hold-detail/distribution')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_distribution')
|
||||
def test_returns_error_on_failure(self, mock_get_dist):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_dist.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/hold-detail/distribution?reason=YieldLimit')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
|
||||
class TestHoldDetailLotsRoute(TestHoldRoutesBase):
|
||||
"""Test GET /api/wip/hold-detail/lots endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
|
||||
def test_returns_success_with_lots(self, mock_get_lots):
|
||||
"""Should return success=True with lots data."""
|
||||
mock_get_lots.return_value = {
|
||||
'lots': [
|
||||
{
|
||||
'lotId': 'L001',
|
||||
'workorder': 'WO123',
|
||||
'qty': 200,
|
||||
'package': 'DIP-B',
|
||||
'workcenter': 'DA',
|
||||
'spec': 'S01',
|
||||
'age': 2.3,
|
||||
'holdBy': 'EMP01',
|
||||
'dept': 'QC',
|
||||
'holdComment': 'Yield below threshold'
|
||||
}
|
||||
],
|
||||
'pagination': {
|
||||
'page': 1,
|
||||
'perPage': 50,
|
||||
'total': 128,
|
||||
'totalPages': 3
|
||||
},
|
||||
'filters': {
|
||||
'workcenter': None,
|
||||
'package': None,
|
||||
'ageRange': None
|
||||
}
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertIn('lots', data['data'])
|
||||
self.assertIn('pagination', data['data'])
|
||||
self.assertIn('filters', data['data'])
|
||||
self.assertEqual(len(data['data']['lots']), 1)
|
||||
self.assertEqual(data['data']['pagination']['total'], 128)
|
||||
|
||||
def test_returns_error_without_reason(self):
|
||||
"""Should return 400 when reason is missing."""
|
||||
response = self.client.get('/api/wip/hold-detail/lots')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
|
||||
def test_passes_filter_parameters(self, mock_get_lots):
|
||||
"""Should pass filter parameters to service function."""
|
||||
mock_get_lots.return_value = {
|
||||
'lots': [],
|
||||
'pagination': {'page': 2, 'perPage': 50, 'total': 0, 'totalPages': 1},
|
||||
'filters': {'workcenter': 'DA', 'package': 'DIP-B', 'ageRange': '1-3'}
|
||||
}
|
||||
|
||||
response = self.client.get(
|
||||
'/api/wip/hold-detail/lots?reason=YieldLimit&workcenter=DA&package=DIP-B&age_range=1-3&page=2'
|
||||
)
|
||||
|
||||
mock_get_lots.assert_called_once_with(
|
||||
reason='YieldLimit',
|
||||
workcenter='DA',
|
||||
package='DIP-B',
|
||||
age_range='1-3',
|
||||
include_dummy=False,
|
||||
page=2,
|
||||
page_size=50
|
||||
)
|
||||
|
||||
def test_validates_age_range_parameter(self):
|
||||
"""Should return 400 for invalid age_range."""
|
||||
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&age_range=invalid')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('age_range', data['error'])
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
|
||||
def test_limits_per_page_to_200(self, mock_get_lots):
|
||||
"""Per page should be capped at 200."""
|
||||
mock_get_lots.return_value = {
|
||||
'lots': [],
|
||||
'pagination': {'page': 1, 'perPage': 200, 'total': 0, 'totalPages': 1},
|
||||
'filters': {'workcenter': None, 'package': None, 'ageRange': None}
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&per_page=500')
|
||||
|
||||
call_args = mock_get_lots.call_args
|
||||
self.assertEqual(call_args.kwargs['page_size'], 200)
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
|
||||
def test_handles_page_less_than_one(self, mock_get_lots):
|
||||
"""Page number less than 1 should be set to 1."""
|
||||
mock_get_lots.return_value = {
|
||||
'lots': [],
|
||||
'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1},
|
||||
'filters': {'workcenter': None, 'package': None, 'ageRange': None}
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit&page=0')
|
||||
|
||||
call_args = mock_get_lots.call_args
|
||||
self.assertEqual(call_args.kwargs['page'], 1)
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
|
||||
def test_returns_error_on_failure(self, mock_get_lots):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_lots.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/hold-detail/lots?reason=YieldLimit')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
|
||||
class TestHoldDetailAgeRangeFilters(TestHoldRoutesBase):
|
||||
"""Test age range filter validation."""
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
|
||||
def test_valid_age_range_0_1(self, mock_get_lots):
|
||||
"""Should accept 0-1 as valid age_range."""
|
||||
mock_get_lots.return_value = {
|
||||
'lots': [], 'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1},
|
||||
'filters': {'workcenter': None, 'package': None, 'ageRange': '0-1'}
|
||||
}
|
||||
response = self.client.get('/api/wip/hold-detail/lots?reason=Test&age_range=0-1')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
|
||||
def test_valid_age_range_1_3(self, mock_get_lots):
|
||||
"""Should accept 1-3 as valid age_range."""
|
||||
mock_get_lots.return_value = {
|
||||
'lots': [], 'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1},
|
||||
'filters': {'workcenter': None, 'package': None, 'ageRange': '1-3'}
|
||||
}
|
||||
response = self.client.get('/api/wip/hold-detail/lots?reason=Test&age_range=1-3')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
|
||||
def test_valid_age_range_3_7(self, mock_get_lots):
|
||||
"""Should accept 3-7 as valid age_range."""
|
||||
mock_get_lots.return_value = {
|
||||
'lots': [], 'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1},
|
||||
'filters': {'workcenter': None, 'package': None, 'ageRange': '3-7'}
|
||||
}
|
||||
response = self.client.get('/api/wip/hold-detail/lots?reason=Test&age_range=3-7')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
@patch('mes_dashboard.routes.hold_routes.get_hold_detail_lots')
|
||||
def test_valid_age_range_7_plus(self, mock_get_lots):
|
||||
"""Should accept 7+ as valid age_range."""
|
||||
mock_get_lots.return_value = {
|
||||
'lots': [], 'pagination': {'page': 1, 'perPage': 50, 'total': 0, 'totalPages': 1},
|
||||
'filters': {'workcenter': None, 'package': None, 'ageRange': '7+'}
|
||||
}
|
||||
response = self.client.get('/api/wip/hold-detail/lots?reason=Test&age_range=7%2B')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
320
tests/test_job_query_routes.py
Normal file
320
tests/test_job_query_routes.py
Normal file
@@ -0,0 +1,320 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Integration tests for Job Query API routes.
|
||||
|
||||
Tests the API endpoints with mocked service dependencies.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import json
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from mes_dashboard import create_app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app():
|
||||
"""Create test Flask application."""
|
||||
app = create_app()
|
||||
app.config['TESTING'] = True
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(app):
|
||||
"""Create test client."""
|
||||
return app.test_client()
|
||||
|
||||
|
||||
class TestJobQueryPage:
|
||||
"""Tests for /job-query page route."""
|
||||
|
||||
def test_page_returns_html(self, client):
|
||||
"""Should return the job query page."""
|
||||
response = client.get('/job-query')
|
||||
assert response.status_code == 200
|
||||
assert b'html' in response.data.lower()
|
||||
|
||||
|
||||
class TestGetResources:
|
||||
"""Tests for /api/job-query/resources endpoint."""
|
||||
|
||||
@patch('mes_dashboard.services.resource_cache.get_all_resources')
|
||||
def test_get_resources_success(self, mock_get_resources, client):
|
||||
"""Should return resources list."""
|
||||
mock_get_resources.return_value = [
|
||||
{
|
||||
'RESOURCEID': 'RES001',
|
||||
'RESOURCENAME': 'Machine-01',
|
||||
'WORKCENTERNAME': 'WC-A',
|
||||
'RESOURCEFAMILYNAME': 'FAM-01'
|
||||
},
|
||||
{
|
||||
'RESOURCEID': 'RES002',
|
||||
'RESOURCENAME': 'Machine-02',
|
||||
'WORKCENTERNAME': 'WC-B',
|
||||
'RESOURCEFAMILYNAME': 'FAM-02'
|
||||
}
|
||||
]
|
||||
|
||||
response = client.get('/api/job-query/resources')
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert 'data' in data
|
||||
assert 'total' in data
|
||||
assert data['total'] == 2
|
||||
assert data['data'][0]['RESOURCEID'] in ['RES001', 'RES002']
|
||||
|
||||
@patch('mes_dashboard.services.resource_cache.get_all_resources')
|
||||
def test_get_resources_empty(self, mock_get_resources, client):
|
||||
"""Should return error when no resources available."""
|
||||
mock_get_resources.return_value = []
|
||||
|
||||
response = client.get('/api/job-query/resources')
|
||||
assert response.status_code == 500
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
|
||||
@patch('mes_dashboard.services.resource_cache.get_all_resources')
|
||||
def test_get_resources_exception(self, mock_get_resources, client):
|
||||
"""Should handle exception gracefully."""
|
||||
mock_get_resources.side_effect = Exception('Database error')
|
||||
|
||||
response = client.get('/api/job-query/resources')
|
||||
assert response.status_code == 500
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
|
||||
|
||||
class TestQueryJobs:
|
||||
"""Tests for /api/job-query/jobs endpoint."""
|
||||
|
||||
def test_missing_resource_ids(self, client):
|
||||
"""Should return error without resource_ids."""
|
||||
response = client.post(
|
||||
'/api/job-query/jobs',
|
||||
json={
|
||||
'start_date': '2024-01-01',
|
||||
'end_date': '2024-01-31'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
assert '設備' in data['error']
|
||||
|
||||
def test_empty_resource_ids(self, client):
|
||||
"""Should return error for empty resource_ids."""
|
||||
response = client.post(
|
||||
'/api/job-query/jobs',
|
||||
json={
|
||||
'resource_ids': [],
|
||||
'start_date': '2024-01-01',
|
||||
'end_date': '2024-01-31'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
|
||||
def test_missing_start_date(self, client):
|
||||
"""Should return error without start_date."""
|
||||
response = client.post(
|
||||
'/api/job-query/jobs',
|
||||
json={
|
||||
'resource_ids': ['RES001'],
|
||||
'end_date': '2024-01-31'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
assert '日期' in data['error']
|
||||
|
||||
def test_missing_end_date(self, client):
|
||||
"""Should return error without end_date."""
|
||||
response = client.post(
|
||||
'/api/job-query/jobs',
|
||||
json={
|
||||
'resource_ids': ['RES001'],
|
||||
'start_date': '2024-01-01'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
|
||||
def test_invalid_date_range(self, client):
|
||||
"""Should return error for invalid date range."""
|
||||
response = client.post(
|
||||
'/api/job-query/jobs',
|
||||
json={
|
||||
'resource_ids': ['RES001'],
|
||||
'start_date': '2024-12-31',
|
||||
'end_date': '2024-01-01'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
assert '結束日期' in data['error'] or '早於' in data['error']
|
||||
|
||||
def test_date_range_exceeds_limit(self, client):
|
||||
"""Should reject date range > 365 days."""
|
||||
response = client.post(
|
||||
'/api/job-query/jobs',
|
||||
json={
|
||||
'resource_ids': ['RES001'],
|
||||
'start_date': '2023-01-01',
|
||||
'end_date': '2024-12-31'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
assert '365' in data['error']
|
||||
|
||||
@patch('mes_dashboard.routes.job_query_routes.get_jobs_by_resources')
|
||||
def test_query_jobs_success(self, mock_query, client):
|
||||
"""Should return jobs list on success."""
|
||||
mock_query.return_value = {
|
||||
'data': [
|
||||
{'JOBID': 'JOB001', 'RESOURCENAME': 'Machine-01', 'JOBSTATUS': 'Complete'}
|
||||
],
|
||||
'total': 1,
|
||||
'resource_count': 1
|
||||
}
|
||||
|
||||
response = client.post(
|
||||
'/api/job-query/jobs',
|
||||
json={
|
||||
'resource_ids': ['RES001'],
|
||||
'start_date': '2024-01-01',
|
||||
'end_date': '2024-01-31'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert 'data' in data
|
||||
assert data['total'] == 1
|
||||
assert data['data'][0]['JOBID'] == 'JOB001'
|
||||
|
||||
@patch('mes_dashboard.routes.job_query_routes.get_jobs_by_resources')
|
||||
def test_query_jobs_service_error(self, mock_query, client):
|
||||
"""Should return error from service."""
|
||||
mock_query.return_value = {'error': '查詢失敗: Database error'}
|
||||
|
||||
response = client.post(
|
||||
'/api/job-query/jobs',
|
||||
json={
|
||||
'resource_ids': ['RES001'],
|
||||
'start_date': '2024-01-01',
|
||||
'end_date': '2024-01-31'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
|
||||
|
||||
class TestQueryJobTxnHistory:
|
||||
"""Tests for /api/job-query/txn/<job_id> endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.job_query_routes.get_job_txn_history')
|
||||
def test_get_txn_history_success(self, mock_query, client):
|
||||
"""Should return transaction history."""
|
||||
mock_query.return_value = {
|
||||
'data': [
|
||||
{
|
||||
'JOBTXNHISTORYID': 'TXN001',
|
||||
'JOBID': 'JOB001',
|
||||
'TXNDATE': '2024-01-15 10:30:00',
|
||||
'FROMJOBSTATUS': 'Open',
|
||||
'JOBSTATUS': 'In Progress'
|
||||
}
|
||||
],
|
||||
'total': 1,
|
||||
'job_id': 'JOB001'
|
||||
}
|
||||
|
||||
response = client.get('/api/job-query/txn/JOB001')
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert 'data' in data
|
||||
assert data['total'] == 1
|
||||
assert data['job_id'] == 'JOB001'
|
||||
|
||||
@patch('mes_dashboard.routes.job_query_routes.get_job_txn_history')
|
||||
def test_get_txn_history_service_error(self, mock_query, client):
|
||||
"""Should return error from service."""
|
||||
mock_query.return_value = {'error': '查詢失敗: Job not found'}
|
||||
|
||||
response = client.get('/api/job-query/txn/INVALID_JOB')
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
|
||||
|
||||
class TestExportJobs:
|
||||
"""Tests for /api/job-query/export endpoint."""
|
||||
|
||||
def test_missing_resource_ids(self, client):
|
||||
"""Should return error without resource_ids."""
|
||||
response = client.post(
|
||||
'/api/job-query/export',
|
||||
json={
|
||||
'start_date': '2024-01-01',
|
||||
'end_date': '2024-01-31'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
|
||||
def test_missing_dates(self, client):
|
||||
"""Should return error without dates."""
|
||||
response = client.post(
|
||||
'/api/job-query/export',
|
||||
json={
|
||||
'resource_ids': ['RES001']
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
|
||||
def test_invalid_date_range(self, client):
|
||||
"""Should return error for invalid date range."""
|
||||
response = client.post(
|
||||
'/api/job-query/export',
|
||||
json={
|
||||
'resource_ids': ['RES001'],
|
||||
'start_date': '2024-12-31',
|
||||
'end_date': '2024-01-01'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = json.loads(response.data)
|
||||
assert 'error' in data
|
||||
|
||||
@patch('mes_dashboard.routes.job_query_routes.export_jobs_with_history')
|
||||
def test_export_success(self, mock_export, client):
|
||||
"""Should return CSV streaming response."""
|
||||
# Mock generator that yields CSV content
|
||||
def mock_generator(*args):
|
||||
yield '\ufeff設備名稱,工單ID\n'
|
||||
yield 'Machine-01,JOB001\n'
|
||||
|
||||
mock_export.return_value = mock_generator()
|
||||
|
||||
response = client.post(
|
||||
'/api/job-query/export',
|
||||
json={
|
||||
'resource_ids': ['RES001'],
|
||||
'start_date': '2024-01-01',
|
||||
'end_date': '2024-01-31'
|
||||
}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert 'text/csv' in response.content_type
|
||||
assert 'attachment' in response.headers.get('Content-Disposition', '')
|
||||
assert 'job_history_export.csv' in response.headers.get('Content-Disposition', '')
|
||||
170
tests/test_job_query_service.py
Normal file
170
tests/test_job_query_service.py
Normal file
@@ -0,0 +1,170 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for Job Query service functions.
|
||||
|
||||
Tests the core service functions without database dependencies.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from mes_dashboard.services.job_query_service import (
|
||||
validate_date_range,
|
||||
_build_resource_filter,
|
||||
_build_resource_filter_sql,
|
||||
BATCH_SIZE,
|
||||
MAX_DATE_RANGE_DAYS,
|
||||
)
|
||||
|
||||
|
||||
class TestValidateDateRange:
|
||||
"""Tests for validate_date_range function."""
|
||||
|
||||
def test_valid_range(self):
|
||||
"""Should return None for valid date range."""
|
||||
result = validate_date_range('2024-01-01', '2024-01-31')
|
||||
assert result is None
|
||||
|
||||
def test_same_day(self):
|
||||
"""Should allow same day as start and end."""
|
||||
result = validate_date_range('2024-01-01', '2024-01-01')
|
||||
assert result is None
|
||||
|
||||
def test_end_before_start(self):
|
||||
"""Should reject end date before start date."""
|
||||
result = validate_date_range('2024-12-31', '2024-01-01')
|
||||
assert result is not None
|
||||
assert '結束日期' in result or '早於' in result
|
||||
|
||||
def test_exceeds_max_range(self):
|
||||
"""Should reject date range exceeding limit."""
|
||||
result = validate_date_range('2023-01-01', '2024-12-31')
|
||||
assert result is not None
|
||||
assert str(MAX_DATE_RANGE_DAYS) in result
|
||||
|
||||
def test_exactly_max_range(self):
|
||||
"""Should allow exactly max range days."""
|
||||
# 365 days from 2024-01-01 is 2024-12-31
|
||||
result = validate_date_range('2024-01-01', '2024-12-31')
|
||||
assert result is None
|
||||
|
||||
def test_one_day_over_max_range(self):
|
||||
"""Should reject one day over max range."""
|
||||
# 366 days
|
||||
result = validate_date_range('2024-01-01', '2025-01-01')
|
||||
assert result is not None
|
||||
assert str(MAX_DATE_RANGE_DAYS) in result
|
||||
|
||||
def test_invalid_date_format(self):
|
||||
"""Should reject invalid date format."""
|
||||
result = validate_date_range('01-01-2024', '12-31-2024')
|
||||
assert result is not None
|
||||
assert '格式' in result or 'format' in result.lower()
|
||||
|
||||
def test_invalid_start_date(self):
|
||||
"""Should reject invalid start date."""
|
||||
result = validate_date_range('2024-13-01', '2024-12-31')
|
||||
assert result is not None
|
||||
assert '格式' in result or 'format' in result.lower()
|
||||
|
||||
def test_invalid_end_date(self):
|
||||
"""Should reject invalid end date."""
|
||||
result = validate_date_range('2024-01-01', '2024-02-30')
|
||||
assert result is not None
|
||||
assert '格式' in result or 'format' in result.lower()
|
||||
|
||||
def test_non_date_string(self):
|
||||
"""Should reject non-date strings."""
|
||||
result = validate_date_range('abc', 'def')
|
||||
assert result is not None
|
||||
assert '格式' in result or 'format' in result.lower()
|
||||
|
||||
|
||||
class TestBuildResourceFilter:
|
||||
"""Tests for _build_resource_filter function."""
|
||||
|
||||
def test_empty_list(self):
|
||||
"""Should return empty list for empty input."""
|
||||
result = _build_resource_filter([])
|
||||
assert result == []
|
||||
|
||||
def test_single_id(self):
|
||||
"""Should return single chunk for single ID."""
|
||||
result = _build_resource_filter(['RES001'])
|
||||
assert len(result) == 1
|
||||
assert result[0] == "'RES001'"
|
||||
|
||||
def test_multiple_ids(self):
|
||||
"""Should join multiple IDs with comma."""
|
||||
result = _build_resource_filter(['RES001', 'RES002', 'RES003'])
|
||||
assert len(result) == 1
|
||||
assert "'RES001'" in result[0]
|
||||
assert "'RES002'" in result[0]
|
||||
assert "'RES003'" in result[0]
|
||||
|
||||
def test_chunking(self):
|
||||
"""Should chunk when exceeding batch size."""
|
||||
# Create more than BATCH_SIZE IDs
|
||||
ids = [f'RES{i:05d}' for i in range(BATCH_SIZE + 10)]
|
||||
result = _build_resource_filter(ids)
|
||||
assert len(result) == 2
|
||||
# First chunk should have BATCH_SIZE items
|
||||
assert result[0].count("'") == BATCH_SIZE * 2 # 2 quotes per ID
|
||||
|
||||
def test_escape_single_quotes(self):
|
||||
"""Should escape single quotes in IDs."""
|
||||
result = _build_resource_filter(["RES'001"])
|
||||
assert len(result) == 1
|
||||
assert "RES''001" in result[0] # Escaped
|
||||
|
||||
def test_custom_chunk_size(self):
|
||||
"""Should respect custom chunk size."""
|
||||
ids = ['RES001', 'RES002', 'RES003', 'RES004', 'RES005']
|
||||
result = _build_resource_filter(ids, max_chunk_size=2)
|
||||
assert len(result) == 3 # 2+2+1
|
||||
|
||||
|
||||
class TestBuildResourceFilterSql:
|
||||
"""Tests for _build_resource_filter_sql function."""
|
||||
|
||||
def test_empty_list(self):
|
||||
"""Should return 1=0 for empty input (no results)."""
|
||||
result = _build_resource_filter_sql([])
|
||||
assert result == "1=0"
|
||||
|
||||
def test_single_id(self):
|
||||
"""Should build simple IN clause for single ID."""
|
||||
result = _build_resource_filter_sql(['RES001'])
|
||||
assert "j.RESOURCEID IN" in result
|
||||
assert "'RES001'" in result
|
||||
|
||||
def test_multiple_ids(self):
|
||||
"""Should build IN clause with multiple IDs."""
|
||||
result = _build_resource_filter_sql(['RES001', 'RES002'])
|
||||
assert "j.RESOURCEID IN" in result
|
||||
assert "'RES001'" in result
|
||||
assert "'RES002'" in result
|
||||
|
||||
def test_custom_column(self):
|
||||
"""Should use custom column name."""
|
||||
result = _build_resource_filter_sql(['RES001'], column='r.ID')
|
||||
assert "r.ID IN" in result
|
||||
|
||||
def test_large_list_uses_or(self):
|
||||
"""Should use OR for chunked results."""
|
||||
# Create more than BATCH_SIZE IDs
|
||||
ids = [f'RES{i:05d}' for i in range(BATCH_SIZE + 10)]
|
||||
result = _build_resource_filter_sql(ids)
|
||||
assert " OR " in result
|
||||
# Should have parentheses wrapping the OR conditions
|
||||
assert result.startswith("(")
|
||||
assert result.endswith(")")
|
||||
|
||||
|
||||
class TestServiceConstants:
|
||||
"""Tests for service constants."""
|
||||
|
||||
def test_batch_size_is_reasonable(self):
|
||||
"""Batch size should be <= 1000 (Oracle limit)."""
|
||||
assert BATCH_SIZE <= 1000
|
||||
|
||||
def test_max_date_range_is_year(self):
|
||||
"""Max date range should be 365 days."""
|
||||
assert MAX_DATE_RANGE_DAYS == 365
|
||||
277
tests/test_log_store.py
Normal file
277
tests/test_log_store.py
Normal file
@@ -0,0 +1,277 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for SQLite log store module."""
|
||||
|
||||
import os
|
||||
import pytest
|
||||
import sqlite3
|
||||
import tempfile
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
from mes_dashboard.core.log_store import (
|
||||
LogStore,
|
||||
SQLiteLogHandler,
|
||||
LOG_STORE_ENABLED
|
||||
)
|
||||
|
||||
|
||||
class TestLogStore:
|
||||
"""Test LogStore class."""
|
||||
|
||||
@pytest.fixture
|
||||
def temp_db_path(self):
|
||||
"""Create a temporary database file."""
|
||||
fd, path = tempfile.mkstemp(suffix='.db')
|
||||
os.close(fd)
|
||||
yield path
|
||||
# Cleanup
|
||||
try:
|
||||
os.unlink(path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
@pytest.fixture
|
||||
def log_store(self, temp_db_path):
|
||||
"""Create a LogStore instance with temp database."""
|
||||
store = LogStore(db_path=temp_db_path)
|
||||
store.initialize() # Explicitly initialize
|
||||
return store
|
||||
|
||||
def test_init_creates_table(self, temp_db_path):
|
||||
"""LogStore creates logs table on init."""
|
||||
store = LogStore(db_path=temp_db_path)
|
||||
store.initialize()
|
||||
|
||||
conn = sqlite3.connect(temp_db_path)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='logs'"
|
||||
)
|
||||
result = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
assert result is not None
|
||||
assert result[0] == 'logs'
|
||||
|
||||
def test_write_log(self, log_store):
|
||||
"""Write a log entry successfully."""
|
||||
log_store.write_log(
|
||||
level="INFO",
|
||||
logger_name="test.logger",
|
||||
message="Test message",
|
||||
request_id="req-123",
|
||||
user="testuser",
|
||||
ip="192.168.1.1"
|
||||
)
|
||||
|
||||
logs = log_store.query_logs(limit=10)
|
||||
assert len(logs) == 1
|
||||
assert logs[0]["level"] == "INFO"
|
||||
assert logs[0]["logger_name"] == "test.logger"
|
||||
assert logs[0]["message"] == "Test message"
|
||||
assert logs[0]["request_id"] == "req-123"
|
||||
assert logs[0]["user"] == "testuser"
|
||||
assert logs[0]["ip"] == "192.168.1.1"
|
||||
|
||||
def test_query_logs_by_level(self, log_store):
|
||||
"""Query logs filtered by level."""
|
||||
log_store.write_log(level="INFO", logger_name="test", message="Info msg")
|
||||
log_store.write_log(level="ERROR", logger_name="test", message="Error msg")
|
||||
log_store.write_log(level="WARNING", logger_name="test", message="Warning msg")
|
||||
|
||||
error_logs = log_store.query_logs(level="ERROR", limit=10)
|
||||
assert len(error_logs) == 1
|
||||
assert error_logs[0]["level"] == "ERROR"
|
||||
|
||||
def test_query_logs_by_keyword(self, log_store):
|
||||
"""Query logs filtered by keyword search."""
|
||||
log_store.write_log(level="INFO", logger_name="test", message="User logged in")
|
||||
log_store.write_log(level="INFO", logger_name="test", message="Data processed")
|
||||
log_store.write_log(level="INFO", logger_name="test", message="User logged out")
|
||||
|
||||
user_logs = log_store.query_logs(q="User", limit=10)
|
||||
assert len(user_logs) == 2
|
||||
|
||||
def test_query_logs_limit(self, log_store):
|
||||
"""Query logs respects limit parameter."""
|
||||
for i in range(20):
|
||||
log_store.write_log(level="INFO", logger_name="test", message=f"Msg {i}")
|
||||
|
||||
logs = log_store.query_logs(limit=5)
|
||||
assert len(logs) == 5
|
||||
|
||||
def test_query_logs_since(self, log_store):
|
||||
"""Query logs filtered by timestamp."""
|
||||
# Write some old logs
|
||||
log_store.write_log(level="INFO", logger_name="test", message="Old msg")
|
||||
|
||||
# Record time after first log
|
||||
time.sleep(0.1)
|
||||
since_time = datetime.now().isoformat()
|
||||
|
||||
# Write some new logs
|
||||
time.sleep(0.1)
|
||||
log_store.write_log(level="INFO", logger_name="test", message="New msg 1")
|
||||
log_store.write_log(level="INFO", logger_name="test", message="New msg 2")
|
||||
|
||||
logs = log_store.query_logs(since=since_time, limit=10)
|
||||
assert len(logs) == 2
|
||||
|
||||
def test_query_logs_order(self, log_store):
|
||||
"""Query logs returns most recent first."""
|
||||
log_store.write_log(level="INFO", logger_name="test", message="First")
|
||||
time.sleep(0.01)
|
||||
log_store.write_log(level="INFO", logger_name="test", message="Second")
|
||||
time.sleep(0.01)
|
||||
log_store.write_log(level="INFO", logger_name="test", message="Third")
|
||||
|
||||
logs = log_store.query_logs(limit=10)
|
||||
assert logs[0]["message"] == "Third"
|
||||
assert logs[2]["message"] == "First"
|
||||
|
||||
def test_get_stats(self, log_store, temp_db_path):
|
||||
"""Get stats returns count and size."""
|
||||
log_store.write_log(level="INFO", logger_name="test", message="Msg 1")
|
||||
log_store.write_log(level="INFO", logger_name="test", message="Msg 2")
|
||||
|
||||
stats = log_store.get_stats()
|
||||
|
||||
assert stats["count"] == 2
|
||||
assert stats["size_bytes"] > 0
|
||||
|
||||
|
||||
class TestLogStoreRetention:
|
||||
"""Test log store retention policies."""
|
||||
|
||||
@pytest.fixture
|
||||
def temp_db_path(self):
|
||||
"""Create a temporary database file."""
|
||||
fd, path = tempfile.mkstemp(suffix='.db')
|
||||
os.close(fd)
|
||||
yield path
|
||||
try:
|
||||
os.unlink(path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def test_cleanup_by_max_rows(self, temp_db_path):
|
||||
"""Cleanup removes old logs when max rows exceeded."""
|
||||
# Patch the max rows config to a small value
|
||||
with patch('mes_dashboard.core.log_store.LOG_SQLITE_MAX_ROWS', 5):
|
||||
store = LogStore(db_path=temp_db_path)
|
||||
store.initialize()
|
||||
|
||||
# Write more than max_rows
|
||||
for i in range(10):
|
||||
store.write_log(level="INFO", logger_name="test", message=f"Msg {i}")
|
||||
|
||||
# Force cleanup - need to reimport for patched value
|
||||
from mes_dashboard.core import log_store as ls_module
|
||||
with patch.object(ls_module, 'LOG_SQLITE_MAX_ROWS', 5):
|
||||
store.cleanup_old_logs()
|
||||
|
||||
logs = store.query_logs(limit=100)
|
||||
# Cleanup may not perfectly reduce to 5 due to timing
|
||||
assert len(logs) <= 10 # At minimum, should have written some
|
||||
|
||||
def test_cleanup_by_retention_days(self, temp_db_path):
|
||||
"""Cleanup removes logs older than retention period."""
|
||||
# Patch the retention days config
|
||||
with patch('mes_dashboard.core.log_store.LOG_SQLITE_RETENTION_DAYS', 1):
|
||||
store = LogStore(db_path=temp_db_path)
|
||||
store.initialize()
|
||||
|
||||
# Insert an old log directly into the database
|
||||
conn = sqlite3.connect(temp_db_path)
|
||||
cursor = conn.cursor()
|
||||
old_time = (datetime.now() - timedelta(days=2)).isoformat()
|
||||
cursor.execute("""
|
||||
INSERT INTO logs (timestamp, level, logger_name, message)
|
||||
VALUES (?, 'INFO', 'test', 'Old message')
|
||||
""", (old_time,))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
# Write a new log
|
||||
store.write_log(level="INFO", logger_name="test", message="New message")
|
||||
|
||||
# Force cleanup with patched retention
|
||||
from mes_dashboard.core import log_store as ls_module
|
||||
with patch.object(ls_module, 'LOG_SQLITE_RETENTION_DAYS', 1):
|
||||
deleted = store.cleanup_old_logs()
|
||||
|
||||
logs = store.query_logs(limit=100)
|
||||
# The old message should be cleaned up
|
||||
new_logs = [l for l in logs if l["message"] == "New message"]
|
||||
assert len(new_logs) >= 1
|
||||
|
||||
|
||||
class TestSQLiteLogHandler:
|
||||
"""Test SQLite logging handler."""
|
||||
|
||||
@pytest.fixture
|
||||
def temp_db_path(self):
|
||||
"""Create a temporary database file."""
|
||||
fd, path = tempfile.mkstemp(suffix='.db')
|
||||
os.close(fd)
|
||||
yield path
|
||||
try:
|
||||
os.unlink(path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def test_handler_writes_log_records(self, temp_db_path):
|
||||
"""Log handler writes records to database."""
|
||||
import logging
|
||||
|
||||
store = LogStore(db_path=temp_db_path)
|
||||
handler = SQLiteLogHandler(store)
|
||||
handler.setLevel(logging.INFO)
|
||||
|
||||
logger = logging.getLogger("test_handler")
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
logger.info("Test log message")
|
||||
|
||||
# Give it a moment to write
|
||||
time.sleep(0.1)
|
||||
|
||||
logs = store.query_logs(limit=10)
|
||||
assert len(logs) >= 1
|
||||
|
||||
# Find our test message
|
||||
test_logs = [l for l in logs if "Test log message" in l["message"]]
|
||||
assert len(test_logs) == 1
|
||||
assert test_logs[0]["level"] == "INFO"
|
||||
|
||||
# Cleanup
|
||||
logger.removeHandler(handler)
|
||||
|
||||
def test_handler_filters_by_level(self, temp_db_path):
|
||||
"""Log handler respects level filtering."""
|
||||
import logging
|
||||
|
||||
store = LogStore(db_path=temp_db_path)
|
||||
handler = SQLiteLogHandler(store)
|
||||
handler.setLevel(logging.WARNING)
|
||||
|
||||
logger = logging.getLogger("test_handler_level")
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
logger.debug("Debug message")
|
||||
logger.info("Info message")
|
||||
logger.warning("Warning message")
|
||||
|
||||
time.sleep(0.1)
|
||||
|
||||
logs = store.query_logs(limit=10)
|
||||
# Only warning should be written (handler level is WARNING)
|
||||
warning_logs = [l for l in logs if l["logger_name"] == "test_handler_level"]
|
||||
assert len(warning_logs) == 1
|
||||
assert warning_logs[0]["level"] == "WARNING"
|
||||
|
||||
# Cleanup
|
||||
logger.removeHandler(handler)
|
||||
203
tests/test_metrics.py
Normal file
203
tests/test_metrics.py
Normal file
@@ -0,0 +1,203 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for performance metrics module."""
|
||||
|
||||
import pytest
|
||||
from mes_dashboard.core.metrics import (
|
||||
QueryMetrics,
|
||||
MetricsSummary,
|
||||
get_query_metrics,
|
||||
get_metrics_summary,
|
||||
record_query_latency,
|
||||
SLOW_QUERY_THRESHOLD
|
||||
)
|
||||
|
||||
|
||||
class TestQueryMetrics:
|
||||
"""Test QueryMetrics class."""
|
||||
|
||||
def test_initial_state_empty(self):
|
||||
"""New metrics instance has no data."""
|
||||
metrics = QueryMetrics(window_size=100)
|
||||
percentiles = metrics.get_percentiles()
|
||||
|
||||
assert percentiles["count"] == 0
|
||||
assert percentiles["p50"] == 0.0
|
||||
assert percentiles["p95"] == 0.0
|
||||
assert percentiles["p99"] == 0.0
|
||||
|
||||
def test_record_latency(self):
|
||||
"""Latencies are recorded correctly."""
|
||||
metrics = QueryMetrics(window_size=100)
|
||||
|
||||
metrics.record_latency(0.1)
|
||||
metrics.record_latency(0.2)
|
||||
metrics.record_latency(0.3)
|
||||
|
||||
latencies = metrics.get_latencies()
|
||||
assert len(latencies) == 3
|
||||
assert latencies == [0.1, 0.2, 0.3]
|
||||
|
||||
def test_window_size_limit(self):
|
||||
"""Window size limits number of samples."""
|
||||
metrics = QueryMetrics(window_size=5)
|
||||
|
||||
for i in range(10):
|
||||
metrics.record_latency(float(i))
|
||||
|
||||
latencies = metrics.get_latencies()
|
||||
assert len(latencies) == 5
|
||||
# Should have last 5 values (5, 6, 7, 8, 9)
|
||||
assert latencies == [5.0, 6.0, 7.0, 8.0, 9.0]
|
||||
|
||||
def test_percentile_calculation_p50(self):
|
||||
"""P50 (median) is calculated correctly."""
|
||||
metrics = QueryMetrics(window_size=100)
|
||||
|
||||
# Record 100 values: 1, 2, 3, ..., 100
|
||||
for i in range(1, 101):
|
||||
metrics.record_latency(float(i))
|
||||
|
||||
percentiles = metrics.get_percentiles()
|
||||
# P50 of 1-100 should be around 50
|
||||
assert 49 <= percentiles["p50"] <= 51
|
||||
|
||||
def test_percentile_calculation_p95(self):
|
||||
"""P95 is calculated correctly."""
|
||||
metrics = QueryMetrics(window_size=100)
|
||||
|
||||
# Record 100 values: 1, 2, 3, ..., 100
|
||||
for i in range(1, 101):
|
||||
metrics.record_latency(float(i))
|
||||
|
||||
percentiles = metrics.get_percentiles()
|
||||
# P95 of 1-100 should be around 95
|
||||
assert 94 <= percentiles["p95"] <= 96
|
||||
|
||||
def test_percentile_calculation_p99(self):
|
||||
"""P99 is calculated correctly."""
|
||||
metrics = QueryMetrics(window_size=100)
|
||||
|
||||
# Record 100 values: 1, 2, 3, ..., 100
|
||||
for i in range(1, 101):
|
||||
metrics.record_latency(float(i))
|
||||
|
||||
percentiles = metrics.get_percentiles()
|
||||
# P99 of 1-100 should be around 99
|
||||
assert 98 <= percentiles["p99"] <= 100
|
||||
|
||||
def test_slow_query_count(self):
|
||||
"""Slow queries (> threshold) are counted."""
|
||||
metrics = QueryMetrics(window_size=100)
|
||||
|
||||
# Record some fast and slow queries
|
||||
metrics.record_latency(0.1) # Fast
|
||||
metrics.record_latency(0.5) # Fast
|
||||
metrics.record_latency(1.5) # Slow
|
||||
metrics.record_latency(2.0) # Slow
|
||||
metrics.record_latency(0.8) # Fast
|
||||
|
||||
percentiles = metrics.get_percentiles()
|
||||
assert percentiles["slow_count"] == 2
|
||||
|
||||
def test_get_summary(self):
|
||||
"""Summary includes all required fields."""
|
||||
metrics = QueryMetrics(window_size=100)
|
||||
|
||||
metrics.record_latency(0.1)
|
||||
metrics.record_latency(0.5)
|
||||
metrics.record_latency(1.5)
|
||||
|
||||
summary = metrics.get_summary()
|
||||
|
||||
assert isinstance(summary, MetricsSummary)
|
||||
assert summary.p50_ms >= 0
|
||||
assert summary.p95_ms >= 0
|
||||
assert summary.p99_ms >= 0
|
||||
assert summary.count == 3
|
||||
assert summary.slow_count == 1
|
||||
assert 0 <= summary.slow_rate <= 1
|
||||
assert summary.worker_pid > 0
|
||||
assert summary.collected_at is not None
|
||||
|
||||
def test_slow_rate_calculation(self):
|
||||
"""Slow rate is calculated correctly."""
|
||||
metrics = QueryMetrics(window_size=100)
|
||||
|
||||
# 2 slow out of 4 = 50%
|
||||
metrics.record_latency(0.1)
|
||||
metrics.record_latency(1.5)
|
||||
metrics.record_latency(0.2)
|
||||
metrics.record_latency(2.0)
|
||||
|
||||
summary = metrics.get_summary()
|
||||
assert summary.slow_rate == 0.5
|
||||
|
||||
def test_clear_resets_metrics(self):
|
||||
"""Clear removes all recorded latencies."""
|
||||
metrics = QueryMetrics(window_size=100)
|
||||
|
||||
metrics.record_latency(0.1)
|
||||
metrics.record_latency(0.2)
|
||||
|
||||
metrics.clear()
|
||||
|
||||
assert len(metrics.get_latencies()) == 0
|
||||
assert metrics.get_percentiles()["count"] == 0
|
||||
|
||||
|
||||
class TestGlobalMetrics:
|
||||
"""Test global metrics functions."""
|
||||
|
||||
def test_get_query_metrics_returns_singleton(self):
|
||||
"""Global query metrics returns same instance."""
|
||||
metrics1 = get_query_metrics()
|
||||
metrics2 = get_query_metrics()
|
||||
|
||||
assert metrics1 is metrics2
|
||||
|
||||
def test_record_query_latency_uses_global(self):
|
||||
"""record_query_latency uses global metrics instance."""
|
||||
metrics = get_query_metrics()
|
||||
initial_count = metrics.get_percentiles()["count"]
|
||||
|
||||
record_query_latency(0.1)
|
||||
|
||||
assert metrics.get_percentiles()["count"] == initial_count + 1
|
||||
|
||||
def test_get_metrics_summary_returns_dict(self):
|
||||
"""get_metrics_summary returns dictionary format."""
|
||||
summary = get_metrics_summary()
|
||||
|
||||
assert isinstance(summary, dict)
|
||||
assert "p50_ms" in summary
|
||||
assert "p95_ms" in summary
|
||||
assert "p99_ms" in summary
|
||||
assert "count" in summary
|
||||
assert "slow_count" in summary
|
||||
assert "slow_rate" in summary
|
||||
assert "worker_pid" in summary
|
||||
assert "collected_at" in summary
|
||||
|
||||
|
||||
class TestMetricsThreadSafety:
|
||||
"""Test thread safety of metrics collection."""
|
||||
|
||||
def test_concurrent_recording(self):
|
||||
"""Metrics handle concurrent recording."""
|
||||
import threading
|
||||
|
||||
metrics = QueryMetrics(window_size=1000)
|
||||
|
||||
def record_many():
|
||||
for _ in range(100):
|
||||
metrics.record_latency(0.1)
|
||||
|
||||
threads = [threading.Thread(target=record_many) for _ in range(10)]
|
||||
|
||||
for t in threads:
|
||||
t.start()
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
# Should have 1000 entries
|
||||
assert metrics.get_percentiles()["count"] == 1000
|
||||
194
tests/test_page_registry.py
Normal file
194
tests/test_page_registry.py
Normal file
@@ -0,0 +1,194 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for page_registry module."""
|
||||
|
||||
import json
|
||||
import pytest
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
|
||||
|
||||
from mes_dashboard.services import page_registry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_data_file(tmp_path):
|
||||
"""Create a temporary data file for testing."""
|
||||
data_file = tmp_path / "page_status.json"
|
||||
initial_data = {
|
||||
"pages": [
|
||||
{"route": "/", "name": "Home", "status": "released"},
|
||||
{"route": "/dev-page", "name": "Dev Page", "status": "dev"},
|
||||
],
|
||||
"api_public": True
|
||||
}
|
||||
data_file.write_text(json.dumps(initial_data), encoding="utf-8")
|
||||
return data_file
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_registry(temp_data_file):
|
||||
"""Mock page_registry to use temp file."""
|
||||
original_data_file = page_registry.DATA_FILE
|
||||
original_cache = page_registry._cache
|
||||
|
||||
page_registry.DATA_FILE = temp_data_file
|
||||
page_registry._cache = None # Clear cache
|
||||
|
||||
yield temp_data_file
|
||||
|
||||
# Restore original
|
||||
page_registry.DATA_FILE = original_data_file
|
||||
page_registry._cache = original_cache
|
||||
|
||||
|
||||
class TestGetPageStatus:
|
||||
"""Tests for get_page_status function."""
|
||||
|
||||
def test_get_released_page_status(self, mock_registry):
|
||||
"""Test getting status of released page."""
|
||||
status = page_registry.get_page_status("/")
|
||||
assert status == "released"
|
||||
|
||||
def test_get_dev_page_status(self, mock_registry):
|
||||
"""Test getting status of dev page."""
|
||||
status = page_registry.get_page_status("/dev-page")
|
||||
assert status == "dev"
|
||||
|
||||
def test_get_unregistered_page_status(self, mock_registry):
|
||||
"""Test getting status of unregistered page returns None."""
|
||||
status = page_registry.get_page_status("/not-registered")
|
||||
assert status is None
|
||||
|
||||
|
||||
class TestIsPageRegistered:
|
||||
"""Tests for is_page_registered function."""
|
||||
|
||||
def test_registered_page(self, mock_registry):
|
||||
"""Test checking registered page."""
|
||||
assert page_registry.is_page_registered("/") is True
|
||||
|
||||
def test_unregistered_page(self, mock_registry):
|
||||
"""Test checking unregistered page."""
|
||||
assert page_registry.is_page_registered("/not-here") is False
|
||||
|
||||
|
||||
class TestSetPageStatus:
|
||||
"""Tests for set_page_status function."""
|
||||
|
||||
def test_update_existing_page(self, mock_registry):
|
||||
"""Test updating existing page status."""
|
||||
page_registry.set_page_status("/", "dev")
|
||||
assert page_registry.get_page_status("/") == "dev"
|
||||
|
||||
def test_add_new_page(self, mock_registry):
|
||||
"""Test adding new page."""
|
||||
page_registry.set_page_status("/new-page", "released", "New Page")
|
||||
assert page_registry.get_page_status("/new-page") == "released"
|
||||
|
||||
def test_invalid_status_raises_error(self, mock_registry):
|
||||
"""Test setting invalid status raises ValueError."""
|
||||
with pytest.raises(ValueError, match="Invalid status"):
|
||||
page_registry.set_page_status("/", "invalid")
|
||||
|
||||
def test_update_page_name(self, mock_registry):
|
||||
"""Test updating page name."""
|
||||
page_registry.set_page_status("/", "released", "New Name")
|
||||
pages = page_registry.get_all_pages()
|
||||
home = next(p for p in pages if p["route"] == "/")
|
||||
assert home["name"] == "New Name"
|
||||
|
||||
|
||||
class TestGetAllPages:
|
||||
"""Tests for get_all_pages function."""
|
||||
|
||||
def test_get_all_pages(self, mock_registry):
|
||||
"""Test getting all pages."""
|
||||
pages = page_registry.get_all_pages()
|
||||
assert len(pages) == 2
|
||||
routes = [p["route"] for p in pages]
|
||||
assert "/" in routes
|
||||
assert "/dev-page" in routes
|
||||
|
||||
|
||||
class TestIsApiPublic:
|
||||
"""Tests for is_api_public function."""
|
||||
|
||||
def test_api_public_true(self, mock_registry):
|
||||
"""Test API public flag when true."""
|
||||
assert page_registry.is_api_public() is True
|
||||
|
||||
def test_api_public_false(self, mock_registry, temp_data_file):
|
||||
"""Test API public flag when false."""
|
||||
data = json.loads(temp_data_file.read_text())
|
||||
data["api_public"] = False
|
||||
temp_data_file.write_text(json.dumps(data))
|
||||
page_registry._cache = None # Clear cache
|
||||
|
||||
assert page_registry.is_api_public() is False
|
||||
|
||||
|
||||
class TestReloadCache:
|
||||
"""Tests for reload_cache function."""
|
||||
|
||||
def test_reload_cache(self, mock_registry, temp_data_file):
|
||||
"""Test reloading cache from disk."""
|
||||
# First load
|
||||
assert page_registry.get_page_status("/") == "released"
|
||||
|
||||
# Modify file directly
|
||||
data = json.loads(temp_data_file.read_text())
|
||||
data["pages"][0]["status"] = "dev"
|
||||
temp_data_file.write_text(json.dumps(data))
|
||||
|
||||
# Cache still has old value
|
||||
assert page_registry.get_page_status("/") == "released"
|
||||
|
||||
# After reload, should have new value
|
||||
page_registry.reload_cache()
|
||||
assert page_registry.get_page_status("/") == "dev"
|
||||
|
||||
|
||||
class TestConcurrency:
|
||||
"""Tests for thread safety."""
|
||||
|
||||
def test_concurrent_access(self, mock_registry):
|
||||
"""Test concurrent read/write operations."""
|
||||
import threading
|
||||
|
||||
errors = []
|
||||
|
||||
def reader():
|
||||
try:
|
||||
for _ in range(100):
|
||||
page_registry.get_page_status("/")
|
||||
except Exception as e:
|
||||
errors.append(e)
|
||||
|
||||
def writer():
|
||||
try:
|
||||
for i in range(100):
|
||||
status = "released" if i % 2 == 0 else "dev"
|
||||
page_registry.set_page_status("/", status)
|
||||
except Exception as e:
|
||||
errors.append(e)
|
||||
|
||||
threads = [
|
||||
threading.Thread(target=reader) for _ in range(3)
|
||||
] + [
|
||||
threading.Thread(target=writer) for _ in range(2)
|
||||
]
|
||||
|
||||
for t in threads:
|
||||
t.start()
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
assert len(errors) == 0, f"Errors occurred: {errors}"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
307
tests/test_performance_integration.py
Normal file
307
tests/test_performance_integration.py
Normal file
@@ -0,0 +1,307 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Integration tests for performance monitoring and admin APIs."""
|
||||
|
||||
import json
|
||||
import os
|
||||
import pytest
|
||||
import tempfile
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from mes_dashboard.app import create_app
|
||||
import mes_dashboard.core.database as db
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app():
|
||||
"""Create application for testing."""
|
||||
db._ENGINE = None
|
||||
app = create_app('testing')
|
||||
app.config['TESTING'] = True
|
||||
app.config['WTF_CSRF_ENABLED'] = False
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(app):
|
||||
"""Create test client."""
|
||||
return app.test_client()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def admin_client(app, client):
|
||||
"""Create authenticated admin client."""
|
||||
# Set admin session - the permissions module checks for 'admin' key in session
|
||||
with client.session_transaction() as sess:
|
||||
sess['admin'] = {'username': 'admin', 'role': 'admin'}
|
||||
yield client
|
||||
|
||||
|
||||
class TestAPIResponseFormat:
|
||||
"""Test standardized API response format."""
|
||||
|
||||
def test_success_response_format(self, admin_client):
|
||||
"""Success responses have correct format."""
|
||||
response = admin_client.get('/admin/api/system-status')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data["success"] is True
|
||||
assert "data" in data
|
||||
|
||||
def test_unauthenticated_redirect(self, client):
|
||||
"""Unauthenticated requests redirect to login."""
|
||||
response = client.get('/admin/performance')
|
||||
|
||||
# Should redirect to login page
|
||||
assert response.status_code == 302
|
||||
|
||||
|
||||
class TestHealthEndpoints:
|
||||
"""Test health check endpoints."""
|
||||
|
||||
def test_health_basic_endpoint(self, client):
|
||||
"""Basic health endpoint returns status."""
|
||||
response = client.get('/health')
|
||||
|
||||
assert response.status_code in (200, 503)
|
||||
data = json.loads(response.data)
|
||||
assert "status" in data
|
||||
assert data["status"] in {"healthy", "degraded", "unhealthy"}
|
||||
# Database status is under 'services' key
|
||||
assert "services" in data
|
||||
assert "database" in data["services"]
|
||||
|
||||
def test_health_deep_requires_auth(self, client):
|
||||
"""Deep health endpoint requires authentication."""
|
||||
response = client.get('/health/deep')
|
||||
# Redirects to login for unauthenticated requests
|
||||
assert response.status_code == 302
|
||||
|
||||
def test_health_deep_returns_metrics(self, admin_client):
|
||||
"""Deep health endpoint returns detailed metrics."""
|
||||
response = admin_client.get('/health/deep')
|
||||
|
||||
if response.status_code == 200:
|
||||
data = json.loads(response.data)
|
||||
assert "status" in data
|
||||
|
||||
|
||||
class TestSystemStatusAPI:
|
||||
"""Test system status API endpoint."""
|
||||
|
||||
def test_system_status_returns_all_components(self, admin_client):
|
||||
"""System status includes all component statuses."""
|
||||
response = admin_client.get('/admin/api/system-status')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data["success"] is True
|
||||
assert "database" in data["data"]
|
||||
assert "redis" in data["data"]
|
||||
assert "circuit_breaker" in data["data"]
|
||||
assert "runtime_resilience" in data["data"]
|
||||
assert "thresholds" in data["data"]["runtime_resilience"]
|
||||
assert "restart_churn" in data["data"]["runtime_resilience"]
|
||||
assert "recovery_recommendation" in data["data"]["runtime_resilience"]
|
||||
assert "worker_pid" in data["data"]
|
||||
|
||||
|
||||
class TestMetricsAPI:
|
||||
"""Test metrics API endpoint."""
|
||||
|
||||
def test_metrics_returns_percentiles(self, admin_client):
|
||||
"""Metrics API returns percentile data."""
|
||||
response = admin_client.get('/admin/api/metrics')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data["success"] is True
|
||||
assert "p50_ms" in data["data"]
|
||||
assert "p95_ms" in data["data"]
|
||||
assert "p99_ms" in data["data"]
|
||||
assert "count" in data["data"]
|
||||
assert "slow_count" in data["data"]
|
||||
assert "slow_rate" in data["data"]
|
||||
|
||||
def test_metrics_includes_latencies(self, admin_client):
|
||||
"""Metrics API includes latency distribution."""
|
||||
response = admin_client.get('/admin/api/metrics')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert "latencies" in data["data"]
|
||||
assert isinstance(data["data"]["latencies"], list)
|
||||
|
||||
|
||||
class TestLogsAPI:
|
||||
"""Test logs API endpoint."""
|
||||
|
||||
def test_logs_api_returns_logs(self, admin_client):
|
||||
"""Logs API returns log entries."""
|
||||
response = admin_client.get('/admin/api/logs')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data["success"] is True
|
||||
assert "logs" in data["data"]
|
||||
assert "enabled" in data["data"]
|
||||
|
||||
def test_logs_api_filter_by_level(self, admin_client):
|
||||
"""Logs API filters by level."""
|
||||
response = admin_client.get('/admin/api/logs?level=ERROR')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data["success"] is True
|
||||
|
||||
def test_logs_api_filter_by_search(self, admin_client):
|
||||
"""Logs API filters by search term."""
|
||||
response = admin_client.get('/admin/api/logs?q=database')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data["success"] is True
|
||||
|
||||
def test_logs_api_pagination(self, admin_client):
|
||||
"""Logs API supports pagination with limit and offset."""
|
||||
# Test with limit=10
|
||||
response = admin_client.get('/admin/api/logs?limit=10&offset=0')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data["success"] is True
|
||||
assert "total" in data["data"]
|
||||
assert "logs" in data["data"]
|
||||
assert len(data["data"]["logs"]) <= 10
|
||||
|
||||
def test_logs_api_pagination_offset(self, admin_client):
|
||||
"""Logs API offset skips entries correctly."""
|
||||
# Get first page
|
||||
response1 = admin_client.get('/admin/api/logs?limit=5&offset=0')
|
||||
data1 = json.loads(response1.data)
|
||||
|
||||
# Get second page
|
||||
response2 = admin_client.get('/admin/api/logs?limit=5&offset=5')
|
||||
data2 = json.loads(response2.data)
|
||||
|
||||
# Total should be the same
|
||||
assert data1["data"]["total"] == data2["data"]["total"]
|
||||
|
||||
# If there are enough logs, pages should be different
|
||||
if data1["data"]["total"] > 5:
|
||||
logs1_ids = [log.get("id") for log in data1["data"]["logs"]]
|
||||
logs2_ids = [log.get("id") for log in data2["data"]["logs"]]
|
||||
# No overlap between pages
|
||||
assert not set(logs1_ids) & set(logs2_ids)
|
||||
|
||||
|
||||
class TestLogsCleanupAPI:
|
||||
"""Test log cleanup API endpoint."""
|
||||
|
||||
def test_logs_cleanup_requires_auth(self, client):
|
||||
"""Log cleanup requires admin authentication."""
|
||||
response = client.post('/admin/api/logs/cleanup')
|
||||
# Should redirect to login page
|
||||
assert response.status_code == 302
|
||||
|
||||
def test_logs_cleanup_success(self, admin_client):
|
||||
"""Log cleanup returns success with stats."""
|
||||
response = admin_client.post('/admin/api/logs/cleanup')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data["success"] is True
|
||||
assert "deleted" in data["data"]
|
||||
assert "before" in data["data"]
|
||||
assert "after" in data["data"]
|
||||
assert "count" in data["data"]["before"]
|
||||
assert "size_bytes" in data["data"]["before"]
|
||||
|
||||
|
||||
class TestWorkerControlAPI:
|
||||
"""Test worker control API endpoints."""
|
||||
|
||||
def test_worker_status_returns_info(self, admin_client):
|
||||
"""Worker status API returns worker information."""
|
||||
response = admin_client.get('/admin/api/worker/status')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data["success"] is True
|
||||
assert "worker_pid" in data["data"]
|
||||
assert "cooldown" in data["data"]
|
||||
assert "resilience" in data["data"]
|
||||
assert "restart_history" in data["data"]
|
||||
assert "restart_churn" in data["data"]["resilience"]
|
||||
assert "last_restart" in data["data"]
|
||||
|
||||
def test_worker_restart_requires_auth(self, client):
|
||||
"""Worker restart requires admin authentication."""
|
||||
response = client.post('/admin/api/worker/restart')
|
||||
# Should redirect to login page for unauthenticated requests
|
||||
assert response.status_code == 302
|
||||
|
||||
def test_worker_restart_writes_flag(self, admin_client):
|
||||
"""Worker restart creates flag file."""
|
||||
# Use a temp file for the flag
|
||||
fd, temp_flag = tempfile.mkstemp()
|
||||
os.close(fd)
|
||||
os.unlink(temp_flag) # Remove so we can test creation
|
||||
|
||||
with patch('mes_dashboard.routes.admin_routes.RESTART_FLAG_PATH', temp_flag):
|
||||
with patch('mes_dashboard.routes.admin_routes._check_restart_cooldown', return_value=(False, 0)):
|
||||
response = admin_client.post('/admin/api/worker/restart')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
assert data["success"] is True
|
||||
|
||||
# Cleanup
|
||||
try:
|
||||
os.unlink(temp_flag)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def test_worker_restart_cooldown(self, admin_client):
|
||||
"""Worker restart respects cooldown."""
|
||||
with patch('mes_dashboard.routes.admin_routes._check_restart_cooldown', return_value=(True, 45)):
|
||||
response = admin_client.post('/admin/api/worker/restart')
|
||||
|
||||
assert response.status_code == 429
|
||||
data = json.loads(response.data)
|
||||
assert data["success"] is False
|
||||
assert "cooldown" in data["error"]["message"].lower()
|
||||
|
||||
|
||||
class TestCircuitBreakerIntegration:
|
||||
"""Test circuit breaker integration with database layer."""
|
||||
|
||||
def test_circuit_breaker_status_in_system_status(self, admin_client):
|
||||
"""Circuit breaker status is included in system status."""
|
||||
response = admin_client.get('/admin/api/system-status')
|
||||
|
||||
assert response.status_code == 200
|
||||
data = json.loads(response.data)
|
||||
cb_status = data["data"]["circuit_breaker"]
|
||||
assert "state" in cb_status
|
||||
assert "enabled" in cb_status
|
||||
|
||||
|
||||
class TestPerformancePage:
|
||||
"""Test performance monitoring page."""
|
||||
|
||||
def test_performance_page_requires_auth(self, client):
|
||||
"""Performance page requires admin authentication."""
|
||||
response = client.get('/admin/performance')
|
||||
# Should redirect to login
|
||||
assert response.status_code == 302
|
||||
|
||||
def test_performance_page_loads(self, admin_client):
|
||||
"""Performance page loads for admin users."""
|
||||
response = admin_client.get('/admin/performance')
|
||||
|
||||
# Should be 200 for authenticated admin
|
||||
assert response.status_code == 200
|
||||
# Check for performance-related content
|
||||
data_str = response.data.decode('utf-8', errors='ignore').lower()
|
||||
assert 'performance' in data_str or '效能' in data_str
|
||||
102
tests/test_permissions.py
Normal file
102
tests/test_permissions.py
Normal file
@@ -0,0 +1,102 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for permissions module."""
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
|
||||
|
||||
from mes_dashboard.core.permissions import is_admin_logged_in, get_current_admin, admin_required
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app():
|
||||
"""Create a test Flask app."""
|
||||
app = Flask(__name__)
|
||||
app.secret_key = "test-secret-key"
|
||||
app.config["TESTING"] = True
|
||||
return app
|
||||
|
||||
|
||||
class TestIsAdminLoggedIn:
|
||||
"""Tests for is_admin_logged_in function."""
|
||||
|
||||
def test_admin_logged_in(self, app):
|
||||
"""Test when admin is logged in."""
|
||||
with app.test_request_context():
|
||||
from flask import session
|
||||
session["admin"] = {"username": "admin", "mail": "admin@test.com"}
|
||||
assert is_admin_logged_in() is True
|
||||
|
||||
def test_admin_not_logged_in(self, app):
|
||||
"""Test when admin is not logged in."""
|
||||
with app.test_request_context():
|
||||
assert is_admin_logged_in() is False
|
||||
|
||||
|
||||
class TestGetCurrentAdmin:
|
||||
"""Tests for get_current_admin function."""
|
||||
|
||||
def test_get_admin_when_logged_in(self, app):
|
||||
"""Test getting admin info when logged in."""
|
||||
with app.test_request_context():
|
||||
from flask import session
|
||||
admin_data = {"username": "admin", "mail": "admin@test.com"}
|
||||
session["admin"] = admin_data
|
||||
result = get_current_admin()
|
||||
assert result == admin_data
|
||||
|
||||
def test_get_admin_when_not_logged_in(self, app):
|
||||
"""Test getting admin info when not logged in."""
|
||||
with app.test_request_context():
|
||||
result = get_current_admin()
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestAdminRequired:
|
||||
"""Tests for admin_required decorator."""
|
||||
|
||||
def test_admin_required_when_logged_in(self, app):
|
||||
"""Test decorator allows access when admin is logged in."""
|
||||
@app.route("/test")
|
||||
@admin_required
|
||||
def test_route():
|
||||
return "success"
|
||||
|
||||
with app.test_client() as client:
|
||||
with client.session_transaction() as sess:
|
||||
sess["admin"] = {"username": "admin"}
|
||||
|
||||
response = client.get("/test")
|
||||
assert response.status_code == 200
|
||||
assert response.data == b"success"
|
||||
|
||||
def test_admin_required_when_not_logged_in(self, app):
|
||||
"""Test decorator redirects when admin is not logged in."""
|
||||
from flask import Blueprint
|
||||
|
||||
# Register auth blueprint first with correct endpoint name
|
||||
auth_bp = Blueprint("auth", __name__, url_prefix="/admin")
|
||||
|
||||
@auth_bp.route("/login", endpoint="login")
|
||||
def login_view():
|
||||
return "login"
|
||||
|
||||
app.register_blueprint(auth_bp)
|
||||
|
||||
# Now add the protected route
|
||||
@app.route("/test")
|
||||
@admin_required
|
||||
def test_route():
|
||||
return "success"
|
||||
|
||||
with app.test_client() as client:
|
||||
response = client.get("/test")
|
||||
assert response.status_code == 302
|
||||
assert "/admin/login" in response.location
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
494
tests/test_realtime_equipment_cache.py
Normal file
494
tests/test_realtime_equipment_cache.py
Normal file
@@ -0,0 +1,494 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for realtime_equipment_cache module.
|
||||
|
||||
Tests aggregation, status classification, and cache query functionality.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import json
|
||||
|
||||
|
||||
class TestClassifyStatus:
|
||||
"""Test _classify_status function."""
|
||||
|
||||
def test_classifies_prd_as_productive(self):
|
||||
"""Test PRD status is classified as PRODUCTIVE."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _classify_status
|
||||
|
||||
result = _classify_status('PRD')
|
||||
assert result == 'PRODUCTIVE'
|
||||
|
||||
def test_classifies_sby_as_standby(self):
|
||||
"""Test SBY status is classified as STANDBY."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _classify_status
|
||||
|
||||
result = _classify_status('SBY')
|
||||
assert result == 'STANDBY'
|
||||
|
||||
def test_classifies_udt_as_down(self):
|
||||
"""Test UDT status is classified as DOWN."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _classify_status
|
||||
|
||||
result = _classify_status('UDT')
|
||||
assert result == 'DOWN'
|
||||
|
||||
def test_classifies_sdt_as_down(self):
|
||||
"""Test SDT status is classified as DOWN."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _classify_status
|
||||
|
||||
result = _classify_status('SDT')
|
||||
assert result == 'DOWN'
|
||||
|
||||
def test_classifies_egt_as_engineering(self):
|
||||
"""Test EGT status is classified as ENGINEERING."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _classify_status
|
||||
|
||||
result = _classify_status('EGT')
|
||||
assert result == 'ENGINEERING'
|
||||
|
||||
def test_classifies_nst_as_not_scheduled(self):
|
||||
"""Test NST status is classified as NOT_SCHEDULED."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _classify_status
|
||||
|
||||
result = _classify_status('NST')
|
||||
assert result == 'NOT_SCHEDULED'
|
||||
|
||||
def test_classifies_scrap_as_inactive(self):
|
||||
"""Test SCRAP status is classified as INACTIVE."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _classify_status
|
||||
|
||||
result = _classify_status('SCRAP')
|
||||
assert result == 'INACTIVE'
|
||||
|
||||
def test_classifies_unknown_as_other(self):
|
||||
"""Test unknown status is classified as OTHER."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _classify_status
|
||||
|
||||
result = _classify_status('UNKNOWN_STATUS')
|
||||
assert result == 'OTHER'
|
||||
|
||||
def test_handles_none_status(self):
|
||||
"""Test None status is classified as OTHER."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _classify_status
|
||||
|
||||
result = _classify_status(None)
|
||||
assert result == 'OTHER'
|
||||
|
||||
def test_handles_empty_status(self):
|
||||
"""Test empty string status is classified as OTHER."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _classify_status
|
||||
|
||||
result = _classify_status('')
|
||||
assert result == 'OTHER'
|
||||
|
||||
|
||||
class TestAggregateByResourceid:
|
||||
"""Test _aggregate_by_resourceid function."""
|
||||
|
||||
def test_aggregates_single_record(self):
|
||||
"""Test aggregation with single record per resource."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _aggregate_by_resourceid
|
||||
|
||||
records = [
|
||||
{
|
||||
'RESOURCEID': 'R001',
|
||||
'EQUIPMENTID': 'E001',
|
||||
'OBJECTCATEGORY': 'ASSEMBLY',
|
||||
'EQUIPMENTASSETSSTATUS': 'PRD',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'JOBORDER': 'JO001',
|
||||
'JOBSTATUS': 'RUN',
|
||||
'SYMPTOMCODE': None,
|
||||
'CAUSECODE': None,
|
||||
'REPAIRCODE': None,
|
||||
'LOTTRACKINQTY_PCS': 100,
|
||||
'LOTTRACKINTIME': '2024-01-15T10:00:00',
|
||||
}
|
||||
]
|
||||
|
||||
result = _aggregate_by_resourceid(records)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['RESOURCEID'] == 'R001'
|
||||
assert result[0]['LOT_COUNT'] == 1
|
||||
assert result[0]['TOTAL_TRACKIN_QTY'] == 100
|
||||
assert result[0]['STATUS_CATEGORY'] == 'PRODUCTIVE'
|
||||
|
||||
def test_aggregates_multiple_lots(self):
|
||||
"""Test aggregation with multiple LOTs per resource (e.g., oven)."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _aggregate_by_resourceid
|
||||
|
||||
records = [
|
||||
{
|
||||
'RESOURCEID': 'R001',
|
||||
'EQUIPMENTID': 'E001',
|
||||
'OBJECTCATEGORY': 'ASSEMBLY',
|
||||
'EQUIPMENTASSETSSTATUS': 'PRD',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'JOBORDER': 'JO001',
|
||||
'JOBSTATUS': 'RUN',
|
||||
'SYMPTOMCODE': None,
|
||||
'CAUSECODE': None,
|
||||
'REPAIRCODE': None,
|
||||
'LOTTRACKINQTY_PCS': 100,
|
||||
'LOTTRACKINTIME': '2024-01-15T10:00:00',
|
||||
},
|
||||
{
|
||||
'RESOURCEID': 'R001',
|
||||
'EQUIPMENTID': 'E001',
|
||||
'OBJECTCATEGORY': 'ASSEMBLY',
|
||||
'EQUIPMENTASSETSSTATUS': 'PRD',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'JOBORDER': 'JO002',
|
||||
'JOBSTATUS': 'RUN',
|
||||
'SYMPTOMCODE': None,
|
||||
'CAUSECODE': None,
|
||||
'REPAIRCODE': None,
|
||||
'LOTTRACKINQTY_PCS': 150,
|
||||
'LOTTRACKINTIME': '2024-01-15T11:00:00',
|
||||
},
|
||||
{
|
||||
'RESOURCEID': 'R001',
|
||||
'EQUIPMENTID': 'E001',
|
||||
'OBJECTCATEGORY': 'ASSEMBLY',
|
||||
'EQUIPMENTASSETSSTATUS': 'PRD',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'JOBORDER': 'JO003',
|
||||
'JOBSTATUS': 'RUN',
|
||||
'SYMPTOMCODE': None,
|
||||
'CAUSECODE': None,
|
||||
'REPAIRCODE': None,
|
||||
'LOTTRACKINQTY_PCS': 50,
|
||||
'LOTTRACKINTIME': '2024-01-15T09:00:00',
|
||||
},
|
||||
]
|
||||
|
||||
result = _aggregate_by_resourceid(records)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['RESOURCEID'] == 'R001'
|
||||
assert result[0]['LOT_COUNT'] == 3
|
||||
assert result[0]['TOTAL_TRACKIN_QTY'] == 300 # 100 + 150 + 50
|
||||
assert result[0]['LATEST_TRACKIN_TIME'] == '2024-01-15T11:00:00'
|
||||
|
||||
def test_aggregates_multiple_resources(self):
|
||||
"""Test aggregation with multiple different resources."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _aggregate_by_resourceid
|
||||
|
||||
records = [
|
||||
{
|
||||
'RESOURCEID': 'R001',
|
||||
'EQUIPMENTID': 'E001',
|
||||
'OBJECTCATEGORY': 'ASSEMBLY',
|
||||
'EQUIPMENTASSETSSTATUS': 'PRD',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'JOBORDER': 'JO001',
|
||||
'JOBSTATUS': 'RUN',
|
||||
'SYMPTOMCODE': None,
|
||||
'CAUSECODE': None,
|
||||
'REPAIRCODE': None,
|
||||
'LOTTRACKINQTY_PCS': 100,
|
||||
'LOTTRACKINTIME': '2024-01-15T10:00:00',
|
||||
},
|
||||
{
|
||||
'RESOURCEID': 'R002',
|
||||
'EQUIPMENTID': 'E002',
|
||||
'OBJECTCATEGORY': 'WAFERSORT',
|
||||
'EQUIPMENTASSETSSTATUS': 'SBY',
|
||||
'EQUIPMENTASSETSSTATUSREASON': 'Waiting',
|
||||
'JOBORDER': None,
|
||||
'JOBSTATUS': None,
|
||||
'SYMPTOMCODE': None,
|
||||
'CAUSECODE': None,
|
||||
'REPAIRCODE': None,
|
||||
'LOTTRACKINQTY_PCS': None,
|
||||
'LOTTRACKINTIME': None,
|
||||
},
|
||||
]
|
||||
|
||||
result = _aggregate_by_resourceid(records)
|
||||
|
||||
assert len(result) == 2
|
||||
r1 = next(r for r in result if r['RESOURCEID'] == 'R001')
|
||||
r2 = next(r for r in result if r['RESOURCEID'] == 'R002')
|
||||
|
||||
assert r1['LOT_COUNT'] == 1
|
||||
assert r1['STATUS_CATEGORY'] == 'PRODUCTIVE'
|
||||
assert r2['LOT_COUNT'] == 1
|
||||
assert r2['STATUS_CATEGORY'] == 'STANDBY'
|
||||
|
||||
def test_handles_empty_records(self):
|
||||
"""Test handles empty record list."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _aggregate_by_resourceid
|
||||
|
||||
result = _aggregate_by_resourceid([])
|
||||
assert result == []
|
||||
|
||||
def test_handles_null_quantities(self):
|
||||
"""Test handles null quantities gracefully."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _aggregate_by_resourceid
|
||||
|
||||
records = [
|
||||
{
|
||||
'RESOURCEID': 'R001',
|
||||
'EQUIPMENTID': 'E001',
|
||||
'OBJECTCATEGORY': 'ASSEMBLY',
|
||||
'EQUIPMENTASSETSSTATUS': 'SBY',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'JOBORDER': None,
|
||||
'JOBSTATUS': None,
|
||||
'SYMPTOMCODE': None,
|
||||
'CAUSECODE': None,
|
||||
'REPAIRCODE': None,
|
||||
'LOTTRACKINQTY_PCS': None,
|
||||
'LOTTRACKINTIME': None,
|
||||
}
|
||||
]
|
||||
|
||||
result = _aggregate_by_resourceid(records)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['TOTAL_TRACKIN_QTY'] == 0
|
||||
assert result[0]['LATEST_TRACKIN_TIME'] is None
|
||||
|
||||
def test_skips_records_without_resourceid(self):
|
||||
"""Test skips records without RESOURCEID."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import _aggregate_by_resourceid
|
||||
|
||||
records = [
|
||||
{
|
||||
'RESOURCEID': None,
|
||||
'EQUIPMENTID': 'E001',
|
||||
'OBJECTCATEGORY': 'ASSEMBLY',
|
||||
'EQUIPMENTASSETSSTATUS': 'PRD',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'JOBORDER': None,
|
||||
'JOBSTATUS': None,
|
||||
'SYMPTOMCODE': None,
|
||||
'CAUSECODE': None,
|
||||
'REPAIRCODE': None,
|
||||
'LOTTRACKINQTY_PCS': 100,
|
||||
'LOTTRACKINTIME': '2024-01-15T10:00:00',
|
||||
},
|
||||
{
|
||||
'RESOURCEID': 'R001',
|
||||
'EQUIPMENTID': 'E001',
|
||||
'OBJECTCATEGORY': 'ASSEMBLY',
|
||||
'EQUIPMENTASSETSSTATUS': 'PRD',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'JOBORDER': None,
|
||||
'JOBSTATUS': None,
|
||||
'SYMPTOMCODE': None,
|
||||
'CAUSECODE': None,
|
||||
'REPAIRCODE': None,
|
||||
'LOTTRACKINQTY_PCS': 50,
|
||||
'LOTTRACKINTIME': '2024-01-15T10:00:00',
|
||||
},
|
||||
]
|
||||
|
||||
result = _aggregate_by_resourceid(records)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['RESOURCEID'] == 'R001'
|
||||
|
||||
|
||||
class TestGetEquipmentStatusById:
|
||||
"""Test get_equipment_status_by_id function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_modules(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
rc._REDIS_CLIENT = None
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
|
||||
def test_returns_none_when_redis_unavailable(self):
|
||||
"""Test returns None when Redis client unavailable."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_by_id
|
||||
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=None):
|
||||
result = get_equipment_status_by_id('R001')
|
||||
assert result is None
|
||||
|
||||
def test_returns_none_when_id_not_found(self):
|
||||
"""Test returns None when resource ID not in index."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_by_id
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.hget.return_value = None
|
||||
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=mock_client):
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_key_prefix', return_value='mes_wip'):
|
||||
result = get_equipment_status_by_id('R999')
|
||||
assert result is None
|
||||
|
||||
def test_returns_matching_record(self):
|
||||
"""Test returns matching record from cache."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_by_id
|
||||
|
||||
test_data = [
|
||||
{'RESOURCEID': 'R001', 'STATUS_CATEGORY': 'PRODUCTIVE'},
|
||||
{'RESOURCEID': 'R002', 'STATUS_CATEGORY': 'STANDBY'},
|
||||
]
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.hget.return_value = '1' # Index 1 -> R002
|
||||
mock_client.get.return_value = json.dumps(test_data)
|
||||
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=mock_client):
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_key_prefix', return_value='mes_wip'):
|
||||
result = get_equipment_status_by_id('R002')
|
||||
|
||||
assert result is not None
|
||||
assert result['RESOURCEID'] == 'R002'
|
||||
assert result['STATUS_CATEGORY'] == 'STANDBY'
|
||||
|
||||
|
||||
class TestGetEquipmentStatusByIds:
|
||||
"""Test get_equipment_status_by_ids function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_modules(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
rc._REDIS_CLIENT = None
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
|
||||
def test_returns_empty_for_empty_input(self):
|
||||
"""Test returns empty list for empty input."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_by_ids
|
||||
|
||||
result = get_equipment_status_by_ids([])
|
||||
assert result == []
|
||||
|
||||
def test_returns_empty_when_redis_unavailable(self):
|
||||
"""Test returns empty list when Redis unavailable."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_by_ids
|
||||
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=None):
|
||||
result = get_equipment_status_by_ids(['R001', 'R002'])
|
||||
assert result == []
|
||||
|
||||
def test_returns_matching_records(self):
|
||||
"""Test returns all matching records."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_by_ids
|
||||
|
||||
test_data = [
|
||||
{'RESOURCEID': 'R001', 'STATUS_CATEGORY': 'PRODUCTIVE'},
|
||||
{'RESOURCEID': 'R002', 'STATUS_CATEGORY': 'STANDBY'},
|
||||
{'RESOURCEID': 'R003', 'STATUS_CATEGORY': 'DOWN'},
|
||||
]
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.hmget.return_value = ['0', '2', None] # R001 at idx 0, R003 at idx 2, R999 not found
|
||||
mock_client.get.return_value = json.dumps(test_data)
|
||||
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=mock_client):
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_key_prefix', return_value='mes_wip'):
|
||||
result = get_equipment_status_by_ids(['R001', 'R003', 'R999'])
|
||||
|
||||
assert len(result) == 2
|
||||
ids = [r['RESOURCEID'] for r in result]
|
||||
assert 'R001' in ids
|
||||
assert 'R003' in ids
|
||||
assert 'R999' not in ids
|
||||
|
||||
|
||||
class TestGetAllEquipmentStatus:
|
||||
"""Test get_all_equipment_status function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_modules(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
rc._REDIS_CLIENT = None
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
|
||||
def test_returns_empty_when_redis_unavailable(self):
|
||||
"""Test returns empty list when Redis unavailable."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import get_all_equipment_status
|
||||
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=None):
|
||||
result = get_all_equipment_status()
|
||||
assert result == []
|
||||
|
||||
def test_returns_empty_when_no_data(self):
|
||||
"""Test returns empty list when no data in cache."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import get_all_equipment_status
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.return_value = None
|
||||
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=mock_client):
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_key_prefix', return_value='mes_wip'):
|
||||
result = get_all_equipment_status()
|
||||
assert result == []
|
||||
|
||||
def test_returns_all_cached_data(self):
|
||||
"""Test returns all cached equipment status."""
|
||||
from mes_dashboard.services.realtime_equipment_cache import get_all_equipment_status
|
||||
|
||||
test_data = [
|
||||
{'RESOURCEID': 'R001', 'STATUS_CATEGORY': 'PRODUCTIVE'},
|
||||
{'RESOURCEID': 'R002', 'STATUS_CATEGORY': 'STANDBY'},
|
||||
]
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.return_value = json.dumps(test_data)
|
||||
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=mock_client):
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_key_prefix', return_value='mes_wip'):
|
||||
result = get_all_equipment_status()
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]['RESOURCEID'] == 'R001'
|
||||
assert result[1]['RESOURCEID'] == 'R002'
|
||||
|
||||
|
||||
class TestGetEquipmentStatusCacheStatus:
|
||||
"""Test get_equipment_status_cache_status function."""
|
||||
|
||||
@pytest.fixture
|
||||
def app(self):
|
||||
"""Create application for testing."""
|
||||
from mes_dashboard.app import create_app
|
||||
import mes_dashboard.core.database as db
|
||||
db._ENGINE = None
|
||||
app = create_app('testing')
|
||||
app.config['TESTING'] = True
|
||||
return app
|
||||
|
||||
def test_returns_disabled_when_cache_disabled(self, app):
|
||||
"""Test returns disabled status when cache is disabled."""
|
||||
app.config['REALTIME_EQUIPMENT_CACHE_ENABLED'] = False
|
||||
|
||||
with app.app_context():
|
||||
from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_cache_status
|
||||
result = get_equipment_status_cache_status()
|
||||
|
||||
assert result['enabled'] is False
|
||||
assert result['loaded'] is False
|
||||
|
||||
def test_returns_loaded_status_when_data_exists(self, app):
|
||||
"""Test returns loaded status when cache has data."""
|
||||
app.config['REALTIME_EQUIPMENT_CACHE_ENABLED'] = True
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.side_effect = lambda key: {
|
||||
'mes_wip:equipment_status:meta:updated': '2024-01-15T10:30:00',
|
||||
'mes_wip:equipment_status:meta:count': '1000',
|
||||
}.get(key)
|
||||
|
||||
with app.app_context():
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_redis_client', return_value=mock_client):
|
||||
with patch('mes_dashboard.services.realtime_equipment_cache.get_key_prefix', return_value='mes_wip'):
|
||||
from mes_dashboard.services.realtime_equipment_cache import get_equipment_status_cache_status
|
||||
result = get_equipment_status_cache_status()
|
||||
|
||||
assert result['enabled'] is True
|
||||
assert result['loaded'] is True
|
||||
assert result['count'] == 1000
|
||||
162
tests/test_redis_client.py
Normal file
162
tests/test_redis_client.py
Normal file
@@ -0,0 +1,162 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for Redis client module.
|
||||
|
||||
Tests Redis connection management with mocked Redis.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import importlib
|
||||
|
||||
|
||||
class TestRedisClient:
|
||||
"""Test Redis client connection management."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_module(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
rc._REDIS_CLIENT = None
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
|
||||
def test_get_redis_client_success(self, reset_module):
|
||||
"""Test successful Redis client creation."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', True):
|
||||
with patch.object(rc.redis.Redis, 'from_url') as mock_from_url:
|
||||
mock_client = MagicMock()
|
||||
mock_client.ping.return_value = True
|
||||
mock_from_url.return_value = mock_client
|
||||
|
||||
client = rc.get_redis_client()
|
||||
|
||||
assert client is mock_client
|
||||
mock_from_url.assert_called_once()
|
||||
|
||||
def test_get_redis_client_disabled(self, reset_module):
|
||||
"""Test Redis client returns None when disabled."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', False):
|
||||
client = rc.get_redis_client()
|
||||
assert client is None
|
||||
|
||||
def test_get_redis_client_connection_error(self, reset_module):
|
||||
"""Test Redis client handles connection errors gracefully."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
import redis as redis_lib
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', True):
|
||||
with patch.object(rc.redis.Redis, 'from_url') as mock_from_url:
|
||||
mock_from_url.side_effect = redis_lib.RedisError("Connection refused")
|
||||
|
||||
client = rc.get_redis_client()
|
||||
|
||||
assert client is None
|
||||
|
||||
def test_redis_available_true(self, reset_module):
|
||||
"""Test redis_available returns True when Redis is connected."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', True):
|
||||
with patch.object(rc.redis.Redis, 'from_url') as mock_from_url:
|
||||
mock_client = MagicMock()
|
||||
mock_client.ping.return_value = True
|
||||
mock_from_url.return_value = mock_client
|
||||
|
||||
assert rc.redis_available() is True
|
||||
|
||||
def test_redis_available_disabled(self, reset_module):
|
||||
"""Test redis_available returns False when disabled."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', False):
|
||||
assert rc.redis_available() is False
|
||||
|
||||
def test_get_key_with_prefix(self):
|
||||
"""Test get_key adds prefix correctly."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
|
||||
with patch.object(rc, 'REDIS_KEY_PREFIX', 'test_prefix'):
|
||||
key = rc.get_key('mykey')
|
||||
assert key == 'test_prefix:mykey'
|
||||
|
||||
def test_get_key_without_prefix(self):
|
||||
"""Test get_key works with empty prefix."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
|
||||
with patch.object(rc, 'REDIS_KEY_PREFIX', ''):
|
||||
key = rc.get_key('mykey')
|
||||
assert key == ':mykey'
|
||||
|
||||
|
||||
class TestRedisClientSingleton:
|
||||
"""Test Redis client singleton behavior."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_module(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
rc._REDIS_CLIENT = None
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
|
||||
def test_client_is_singleton(self, reset_module):
|
||||
"""Test that get_redis_client returns same instance."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', True):
|
||||
with patch.object(rc.redis.Redis, 'from_url') as mock_from_url:
|
||||
mock_client = MagicMock()
|
||||
mock_client.ping.return_value = True
|
||||
mock_from_url.return_value = mock_client
|
||||
|
||||
client1 = rc.get_redis_client()
|
||||
client2 = rc.get_redis_client()
|
||||
|
||||
assert client1 is client2
|
||||
# from_url should only be called once
|
||||
assert mock_from_url.call_count == 1
|
||||
|
||||
|
||||
class TestCloseRedis:
|
||||
"""Test Redis client cleanup."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_module(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
rc._REDIS_CLIENT = None
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
|
||||
def test_close_redis(self, reset_module):
|
||||
"""Test close_redis properly closes connection."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', True):
|
||||
with patch.object(rc.redis.Redis, 'from_url') as mock_from_url:
|
||||
mock_client = MagicMock()
|
||||
mock_client.ping.return_value = True
|
||||
mock_from_url.return_value = mock_client
|
||||
|
||||
# Get client first
|
||||
client = rc.get_redis_client()
|
||||
assert client is not None
|
||||
|
||||
# Close it
|
||||
rc.close_redis()
|
||||
|
||||
# Verify close was called
|
||||
mock_client.close.assert_called_once()
|
||||
assert rc._REDIS_CLIENT is None
|
||||
|
||||
def test_close_redis_when_none(self, reset_module):
|
||||
"""Test close_redis does nothing when no client."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
|
||||
# Should not raise any errors
|
||||
rc.close_redis()
|
||||
assert rc._REDIS_CLIENT is None
|
||||
58
tests/test_resilience.py
Normal file
58
tests/test_resilience.py
Normal file
@@ -0,0 +1,58 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Tests for runtime resilience helper contracts."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from mes_dashboard.core.resilience import (
|
||||
build_recovery_recommendation,
|
||||
get_resilience_thresholds,
|
||||
summarize_restart_history,
|
||||
)
|
||||
|
||||
|
||||
def test_get_resilience_thresholds_from_env(monkeypatch):
|
||||
monkeypatch.setenv("RESILIENCE_RESTART_CHURN_WINDOW_SECONDS", "120")
|
||||
monkeypatch.setenv("RESILIENCE_RESTART_CHURN_THRESHOLD", "2")
|
||||
monkeypatch.setenv("RESILIENCE_POOL_SATURATION_WARNING", "0.8")
|
||||
|
||||
thresholds = get_resilience_thresholds()
|
||||
assert thresholds["restart_churn_window_seconds"] == 120
|
||||
assert thresholds["restart_churn_threshold"] == 2
|
||||
assert thresholds["pool_saturation_warning"] == 0.8
|
||||
|
||||
|
||||
def test_summarize_restart_history_counts_entries_in_window():
|
||||
now = datetime(2026, 2, 7, 12, 0, tzinfo=timezone.utc)
|
||||
history = [
|
||||
{"completed_at": (now - timedelta(seconds=30)).isoformat()},
|
||||
{"completed_at": (now - timedelta(seconds=90)).isoformat()},
|
||||
{"completed_at": (now - timedelta(seconds=700)).isoformat()},
|
||||
]
|
||||
|
||||
summary = summarize_restart_history(history, now=now, window_seconds=120, threshold=2)
|
||||
assert summary["count"] == 2
|
||||
assert summary["exceeded"] is True
|
||||
assert summary["window_seconds"] == 120
|
||||
assert summary["threshold"] == 2
|
||||
|
||||
|
||||
def test_build_recovery_recommendation_for_pool_churn_and_cooldown():
|
||||
recommendation = build_recovery_recommendation(
|
||||
degraded_reason="db_pool_saturated",
|
||||
pool_saturation=1.0,
|
||||
circuit_state="CLOSED",
|
||||
restart_churn_exceeded=True,
|
||||
cooldown_active=False,
|
||||
)
|
||||
assert recommendation["action"] == "throttle_and_investigate_queries"
|
||||
|
||||
cooldown_recommendation = build_recovery_recommendation(
|
||||
degraded_reason="db_pool_saturated",
|
||||
pool_saturation=1.0,
|
||||
circuit_state="CLOSED",
|
||||
restart_churn_exceeded=False,
|
||||
cooldown_active=True,
|
||||
)
|
||||
assert cooldown_recommendation["action"] == "wait_for_restart_cooldown"
|
||||
579
tests/test_resource_cache.py
Normal file
579
tests/test_resource_cache.py
Normal file
@@ -0,0 +1,579 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for resource_cache module.
|
||||
|
||||
Tests cache read/write functionality, fallback mechanism, and distinct values API.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import pandas as pd
|
||||
import json
|
||||
|
||||
|
||||
class TestGetDistinctValues:
|
||||
"""Test get_distinct_values function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_modules(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
rc._REDIS_CLIENT = None
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
|
||||
def test_returns_sorted_unique_values(self):
|
||||
"""Test returns sorted unique values from resources."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_resources = [
|
||||
{'WORKCENTERNAME': 'Station_B', 'RESOURCEFAMILYNAME': 'Family1'},
|
||||
{'WORKCENTERNAME': 'Station_A', 'RESOURCEFAMILYNAME': 'Family2'},
|
||||
{'WORKCENTERNAME': 'Station_B', 'RESOURCEFAMILYNAME': 'Family1'}, # duplicate
|
||||
{'WORKCENTERNAME': 'Station_C', 'RESOURCEFAMILYNAME': None}, # None value
|
||||
]
|
||||
|
||||
with patch.object(rc, 'get_all_resources', return_value=mock_resources):
|
||||
result = rc.get_distinct_values('WORKCENTERNAME')
|
||||
|
||||
assert result == ['Station_A', 'Station_B', 'Station_C']
|
||||
|
||||
def test_excludes_none_and_empty_strings(self):
|
||||
"""Test excludes None and empty string values."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_resources = [
|
||||
{'RESOURCEFAMILYNAME': 'Family1'},
|
||||
{'RESOURCEFAMILYNAME': None},
|
||||
{'RESOURCEFAMILYNAME': ''},
|
||||
{'RESOURCEFAMILYNAME': 'Family2'},
|
||||
]
|
||||
|
||||
with patch.object(rc, 'get_all_resources', return_value=mock_resources):
|
||||
result = rc.get_distinct_values('RESOURCEFAMILYNAME')
|
||||
|
||||
assert result == ['Family1', 'Family2']
|
||||
|
||||
def test_handles_nan_values(self):
|
||||
"""Test handles NaN values (pandas float NaN)."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
import numpy as np
|
||||
|
||||
mock_resources = [
|
||||
{'WORKCENTERNAME': 'Station_A'},
|
||||
{'WORKCENTERNAME': float('nan')}, # NaN
|
||||
{'WORKCENTERNAME': np.nan}, # NumPy NaN
|
||||
{'WORKCENTERNAME': 'Station_B'},
|
||||
]
|
||||
|
||||
with patch.object(rc, 'get_all_resources', return_value=mock_resources):
|
||||
result = rc.get_distinct_values('WORKCENTERNAME')
|
||||
|
||||
assert result == ['Station_A', 'Station_B']
|
||||
|
||||
def test_handles_mixed_types(self):
|
||||
"""Test handles mixed types (converts to string)."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_resources = [
|
||||
{'PJ_DEPARTMENT': 'Dept_A'},
|
||||
{'PJ_DEPARTMENT': 123}, # int
|
||||
{'PJ_DEPARTMENT': 'Dept_B'},
|
||||
]
|
||||
|
||||
with patch.object(rc, 'get_all_resources', return_value=mock_resources):
|
||||
result = rc.get_distinct_values('PJ_DEPARTMENT')
|
||||
|
||||
assert '123' in result
|
||||
assert 'Dept_A' in result
|
||||
assert 'Dept_B' in result
|
||||
|
||||
def test_returns_empty_list_when_no_resources(self):
|
||||
"""Test returns empty list when no resources."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
with patch.object(rc, 'get_all_resources', return_value=[]):
|
||||
result = rc.get_distinct_values('WORKCENTERNAME')
|
||||
|
||||
assert result == []
|
||||
|
||||
|
||||
class TestConvenienceMethods:
|
||||
"""Test convenience methods for common columns."""
|
||||
|
||||
def test_get_resource_families_calls_get_distinct_values(self):
|
||||
"""Test get_resource_families calls get_distinct_values with correct column."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
with patch.object(rc, 'get_distinct_values', return_value=['Family1', 'Family2']) as mock:
|
||||
result = rc.get_resource_families()
|
||||
|
||||
mock.assert_called_once_with('RESOURCEFAMILYNAME')
|
||||
assert result == ['Family1', 'Family2']
|
||||
|
||||
def test_get_workcenters_calls_get_distinct_values(self):
|
||||
"""Test get_workcenters calls get_distinct_values with correct column."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
with patch.object(rc, 'get_distinct_values', return_value=['WC1', 'WC2']) as mock:
|
||||
result = rc.get_workcenters()
|
||||
|
||||
mock.assert_called_once_with('WORKCENTERNAME')
|
||||
assert result == ['WC1', 'WC2']
|
||||
|
||||
def test_get_departments_calls_get_distinct_values(self):
|
||||
"""Test get_departments calls get_distinct_values with correct column."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
with patch.object(rc, 'get_distinct_values', return_value=['Dept1', 'Dept2']) as mock:
|
||||
result = rc.get_departments()
|
||||
|
||||
mock.assert_called_once_with('PJ_DEPARTMENT')
|
||||
assert result == ['Dept1', 'Dept2']
|
||||
|
||||
|
||||
class TestGetAllResources:
|
||||
"""Test get_all_resources function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_modules(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
rc._REDIS_CLIENT = None
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
|
||||
def test_returns_cached_data_when_available(self):
|
||||
"""Test returns cached data from Redis when available."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
test_data = [
|
||||
{'RESOURCEID': 'R001', 'RESOURCENAME': 'Machine1'},
|
||||
{'RESOURCEID': 'R002', 'RESOURCENAME': 'Machine2'}
|
||||
]
|
||||
cached_json = json.dumps(test_data)
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.return_value = cached_json
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', True):
|
||||
with patch.object(rc, 'RESOURCE_CACHE_ENABLED', True):
|
||||
with patch.object(rc, 'get_redis_client', return_value=mock_client):
|
||||
result = rc.get_all_resources()
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]['RESOURCEID'] == 'R001'
|
||||
|
||||
def test_falls_back_to_oracle_when_cache_miss(self):
|
||||
"""Test falls back to Oracle when cache is empty."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.return_value = None
|
||||
|
||||
oracle_df = pd.DataFrame({
|
||||
'RESOURCEID': ['R001'],
|
||||
'RESOURCENAME': ['Machine1']
|
||||
})
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', True):
|
||||
with patch.object(rc, 'RESOURCE_CACHE_ENABLED', True):
|
||||
with patch.object(rc, 'get_redis_client', return_value=mock_client):
|
||||
with patch.object(rc, '_load_from_oracle', return_value=oracle_df):
|
||||
result = rc.get_all_resources()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['RESOURCEID'] == 'R001'
|
||||
|
||||
def test_returns_empty_when_both_unavailable(self):
|
||||
"""Test returns empty list when both cache and Oracle fail."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.return_value = None
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', True):
|
||||
with patch.object(rc, 'RESOURCE_CACHE_ENABLED', True):
|
||||
with patch.object(rc, 'get_redis_client', return_value=mock_client):
|
||||
with patch.object(rc, '_load_from_oracle', return_value=None):
|
||||
result = rc.get_all_resources()
|
||||
|
||||
assert result == []
|
||||
|
||||
|
||||
class TestGetResourceById:
|
||||
"""Test get_resource_by_id function."""
|
||||
|
||||
def test_returns_matching_resource(self):
|
||||
"""Test returns resource with matching ID."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_resources = [
|
||||
{'RESOURCEID': 'R001', 'RESOURCENAME': 'Machine1'},
|
||||
{'RESOURCEID': 'R002', 'RESOURCENAME': 'Machine2'}
|
||||
]
|
||||
|
||||
with patch.object(rc, 'get_all_resources', return_value=mock_resources):
|
||||
result = rc.get_resource_by_id('R002')
|
||||
|
||||
assert result is not None
|
||||
assert result['RESOURCEID'] == 'R002'
|
||||
assert result['RESOURCENAME'] == 'Machine2'
|
||||
|
||||
def test_returns_none_when_not_found(self):
|
||||
"""Test returns None when ID not found."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_resources = [
|
||||
{'RESOURCEID': 'R001', 'RESOURCENAME': 'Machine1'}
|
||||
]
|
||||
|
||||
with patch.object(rc, 'get_all_resources', return_value=mock_resources):
|
||||
result = rc.get_resource_by_id('R999')
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestGetResourcesByIds:
|
||||
"""Test get_resources_by_ids function."""
|
||||
|
||||
def test_returns_matching_resources(self):
|
||||
"""Test returns all resources with matching IDs."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_resources = [
|
||||
{'RESOURCEID': 'R001', 'RESOURCENAME': 'Machine1'},
|
||||
{'RESOURCEID': 'R002', 'RESOURCENAME': 'Machine2'},
|
||||
{'RESOURCEID': 'R003', 'RESOURCENAME': 'Machine3'}
|
||||
]
|
||||
|
||||
with patch.object(rc, 'get_all_resources', return_value=mock_resources):
|
||||
result = rc.get_resources_by_ids(['R001', 'R003'])
|
||||
|
||||
assert len(result) == 2
|
||||
ids = [r['RESOURCEID'] for r in result]
|
||||
assert 'R001' in ids
|
||||
assert 'R003' in ids
|
||||
|
||||
def test_ignores_missing_ids(self):
|
||||
"""Test ignores IDs that don't exist."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_resources = [
|
||||
{'RESOURCEID': 'R001', 'RESOURCENAME': 'Machine1'}
|
||||
]
|
||||
|
||||
with patch.object(rc, 'get_all_resources', return_value=mock_resources):
|
||||
result = rc.get_resources_by_ids(['R001', 'R999'])
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['RESOURCEID'] == 'R001'
|
||||
|
||||
|
||||
class TestGetResourcesByFilter:
|
||||
"""Test get_resources_by_filter function."""
|
||||
|
||||
def test_filters_by_workcenter(self):
|
||||
"""Test filters resources by workcenter."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_resources = [
|
||||
{'RESOURCEID': 'R001', 'WORKCENTERNAME': 'WC1'},
|
||||
{'RESOURCEID': 'R002', 'WORKCENTERNAME': 'WC2'},
|
||||
{'RESOURCEID': 'R003', 'WORKCENTERNAME': 'WC1'}
|
||||
]
|
||||
|
||||
with patch.object(rc, 'get_all_resources', return_value=mock_resources):
|
||||
result = rc.get_resources_by_filter(workcenters=['WC1'])
|
||||
|
||||
assert len(result) == 2
|
||||
|
||||
def test_filters_by_family(self):
|
||||
"""Test filters resources by family."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_resources = [
|
||||
{'RESOURCEID': 'R001', 'RESOURCEFAMILYNAME': 'F1'},
|
||||
{'RESOURCEID': 'R002', 'RESOURCEFAMILYNAME': 'F2'}
|
||||
]
|
||||
|
||||
with patch.object(rc, 'get_all_resources', return_value=mock_resources):
|
||||
result = rc.get_resources_by_filter(families=['F1'])
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['RESOURCEFAMILYNAME'] == 'F1'
|
||||
|
||||
def test_filters_by_production_flag(self):
|
||||
"""Test filters resources by production flag."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_resources = [
|
||||
{'RESOURCEID': 'R001', 'PJ_ISPRODUCTION': 1},
|
||||
{'RESOURCEID': 'R002', 'PJ_ISPRODUCTION': 0},
|
||||
{'RESOURCEID': 'R003', 'PJ_ISPRODUCTION': 1}
|
||||
]
|
||||
|
||||
with patch.object(rc, 'get_all_resources', return_value=mock_resources):
|
||||
result = rc.get_resources_by_filter(is_production=True)
|
||||
|
||||
assert len(result) == 2
|
||||
|
||||
def test_combines_multiple_filters(self):
|
||||
"""Test combines multiple filter criteria."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_resources = [
|
||||
{'RESOURCEID': 'R001', 'WORKCENTERNAME': 'WC1', 'RESOURCEFAMILYNAME': 'F1'},
|
||||
{'RESOURCEID': 'R002', 'WORKCENTERNAME': 'WC1', 'RESOURCEFAMILYNAME': 'F2'},
|
||||
{'RESOURCEID': 'R003', 'WORKCENTERNAME': 'WC2', 'RESOURCEFAMILYNAME': 'F1'}
|
||||
]
|
||||
|
||||
with patch.object(rc, 'get_all_resources', return_value=mock_resources):
|
||||
result = rc.get_resources_by_filter(workcenters=['WC1'], families=['F1'])
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['RESOURCEID'] == 'R001'
|
||||
|
||||
|
||||
class TestGetCacheStatus:
|
||||
"""Test get_cache_status function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_modules(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
rc._REDIS_CLIENT = None
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
|
||||
def test_returns_disabled_when_cache_disabled(self):
|
||||
"""Test returns disabled status when cache is disabled."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', False):
|
||||
result = rc.get_cache_status()
|
||||
|
||||
assert result['enabled'] is False
|
||||
assert result['loaded'] is False
|
||||
|
||||
def test_returns_loaded_status_when_data_exists(self):
|
||||
"""Test returns loaded status when cache has data."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.exists.return_value = 1
|
||||
mock_client.get.side_effect = lambda key: {
|
||||
'mes_wip:resource:meta:count': '1000',
|
||||
'mes_wip:resource:meta:version': '2024-01-15T10:00:00',
|
||||
'mes_wip:resource:meta:updated': '2024-01-15T10:30:00',
|
||||
}.get(key)
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', True):
|
||||
with patch.object(rc, 'RESOURCE_CACHE_ENABLED', True):
|
||||
with patch.object(rc, 'get_redis_client', return_value=mock_client):
|
||||
result = rc.get_cache_status()
|
||||
|
||||
assert result['enabled'] is True
|
||||
assert result['loaded'] is True
|
||||
|
||||
|
||||
class TestRefreshCache:
|
||||
"""Test refresh_cache function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_modules(self):
|
||||
"""Reset module state before each test."""
|
||||
import mes_dashboard.core.redis_client as rc
|
||||
rc._REDIS_CLIENT = None
|
||||
yield
|
||||
rc._REDIS_CLIENT = None
|
||||
|
||||
def test_returns_false_when_disabled(self):
|
||||
"""Test returns False when cache is disabled."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', False):
|
||||
result = rc.refresh_cache()
|
||||
|
||||
assert result is False
|
||||
|
||||
def test_skips_sync_when_version_unchanged(self):
|
||||
"""Test skips sync when Oracle version matches Redis version."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get.return_value = '2024-01-15T10:00:00'
|
||||
mock_client.ping.return_value = True
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', True):
|
||||
with patch.object(rc, 'RESOURCE_CACHE_ENABLED', True):
|
||||
with patch.object(rc, 'redis_available', return_value=True):
|
||||
with patch.object(rc, '_get_version_from_oracle', return_value='2024-01-15T10:00:00'):
|
||||
with patch.object(rc, '_get_version_from_redis', return_value='2024-01-15T10:00:00'):
|
||||
result = rc.refresh_cache(force=False)
|
||||
|
||||
assert result is False
|
||||
|
||||
def test_syncs_when_version_changed(self):
|
||||
"""Test syncs when Oracle version differs from Redis version."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_df = pd.DataFrame({
|
||||
'RESOURCEID': ['R001'],
|
||||
'RESOURCENAME': ['Machine1']
|
||||
})
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', True):
|
||||
with patch.object(rc, 'RESOURCE_CACHE_ENABLED', True):
|
||||
with patch.object(rc, 'redis_available', return_value=True):
|
||||
with patch.object(rc, '_get_version_from_oracle', return_value='2024-01-15T11:00:00'):
|
||||
with patch.object(rc, '_get_version_from_redis', return_value='2024-01-15T10:00:00'):
|
||||
with patch.object(rc, '_load_from_oracle', return_value=mock_df):
|
||||
with patch.object(rc, '_sync_to_redis', return_value=True) as mock_sync:
|
||||
result = rc.refresh_cache(force=False)
|
||||
|
||||
assert result is True
|
||||
mock_sync.assert_called_once()
|
||||
|
||||
def test_force_sync_ignores_version(self):
|
||||
"""Test force sync ignores version comparison."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
mock_df = pd.DataFrame({
|
||||
'RESOURCEID': ['R001'],
|
||||
'RESOURCENAME': ['Machine1']
|
||||
})
|
||||
|
||||
with patch.object(rc, 'REDIS_ENABLED', True):
|
||||
with patch.object(rc, 'RESOURCE_CACHE_ENABLED', True):
|
||||
with patch.object(rc, 'redis_available', return_value=True):
|
||||
with patch.object(rc, '_get_version_from_oracle', return_value='2024-01-15T10:00:00'):
|
||||
with patch.object(rc, '_get_version_from_redis', return_value='2024-01-15T10:00:00'):
|
||||
with patch.object(rc, '_load_from_oracle', return_value=mock_df):
|
||||
with patch.object(rc, '_sync_to_redis', return_value=True) as mock_sync:
|
||||
result = rc.refresh_cache(force=True)
|
||||
|
||||
assert result is True
|
||||
mock_sync.assert_called_once()
|
||||
|
||||
|
||||
class TestBuildFilterBuilder:
|
||||
"""Test _build_filter_builder function."""
|
||||
|
||||
def test_includes_equipment_type_filter(self):
|
||||
"""Test includes equipment type filter."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
builder = rc._build_filter_builder()
|
||||
builder.base_sql = "SELECT * FROM DWH.DW_MES_RESOURCE {{ WHERE_CLAUSE }}"
|
||||
sql, params = builder.build()
|
||||
|
||||
assert 'OBJECTCATEGORY' in sql
|
||||
assert 'ASSEMBLY' in sql or 'WAFERSORT' in sql
|
||||
|
||||
def test_includes_location_filter(self):
|
||||
"""Test includes location exclusion filter with parameterization."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
builder = rc._build_filter_builder()
|
||||
builder.base_sql = "SELECT * FROM DWH.DW_MES_RESOURCE {{ WHERE_CLAUSE }}"
|
||||
sql, params = builder.build()
|
||||
|
||||
# Check SQL contains LOCATIONNAME condition
|
||||
assert 'LOCATIONNAME' in sql
|
||||
# Parameterized query should have bind variables
|
||||
assert len(params) > 0
|
||||
|
||||
def test_includes_asset_status_filter(self):
|
||||
"""Test includes asset status exclusion filter with parameterization."""
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
builder = rc._build_filter_builder()
|
||||
builder.base_sql = "SELECT * FROM DWH.DW_MES_RESOURCE {{ WHERE_CLAUSE }}"
|
||||
sql, params = builder.build()
|
||||
|
||||
# Check SQL contains PJ_ASSETSSTATUS condition
|
||||
assert 'PJ_ASSETSSTATUS' in sql
|
||||
# Parameterized query should have bind variables
|
||||
assert len(params) > 0
|
||||
|
||||
|
||||
class TestResourceDerivedIndex:
|
||||
"""Test derived resource index and telemetry behavior."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_state(self):
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
rc._resource_index = rc._new_empty_index()
|
||||
rc._resource_df_cache.invalidate("resource_data")
|
||||
yield
|
||||
rc._resource_index = rc._new_empty_index()
|
||||
rc._resource_df_cache.invalidate("resource_data")
|
||||
|
||||
def test_get_resource_by_id_uses_index_snapshot(self):
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
snapshot = {
|
||||
"records": [{"RESOURCEID": "R001", "RESOURCENAME": "Machine1"}],
|
||||
"by_resource_id": {"R001": {"RESOURCEID": "R001", "RESOURCENAME": "Machine1"}},
|
||||
}
|
||||
with patch.object(rc, "get_resource_index_snapshot", return_value=snapshot):
|
||||
row = rc.get_resource_by_id("R001")
|
||||
assert row is not None
|
||||
assert row["RESOURCENAME"] == "Machine1"
|
||||
|
||||
def test_get_cache_status_includes_derived_index_freshness(self):
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
rc._resource_index = {
|
||||
**rc._new_empty_index(),
|
||||
"ready": True,
|
||||
"source": "redis",
|
||||
"version": "v1",
|
||||
"updated_at": "2026-02-07T10:00:00",
|
||||
"built_at": "2026-02-07T10:00:05",
|
||||
"count": 2,
|
||||
}
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.exists.return_value = 1
|
||||
mock_client.get.side_effect = lambda key: {
|
||||
'mes_wip:resource:meta:count': '2',
|
||||
'mes_wip:resource:meta:version': 'v1',
|
||||
'mes_wip:resource:meta:updated': '2026-02-07T10:00:00',
|
||||
}.get(key)
|
||||
|
||||
with patch.object(rc, "REDIS_ENABLED", True):
|
||||
with patch.object(rc, "RESOURCE_CACHE_ENABLED", True):
|
||||
with patch.object(rc, "get_redis_client", return_value=mock_client):
|
||||
status = rc.get_cache_status()
|
||||
assert status["derived_index"]["ready"] is True
|
||||
assert status["derived_index"]["is_fresh"] is True
|
||||
|
||||
def test_index_rebuilds_when_redis_version_changes(self):
|
||||
import mes_dashboard.services.resource_cache as rc
|
||||
|
||||
rc._resource_index = {
|
||||
**rc._new_empty_index(),
|
||||
"ready": True,
|
||||
"source": "redis",
|
||||
"version": "v1",
|
||||
"updated_at": "2026-02-07T10:00:00",
|
||||
"built_at": "2026-02-07T10:00:05",
|
||||
"version_checked_at": 0.0,
|
||||
"count": 1,
|
||||
"records": [{"RESOURCEID": "OLD"}],
|
||||
"by_resource_id": {"OLD": {"RESOURCEID": "OLD"}},
|
||||
}
|
||||
|
||||
rebuilt_df = pd.DataFrame([
|
||||
{"RESOURCEID": "R002", "RESOURCENAME": "Machine2"}
|
||||
])
|
||||
|
||||
with patch.object(rc, "RESOURCE_INDEX_VERSION_CHECK_INTERVAL", 0):
|
||||
with patch.object(rc, "_get_version_from_redis", return_value="v2"):
|
||||
with patch.object(rc, "_get_cached_data", return_value=rebuilt_df):
|
||||
with patch.object(rc, "_get_cache_meta", return_value=("v2", "2026-02-07T10:10:00")):
|
||||
snapshot = rc.get_resource_index_snapshot()
|
||||
assert snapshot["version"] == "v2"
|
||||
assert snapshot["count"] == 1
|
||||
assert snapshot["by_resource_id"]["R002"]["RESOURCENAME"] == "Machine2"
|
||||
297
tests/test_resource_history_routes.py
Normal file
297
tests/test_resource_history_routes.py
Normal file
@@ -0,0 +1,297 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Integration tests for resource history API endpoints.
|
||||
|
||||
Tests API endpoints for proper response format, error handling,
|
||||
and parameter validation.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import json
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
|
||||
|
||||
import mes_dashboard.core.database as db
|
||||
from mes_dashboard.app import create_app
|
||||
|
||||
|
||||
class TestResourceHistoryOptionsAPI(unittest.TestCase):
|
||||
"""Integration tests for /api/resource/history/options endpoint."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test client."""
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
|
||||
@patch('mes_dashboard.routes.resource_history_routes.get_filter_options')
|
||||
def test_options_success(self, mock_get_options):
|
||||
"""Successful options request should return workcenter_groups and families."""
|
||||
mock_get_options.return_value = {
|
||||
'workcenter_groups': [
|
||||
{'name': '焊接_DB', 'sequence': 1},
|
||||
{'name': '成型', 'sequence': 4}
|
||||
],
|
||||
'families': ['FAM01', 'FAM02']
|
||||
}
|
||||
|
||||
response = self.client.get('/api/resource/history/options')
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertIn('data', data)
|
||||
self.assertEqual(len(data['data']['workcenter_groups']), 2)
|
||||
self.assertEqual(data['data']['workcenter_groups'][0]['name'], '焊接_DB')
|
||||
self.assertEqual(data['data']['families'], ['FAM01', 'FAM02'])
|
||||
|
||||
@patch('mes_dashboard.routes.resource_history_routes.get_filter_options')
|
||||
def test_options_failure(self, mock_get_options):
|
||||
"""Failed options request should return error."""
|
||||
mock_get_options.return_value = None
|
||||
|
||||
response = self.client.get('/api/resource/history/options')
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
data = json.loads(response.data)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('error', data)
|
||||
|
||||
|
||||
class TestResourceHistorySummaryAPI(unittest.TestCase):
|
||||
"""Integration tests for /api/resource/history/summary endpoint."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test client."""
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
|
||||
def test_missing_start_date(self):
|
||||
"""Missing start_date should return 400."""
|
||||
response = self.client.get('/api/resource/history/summary?end_date=2024-01-31')
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = json.loads(response.data)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('start_date', data['error'])
|
||||
|
||||
def test_missing_end_date(self):
|
||||
"""Missing end_date should return 400."""
|
||||
response = self.client.get('/api/resource/history/summary?start_date=2024-01-01')
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = json.loads(response.data)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('end_date', data['error'])
|
||||
|
||||
@patch('mes_dashboard.routes.resource_history_routes.query_summary')
|
||||
def test_date_range_exceeds_limit(self, mock_query):
|
||||
"""Date range exceeding 730 days should return error."""
|
||||
mock_query.return_value = {'error': '查詢範圍不可超過 730 天(兩年)'}
|
||||
|
||||
response = self.client.get(
|
||||
'/api/resource/history/summary?start_date=2024-01-01&end_date=2026-01-02'
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = json.loads(response.data)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('730', data['error'])
|
||||
|
||||
@patch('mes_dashboard.routes.resource_history_routes.query_summary')
|
||||
def test_successful_summary(self, mock_query):
|
||||
"""Successful summary request should return all data sections."""
|
||||
mock_query.return_value = {
|
||||
'kpi': {
|
||||
'ou_pct': 80.0,
|
||||
'prd_hours': 800,
|
||||
'sby_hours': 100,
|
||||
'udt_hours': 50,
|
||||
'sdt_hours': 30,
|
||||
'egt_hours': 20,
|
||||
'nst_hours': 100,
|
||||
'machine_count': 10
|
||||
},
|
||||
'trend': [{'date': '2024-01-01', 'ou_pct': 80.0}],
|
||||
'heatmap': [{'workcenter': 'WC01', 'date': '2024-01-01', 'ou_pct': 80.0}],
|
||||
'workcenter_comparison': [{'workcenter': 'WC01', 'ou_pct': 80.0}]
|
||||
}
|
||||
|
||||
response = self.client.get(
|
||||
'/api/resource/history/summary?start_date=2024-01-01&end_date=2024-01-07'
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertIn('kpi', data['data'])
|
||||
self.assertIn('trend', data['data'])
|
||||
self.assertIn('heatmap', data['data'])
|
||||
self.assertIn('workcenter_comparison', data['data'])
|
||||
|
||||
@patch('mes_dashboard.routes.resource_history_routes.query_summary')
|
||||
def test_summary_with_filters(self, mock_query):
|
||||
"""Summary with filters should pass them to service."""
|
||||
mock_query.return_value = {'kpi': {}, 'trend': [], 'heatmap': [], 'workcenter_comparison': []}
|
||||
|
||||
response = self.client.get(
|
||||
'/api/resource/history/summary'
|
||||
'?start_date=2024-01-01'
|
||||
'&end_date=2024-01-07'
|
||||
'&granularity=week'
|
||||
'&workcenter_groups=焊接_DB'
|
||||
'&workcenter_groups=成型'
|
||||
'&families=FAM01'
|
||||
'&families=FAM02'
|
||||
'&is_production=1'
|
||||
'&is_key=1'
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
mock_query.assert_called_once()
|
||||
call_kwargs = mock_query.call_args[1]
|
||||
self.assertEqual(call_kwargs['granularity'], 'week')
|
||||
self.assertEqual(call_kwargs['workcenter_groups'], ['焊接_DB', '成型'])
|
||||
self.assertEqual(call_kwargs['families'], ['FAM01', 'FAM02'])
|
||||
self.assertTrue(call_kwargs['is_production'])
|
||||
self.assertTrue(call_kwargs['is_key'])
|
||||
|
||||
|
||||
class TestResourceHistoryDetailAPI(unittest.TestCase):
|
||||
"""Integration tests for /api/resource/history/detail endpoint."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test client."""
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
|
||||
def test_missing_dates(self):
|
||||
"""Missing dates should return 400."""
|
||||
response = self.client.get('/api/resource/history/detail')
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = json.loads(response.data)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
@patch('mes_dashboard.routes.resource_history_routes.query_detail')
|
||||
def test_successful_detail(self, mock_query):
|
||||
"""Successful detail request should return data with total and truncated flag."""
|
||||
mock_query.return_value = {
|
||||
'data': [
|
||||
{'workcenter': 'WC01', 'family': 'FAM01', 'resource': 'RES01', 'ou_pct': 80.0}
|
||||
],
|
||||
'total': 100,
|
||||
'truncated': False,
|
||||
'max_records': None
|
||||
}
|
||||
|
||||
response = self.client.get(
|
||||
'/api/resource/history/detail?start_date=2024-01-01&end_date=2024-01-07'
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertIn('data', data)
|
||||
self.assertIn('total', data)
|
||||
self.assertIn('truncated', data)
|
||||
self.assertFalse(data['truncated'])
|
||||
|
||||
@patch('mes_dashboard.routes.resource_history_routes.query_detail')
|
||||
def test_detail_truncated_warning(self, mock_query):
|
||||
"""Detail with truncated data should return truncated flag and max_records."""
|
||||
mock_query.return_value = {
|
||||
'data': [{'workcenter': 'WC01', 'family': 'FAM01', 'resource': 'RES01', 'ou_pct': 80.0}],
|
||||
'total': 6000,
|
||||
'truncated': True,
|
||||
'max_records': 5000
|
||||
}
|
||||
|
||||
response = self.client.get(
|
||||
'/api/resource/history/detail'
|
||||
'?start_date=2024-01-01'
|
||||
'&end_date=2024-01-07'
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertTrue(data['truncated'])
|
||||
self.assertEqual(data['max_records'], 5000)
|
||||
self.assertEqual(data['total'], 6000)
|
||||
|
||||
|
||||
class TestResourceHistoryExportAPI(unittest.TestCase):
|
||||
"""Integration tests for /api/resource/history/export endpoint."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test client."""
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
|
||||
def test_missing_dates(self):
|
||||
"""Missing dates should return 400."""
|
||||
response = self.client.get('/api/resource/history/export')
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = json.loads(response.data)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
@patch('mes_dashboard.routes.resource_history_routes.export_csv')
|
||||
def test_successful_export(self, mock_export):
|
||||
"""Successful export should return CSV with correct headers."""
|
||||
mock_export.return_value = iter(['站點,型號,機台,OU%\n', 'WC01,FAM01,RES01,80%\n'])
|
||||
|
||||
response = self.client.get(
|
||||
'/api/resource/history/export?start_date=2024-01-01&end_date=2024-01-07'
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn('text/csv', response.content_type)
|
||||
self.assertIn('attachment', response.headers['Content-Disposition'])
|
||||
self.assertIn('resource_history', response.headers['Content-Disposition'])
|
||||
|
||||
@patch('mes_dashboard.routes.resource_history_routes.export_csv')
|
||||
def test_export_filename_includes_dates(self, mock_export):
|
||||
"""Export filename should include date range."""
|
||||
mock_export.return_value = iter(['header\n'])
|
||||
|
||||
response = self.client.get(
|
||||
'/api/resource/history/export?start_date=2024-01-01&end_date=2024-01-07'
|
||||
)
|
||||
|
||||
self.assertIn('2024-01-01', response.headers['Content-Disposition'])
|
||||
self.assertIn('2024-01-07', response.headers['Content-Disposition'])
|
||||
|
||||
|
||||
class TestAPIContentType(unittest.TestCase):
|
||||
"""Test that APIs return proper content types."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test client."""
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
|
||||
@patch('mes_dashboard.routes.resource_history_routes.get_filter_options')
|
||||
def test_json_content_type(self, mock_get_options):
|
||||
"""API endpoints should return application/json content type."""
|
||||
mock_get_options.return_value = {'workcenter_groups': [], 'families': []}
|
||||
|
||||
response = self.client.get('/api/resource/history/options')
|
||||
|
||||
self.assertIn('application/json', response.content_type)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
446
tests/test_resource_history_service.py
Normal file
446
tests/test_resource_history_service.py
Normal file
@@ -0,0 +1,446 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for resource_history_service.py.
|
||||
|
||||
Tests the service layer functions for resource history analysis.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch, MagicMock
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import pandas as pd
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
|
||||
|
||||
from mes_dashboard.services.resource_history_service import (
|
||||
get_filter_options,
|
||||
query_summary,
|
||||
query_detail,
|
||||
export_csv,
|
||||
_validate_date_range,
|
||||
_get_date_trunc,
|
||||
_calc_ou_pct,
|
||||
_calc_availability_pct,
|
||||
_build_kpi_from_df,
|
||||
_build_detail_from_raw_df,
|
||||
MAX_QUERY_DAYS,
|
||||
)
|
||||
|
||||
|
||||
class TestValidateDateRange(unittest.TestCase):
|
||||
"""Test date range validation."""
|
||||
|
||||
def test_valid_date_range(self):
|
||||
"""Valid date range should return None."""
|
||||
result = _validate_date_range('2024-01-01', '2024-01-31')
|
||||
self.assertIsNone(result)
|
||||
|
||||
def test_date_range_exceeds_max(self):
|
||||
"""Date range exceeding MAX_QUERY_DAYS should return error message."""
|
||||
result = _validate_date_range('2024-01-01', '2026-01-02')
|
||||
self.assertIsNotNone(result)
|
||||
self.assertIn('730', result)
|
||||
|
||||
def test_end_date_before_start_date(self):
|
||||
"""End date before start date should return error message."""
|
||||
result = _validate_date_range('2024-01-31', '2024-01-01')
|
||||
self.assertIsNotNone(result)
|
||||
self.assertIn('起始日期', result)
|
||||
|
||||
def test_invalid_date_format(self):
|
||||
"""Invalid date format should return error message."""
|
||||
result = _validate_date_range('invalid', '2024-01-01')
|
||||
self.assertIsNotNone(result)
|
||||
self.assertIn('日期格式錯誤', result)
|
||||
|
||||
|
||||
class TestGetDateTrunc(unittest.TestCase):
|
||||
"""Test date truncation SQL generation."""
|
||||
|
||||
def test_day_granularity(self):
|
||||
"""Day granularity should use TRUNC without format."""
|
||||
result = _get_date_trunc('day')
|
||||
self.assertIn('TRUNC(TXNDATE)', result)
|
||||
self.assertNotIn('IW', result)
|
||||
|
||||
def test_week_granularity(self):
|
||||
"""Week granularity should use TRUNC with IW format."""
|
||||
result = _get_date_trunc('week')
|
||||
self.assertIn("'IW'", result)
|
||||
|
||||
def test_month_granularity(self):
|
||||
"""Month granularity should use TRUNC with MM format."""
|
||||
result = _get_date_trunc('month')
|
||||
self.assertIn("'MM'", result)
|
||||
|
||||
def test_year_granularity(self):
|
||||
"""Year granularity should use TRUNC with YYYY format."""
|
||||
result = _get_date_trunc('year')
|
||||
self.assertIn("'YYYY'", result)
|
||||
|
||||
def test_unknown_granularity(self):
|
||||
"""Unknown granularity should default to day."""
|
||||
result = _get_date_trunc('unknown')
|
||||
self.assertIn('TRUNC(TXNDATE)', result)
|
||||
self.assertNotIn("'IW'", result)
|
||||
|
||||
|
||||
class TestCalcOuPct(unittest.TestCase):
|
||||
"""Test OU% calculation."""
|
||||
|
||||
def test_normal_calculation(self):
|
||||
"""OU% should be calculated correctly."""
|
||||
# PRD=800, SBY=100, UDT=50, SDT=30, EGT=20
|
||||
# OU% = 800 / (800+100+50+30+20) * 100 = 80%
|
||||
result = _calc_ou_pct(800, 100, 50, 30, 20)
|
||||
self.assertEqual(result, 80.0)
|
||||
|
||||
def test_zero_denominator(self):
|
||||
"""Zero denominator should return 0, not error."""
|
||||
result = _calc_ou_pct(0, 0, 0, 0, 0)
|
||||
self.assertEqual(result, 0)
|
||||
|
||||
def test_all_prd(self):
|
||||
"""100% PRD should result in 100% OU."""
|
||||
result = _calc_ou_pct(100, 0, 0, 0, 0)
|
||||
self.assertEqual(result, 100.0)
|
||||
|
||||
def test_no_prd(self):
|
||||
"""No PRD should result in 0% OU."""
|
||||
result = _calc_ou_pct(0, 100, 50, 30, 20)
|
||||
self.assertEqual(result, 0)
|
||||
|
||||
|
||||
class TestCalcAvailabilityPct(unittest.TestCase):
|
||||
"""Test Availability% calculation."""
|
||||
|
||||
def test_normal_calculation(self):
|
||||
"""Availability% should be calculated correctly."""
|
||||
# PRD=800, SBY=100, UDT=50, SDT=30, EGT=20, NST=100
|
||||
# Availability% = (800+100+20) / (800+100+20+30+50+100) * 100 = 920 / 1100 * 100 = 83.6%
|
||||
result = _calc_availability_pct(800, 100, 50, 30, 20, 100)
|
||||
self.assertEqual(result, 83.6)
|
||||
|
||||
def test_zero_denominator(self):
|
||||
"""Zero denominator should return 0, not error."""
|
||||
result = _calc_availability_pct(0, 0, 0, 0, 0, 0)
|
||||
self.assertEqual(result, 0)
|
||||
|
||||
def test_all_available(self):
|
||||
"""100% available (no SDT, UDT, NST) should result in 100%."""
|
||||
# PRD=100, SBY=50, EGT=50, no SDT/UDT/NST
|
||||
# Availability% = (100+50+50) / (100+50+50+0+0+0) * 100 = 100%
|
||||
result = _calc_availability_pct(100, 50, 0, 0, 50, 0)
|
||||
self.assertEqual(result, 100.0)
|
||||
|
||||
def test_no_available_time(self):
|
||||
"""No available time (all SDT/UDT/NST) should result in 0%."""
|
||||
# PRD=0, SBY=0, EGT=0, SDT=50, UDT=30, NST=20
|
||||
# Availability% = 0 / (0+0+0+50+30+20) * 100 = 0%
|
||||
result = _calc_availability_pct(0, 0, 50, 30, 0, 20)
|
||||
self.assertEqual(result, 0)
|
||||
|
||||
def test_mixed_scenario(self):
|
||||
"""Mixed scenario with partial availability."""
|
||||
# PRD=500, SBY=200, UDT=100, SDT=100, EGT=50, NST=50
|
||||
# Numerator = PRD + SBY + EGT = 500 + 200 + 50 = 750
|
||||
# Denominator = 500 + 200 + 50 + 100 + 100 + 50 = 1000
|
||||
# Availability% = 750 / 1000 * 100 = 75%
|
||||
result = _calc_availability_pct(500, 200, 100, 100, 50, 50)
|
||||
self.assertEqual(result, 75.0)
|
||||
|
||||
|
||||
class TestBuildKpiFromDf(unittest.TestCase):
|
||||
"""Test KPI building from DataFrame."""
|
||||
|
||||
def test_empty_dataframe(self):
|
||||
"""Empty DataFrame should return default KPI values."""
|
||||
df = pd.DataFrame()
|
||||
result = _build_kpi_from_df(df)
|
||||
|
||||
self.assertEqual(result['ou_pct'], 0)
|
||||
self.assertEqual(result['availability_pct'], 0)
|
||||
self.assertEqual(result['prd_hours'], 0)
|
||||
self.assertEqual(result['machine_count'], 0)
|
||||
|
||||
def test_normal_dataframe(self):
|
||||
"""Normal DataFrame should build correct KPI."""
|
||||
df = pd.DataFrame([{
|
||||
'PRD_HOURS': 800,
|
||||
'SBY_HOURS': 100,
|
||||
'UDT_HOURS': 50,
|
||||
'SDT_HOURS': 30,
|
||||
'EGT_HOURS': 20,
|
||||
'NST_HOURS': 100,
|
||||
'MACHINE_COUNT': 10
|
||||
}])
|
||||
result = _build_kpi_from_df(df)
|
||||
|
||||
self.assertEqual(result['ou_pct'], 80.0)
|
||||
# Availability% = (800+100+20) / (800+100+20+30+50+100) * 100 = 920/1100 = 83.6%
|
||||
self.assertEqual(result['availability_pct'], 83.6)
|
||||
self.assertEqual(result['prd_hours'], 800)
|
||||
self.assertEqual(result['machine_count'], 10)
|
||||
|
||||
def test_none_values_in_dataframe(self):
|
||||
"""None values should be treated as 0."""
|
||||
df = pd.DataFrame([{
|
||||
'PRD_HOURS': None,
|
||||
'SBY_HOURS': None,
|
||||
'UDT_HOURS': None,
|
||||
'SDT_HOURS': None,
|
||||
'EGT_HOURS': None,
|
||||
'NST_HOURS': None,
|
||||
'MACHINE_COUNT': None
|
||||
}])
|
||||
result = _build_kpi_from_df(df)
|
||||
|
||||
self.assertEqual(result['ou_pct'], 0)
|
||||
self.assertEqual(result['availability_pct'], 0)
|
||||
self.assertEqual(result['prd_hours'], 0)
|
||||
self.assertEqual(result['machine_count'], 0)
|
||||
|
||||
|
||||
class TestBuildDetailFromDf(unittest.TestCase):
|
||||
"""Test detail data building from DataFrame."""
|
||||
|
||||
def test_empty_dataframe(self):
|
||||
"""Empty DataFrame should return empty list."""
|
||||
df = pd.DataFrame()
|
||||
resource_lookup = {}
|
||||
result = _build_detail_from_raw_df(df, resource_lookup)
|
||||
self.assertEqual(result, [])
|
||||
|
||||
@patch('mes_dashboard.services.filter_cache.get_workcenter_mapping')
|
||||
def test_normal_dataframe(self, mock_wc_mapping):
|
||||
"""Normal DataFrame should build correct detail data."""
|
||||
mock_wc_mapping.return_value = {
|
||||
'WC01': {'group': 'Group01', 'sequence': 1}
|
||||
}
|
||||
df = pd.DataFrame([{
|
||||
'HISTORYID': 'RES01',
|
||||
'PRD_HOURS': 80,
|
||||
'SBY_HOURS': 10,
|
||||
'UDT_HOURS': 5,
|
||||
'SDT_HOURS': 3,
|
||||
'EGT_HOURS': 2,
|
||||
'NST_HOURS': 10,
|
||||
'TOTAL_HOURS': 110
|
||||
}])
|
||||
resource_lookup = {
|
||||
'RES01': {
|
||||
'RESOURCEID': 'RES01',
|
||||
'WORKCENTERNAME': 'WC01',
|
||||
'RESOURCEFAMILYNAME': 'FAM01',
|
||||
'RESOURCENAME': 'RES01'
|
||||
}
|
||||
}
|
||||
result = _build_detail_from_raw_df(df, resource_lookup)
|
||||
|
||||
self.assertEqual(len(result), 1)
|
||||
self.assertEqual(result[0]['workcenter'], 'Group01')
|
||||
self.assertEqual(result[0]['family'], 'FAM01')
|
||||
self.assertEqual(result[0]['resource'], 'RES01')
|
||||
self.assertEqual(result[0]['machine_count'], 1)
|
||||
# OU% = 80 / (80+10+5+3+2) * 100 = 80%
|
||||
self.assertEqual(result[0]['ou_pct'], 80.0)
|
||||
|
||||
|
||||
class TestGetFilterOptions(unittest.TestCase):
|
||||
"""Test filter options retrieval."""
|
||||
|
||||
@patch('mes_dashboard.services.filter_cache.get_workcenter_groups')
|
||||
@patch('mes_dashboard.services.resource_cache.get_resource_families')
|
||||
def test_cache_failure(self, mock_families, mock_groups):
|
||||
"""Cache failure should return None."""
|
||||
mock_groups.return_value = None
|
||||
mock_families.return_value = None
|
||||
result = get_filter_options()
|
||||
self.assertIsNone(result)
|
||||
|
||||
@patch('mes_dashboard.services.filter_cache.get_workcenter_groups')
|
||||
@patch('mes_dashboard.services.resource_cache.get_resource_families')
|
||||
def test_successful_query(self, mock_families, mock_groups):
|
||||
"""Successful query should return workcenter groups and families."""
|
||||
mock_groups.return_value = [
|
||||
{'name': '焊接_DB', 'sequence': 1},
|
||||
{'name': '成型', 'sequence': 4},
|
||||
]
|
||||
mock_families.return_value = ['FAM01', 'FAM02']
|
||||
|
||||
result = get_filter_options()
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertEqual(len(result['workcenter_groups']), 2)
|
||||
self.assertEqual(result['workcenter_groups'][0]['name'], '焊接_DB')
|
||||
self.assertEqual(result['families'], ['FAM01', 'FAM02'])
|
||||
|
||||
|
||||
class TestQuerySummary(unittest.TestCase):
|
||||
"""Test summary query function."""
|
||||
|
||||
def test_invalid_date_range(self):
|
||||
"""Invalid date range should return error."""
|
||||
result = query_summary(
|
||||
start_date='2024-01-01',
|
||||
end_date='2026-01-02', # More than 730 days
|
||||
granularity='day'
|
||||
)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertIn('error', result)
|
||||
|
||||
@patch('mes_dashboard.services.resource_history_service.read_sql_df')
|
||||
def test_successful_query(self, mock_read_sql):
|
||||
"""Successful query should return all sections."""
|
||||
# Mock data for all queries
|
||||
kpi_df = pd.DataFrame([{
|
||||
'PRD_HOURS': 800, 'SBY_HOURS': 100, 'UDT_HOURS': 50,
|
||||
'SDT_HOURS': 30, 'EGT_HOURS': 20, 'NST_HOURS': 100,
|
||||
'MACHINE_COUNT': 10
|
||||
}])
|
||||
|
||||
trend_df = pd.DataFrame([{
|
||||
'DATA_DATE': datetime(2024, 1, 1),
|
||||
'PRD_HOURS': 100, 'SBY_HOURS': 10, 'UDT_HOURS': 5,
|
||||
'SDT_HOURS': 3, 'EGT_HOURS': 2, 'NST_HOURS': 10,
|
||||
'MACHINE_COUNT': 5
|
||||
}])
|
||||
|
||||
heatmap_df = pd.DataFrame([{
|
||||
'WORKCENTERNAME': 'WC01', 'DATA_DATE': datetime(2024, 1, 1),
|
||||
'PRD_HOURS': 80, 'SBY_HOURS': 10, 'UDT_HOURS': 5,
|
||||
'SDT_HOURS': 3, 'EGT_HOURS': 2
|
||||
}])
|
||||
|
||||
comparison_df = pd.DataFrame([{
|
||||
'WORKCENTERNAME': 'WC01',
|
||||
'PRD_HOURS': 800, 'SBY_HOURS': 100, 'UDT_HOURS': 50,
|
||||
'SDT_HOURS': 30, 'EGT_HOURS': 20, 'MACHINE_COUNT': 10
|
||||
}])
|
||||
|
||||
# Use a function to return appropriate mock based on SQL content
|
||||
# (ThreadPoolExecutor runs queries in parallel, so side_effect list is unreliable)
|
||||
def mock_sql(sql):
|
||||
sql_upper = sql.upper()
|
||||
if 'DATA_DATE' in sql_upper and 'WORKCENTERNAME' in sql_upper:
|
||||
return heatmap_df # heatmap has both DATA_DATE and WORKCENTERNAME
|
||||
elif 'DATA_DATE' in sql_upper:
|
||||
return trend_df # trend has DATA_DATE but no WORKCENTERNAME
|
||||
elif 'WORKCENTERNAME' in sql_upper:
|
||||
return comparison_df # comparison has WORKCENTERNAME but no DATA_DATE
|
||||
else:
|
||||
return kpi_df # kpi has neither
|
||||
|
||||
mock_read_sql.side_effect = mock_sql
|
||||
|
||||
result = query_summary(
|
||||
start_date='2024-01-01',
|
||||
end_date='2024-01-07',
|
||||
granularity='day'
|
||||
)
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertIn('kpi', result)
|
||||
self.assertIn('trend', result)
|
||||
self.assertIn('heatmap', result)
|
||||
self.assertIn('workcenter_comparison', result)
|
||||
|
||||
|
||||
class TestQueryDetail(unittest.TestCase):
|
||||
"""Test detail query function."""
|
||||
|
||||
def test_invalid_date_range(self):
|
||||
"""Invalid date range should return error."""
|
||||
result = query_detail(
|
||||
start_date='2024-01-01',
|
||||
end_date='2026-01-02', # More than 730 days
|
||||
granularity='day'
|
||||
)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertIn('error', result)
|
||||
|
||||
@patch('mes_dashboard.services.filter_cache.get_workcenter_mapping')
|
||||
@patch('mes_dashboard.services.resource_history_service._get_filtered_resources')
|
||||
@patch('mes_dashboard.services.resource_history_service.read_sql_df')
|
||||
def test_successful_query(self, mock_read_sql, mock_get_resources, mock_wc_mapping):
|
||||
"""Successful query should return data with total count."""
|
||||
# Mock filtered resources
|
||||
mock_get_resources.return_value = [
|
||||
{'RESOURCEID': 'RES01', 'WORKCENTERNAME': 'WC01',
|
||||
'RESOURCEFAMILYNAME': 'FAM01', 'RESOURCENAME': 'RES01'}
|
||||
]
|
||||
mock_wc_mapping.return_value = {
|
||||
'WC01': {'group': 'Group01', 'sequence': 1}
|
||||
}
|
||||
|
||||
# Mock detail query with HISTORYID column
|
||||
detail_df = pd.DataFrame([{
|
||||
'HISTORYID': 'RES01',
|
||||
'PRD_HOURS': 80, 'SBY_HOURS': 10, 'UDT_HOURS': 5,
|
||||
'SDT_HOURS': 3, 'EGT_HOURS': 2, 'NST_HOURS': 10,
|
||||
'TOTAL_HOURS': 110
|
||||
}])
|
||||
|
||||
mock_read_sql.return_value = detail_df
|
||||
|
||||
result = query_detail(
|
||||
start_date='2024-01-01',
|
||||
end_date='2024-01-07',
|
||||
granularity='day',
|
||||
)
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertIn('data', result)
|
||||
self.assertIn('total', result)
|
||||
self.assertIn('truncated', result)
|
||||
self.assertEqual(result['total'], 1)
|
||||
self.assertFalse(result['truncated'])
|
||||
|
||||
|
||||
class TestExportCsv(unittest.TestCase):
|
||||
"""Test CSV export function."""
|
||||
|
||||
def test_invalid_date_range(self):
|
||||
"""Invalid date range should yield error."""
|
||||
result = list(export_csv(
|
||||
start_date='2024-01-01',
|
||||
end_date='2026-01-02', # More than 730 days
|
||||
))
|
||||
self.assertTrue(any('Error' in r for r in result))
|
||||
|
||||
@patch('mes_dashboard.services.filter_cache.get_workcenter_mapping')
|
||||
@patch('mes_dashboard.services.resource_history_service._get_filtered_resources')
|
||||
@patch('mes_dashboard.services.resource_history_service.read_sql_df')
|
||||
def test_successful_export(self, mock_read_sql, mock_get_filtered_resources, mock_wc_mapping):
|
||||
"""Successful export should yield CSV rows."""
|
||||
mock_get_filtered_resources.return_value = [{
|
||||
'RESOURCEID': 'RES01',
|
||||
'WORKCENTERNAME': 'WC01',
|
||||
'RESOURCEFAMILYNAME': 'FAM01',
|
||||
'RESOURCENAME': 'RES01',
|
||||
}]
|
||||
mock_wc_mapping.return_value = {'WC01': {'group': 'WC01', 'sequence': 1}}
|
||||
|
||||
mock_read_sql.return_value = pd.DataFrame([{
|
||||
'HISTORYID': 'RES01',
|
||||
'PRD_HOURS': 80, 'SBY_HOURS': 10, 'UDT_HOURS': 5,
|
||||
'SDT_HOURS': 3, 'EGT_HOURS': 2, 'NST_HOURS': 10,
|
||||
'TOTAL_HOURS': 110
|
||||
}])
|
||||
|
||||
result = list(export_csv(
|
||||
start_date='2024-01-01',
|
||||
end_date='2024-01-07',
|
||||
))
|
||||
|
||||
# Should have header row + data row
|
||||
self.assertGreaterEqual(len(result), 2)
|
||||
# Header should contain column names
|
||||
self.assertIn('站點', result[0])
|
||||
self.assertIn('OU%', result[0])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
396
tests/test_resource_service.py
Normal file
396
tests/test_resource_service.py
Normal file
@@ -0,0 +1,396 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for resource_service module.
|
||||
|
||||
Tests merged resource status queries and summary functions.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
|
||||
class TestGetMergedResourceStatus:
|
||||
"""Test get_merged_resource_status function."""
|
||||
|
||||
def test_returns_empty_when_no_resources(self):
|
||||
"""Test returns empty list when no resources available."""
|
||||
from mes_dashboard.services.resource_service import get_merged_resource_status
|
||||
|
||||
with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=[]):
|
||||
result = get_merged_resource_status()
|
||||
assert result == []
|
||||
|
||||
def test_merges_resource_and_status_data(self):
|
||||
"""Test merges resource-cache and realtime-equipment-cache data."""
|
||||
from mes_dashboard.services.resource_service import get_merged_resource_status
|
||||
|
||||
mock_resources = [
|
||||
{
|
||||
'RESOURCEID': 'R001',
|
||||
'RESOURCENAME': 'Machine1',
|
||||
'WORKCENTERNAME': 'WC-01',
|
||||
'RESOURCEFAMILYNAME': 'Family1',
|
||||
'PJ_DEPARTMENT': 'Dept1',
|
||||
'PJ_ASSETSSTATUS': 'Active',
|
||||
'PJ_ISPRODUCTION': 1,
|
||||
'PJ_ISKEY': 0,
|
||||
'PJ_ISMONITOR': 0,
|
||||
'VENDORNAME': 'Vendor1',
|
||||
'VENDORMODEL': 'Model1',
|
||||
'LOCATIONNAME': 'Loc1',
|
||||
}
|
||||
]
|
||||
|
||||
mock_equipment_status = [
|
||||
{
|
||||
'RESOURCEID': 'R001',
|
||||
'EQUIPMENTASSETSSTATUS': 'PRD',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'STATUS_CATEGORY': 'PRODUCTIVE',
|
||||
'JOBORDER': 'JO001',
|
||||
'JOBSTATUS': 'RUN',
|
||||
'SYMPTOMCODE': None,
|
||||
'CAUSECODE': None,
|
||||
'REPAIRCODE': None,
|
||||
'LOT_COUNT': 2,
|
||||
'TOTAL_TRACKIN_QTY': 150,
|
||||
'LATEST_TRACKIN_TIME': '2024-01-15T10:00:00',
|
||||
}
|
||||
]
|
||||
|
||||
with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources):
|
||||
with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_group', return_value='焊接'):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value='DB'):
|
||||
result = get_merged_resource_status()
|
||||
|
||||
assert len(result) == 1
|
||||
r = result[0]
|
||||
# Resource-cache data
|
||||
assert r['RESOURCEID'] == 'R001'
|
||||
assert r['RESOURCENAME'] == 'Machine1'
|
||||
assert r['WORKCENTERNAME'] == 'WC-01'
|
||||
# Workcenter mapping
|
||||
assert r['WORKCENTER_GROUP'] == '焊接'
|
||||
assert r['WORKCENTER_SHORT'] == 'DB'
|
||||
# Realtime status
|
||||
assert r['EQUIPMENTASSETSSTATUS'] == 'PRD'
|
||||
assert r['STATUS_CATEGORY'] == 'PRODUCTIVE'
|
||||
assert r['LOT_COUNT'] == 2
|
||||
|
||||
def test_handles_resources_without_status(self):
|
||||
"""Test handles resources that have no realtime status."""
|
||||
from mes_dashboard.services.resource_service import get_merged_resource_status
|
||||
|
||||
mock_resources = [
|
||||
{
|
||||
'RESOURCEID': 'R001',
|
||||
'RESOURCENAME': 'Machine1',
|
||||
'WORKCENTERNAME': 'WC-01',
|
||||
'RESOURCEFAMILYNAME': 'Family1',
|
||||
'PJ_DEPARTMENT': 'Dept1',
|
||||
'PJ_ASSETSSTATUS': 'Active',
|
||||
'PJ_ISPRODUCTION': 1,
|
||||
'PJ_ISKEY': 0,
|
||||
'PJ_ISMONITOR': 0,
|
||||
'VENDORNAME': 'Vendor1',
|
||||
'VENDORMODEL': 'Model1',
|
||||
'LOCATIONNAME': 'Loc1',
|
||||
}
|
||||
]
|
||||
|
||||
# No matching equipment status
|
||||
mock_equipment_status = []
|
||||
|
||||
with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources):
|
||||
with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_group', return_value=None):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value=None):
|
||||
result = get_merged_resource_status()
|
||||
|
||||
assert len(result) == 1
|
||||
r = result[0]
|
||||
assert r['RESOURCEID'] == 'R001'
|
||||
# Status fields should be None
|
||||
assert r['EQUIPMENTASSETSSTATUS'] is None
|
||||
assert r['STATUS_CATEGORY'] is None
|
||||
assert r['LOT_COUNT'] is None
|
||||
|
||||
|
||||
class TestGetMergedResourceStatusWithFilters:
|
||||
"""Test get_merged_resource_status with filter parameters."""
|
||||
|
||||
def _get_mock_data(self):
|
||||
"""Get mock test data."""
|
||||
mock_resources = [
|
||||
{
|
||||
'RESOURCEID': 'R001',
|
||||
'RESOURCENAME': 'Machine1',
|
||||
'WORKCENTERNAME': 'WC-01',
|
||||
'RESOURCEFAMILYNAME': 'Family1',
|
||||
'PJ_DEPARTMENT': 'Dept1',
|
||||
'PJ_ASSETSSTATUS': 'Active',
|
||||
'PJ_ISPRODUCTION': 1,
|
||||
'PJ_ISKEY': 1,
|
||||
'PJ_ISMONITOR': 0,
|
||||
'VENDORNAME': 'Vendor1',
|
||||
'VENDORMODEL': 'Model1',
|
||||
'LOCATIONNAME': 'Loc1',
|
||||
},
|
||||
{
|
||||
'RESOURCEID': 'R002',
|
||||
'RESOURCENAME': 'Machine2',
|
||||
'WORKCENTERNAME': 'WC-02',
|
||||
'RESOURCEFAMILYNAME': 'Family2',
|
||||
'PJ_DEPARTMENT': 'Dept2',
|
||||
'PJ_ASSETSSTATUS': 'Active',
|
||||
'PJ_ISPRODUCTION': 0,
|
||||
'PJ_ISKEY': 0,
|
||||
'PJ_ISMONITOR': 1,
|
||||
'VENDORNAME': 'Vendor2',
|
||||
'VENDORMODEL': 'Model2',
|
||||
'LOCATIONNAME': 'Loc2',
|
||||
},
|
||||
]
|
||||
|
||||
mock_equipment_status = [
|
||||
{
|
||||
'RESOURCEID': 'R001',
|
||||
'EQUIPMENTASSETSSTATUS': 'PRD',
|
||||
'EQUIPMENTASSETSSTATUSREASON': None,
|
||||
'STATUS_CATEGORY': 'PRODUCTIVE',
|
||||
'JOBORDER': 'JO001',
|
||||
'JOBSTATUS': 'RUN',
|
||||
'SYMPTOMCODE': None,
|
||||
'CAUSECODE': None,
|
||||
'REPAIRCODE': None,
|
||||
'LOT_COUNT': 1,
|
||||
'TOTAL_TRACKIN_QTY': 100,
|
||||
'LATEST_TRACKIN_TIME': '2024-01-15T10:00:00',
|
||||
},
|
||||
{
|
||||
'RESOURCEID': 'R002',
|
||||
'EQUIPMENTASSETSSTATUS': 'SBY',
|
||||
'EQUIPMENTASSETSSTATUSREASON': 'Waiting',
|
||||
'STATUS_CATEGORY': 'STANDBY',
|
||||
'JOBORDER': None,
|
||||
'JOBSTATUS': None,
|
||||
'SYMPTOMCODE': None,
|
||||
'CAUSECODE': None,
|
||||
'REPAIRCODE': None,
|
||||
'LOT_COUNT': 0,
|
||||
'TOTAL_TRACKIN_QTY': 0,
|
||||
'LATEST_TRACKIN_TIME': None,
|
||||
},
|
||||
]
|
||||
|
||||
return mock_resources, mock_equipment_status
|
||||
|
||||
def test_filters_by_workcenter_groups(self):
|
||||
"""Test filters by workcenter_groups parameter."""
|
||||
from mes_dashboard.services.resource_service import get_merged_resource_status
|
||||
|
||||
mock_resources, mock_equipment_status = self._get_mock_data()
|
||||
|
||||
def mock_get_group(wc_name):
|
||||
return '焊接' if wc_name == 'WC-01' else '成型'
|
||||
|
||||
with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources):
|
||||
with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_group', side_effect=mock_get_group):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value=None):
|
||||
result = get_merged_resource_status(workcenter_groups=['焊接'])
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['RESOURCEID'] == 'R001'
|
||||
|
||||
def test_filters_by_is_production(self):
|
||||
"""Test filters by is_production parameter."""
|
||||
from mes_dashboard.services.resource_service import get_merged_resource_status
|
||||
|
||||
mock_resources, mock_equipment_status = self._get_mock_data()
|
||||
|
||||
with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources):
|
||||
with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_group', return_value=None):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value=None):
|
||||
result = get_merged_resource_status(is_production=True)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['RESOURCEID'] == 'R001'
|
||||
|
||||
def test_filters_by_is_key(self):
|
||||
"""Test filters by is_key parameter."""
|
||||
from mes_dashboard.services.resource_service import get_merged_resource_status
|
||||
|
||||
mock_resources, mock_equipment_status = self._get_mock_data()
|
||||
|
||||
with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources):
|
||||
with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_group', return_value=None):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value=None):
|
||||
result = get_merged_resource_status(is_key=True)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['RESOURCEID'] == 'R001'
|
||||
|
||||
def test_filters_by_is_monitor(self):
|
||||
"""Test filters by is_monitor parameter."""
|
||||
from mes_dashboard.services.resource_service import get_merged_resource_status
|
||||
|
||||
mock_resources, mock_equipment_status = self._get_mock_data()
|
||||
|
||||
with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources):
|
||||
with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_group', return_value=None):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value=None):
|
||||
result = get_merged_resource_status(is_monitor=True)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['RESOURCEID'] == 'R002'
|
||||
|
||||
def test_filters_by_status_categories(self):
|
||||
"""Test filters by status_categories parameter."""
|
||||
from mes_dashboard.services.resource_service import get_merged_resource_status
|
||||
|
||||
mock_resources, mock_equipment_status = self._get_mock_data()
|
||||
|
||||
with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources):
|
||||
with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_group', return_value=None):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value=None):
|
||||
result = get_merged_resource_status(status_categories=['PRODUCTIVE'])
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['RESOURCEID'] == 'R001'
|
||||
assert result[0]['STATUS_CATEGORY'] == 'PRODUCTIVE'
|
||||
|
||||
def test_combines_multiple_filters(self):
|
||||
"""Test combines multiple filter criteria."""
|
||||
from mes_dashboard.services.resource_service import get_merged_resource_status
|
||||
|
||||
mock_resources, mock_equipment_status = self._get_mock_data()
|
||||
|
||||
with patch('mes_dashboard.services.resource_service.get_all_resources', return_value=mock_resources):
|
||||
with patch('mes_dashboard.services.resource_service.get_all_equipment_status', return_value=mock_equipment_status):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_group', return_value=None):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_short', return_value=None):
|
||||
# Filter: production AND key
|
||||
result = get_merged_resource_status(is_production=True, is_key=True)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['RESOURCEID'] == 'R001'
|
||||
|
||||
|
||||
class TestGetResourceStatusSummary:
|
||||
"""Test get_resource_status_summary function."""
|
||||
|
||||
def test_returns_empty_summary_when_no_data(self):
|
||||
"""Test returns empty summary when no data."""
|
||||
from mes_dashboard.services.resource_service import get_resource_status_summary
|
||||
|
||||
with patch('mes_dashboard.services.resource_service.get_merged_resource_status', return_value=[]):
|
||||
result = get_resource_status_summary()
|
||||
|
||||
assert result['total_count'] == 0
|
||||
assert result['by_status_category'] == {}
|
||||
assert result['by_workcenter_group'] == {}
|
||||
|
||||
def test_calculates_summary_statistics(self):
|
||||
"""Test calculates correct summary statistics."""
|
||||
from mes_dashboard.services.resource_service import get_resource_status_summary
|
||||
|
||||
mock_data = [
|
||||
{
|
||||
'RESOURCEID': 'R001',
|
||||
'STATUS_CATEGORY': 'PRODUCTIVE',
|
||||
'WORKCENTER_GROUP': '焊接',
|
||||
'JOBORDER': 'JO001',
|
||||
'LOT_COUNT': 2,
|
||||
},
|
||||
{
|
||||
'RESOURCEID': 'R002',
|
||||
'STATUS_CATEGORY': 'PRODUCTIVE',
|
||||
'WORKCENTER_GROUP': '焊接',
|
||||
'JOBORDER': 'JO002',
|
||||
'LOT_COUNT': 1,
|
||||
},
|
||||
{
|
||||
'RESOURCEID': 'R003',
|
||||
'STATUS_CATEGORY': 'STANDBY',
|
||||
'WORKCENTER_GROUP': '成型',
|
||||
'JOBORDER': None,
|
||||
'LOT_COUNT': 0,
|
||||
},
|
||||
]
|
||||
|
||||
with patch('mes_dashboard.services.resource_service.get_merged_resource_status', return_value=mock_data):
|
||||
result = get_resource_status_summary()
|
||||
|
||||
assert result['total_count'] == 3
|
||||
assert result['by_status_category']['PRODUCTIVE'] == 2
|
||||
assert result['by_status_category']['STANDBY'] == 1
|
||||
assert result['by_workcenter_group']['焊接'] == 2
|
||||
assert result['by_workcenter_group']['成型'] == 1
|
||||
assert result['with_active_job'] == 2
|
||||
assert result['with_wip'] == 2
|
||||
|
||||
|
||||
class TestGetWorkcenterStatusMatrix:
|
||||
"""Test get_workcenter_status_matrix function."""
|
||||
|
||||
def test_returns_empty_when_no_data(self):
|
||||
"""Test returns empty list when no data."""
|
||||
from mes_dashboard.services.resource_service import get_workcenter_status_matrix
|
||||
|
||||
with patch('mes_dashboard.services.resource_service.get_merged_resource_status', return_value=[]):
|
||||
result = get_workcenter_status_matrix()
|
||||
assert result == []
|
||||
|
||||
def test_builds_matrix_by_workcenter_and_status(self):
|
||||
"""Test builds matrix by workcenter group and status."""
|
||||
from mes_dashboard.services.resource_service import get_workcenter_status_matrix
|
||||
|
||||
mock_data = [
|
||||
{'WORKCENTER_GROUP': '焊接', 'EQUIPMENTASSETSSTATUS': 'PRD'},
|
||||
{'WORKCENTER_GROUP': '焊接', 'EQUIPMENTASSETSSTATUS': 'PRD'},
|
||||
{'WORKCENTER_GROUP': '焊接', 'EQUIPMENTASSETSSTATUS': 'SBY'},
|
||||
{'WORKCENTER_GROUP': '成型', 'EQUIPMENTASSETSSTATUS': 'UDT'},
|
||||
]
|
||||
|
||||
mock_groups = [
|
||||
{'name': '焊接', 'sequence': 1},
|
||||
{'name': '成型', 'sequence': 2},
|
||||
]
|
||||
|
||||
with patch('mes_dashboard.services.resource_service.get_merged_resource_status', return_value=mock_data):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_groups', return_value=mock_groups):
|
||||
result = get_workcenter_status_matrix()
|
||||
|
||||
assert len(result) == 2
|
||||
|
||||
# Should be sorted by sequence
|
||||
assert result[0]['workcenter_group'] == '焊接'
|
||||
assert result[0]['total'] == 3
|
||||
assert result[0]['PRD'] == 2
|
||||
assert result[0]['SBY'] == 1
|
||||
|
||||
assert result[1]['workcenter_group'] == '成型'
|
||||
assert result[1]['total'] == 1
|
||||
assert result[1]['UDT'] == 1
|
||||
|
||||
def test_handles_unknown_status(self):
|
||||
"""Test handles unknown status codes."""
|
||||
from mes_dashboard.services.resource_service import get_workcenter_status_matrix
|
||||
|
||||
mock_data = [
|
||||
{'WORKCENTER_GROUP': '焊接', 'EQUIPMENTASSETSSTATUS': 'CUSTOM_STATUS'},
|
||||
]
|
||||
|
||||
mock_groups = [{'name': '焊接', 'sequence': 1}]
|
||||
|
||||
with patch('mes_dashboard.services.resource_service.get_merged_resource_status', return_value=mock_data):
|
||||
with patch('mes_dashboard.services.resource_service.get_workcenter_groups', return_value=mock_groups):
|
||||
result = get_workcenter_status_matrix()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]['OTHER'] == 1
|
||||
238
tests/test_sql_builder.py
Normal file
238
tests/test_sql_builder.py
Normal file
@@ -0,0 +1,238 @@
|
||||
"""Tests for Query Builder."""
|
||||
|
||||
import pytest
|
||||
|
||||
from mes_dashboard.sql.builder import QueryBuilder
|
||||
|
||||
|
||||
class TestQueryBuilder:
|
||||
"""Test QueryBuilder class."""
|
||||
|
||||
def test_add_param_condition(self):
|
||||
"""Test adding a parameterized condition."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_param_condition("status", "RUN")
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
assert "status = :p0" in builder.conditions[0]
|
||||
assert builder.params["p0"] == "RUN"
|
||||
|
||||
def test_add_param_condition_with_operator(self):
|
||||
"""Test adding a parameterized condition with custom operator."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_param_condition("count", 10, operator=">=")
|
||||
|
||||
assert "count >= :p0" in builder.conditions[0]
|
||||
assert builder.params["p0"] == 10
|
||||
|
||||
def test_add_in_condition(self):
|
||||
"""Test adding an IN condition."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_in_condition("status", ["RUN", "QUEUE", "HOLD"])
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
assert "status IN (:p0, :p1, :p2)" in builder.conditions[0]
|
||||
assert builder.params["p0"] == "RUN"
|
||||
assert builder.params["p1"] == "QUEUE"
|
||||
assert builder.params["p2"] == "HOLD"
|
||||
|
||||
def test_add_in_condition_empty_list(self):
|
||||
"""Test that empty list doesn't add condition."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_in_condition("status", [])
|
||||
|
||||
assert len(builder.conditions) == 0
|
||||
assert len(builder.params) == 0
|
||||
|
||||
def test_add_not_in_condition(self):
|
||||
"""Test adding a NOT IN condition."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_not_in_condition("location", ["ATEC", "F區"])
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
assert "location NOT IN (:p0, :p1)" in builder.conditions[0]
|
||||
assert builder.params["p0"] == "ATEC"
|
||||
assert builder.params["p1"] == "F區"
|
||||
|
||||
def test_add_not_in_condition_with_null(self):
|
||||
"""Test NOT IN condition allowing NULL values."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_not_in_condition("location", ["ATEC"], allow_null=True)
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
assert "(location IS NULL OR location NOT IN (:p0))" in builder.conditions[0]
|
||||
|
||||
def test_add_like_condition_both(self):
|
||||
"""Test LIKE condition with wildcards on both sides."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_like_condition("name", "test")
|
||||
|
||||
assert "name LIKE :p0 ESCAPE '\\'" in builder.conditions[0]
|
||||
assert builder.params["p0"] == "%test%"
|
||||
|
||||
def test_add_like_condition_start(self):
|
||||
"""Test LIKE condition with wildcard at end only."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_like_condition("name", "prefix", position="start")
|
||||
|
||||
assert builder.params["p0"] == "prefix%"
|
||||
|
||||
def test_add_like_condition_end(self):
|
||||
"""Test LIKE condition with wildcard at start only."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_like_condition("name", "suffix", position="end")
|
||||
|
||||
assert builder.params["p0"] == "%suffix"
|
||||
|
||||
def test_add_like_condition_escapes_wildcards(self):
|
||||
"""Test that LIKE condition escapes SQL wildcards."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_like_condition("name", "test%value")
|
||||
|
||||
assert builder.params["p0"] == "%test\\%value%"
|
||||
|
||||
def test_add_like_condition_escapes_underscore(self):
|
||||
"""Test that LIKE condition escapes underscores."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_like_condition("name", "test_value")
|
||||
|
||||
assert builder.params["p0"] == "%test\\_value%"
|
||||
|
||||
def test_build_with_conditions(self):
|
||||
"""Test building SQL with multiple conditions."""
|
||||
builder = QueryBuilder("SELECT * FROM t {{ WHERE_CLAUSE }}")
|
||||
builder.add_param_condition("status", "RUN")
|
||||
builder.add_in_condition("type", ["A", "B"])
|
||||
|
||||
sql, params = builder.build()
|
||||
|
||||
assert "WHERE" in sql
|
||||
assert "status = :p0" in sql
|
||||
assert "type IN (:p1, :p2)" in sql
|
||||
assert "AND" in sql
|
||||
assert params["p0"] == "RUN"
|
||||
assert params["p1"] == "A"
|
||||
assert params["p2"] == "B"
|
||||
|
||||
def test_build_without_conditions(self):
|
||||
"""Test building SQL with no conditions."""
|
||||
builder = QueryBuilder("SELECT * FROM t {{ WHERE_CLAUSE }}")
|
||||
sql, params = builder.build()
|
||||
|
||||
assert "WHERE" not in sql
|
||||
assert "{{ WHERE_CLAUSE }}" not in sql
|
||||
assert params == {}
|
||||
|
||||
def test_build_where_only(self):
|
||||
"""Test building only the WHERE clause."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_param_condition("status", "RUN")
|
||||
|
||||
where_clause, params = builder.build_where_only()
|
||||
|
||||
assert where_clause.startswith("WHERE")
|
||||
assert "status = :p0" in where_clause
|
||||
|
||||
def test_get_conditions_sql(self):
|
||||
"""Test getting conditions as string."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_param_condition("a", 1)
|
||||
builder.add_param_condition("b", 2)
|
||||
|
||||
conditions = builder.get_conditions_sql()
|
||||
|
||||
assert "a = :p0 AND b = :p1" == conditions
|
||||
|
||||
def test_reset(self):
|
||||
"""Test resetting the builder."""
|
||||
builder = QueryBuilder("SELECT * FROM t")
|
||||
builder.add_param_condition("status", "RUN")
|
||||
builder.reset()
|
||||
|
||||
assert len(builder.conditions) == 0
|
||||
assert len(builder.params) == 0
|
||||
assert builder._param_counter == 0
|
||||
assert builder.base_sql == "SELECT * FROM t"
|
||||
|
||||
def test_method_chaining(self):
|
||||
"""Test that methods support chaining."""
|
||||
builder = (
|
||||
QueryBuilder("SELECT * FROM t {{ WHERE_CLAUSE }}")
|
||||
.add_param_condition("status", "RUN")
|
||||
.add_in_condition("type", ["A", "B"])
|
||||
.add_like_condition("name", "test")
|
||||
)
|
||||
|
||||
assert len(builder.conditions) == 3
|
||||
|
||||
def test_add_is_null(self):
|
||||
"""Test adding IS NULL condition."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_is_null("deleted_at")
|
||||
|
||||
assert "deleted_at IS NULL" in builder.conditions[0]
|
||||
|
||||
def test_add_is_not_null(self):
|
||||
"""Test adding IS NOT NULL condition."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_is_not_null("updated_at")
|
||||
|
||||
assert "updated_at IS NOT NULL" in builder.conditions[0]
|
||||
|
||||
def test_add_condition_fixed(self):
|
||||
"""Test adding a fixed condition."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_condition("1=1")
|
||||
|
||||
assert "1=1" in builder.conditions[0]
|
||||
assert len(builder.params) == 0
|
||||
|
||||
def test_add_or_like_conditions(self):
|
||||
"""Test adding multiple LIKE conditions combined with OR."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_or_like_conditions("name", ["foo", "bar", "baz"])
|
||||
|
||||
assert len(builder.conditions) == 1
|
||||
condition = builder.conditions[0]
|
||||
assert "name LIKE :p0 ESCAPE '\\'" in condition
|
||||
assert "name LIKE :p1 ESCAPE '\\'" in condition
|
||||
assert "name LIKE :p2 ESCAPE '\\'" in condition
|
||||
assert " OR " in condition
|
||||
assert condition.startswith("(")
|
||||
assert condition.endswith(")")
|
||||
assert builder.params["p0"] == "%foo%"
|
||||
assert builder.params["p1"] == "%bar%"
|
||||
assert builder.params["p2"] == "%baz%"
|
||||
|
||||
def test_add_or_like_conditions_case_insensitive(self):
|
||||
"""Test OR LIKE conditions with case insensitive matching."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_or_like_conditions("name", ["Foo", "BAR"], case_insensitive=True)
|
||||
|
||||
condition = builder.conditions[0]
|
||||
assert "UPPER(name)" in condition
|
||||
assert builder.params["p0"] == "%FOO%"
|
||||
assert builder.params["p1"] == "%BAR%"
|
||||
|
||||
def test_add_or_like_conditions_escapes_wildcards(self):
|
||||
"""Test OR LIKE conditions escape SQL wildcards."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_or_like_conditions("name", ["test%val", "foo_bar"])
|
||||
|
||||
assert builder.params["p0"] == "%test\\%val%"
|
||||
assert builder.params["p1"] == "%foo\\_bar%"
|
||||
|
||||
def test_add_or_like_conditions_empty_list(self):
|
||||
"""Test that empty list doesn't add condition."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_or_like_conditions("name", [])
|
||||
|
||||
assert len(builder.conditions) == 0
|
||||
assert len(builder.params) == 0
|
||||
|
||||
def test_add_or_like_conditions_position(self):
|
||||
"""Test OR LIKE conditions with different positions."""
|
||||
builder = QueryBuilder()
|
||||
builder.add_or_like_conditions("name", ["test"], position="start")
|
||||
|
||||
assert builder.params["p0"] == "test%"
|
||||
109
tests/test_sql_loader.py
Normal file
109
tests/test_sql_loader.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""Tests for SQL Loader."""
|
||||
|
||||
import pytest
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from mes_dashboard.sql.loader import SQLLoader
|
||||
|
||||
|
||||
class TestSQLLoader:
|
||||
"""Test SQLLoader class."""
|
||||
|
||||
def setup_method(self):
|
||||
"""Clear cache before each test."""
|
||||
SQLLoader.clear_cache()
|
||||
|
||||
def test_load_existing_file(self, tmp_path):
|
||||
"""Test loading an existing SQL file."""
|
||||
# Create a temporary SQL file
|
||||
sql_dir = tmp_path / "wip"
|
||||
sql_dir.mkdir()
|
||||
sql_file = sql_dir / "summary.sql"
|
||||
sql_file.write_text("SELECT * FROM DWH.DW_MES_LOT_V")
|
||||
|
||||
# Patch the _sql_dir to use our temp directory
|
||||
with patch.object(SQLLoader, "_sql_dir", tmp_path):
|
||||
result = SQLLoader.load("wip/summary")
|
||||
assert result == "SELECT * FROM DWH.DW_MES_LOT_V"
|
||||
|
||||
def test_load_nonexistent_file(self):
|
||||
"""Test loading a non-existent SQL file raises FileNotFoundError."""
|
||||
with pytest.raises(FileNotFoundError) as exc_info:
|
||||
SQLLoader.load("nonexistent/query")
|
||||
assert "SQL file not found" in str(exc_info.value)
|
||||
|
||||
def test_load_uses_cache(self, tmp_path):
|
||||
"""Test that repeated loads use the cache."""
|
||||
# Create a temporary SQL file
|
||||
sql_dir = tmp_path / "test"
|
||||
sql_dir.mkdir()
|
||||
sql_file = sql_dir / "cached.sql"
|
||||
sql_file.write_text("SELECT 1")
|
||||
|
||||
with patch.object(SQLLoader, "_sql_dir", tmp_path):
|
||||
SQLLoader.clear_cache()
|
||||
|
||||
# First load
|
||||
result1 = SQLLoader.load("test/cached")
|
||||
info1 = SQLLoader.cache_info()
|
||||
|
||||
# Second load (should hit cache)
|
||||
result2 = SQLLoader.load("test/cached")
|
||||
info2 = SQLLoader.cache_info()
|
||||
|
||||
assert result1 == result2
|
||||
assert info1.misses == 1
|
||||
assert info2.hits == 1
|
||||
|
||||
def test_load_with_params_substitutes_values(self, tmp_path):
|
||||
"""Test structural parameter substitution."""
|
||||
sql_dir = tmp_path
|
||||
sql_file = sql_dir / "query.sql"
|
||||
sql_file.write_text("SELECT * FROM {{ table_name }}")
|
||||
|
||||
with patch.object(SQLLoader, "_sql_dir", tmp_path):
|
||||
result = SQLLoader.load_with_params("query", table_name="DWH.MY_TABLE")
|
||||
assert result == "SELECT * FROM DWH.MY_TABLE"
|
||||
|
||||
def test_load_with_params_preserves_unsubstituted(self, tmp_path):
|
||||
"""Test that unsubstituted parameters remain unchanged."""
|
||||
sql_dir = tmp_path
|
||||
sql_file = sql_dir / "query.sql"
|
||||
sql_file.write_text("SELECT * FROM {{ table_name }} {{ WHERE_CLAUSE }}")
|
||||
|
||||
with patch.object(SQLLoader, "_sql_dir", tmp_path):
|
||||
result = SQLLoader.load_with_params("query", table_name="T")
|
||||
assert result == "SELECT * FROM T {{ WHERE_CLAUSE }}"
|
||||
|
||||
def test_clear_cache(self, tmp_path):
|
||||
"""Test cache clearing."""
|
||||
sql_dir = tmp_path
|
||||
sql_file = sql_dir / "test.sql"
|
||||
sql_file.write_text("SELECT 1")
|
||||
|
||||
with patch.object(SQLLoader, "_sql_dir", tmp_path):
|
||||
SQLLoader.load("test")
|
||||
info_before = SQLLoader.cache_info()
|
||||
assert info_before.currsize > 0
|
||||
|
||||
SQLLoader.clear_cache()
|
||||
info_after = SQLLoader.cache_info()
|
||||
assert info_after.currsize == 0
|
||||
|
||||
def test_cache_info(self, tmp_path):
|
||||
"""Test cache_info returns valid statistics."""
|
||||
sql_dir = tmp_path
|
||||
sql_file = sql_dir / "test.sql"
|
||||
sql_file.write_text("SELECT 1")
|
||||
|
||||
with patch.object(SQLLoader, "_sql_dir", tmp_path):
|
||||
SQLLoader.clear_cache()
|
||||
SQLLoader.load("test")
|
||||
info = SQLLoader.cache_info()
|
||||
|
||||
assert hasattr(info, "hits")
|
||||
assert hasattr(info, "misses")
|
||||
assert hasattr(info, "maxsize")
|
||||
assert hasattr(info, "currsize")
|
||||
assert info.maxsize == 100
|
||||
249
tests/test_template_integration.py
Normal file
249
tests/test_template_integration.py
Normal file
@@ -0,0 +1,249 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for template integration with _base.html.
|
||||
|
||||
Verifies that all templates properly extend _base.html and include
|
||||
required core JavaScript resources.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from mes_dashboard.app import create_app
|
||||
import mes_dashboard.core.database as db
|
||||
|
||||
|
||||
def _login_as_admin(client):
|
||||
with client.session_transaction() as sess:
|
||||
sess['admin'] = {'displayName': 'Test Admin', 'employeeNo': 'A001'}
|
||||
|
||||
|
||||
class TestTemplateIntegration(unittest.TestCase):
|
||||
"""Test that all templates properly extend _base.html."""
|
||||
|
||||
def setUp(self):
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
_login_as_admin(self.client)
|
||||
|
||||
def test_portal_includes_base_scripts(self):
|
||||
response = self.client.get('/')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
html = response.data.decode('utf-8')
|
||||
|
||||
self.assertIn('toast.js', html)
|
||||
self.assertIn('mes-api.js', html)
|
||||
self.assertIn('mes-toast-container', html)
|
||||
|
||||
def test_wip_overview_includes_base_scripts(self):
|
||||
response = self.client.get('/wip-overview')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
html = response.data.decode('utf-8')
|
||||
|
||||
self.assertIn('toast.js', html)
|
||||
self.assertIn('mes-api.js', html)
|
||||
self.assertIn('mes-toast-container', html)
|
||||
|
||||
def test_wip_detail_includes_base_scripts(self):
|
||||
response = self.client.get('/wip-detail')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
html = response.data.decode('utf-8')
|
||||
|
||||
self.assertIn('toast.js', html)
|
||||
self.assertIn('mes-api.js', html)
|
||||
self.assertIn('mes-toast-container', html)
|
||||
|
||||
def test_tables_page_includes_base_scripts(self):
|
||||
response = self.client.get('/tables')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
html = response.data.decode('utf-8')
|
||||
|
||||
self.assertIn('toast.js', html)
|
||||
self.assertIn('mes-api.js', html)
|
||||
self.assertIn('mes-toast-container', html)
|
||||
|
||||
def test_resource_page_includes_base_scripts(self):
|
||||
response = self.client.get('/resource')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
html = response.data.decode('utf-8')
|
||||
|
||||
self.assertIn('toast.js', html)
|
||||
self.assertIn('mes-api.js', html)
|
||||
self.assertIn('mes-toast-container', html)
|
||||
|
||||
def test_excel_query_page_includes_base_scripts(self):
|
||||
response = self.client.get('/excel-query')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
html = response.data.decode('utf-8')
|
||||
|
||||
self.assertIn('toast.js', html)
|
||||
self.assertIn('mes-api.js', html)
|
||||
self.assertIn('mes-toast-container', html)
|
||||
|
||||
|
||||
class TestToastCSSIntegration(unittest.TestCase):
|
||||
"""Test that Toast CSS styles are included in pages."""
|
||||
|
||||
def setUp(self):
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
_login_as_admin(self.client)
|
||||
|
||||
def test_portal_includes_toast_css(self):
|
||||
response = self.client.get('/')
|
||||
html = response.data.decode('utf-8')
|
||||
|
||||
self.assertIn('.mes-toast-container', html)
|
||||
self.assertIn('.mes-toast', html)
|
||||
|
||||
def test_wip_overview_includes_toast_css(self):
|
||||
response = self.client.get('/wip-overview')
|
||||
html = response.data.decode('utf-8')
|
||||
|
||||
self.assertIn('.mes-toast-container', html)
|
||||
self.assertIn('.mes-toast', html)
|
||||
|
||||
def test_wip_detail_includes_toast_css(self):
|
||||
response = self.client.get('/wip-detail')
|
||||
html = response.data.decode('utf-8')
|
||||
|
||||
self.assertIn('.mes-toast-container', html)
|
||||
self.assertIn('.mes-toast', html)
|
||||
|
||||
|
||||
class TestMesApiUsageInTemplates(unittest.TestCase):
|
||||
"""Test that templates either inline MesApi usage or load Vite modules."""
|
||||
|
||||
def setUp(self):
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
_login_as_admin(self.client)
|
||||
|
||||
def test_wip_overview_uses_mesapi(self):
|
||||
response = self.client.get('/wip-overview')
|
||||
html = response.data.decode('utf-8')
|
||||
|
||||
self.assertTrue('MesApi.get' in html or '/static/dist/wip-overview.js' in html)
|
||||
self.assertNotIn('fetchWithTimeout', html)
|
||||
|
||||
def test_wip_detail_uses_mesapi(self):
|
||||
response = self.client.get('/wip-detail')
|
||||
html = response.data.decode('utf-8')
|
||||
|
||||
self.assertTrue('MesApi.get' in html or '/static/dist/wip-detail.js' in html)
|
||||
self.assertNotIn('fetchWithTimeout', html)
|
||||
|
||||
def test_tables_page_uses_mesapi_or_vite_module(self):
|
||||
response = self.client.get('/tables')
|
||||
html = response.data.decode('utf-8')
|
||||
|
||||
self.assertTrue('MesApi.post' in html or '/static/dist/tables.js' in html)
|
||||
|
||||
def test_resource_page_uses_mesapi_or_vite_module(self):
|
||||
response = self.client.get('/resource')
|
||||
html = response.data.decode('utf-8')
|
||||
|
||||
self.assertTrue('MesApi.post' in html or '/static/dist/resource-status.js' in html)
|
||||
|
||||
|
||||
class TestViteModuleFallbackIntegration(unittest.TestCase):
|
||||
"""Ensure page templates support Vite module assets with inline fallback."""
|
||||
|
||||
def setUp(self):
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
_login_as_admin(self.client)
|
||||
|
||||
def test_pages_render_inline_fallback_when_asset_missing(self):
|
||||
endpoints_and_markers = [
|
||||
('/wip-overview', 'function applyFilters'),
|
||||
('/wip-detail', 'function init'),
|
||||
('/hold-detail?reason=test-reason', 'function loadAllData'),
|
||||
('/tables', 'function loadTableData'),
|
||||
('/resource', 'function loadData'),
|
||||
('/resource-history', 'function executeQuery'),
|
||||
('/job-query', 'function queryJobs'),
|
||||
('/excel-query', 'function uploadExcel'),
|
||||
]
|
||||
for endpoint, marker in endpoints_and_markers:
|
||||
with patch('mes_dashboard.app.os.path.exists', return_value=False):
|
||||
response = self.client.get(endpoint)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
html = response.data.decode('utf-8')
|
||||
self.assertIn(marker, html)
|
||||
|
||||
def test_pages_render_vite_module_when_asset_exists(self):
|
||||
endpoints_and_assets = [
|
||||
('/wip-overview', 'wip-overview.js'),
|
||||
('/wip-detail', 'wip-detail.js'),
|
||||
('/hold-detail?reason=test-reason', 'hold-detail.js'),
|
||||
('/tables', 'tables.js'),
|
||||
('/resource', 'resource-status.js'),
|
||||
('/resource-history', 'resource-history.js'),
|
||||
('/job-query', 'job-query.js'),
|
||||
('/excel-query', 'excel-query.js'),
|
||||
]
|
||||
for endpoint, asset in endpoints_and_assets:
|
||||
with patch('mes_dashboard.app.os.path.exists', return_value=True):
|
||||
response = self.client.get(endpoint)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
html = response.data.decode('utf-8')
|
||||
self.assertIn(f'/static/dist/{asset}', html)
|
||||
self.assertIn('type="module"', html)
|
||||
|
||||
|
||||
class TestStaticFilesServing(unittest.TestCase):
|
||||
"""Test that static JavaScript files are served correctly."""
|
||||
|
||||
def setUp(self):
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
_login_as_admin(self.client)
|
||||
|
||||
def test_toast_js_is_served(self):
|
||||
response = self.client.get('/static/js/toast.js')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
content = response.data.decode('utf-8')
|
||||
|
||||
self.assertIn('Toast', content)
|
||||
self.assertIn('info', content)
|
||||
self.assertIn('success', content)
|
||||
self.assertIn('error', content)
|
||||
self.assertIn('loading', content)
|
||||
|
||||
def test_mes_api_js_is_served(self):
|
||||
response = self.client.get('/static/js/mes-api.js')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
content = response.data.decode('utf-8')
|
||||
|
||||
self.assertIn('MesApi', content)
|
||||
self.assertIn('get', content)
|
||||
self.assertIn('post', content)
|
||||
self.assertIn('AbortController', content)
|
||||
|
||||
def test_toast_js_contains_retry_button(self):
|
||||
response = self.client.get('/static/js/toast.js')
|
||||
content = response.data.decode('utf-8')
|
||||
|
||||
self.assertIn('retry', content)
|
||||
self.assertIn('mes-toast-retry', content)
|
||||
|
||||
def test_mes_api_js_has_exponential_backoff(self):
|
||||
response = self.client.get('/static/js/mes-api.js')
|
||||
content = response.data.decode('utf-8')
|
||||
|
||||
self.assertIn('1000', content)
|
||||
self.assertIn('retry', content.lower())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
337
tests/test_wip_routes.py
Normal file
337
tests/test_wip_routes.py
Normal file
@@ -0,0 +1,337 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for WIP API routes.
|
||||
|
||||
Tests the WIP API endpoints in wip_routes.py.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
import json
|
||||
|
||||
from mes_dashboard.app import create_app
|
||||
import mes_dashboard.core.database as db
|
||||
|
||||
|
||||
class TestWipRoutesBase(unittest.TestCase):
|
||||
"""Base class for WIP routes tests."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test client."""
|
||||
db._ENGINE = None
|
||||
self.app = create_app('testing')
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
|
||||
|
||||
class TestOverviewSummaryRoute(TestWipRoutesBase):
|
||||
"""Test GET /api/wip/overview/summary endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_summary')
|
||||
def test_returns_success_with_data(self, mock_get_summary):
|
||||
"""Should return success=True with summary data."""
|
||||
mock_get_summary.return_value = {
|
||||
'totalLots': 9073,
|
||||
'totalQtyPcs': 858878718,
|
||||
'byWipStatus': {
|
||||
'run': {'lots': 8000, 'qtyPcs': 800000000},
|
||||
'queue': {'lots': 953, 'qtyPcs': 504645323},
|
||||
'hold': {'lots': 120, 'qtyPcs': 8213395}
|
||||
},
|
||||
'dataUpdateDate': '2026-01-26 19:18:29'
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/overview/summary')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertEqual(data['data']['totalLots'], 9073)
|
||||
self.assertEqual(data['data']['byWipStatus']['hold']['lots'], 120)
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_summary')
|
||||
def test_returns_error_on_failure(self, mock_get_summary):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_summary.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/overview/summary')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
self.assertIn('error', data)
|
||||
|
||||
|
||||
class TestOverviewMatrixRoute(TestWipRoutesBase):
|
||||
"""Test GET /api/wip/overview/matrix endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_matrix')
|
||||
def test_returns_success_with_matrix(self, mock_get_matrix):
|
||||
"""Should return success=True with matrix data."""
|
||||
mock_get_matrix.return_value = {
|
||||
'workcenters': ['切割', '焊接_DB'],
|
||||
'packages': ['SOT-23', 'SOD-323'],
|
||||
'matrix': {'切割': {'SOT-23': 50000000}},
|
||||
'workcenter_totals': {'切割': 50000000},
|
||||
'package_totals': {'SOT-23': 50000000},
|
||||
'grand_total': 50000000
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/overview/matrix')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertIn('workcenters', data['data'])
|
||||
self.assertIn('packages', data['data'])
|
||||
self.assertIn('matrix', data['data'])
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_matrix')
|
||||
def test_returns_error_on_failure(self, mock_get_matrix):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_matrix.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/overview/matrix')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
|
||||
class TestOverviewHoldRoute(TestWipRoutesBase):
|
||||
"""Test GET /api/wip/overview/hold endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_hold_summary')
|
||||
def test_returns_success_with_hold_items(self, mock_get_hold):
|
||||
"""Should return success=True with hold items."""
|
||||
mock_get_hold.return_value = {
|
||||
'items': [
|
||||
{'reason': '特殊需求管控', 'lots': 44, 'qty': 4235060},
|
||||
{'reason': 'YieldLimit', 'lots': 21, 'qty': 1084443}
|
||||
]
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/overview/hold')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertEqual(len(data['data']['items']), 2)
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_hold_summary')
|
||||
def test_returns_error_on_failure(self, mock_get_hold):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_hold.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/overview/hold')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
|
||||
class TestDetailRoute(TestWipRoutesBase):
|
||||
"""Test GET /api/wip/detail/<workcenter> endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_detail')
|
||||
def test_returns_success_with_detail(self, mock_get_detail):
|
||||
"""Should return success=True with detail data."""
|
||||
mock_get_detail.return_value = {
|
||||
'workcenter': '焊接_DB',
|
||||
'summary': {
|
||||
'total_lots': 859,
|
||||
'on_equipment_lots': 312,
|
||||
'waiting_lots': 547,
|
||||
'hold_lots': 15
|
||||
},
|
||||
'specs': ['Spec1', 'Spec2'],
|
||||
'lots': [
|
||||
{'lot_id': 'GA25102485', 'equipment': 'GSMP-0054',
|
||||
'status': 'ACTIVE', 'hold_reason': None,
|
||||
'qty': 750, 'package': 'SOT-23', 'spec': 'Spec1'}
|
||||
],
|
||||
'pagination': {
|
||||
'page': 1, 'page_size': 100,
|
||||
'total_count': 859, 'total_pages': 9
|
||||
},
|
||||
'sys_date': '2026-01-26 19:18:29'
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/detail/焊接_DB')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertEqual(data['data']['workcenter'], '焊接_DB')
|
||||
self.assertIn('summary', data['data'])
|
||||
self.assertIn('lots', data['data'])
|
||||
self.assertIn('pagination', data['data'])
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_detail')
|
||||
def test_passes_query_parameters(self, mock_get_detail):
|
||||
"""Should pass query parameters to service function."""
|
||||
mock_get_detail.return_value = {
|
||||
'workcenter': '焊接_DB',
|
||||
'summary': {'total_lots': 100, 'on_equipment_lots': 50,
|
||||
'waiting_lots': 50, 'hold_lots': 0},
|
||||
'specs': [],
|
||||
'lots': [],
|
||||
'pagination': {'page': 2, 'page_size': 50,
|
||||
'total_count': 100, 'total_pages': 2},
|
||||
'sys_date': None
|
||||
}
|
||||
|
||||
response = self.client.get(
|
||||
'/api/wip/detail/焊接_DB?package=SOT-23&status=RUN&page=2&page_size=50'
|
||||
)
|
||||
|
||||
mock_get_detail.assert_called_once_with(
|
||||
workcenter='焊接_DB',
|
||||
package='SOT-23',
|
||||
status='RUN',
|
||||
hold_type=None,
|
||||
workorder=None,
|
||||
lotid=None,
|
||||
include_dummy=False,
|
||||
page=2,
|
||||
page_size=50
|
||||
)
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_detail')
|
||||
def test_limits_page_size_to_500(self, mock_get_detail):
|
||||
"""Page size should be capped at 500."""
|
||||
mock_get_detail.return_value = {
|
||||
'workcenter': '切割',
|
||||
'summary': {'total_lots': 0, 'on_equipment_lots': 0,
|
||||
'waiting_lots': 0, 'hold_lots': 0},
|
||||
'specs': [],
|
||||
'lots': [],
|
||||
'pagination': {'page': 1, 'page_size': 500,
|
||||
'total_count': 0, 'total_pages': 1},
|
||||
'sys_date': None
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/detail/切割?page_size=1000')
|
||||
|
||||
# Should be capped to 500
|
||||
call_args = mock_get_detail.call_args
|
||||
self.assertEqual(call_args.kwargs['page_size'], 500)
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_detail')
|
||||
def test_handles_page_less_than_one(self, mock_get_detail):
|
||||
"""Page number less than 1 should be set to 1."""
|
||||
mock_get_detail.return_value = {
|
||||
'workcenter': '切割',
|
||||
'summary': {'total_lots': 0, 'on_equipment_lots': 0,
|
||||
'waiting_lots': 0, 'hold_lots': 0},
|
||||
'specs': [],
|
||||
'lots': [],
|
||||
'pagination': {'page': 1, 'page_size': 100,
|
||||
'total_count': 0, 'total_pages': 1},
|
||||
'sys_date': None
|
||||
}
|
||||
|
||||
response = self.client.get('/api/wip/detail/切割?page=0')
|
||||
|
||||
call_args = mock_get_detail.call_args
|
||||
self.assertEqual(call_args.kwargs['page'], 1)
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_wip_detail')
|
||||
def test_returns_error_on_failure(self, mock_get_detail):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_detail.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/detail/不存在的工站')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
|
||||
class TestMetaWorkcentersRoute(TestWipRoutesBase):
|
||||
"""Test GET /api/wip/meta/workcenters endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_workcenters')
|
||||
def test_returns_success_with_workcenters(self, mock_get_wcs):
|
||||
"""Should return success=True with workcenters list."""
|
||||
mock_get_wcs.return_value = [
|
||||
{'name': '切割', 'lot_count': 1377},
|
||||
{'name': '焊接_DB', 'lot_count': 859}
|
||||
]
|
||||
|
||||
response = self.client.get('/api/wip/meta/workcenters')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertEqual(len(data['data']), 2)
|
||||
self.assertEqual(data['data'][0]['name'], '切割')
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_workcenters')
|
||||
def test_returns_error_on_failure(self, mock_get_wcs):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_wcs.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/meta/workcenters')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
|
||||
class TestMetaPackagesRoute(TestWipRoutesBase):
|
||||
"""Test GET /api/wip/meta/packages endpoint."""
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_packages')
|
||||
def test_returns_success_with_packages(self, mock_get_pkgs):
|
||||
"""Should return success=True with packages list."""
|
||||
mock_get_pkgs.return_value = [
|
||||
{'name': 'SOT-23', 'lot_count': 2234},
|
||||
{'name': 'SOD-323', 'lot_count': 1392}
|
||||
]
|
||||
|
||||
response = self.client.get('/api/wip/meta/packages')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(data['success'])
|
||||
self.assertEqual(len(data['data']), 2)
|
||||
self.assertEqual(data['data'][0]['name'], 'SOT-23')
|
||||
|
||||
@patch('mes_dashboard.routes.wip_routes.get_packages')
|
||||
def test_returns_error_on_failure(self, mock_get_pkgs):
|
||||
"""Should return success=False and 500 on failure."""
|
||||
mock_get_pkgs.return_value = None
|
||||
|
||||
response = self.client.get('/api/wip/meta/packages')
|
||||
data = json.loads(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertFalse(data['success'])
|
||||
|
||||
|
||||
class TestPageRoutes(TestWipRoutesBase):
|
||||
"""Test page routes for WIP dashboards."""
|
||||
|
||||
def test_wip_overview_page_exists(self):
|
||||
"""GET /wip-overview should return 200."""
|
||||
response = self.client.get('/wip-overview')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_wip_detail_page_exists(self):
|
||||
"""GET /wip-detail should return 200."""
|
||||
response = self.client.get('/wip-detail')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_wip_detail_page_with_workcenter(self):
|
||||
"""GET /wip-detail?workcenter=xxx should return 200."""
|
||||
response = self.client.get('/wip-detail?workcenter=焊接_DB')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_old_wip_route_removed(self):
|
||||
"""GET /wip should return 404 (route removed)."""
|
||||
response = self.client.get('/wip')
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
767
tests/test_wip_service.py
Normal file
767
tests/test_wip_service.py
Normal file
@@ -0,0 +1,767 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for WIP service layer.
|
||||
|
||||
Tests the WIP query functions that use DW_MES_LOT_V view.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch, MagicMock
|
||||
from functools import wraps
|
||||
import pandas as pd
|
||||
|
||||
from mes_dashboard.services.wip_service import (
|
||||
WIP_VIEW,
|
||||
get_wip_summary,
|
||||
get_wip_matrix,
|
||||
get_wip_hold_summary,
|
||||
get_wip_detail,
|
||||
get_workcenters,
|
||||
get_packages,
|
||||
search_workorders,
|
||||
search_lot_ids,
|
||||
)
|
||||
|
||||
|
||||
def disable_cache(func):
|
||||
"""Decorator to disable Redis cache for Oracle fallback tests."""
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
with patch('mes_dashboard.services.wip_service.get_cached_wip_data', return_value=None):
|
||||
with patch('mes_dashboard.services.wip_service.get_cached_sys_date', return_value=None):
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
class TestWipServiceConfig(unittest.TestCase):
|
||||
"""Test WIP service configuration."""
|
||||
|
||||
def test_wip_view_configured(self):
|
||||
"""WIP_VIEW should be configured correctly."""
|
||||
self.assertEqual(WIP_VIEW, "DWH.DW_MES_LOT_V")
|
||||
|
||||
|
||||
class TestGetWipSummary(unittest.TestCase):
|
||||
"""Test get_wip_summary function."""
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_none_on_empty_result(self, mock_read_sql):
|
||||
"""Should return None when query returns empty DataFrame."""
|
||||
mock_read_sql.return_value = pd.DataFrame()
|
||||
|
||||
result = get_wip_summary()
|
||||
|
||||
self.assertIsNone(result)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_none_on_exception(self, mock_read_sql):
|
||||
"""Should return None when query raises exception."""
|
||||
mock_read_sql.side_effect = Exception("Database error")
|
||||
|
||||
result = get_wip_summary()
|
||||
|
||||
self.assertIsNone(result)
|
||||
|
||||
|
||||
|
||||
class TestGetWipMatrix(unittest.TestCase):
|
||||
"""Test get_wip_matrix function."""
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_matrix_structure(self, mock_read_sql):
|
||||
"""Should return dict with matrix structure."""
|
||||
mock_df = pd.DataFrame({
|
||||
'WORKCENTER_GROUP': ['切割', '切割', '焊接_DB'],
|
||||
'WORKCENTERSEQUENCE_GROUP': [1, 1, 2],
|
||||
'PACKAGE_LEF': ['SOT-23', 'SOD-323', 'SOT-23'],
|
||||
'QTY': [50000000, 30000000, 40000000]
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
result = get_wip_matrix()
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertIn('workcenters', result)
|
||||
self.assertIn('packages', result)
|
||||
self.assertIn('matrix', result)
|
||||
self.assertIn('workcenter_totals', result)
|
||||
self.assertIn('package_totals', result)
|
||||
self.assertIn('grand_total', result)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_workcenters_sorted_by_sequence(self, mock_read_sql):
|
||||
"""Workcenters should be sorted by WORKCENTERSEQUENCE_GROUP."""
|
||||
mock_df = pd.DataFrame({
|
||||
'WORKCENTER_GROUP': ['焊接_DB', '切割'],
|
||||
'WORKCENTERSEQUENCE_GROUP': [2, 1],
|
||||
'PACKAGE_LEF': ['SOT-23', 'SOT-23'],
|
||||
'QTY': [40000000, 50000000]
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
result = get_wip_matrix()
|
||||
|
||||
self.assertEqual(result['workcenters'], ['切割', '焊接_DB'])
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_packages_sorted_by_qty_desc(self, mock_read_sql):
|
||||
"""Packages should be sorted by total QTY descending."""
|
||||
mock_df = pd.DataFrame({
|
||||
'WORKCENTER_GROUP': ['切割', '切割'],
|
||||
'WORKCENTERSEQUENCE_GROUP': [1, 1],
|
||||
'PACKAGE_LEF': ['SOD-323', 'SOT-23'],
|
||||
'QTY': [30000000, 50000000]
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
result = get_wip_matrix()
|
||||
|
||||
self.assertEqual(result['packages'][0], 'SOT-23') # Higher QTY first
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_empty_structure_on_empty_result(self, mock_read_sql):
|
||||
"""Should return empty structure when no data."""
|
||||
mock_read_sql.return_value = pd.DataFrame()
|
||||
|
||||
result = get_wip_matrix()
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertEqual(result['workcenters'], [])
|
||||
self.assertEqual(result['packages'], [])
|
||||
self.assertEqual(result['grand_total'], 0)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_calculates_totals_correctly(self, mock_read_sql):
|
||||
"""Should calculate workcenter and package totals correctly."""
|
||||
mock_df = pd.DataFrame({
|
||||
'WORKCENTER_GROUP': ['切割', '切割'],
|
||||
'WORKCENTERSEQUENCE_GROUP': [1, 1],
|
||||
'PACKAGE_LEF': ['SOT-23', 'SOD-323'],
|
||||
'QTY': [50000000, 30000000]
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
result = get_wip_matrix()
|
||||
|
||||
self.assertEqual(result['workcenter_totals']['切割'], 80000000)
|
||||
self.assertEqual(result['package_totals']['SOT-23'], 50000000)
|
||||
self.assertEqual(result['grand_total'], 80000000)
|
||||
|
||||
|
||||
class TestGetWipHoldSummary(unittest.TestCase):
|
||||
"""Test get_wip_hold_summary function."""
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_hold_items(self, mock_read_sql):
|
||||
"""Should return list of hold items."""
|
||||
mock_df = pd.DataFrame({
|
||||
'REASON': ['YieldLimit', '特殊需求管控'],
|
||||
'LOTS': [21, 44],
|
||||
'QTY': [1084443, 4235060]
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
result = get_wip_hold_summary()
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertIn('items', result)
|
||||
self.assertEqual(len(result['items']), 2)
|
||||
self.assertEqual(result['items'][0]['reason'], 'YieldLimit')
|
||||
self.assertEqual(result['items'][0]['lots'], 21)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_empty_items_on_no_holds(self, mock_read_sql):
|
||||
"""Should return empty items list when no holds."""
|
||||
mock_read_sql.return_value = pd.DataFrame()
|
||||
|
||||
result = get_wip_hold_summary()
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertEqual(result['items'], [])
|
||||
|
||||
|
||||
class TestGetWipDetail(unittest.TestCase):
|
||||
"""Test get_wip_detail function."""
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_none_on_empty_summary(self, mock_read_sql):
|
||||
"""Should return None when summary query returns empty."""
|
||||
mock_read_sql.return_value = pd.DataFrame()
|
||||
|
||||
result = get_wip_detail('不存在的工站')
|
||||
|
||||
self.assertIsNone(result)
|
||||
|
||||
|
||||
class TestGetWorkcenters(unittest.TestCase):
|
||||
"""Test get_workcenters function."""
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_workcenter_list(self, mock_read_sql):
|
||||
"""Should return list of workcenters with lot counts."""
|
||||
mock_df = pd.DataFrame({
|
||||
'WORKCENTER_GROUP': ['切割', '焊接_DB'],
|
||||
'WORKCENTERSEQUENCE_GROUP': [1, 2],
|
||||
'LOT_COUNT': [1377, 859]
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
result = get_workcenters()
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertEqual(len(result), 2)
|
||||
self.assertEqual(result[0]['name'], '切割')
|
||||
self.assertEqual(result[0]['lot_count'], 1377)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_empty_list_on_no_data(self, mock_read_sql):
|
||||
"""Should return empty list when no workcenters."""
|
||||
mock_read_sql.return_value = pd.DataFrame()
|
||||
|
||||
result = get_workcenters()
|
||||
|
||||
self.assertEqual(result, [])
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_none_on_exception(self, mock_read_sql):
|
||||
"""Should return None on exception."""
|
||||
mock_read_sql.side_effect = Exception("Database error")
|
||||
|
||||
result = get_workcenters()
|
||||
|
||||
self.assertIsNone(result)
|
||||
|
||||
|
||||
class TestGetPackages(unittest.TestCase):
|
||||
"""Test get_packages function."""
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_package_list(self, mock_read_sql):
|
||||
"""Should return list of packages with lot counts."""
|
||||
mock_df = pd.DataFrame({
|
||||
'PACKAGE_LEF': ['SOT-23', 'SOD-323'],
|
||||
'LOT_COUNT': [2234, 1392]
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
result = get_packages()
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertEqual(len(result), 2)
|
||||
self.assertEqual(result[0]['name'], 'SOT-23')
|
||||
self.assertEqual(result[0]['lot_count'], 2234)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_empty_list_on_no_data(self, mock_read_sql):
|
||||
"""Should return empty list when no packages."""
|
||||
mock_read_sql.return_value = pd.DataFrame()
|
||||
|
||||
result = get_packages()
|
||||
|
||||
self.assertEqual(result, [])
|
||||
|
||||
|
||||
class TestSearchWorkorders(unittest.TestCase):
|
||||
"""Test search_workorders function."""
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_matching_workorders(self, mock_read_sql):
|
||||
"""Should return list of matching WORKORDER values."""
|
||||
mock_df = pd.DataFrame({
|
||||
'WORKORDER': ['GA26012001', 'GA26012002', 'GA26012003']
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
result = search_workorders('GA26')
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertEqual(len(result), 3)
|
||||
self.assertEqual(result[0], 'GA26012001')
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_empty_list_for_short_query(self, mock_read_sql):
|
||||
"""Should return empty list for query < 2 characters."""
|
||||
result = search_workorders('G')
|
||||
|
||||
self.assertEqual(result, [])
|
||||
mock_read_sql.assert_not_called()
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_empty_list_for_empty_query(self, mock_read_sql):
|
||||
"""Should return empty list for empty query."""
|
||||
result = search_workorders('')
|
||||
|
||||
self.assertEqual(result, [])
|
||||
mock_read_sql.assert_not_called()
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_empty_list_on_no_matches(self, mock_read_sql):
|
||||
"""Should return empty list when no matches found."""
|
||||
mock_read_sql.return_value = pd.DataFrame()
|
||||
|
||||
result = search_workorders('NONEXISTENT')
|
||||
|
||||
self.assertEqual(result, [])
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_respects_limit_parameter(self, mock_read_sql):
|
||||
"""Should respect the limit parameter."""
|
||||
mock_df = pd.DataFrame({
|
||||
'WORKORDER': ['GA26012001', 'GA26012002']
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
result = search_workorders('GA26', limit=2)
|
||||
|
||||
self.assertEqual(len(result), 2)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_caps_limit_at_50(self, mock_read_sql):
|
||||
"""Should cap limit at 50."""
|
||||
mock_df = pd.DataFrame({'WORKORDER': ['GA26012001']})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
search_workorders('GA26', limit=100)
|
||||
|
||||
# Verify params contain row_limit=50 (capped from 100)
|
||||
call_args = mock_read_sql.call_args
|
||||
params = call_args[0][1] if len(call_args[0]) > 1 else call_args[1].get('params', {})
|
||||
self.assertEqual(params.get('row_limit'), 50)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_none_on_exception(self, mock_read_sql):
|
||||
"""Should return None on exception."""
|
||||
mock_read_sql.side_effect = Exception("Database error")
|
||||
|
||||
result = search_workorders('GA26')
|
||||
|
||||
self.assertIsNone(result)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_excludes_dummy_by_default(self, mock_read_sql):
|
||||
"""Should exclude DUMMY lots by default."""
|
||||
mock_df = pd.DataFrame({'WORKORDER': []})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
search_workorders('GA26')
|
||||
|
||||
call_args = mock_read_sql.call_args[0][0]
|
||||
self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_includes_dummy_when_specified(self, mock_read_sql):
|
||||
"""Should include DUMMY lots when include_dummy=True."""
|
||||
mock_df = pd.DataFrame({'WORKORDER': []})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
search_workorders('GA26', include_dummy=True)
|
||||
|
||||
call_args = mock_read_sql.call_args[0][0]
|
||||
self.assertNotIn("LOTID NOT LIKE '%DUMMY%'", call_args)
|
||||
|
||||
|
||||
class TestSearchLotIds(unittest.TestCase):
|
||||
"""Test search_lot_ids function."""
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_matching_lotids(self, mock_read_sql):
|
||||
"""Should return list of matching LOTID values."""
|
||||
mock_df = pd.DataFrame({
|
||||
'LOTID': ['GA26012345-A00-001', 'GA26012345-A00-002']
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
result = search_lot_ids('GA26012345')
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertEqual(len(result), 2)
|
||||
self.assertEqual(result[0], 'GA26012345-A00-001')
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_empty_list_for_short_query(self, mock_read_sql):
|
||||
"""Should return empty list for query < 2 characters."""
|
||||
result = search_lot_ids('G')
|
||||
|
||||
self.assertEqual(result, [])
|
||||
mock_read_sql.assert_not_called()
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_empty_list_on_no_matches(self, mock_read_sql):
|
||||
"""Should return empty list when no matches found."""
|
||||
mock_read_sql.return_value = pd.DataFrame()
|
||||
|
||||
result = search_lot_ids('NONEXISTENT')
|
||||
|
||||
self.assertEqual(result, [])
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_returns_none_on_exception(self, mock_read_sql):
|
||||
"""Should return None on exception."""
|
||||
mock_read_sql.side_effect = Exception("Database error")
|
||||
|
||||
result = search_lot_ids('GA26')
|
||||
|
||||
self.assertIsNone(result)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_excludes_dummy_by_default(self, mock_read_sql):
|
||||
"""Should exclude DUMMY lots by default."""
|
||||
mock_df = pd.DataFrame({'LOTID': []})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
search_lot_ids('GA26')
|
||||
|
||||
call_args = mock_read_sql.call_args[0][0]
|
||||
self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args)
|
||||
|
||||
|
||||
class TestWipSearchIndexShortcut(unittest.TestCase):
|
||||
"""Test derived search index fast-path behavior."""
|
||||
|
||||
@patch('mes_dashboard.services.wip_service._search_workorders_from_oracle')
|
||||
@patch('mes_dashboard.services.wip_service._get_wip_search_index')
|
||||
def test_workorder_search_uses_index_without_cross_filters(self, mock_index, mock_oracle):
|
||||
mock_index.return_value = {
|
||||
"workorders": ["GA26012001", "GA26012002", "GB00000001"]
|
||||
}
|
||||
|
||||
result = search_workorders("GA26", limit=10)
|
||||
|
||||
self.assertEqual(result, ["GA26012001", "GA26012002"])
|
||||
mock_oracle.assert_not_called()
|
||||
|
||||
@patch('mes_dashboard.services.wip_service._search_workorders_from_oracle')
|
||||
@patch('mes_dashboard.services.wip_service._get_wip_search_index')
|
||||
def test_workorder_search_with_cross_filters_falls_back(self, mock_index, mock_oracle):
|
||||
mock_index.return_value = {
|
||||
"workorders": ["GA26012001", "GA26012002"]
|
||||
}
|
||||
mock_oracle.return_value = ["GA26012001"]
|
||||
|
||||
result = search_workorders("GA26", package="SOT-23")
|
||||
|
||||
self.assertEqual(result, ["GA26012001"])
|
||||
mock_oracle.assert_called_once()
|
||||
|
||||
|
||||
class TestDummyExclusionInAllFunctions(unittest.TestCase):
|
||||
"""Test DUMMY exclusion is applied in all WIP functions."""
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_get_wip_summary_excludes_dummy_by_default(self, mock_read_sql):
|
||||
"""get_wip_summary should exclude DUMMY by default."""
|
||||
mock_df = pd.DataFrame({
|
||||
'TOTAL_LOTS': [100],
|
||||
'TOTAL_QTY_PCS': [1000],
|
||||
'RUN_LOTS': [80],
|
||||
'RUN_QTY_PCS': [800],
|
||||
'QUEUE_LOTS': [10],
|
||||
'QUEUE_QTY_PCS': [100],
|
||||
'HOLD_LOTS': [10],
|
||||
'HOLD_QTY_PCS': [100],
|
||||
'DATA_UPDATE_DATE': ['2026-01-26']
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
get_wip_summary()
|
||||
|
||||
call_args = mock_read_sql.call_args[0][0]
|
||||
self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_get_wip_summary_includes_dummy_when_specified(self, mock_read_sql):
|
||||
"""get_wip_summary should include DUMMY when specified."""
|
||||
mock_df = pd.DataFrame({
|
||||
'TOTAL_LOTS': [100],
|
||||
'TOTAL_QTY_PCS': [1000],
|
||||
'RUN_LOTS': [80],
|
||||
'RUN_QTY_PCS': [800],
|
||||
'QUEUE_LOTS': [10],
|
||||
'QUEUE_QTY_PCS': [100],
|
||||
'HOLD_LOTS': [10],
|
||||
'HOLD_QTY_PCS': [100],
|
||||
'DATA_UPDATE_DATE': ['2026-01-26']
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
get_wip_summary(include_dummy=True)
|
||||
|
||||
call_args = mock_read_sql.call_args[0][0]
|
||||
self.assertNotIn("LOTID NOT LIKE '%DUMMY%'", call_args)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_get_wip_matrix_excludes_dummy_by_default(self, mock_read_sql):
|
||||
"""get_wip_matrix should exclude DUMMY by default."""
|
||||
mock_df = pd.DataFrame({
|
||||
'WORKCENTER_GROUP': ['切割'],
|
||||
'WORKCENTERSEQUENCE_GROUP': [1],
|
||||
'PACKAGE_LEF': ['SOT-23'],
|
||||
'QTY': [1000]
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
get_wip_matrix()
|
||||
|
||||
call_args = mock_read_sql.call_args[0][0]
|
||||
self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_get_wip_hold_summary_excludes_dummy_by_default(self, mock_read_sql):
|
||||
"""get_wip_hold_summary should exclude DUMMY by default."""
|
||||
mock_df = pd.DataFrame({
|
||||
'REASON': ['YieldLimit'], 'LOTS': [10], 'QTY': [1000]
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
get_wip_hold_summary()
|
||||
|
||||
call_args = mock_read_sql.call_args[0][0]
|
||||
self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_get_workcenters_excludes_dummy_by_default(self, mock_read_sql):
|
||||
"""get_workcenters should exclude DUMMY by default."""
|
||||
mock_df = pd.DataFrame({
|
||||
'WORKCENTER_GROUP': ['切割'],
|
||||
'WORKCENTERSEQUENCE_GROUP': [1],
|
||||
'LOT_COUNT': [100]
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
get_workcenters()
|
||||
|
||||
call_args = mock_read_sql.call_args[0][0]
|
||||
self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args)
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_get_packages_excludes_dummy_by_default(self, mock_read_sql):
|
||||
"""get_packages should exclude DUMMY by default."""
|
||||
mock_df = pd.DataFrame({
|
||||
'PACKAGE_LEF': ['SOT-23'], 'LOT_COUNT': [100]
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
get_packages()
|
||||
|
||||
call_args = mock_read_sql.call_args[0][0]
|
||||
self.assertIn("LOTID NOT LIKE '%DUMMY%'", call_args)
|
||||
|
||||
|
||||
class TestMultipleFilterConditions(unittest.TestCase):
|
||||
"""Test multiple filter conditions work together."""
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_get_wip_summary_with_all_filters(self, mock_read_sql):
|
||||
"""get_wip_summary should combine all filter conditions via parameterized queries."""
|
||||
mock_df = pd.DataFrame({
|
||||
'TOTAL_LOTS': [50],
|
||||
'TOTAL_QTY_PCS': [500],
|
||||
'RUN_LOTS': [40],
|
||||
'RUN_QTY_PCS': [400],
|
||||
'QUEUE_LOTS': [5],
|
||||
'QUEUE_QTY_PCS': [50],
|
||||
'HOLD_LOTS': [5],
|
||||
'HOLD_QTY_PCS': [50],
|
||||
'QUALITY_HOLD_LOTS': [3],
|
||||
'QUALITY_HOLD_QTY_PCS': [30],
|
||||
'NON_QUALITY_HOLD_LOTS': [2],
|
||||
'NON_QUALITY_HOLD_QTY_PCS': [20],
|
||||
'DATA_UPDATE_DATE': ['2026-01-26']
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
get_wip_summary(workorder='GA26', lotid='A00')
|
||||
|
||||
# Check SQL contains parameterized LIKE conditions
|
||||
call_args = mock_read_sql.call_args
|
||||
sql = call_args[0][0]
|
||||
params = call_args[0][1] if len(call_args[0]) > 1 else {}
|
||||
|
||||
self.assertIn("WORKORDER LIKE", sql)
|
||||
self.assertIn("LOTID LIKE", sql)
|
||||
self.assertIn("LOTID NOT LIKE '%DUMMY%'", sql)
|
||||
# Verify params contain the search patterns
|
||||
self.assertTrue(any('%GA26%' in str(v) for v in params.values()))
|
||||
self.assertTrue(any('%A00%' in str(v) for v in params.values()))
|
||||
|
||||
@disable_cache
|
||||
@patch('mes_dashboard.services.wip_service.read_sql_df')
|
||||
def test_get_wip_matrix_with_all_filters(self, mock_read_sql):
|
||||
"""get_wip_matrix should combine all filter conditions via parameterized queries."""
|
||||
mock_df = pd.DataFrame({
|
||||
'WORKCENTER_GROUP': ['切割'],
|
||||
'WORKCENTERSEQUENCE_GROUP': [1],
|
||||
'PACKAGE_LEF': ['SOT-23'],
|
||||
'QTY': [500]
|
||||
})
|
||||
mock_read_sql.return_value = mock_df
|
||||
|
||||
get_wip_matrix(workorder='GA26', lotid='A00', include_dummy=True)
|
||||
|
||||
# Check SQL contains parameterized LIKE conditions
|
||||
call_args = mock_read_sql.call_args
|
||||
sql = call_args[0][0]
|
||||
params = call_args[0][1] if len(call_args[0]) > 1 else {}
|
||||
|
||||
self.assertIn("WORKORDER LIKE", sql)
|
||||
self.assertIn("LOTID LIKE", sql)
|
||||
# Should NOT contain DUMMY exclusion since include_dummy=True
|
||||
self.assertNotIn("LOTID NOT LIKE '%DUMMY%'", sql)
|
||||
# Verify params contain the search patterns
|
||||
self.assertTrue(any('%GA26%' in str(v) for v in params.values()))
|
||||
self.assertTrue(any('%A00%' in str(v) for v in params.values()))
|
||||
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class TestWipServiceIntegration:
|
||||
"""Integration tests that hit the actual database.
|
||||
|
||||
These tests are skipped by default. Run with:
|
||||
python -m pytest tests/test_wip_service.py -k Integration --run-integration
|
||||
"""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_wip_summary_integration(self):
|
||||
"""Integration test for get_wip_summary."""
|
||||
result = get_wip_summary()
|
||||
assert result is not None
|
||||
assert result['totalLots'] > 0
|
||||
assert 'dataUpdateDate' in result
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_wip_matrix_integration(self):
|
||||
"""Integration test for get_wip_matrix."""
|
||||
result = get_wip_matrix()
|
||||
assert result is not None
|
||||
assert len(result['workcenters']) > 0
|
||||
assert result['grand_total'] > 0
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_wip_hold_summary_integration(self):
|
||||
"""Integration test for get_wip_hold_summary."""
|
||||
result = get_wip_hold_summary()
|
||||
assert result is not None
|
||||
assert 'items' in result
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_wip_detail_integration(self):
|
||||
"""Integration test for get_wip_detail."""
|
||||
# First get a valid workcenter
|
||||
workcenters = get_workcenters()
|
||||
assert workcenters is not None and len(workcenters) > 0
|
||||
|
||||
wc_name = workcenters[0]['name']
|
||||
result = get_wip_detail(wc_name, page=1, page_size=10)
|
||||
|
||||
assert result is not None
|
||||
assert result['workcenter'] == wc_name
|
||||
assert 'summary' in result
|
||||
assert 'lots' in result
|
||||
assert 'pagination' in result
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_workcenters_integration(self):
|
||||
"""Integration test for get_workcenters."""
|
||||
result = get_workcenters()
|
||||
assert result is not None
|
||||
assert len(result) > 0
|
||||
assert 'name' in result[0]
|
||||
assert 'lot_count' in result[0]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_packages_integration(self):
|
||||
"""Integration test for get_packages."""
|
||||
result = get_packages()
|
||||
assert result is not None
|
||||
assert len(result) > 0
|
||||
assert 'name' in result[0]
|
||||
assert 'lot_count' in result[0]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_search_workorders_integration(self):
|
||||
"""Integration test for search_workorders."""
|
||||
# Use a common prefix that likely exists
|
||||
result = search_workorders('GA')
|
||||
assert result is not None
|
||||
# Should return a list (possibly empty if no GA* workorders)
|
||||
assert isinstance(result, list)
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_search_lot_ids_integration(self):
|
||||
"""Integration test for search_lot_ids."""
|
||||
# Use a common prefix that likely exists
|
||||
result = search_lot_ids('GA')
|
||||
assert result is not None
|
||||
assert isinstance(result, list)
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_dummy_exclusion_integration(self):
|
||||
"""Integration test to verify DUMMY exclusion works."""
|
||||
# Get summary with and without DUMMY
|
||||
result_without_dummy = get_wip_summary(include_dummy=False)
|
||||
result_with_dummy = get_wip_summary(include_dummy=True)
|
||||
|
||||
assert result_without_dummy is not None
|
||||
assert result_with_dummy is not None
|
||||
|
||||
# If there are DUMMY lots, with_dummy should have more
|
||||
# (or equal if no DUMMY lots exist)
|
||||
assert result_with_dummy['totalLots'] >= result_without_dummy['totalLots']
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_workorder_filter_integration(self):
|
||||
"""Integration test for workorder filter."""
|
||||
# Get all data first
|
||||
all_result = get_wip_summary()
|
||||
assert all_result is not None
|
||||
|
||||
# Search for a workorder that exists
|
||||
workorders = search_workorders('GA', limit=1)
|
||||
if workorders and len(workorders) > 0:
|
||||
# Filter by that workorder
|
||||
filtered_result = get_wip_summary(workorder=workorders[0])
|
||||
assert filtered_result is not None
|
||||
# Filtered count should be less than or equal to total
|
||||
assert filtered_result['totalLots'] <= all_result['totalLots']
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
349
tests/test_workcenter_mapping.py
Normal file
349
tests/test_workcenter_mapping.py
Normal file
@@ -0,0 +1,349 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for workcenter mapping in filter_cache module.
|
||||
|
||||
Tests workcenter group lookup and mapping functionality.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
import pandas as pd
|
||||
|
||||
|
||||
class TestGetWorkcenterGroup:
|
||||
"""Test get_workcenter_group function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_cache(self):
|
||||
"""Reset cache state before each test."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
with fc._CACHE_LOCK:
|
||||
fc._CACHE['workcenter_groups'] = None
|
||||
fc._CACHE['workcenter_mapping'] = None
|
||||
fc._CACHE['workcenter_to_short'] = None
|
||||
fc._CACHE['last_refresh'] = None
|
||||
fc._CACHE['is_loading'] = False
|
||||
yield
|
||||
with fc._CACHE_LOCK:
|
||||
fc._CACHE['workcenter_groups'] = None
|
||||
fc._CACHE['workcenter_mapping'] = None
|
||||
fc._CACHE['workcenter_to_short'] = None
|
||||
fc._CACHE['last_refresh'] = None
|
||||
fc._CACHE['is_loading'] = False
|
||||
|
||||
def test_returns_group_for_valid_workcenter(self):
|
||||
"""Test returns group for valid workcenter name."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
|
||||
mock_mapping = {
|
||||
'DB-01': {'group': '焊接', 'sequence': 1},
|
||||
'WB-01': {'group': '焊線', 'sequence': 2},
|
||||
}
|
||||
|
||||
with patch.object(fc, 'get_workcenter_mapping', return_value=mock_mapping):
|
||||
result = fc.get_workcenter_group('DB-01')
|
||||
assert result == '焊接'
|
||||
|
||||
def test_returns_none_for_unknown_workcenter(self):
|
||||
"""Test returns None for unknown workcenter name."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
|
||||
mock_mapping = {
|
||||
'DB-01': {'group': '焊接', 'sequence': 1},
|
||||
}
|
||||
|
||||
with patch.object(fc, 'get_workcenter_mapping', return_value=mock_mapping):
|
||||
result = fc.get_workcenter_group('UNKNOWN')
|
||||
assert result is None
|
||||
|
||||
def test_returns_none_when_mapping_unavailable(self):
|
||||
"""Test returns None when mapping is unavailable."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
|
||||
with patch.object(fc, 'get_workcenter_mapping', return_value=None):
|
||||
result = fc.get_workcenter_group('DB-01')
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestGetWorkcenterShort:
|
||||
"""Test get_workcenter_short function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_cache(self):
|
||||
"""Reset cache state before each test."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
with fc._CACHE_LOCK:
|
||||
fc._CACHE['workcenter_groups'] = None
|
||||
fc._CACHE['workcenter_mapping'] = None
|
||||
fc._CACHE['workcenter_to_short'] = None
|
||||
fc._CACHE['last_refresh'] = None
|
||||
fc._CACHE['is_loading'] = False
|
||||
yield
|
||||
with fc._CACHE_LOCK:
|
||||
fc._CACHE['workcenter_groups'] = None
|
||||
fc._CACHE['workcenter_mapping'] = None
|
||||
fc._CACHE['workcenter_to_short'] = None
|
||||
fc._CACHE['last_refresh'] = None
|
||||
fc._CACHE['is_loading'] = False
|
||||
|
||||
def test_returns_short_name_for_valid_workcenter(self):
|
||||
"""Test returns short name for valid workcenter."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
from datetime import datetime
|
||||
|
||||
# Set up cache directly
|
||||
with fc._CACHE_LOCK:
|
||||
fc._CACHE['workcenter_to_short'] = {
|
||||
'DB-01': 'DB',
|
||||
'WB-01': 'WB',
|
||||
}
|
||||
fc._CACHE['workcenter_groups'] = [{'name': '焊接', 'sequence': 1}]
|
||||
fc._CACHE['workcenter_mapping'] = {}
|
||||
fc._CACHE['last_refresh'] = datetime.now()
|
||||
|
||||
result = fc.get_workcenter_short('DB-01')
|
||||
assert result == 'DB'
|
||||
|
||||
def test_returns_none_for_unknown_workcenter(self):
|
||||
"""Test returns None for unknown workcenter."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
from datetime import datetime
|
||||
|
||||
with fc._CACHE_LOCK:
|
||||
fc._CACHE['workcenter_to_short'] = {
|
||||
'DB-01': 'DB',
|
||||
}
|
||||
fc._CACHE['workcenter_groups'] = [{'name': '焊接', 'sequence': 1}]
|
||||
fc._CACHE['workcenter_mapping'] = {}
|
||||
fc._CACHE['last_refresh'] = datetime.now()
|
||||
|
||||
result = fc.get_workcenter_short('UNKNOWN')
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestGetWorkcentersByGroup:
|
||||
"""Test get_workcenters_by_group function."""
|
||||
|
||||
def test_returns_workcenters_in_group(self):
|
||||
"""Test returns all workcenters in specified group."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
|
||||
mock_mapping = {
|
||||
'DB-01': {'group': '焊接', 'sequence': 1},
|
||||
'DB-02': {'group': '焊接', 'sequence': 1},
|
||||
'WB-01': {'group': '焊線', 'sequence': 2},
|
||||
}
|
||||
|
||||
with patch.object(fc, 'get_workcenter_mapping', return_value=mock_mapping):
|
||||
result = fc.get_workcenters_by_group('焊接')
|
||||
|
||||
assert len(result) == 2
|
||||
assert 'DB-01' in result
|
||||
assert 'DB-02' in result
|
||||
assert 'WB-01' not in result
|
||||
|
||||
def test_returns_empty_for_unknown_group(self):
|
||||
"""Test returns empty list for unknown group."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
|
||||
mock_mapping = {
|
||||
'DB-01': {'group': '焊接', 'sequence': 1},
|
||||
}
|
||||
|
||||
with patch.object(fc, 'get_workcenter_mapping', return_value=mock_mapping):
|
||||
result = fc.get_workcenters_by_group('UNKNOWN')
|
||||
assert result == []
|
||||
|
||||
def test_returns_empty_when_mapping_unavailable(self):
|
||||
"""Test returns empty list when mapping unavailable."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
|
||||
with patch.object(fc, 'get_workcenter_mapping', return_value=None):
|
||||
result = fc.get_workcenters_by_group('焊接')
|
||||
assert result == []
|
||||
|
||||
|
||||
class TestGetWorkcentersForGroups:
|
||||
"""Test get_workcenters_for_groups function."""
|
||||
|
||||
def test_returns_workcenters_for_multiple_groups(self):
|
||||
"""Test returns workcenters for multiple groups."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
|
||||
mock_mapping = {
|
||||
'DB-01': {'group': '焊接', 'sequence': 1},
|
||||
'WB-01': {'group': '焊線', 'sequence': 2},
|
||||
'MD-01': {'group': '成型', 'sequence': 3},
|
||||
}
|
||||
|
||||
with patch.object(fc, 'get_workcenter_mapping', return_value=mock_mapping):
|
||||
result = fc.get_workcenters_for_groups(['焊接', '焊線'])
|
||||
|
||||
assert len(result) == 2
|
||||
assert 'DB-01' in result
|
||||
assert 'WB-01' in result
|
||||
assert 'MD-01' not in result
|
||||
|
||||
def test_returns_empty_for_empty_groups_list(self):
|
||||
"""Test returns empty list for empty groups list."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
|
||||
mock_mapping = {
|
||||
'DB-01': {'group': '焊接', 'sequence': 1},
|
||||
}
|
||||
|
||||
with patch.object(fc, 'get_workcenter_mapping', return_value=mock_mapping):
|
||||
result = fc.get_workcenters_for_groups([])
|
||||
assert result == []
|
||||
|
||||
|
||||
class TestGetWorkcenterGroups:
|
||||
"""Test get_workcenter_groups function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_cache(self):
|
||||
"""Reset cache state before each test."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
with fc._CACHE_LOCK:
|
||||
fc._CACHE['workcenter_groups'] = None
|
||||
fc._CACHE['workcenter_mapping'] = None
|
||||
fc._CACHE['workcenter_to_short'] = None
|
||||
fc._CACHE['last_refresh'] = None
|
||||
fc._CACHE['is_loading'] = False
|
||||
yield
|
||||
with fc._CACHE_LOCK:
|
||||
fc._CACHE['workcenter_groups'] = None
|
||||
fc._CACHE['workcenter_mapping'] = None
|
||||
fc._CACHE['workcenter_to_short'] = None
|
||||
fc._CACHE['last_refresh'] = None
|
||||
fc._CACHE['is_loading'] = False
|
||||
|
||||
def test_returns_groups_sorted_by_sequence(self):
|
||||
"""Test returns groups sorted by sequence."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
from datetime import datetime
|
||||
|
||||
# Set up cache directly
|
||||
with fc._CACHE_LOCK:
|
||||
fc._CACHE['workcenter_groups'] = [
|
||||
{'name': '成型', 'sequence': 3},
|
||||
{'name': '焊接', 'sequence': 1},
|
||||
{'name': '焊線', 'sequence': 2},
|
||||
]
|
||||
fc._CACHE['workcenter_mapping'] = {}
|
||||
fc._CACHE['workcenter_to_short'] = {}
|
||||
fc._CACHE['last_refresh'] = datetime.now()
|
||||
|
||||
result = fc.get_workcenter_groups()
|
||||
|
||||
# Should preserve original order (as stored)
|
||||
assert len(result) == 3
|
||||
names = [g['name'] for g in result]
|
||||
assert '成型' in names
|
||||
assert '焊接' in names
|
||||
assert '焊線' in names
|
||||
|
||||
|
||||
class TestLoadWorkcenterMappingFromSpec:
|
||||
"""Test _load_workcenter_mapping_from_spec function."""
|
||||
|
||||
def test_builds_mapping_from_spec_view(self):
|
||||
"""Test builds mapping from SPEC_WORKCENTER_V data."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
|
||||
mock_df = pd.DataFrame({
|
||||
'WORK_CENTER': ['DB-01', 'DB-02', 'WB-01'],
|
||||
'WORK_CENTER_GROUP': ['焊接', '焊接', '焊線'],
|
||||
'WORKCENTERSEQUENCE_GROUP': [1, 1, 2],
|
||||
'WORK_CENTER_SHORT': ['DB', 'DB', 'WB'],
|
||||
})
|
||||
|
||||
with patch.object(fc, 'read_sql_df', return_value=mock_df):
|
||||
groups, mapping, short_mapping = fc._load_workcenter_mapping_from_spec()
|
||||
|
||||
# Check groups
|
||||
assert len(groups) == 2 # 2 unique groups
|
||||
group_names = [g['name'] for g in groups]
|
||||
assert '焊接' in group_names
|
||||
assert '焊線' in group_names
|
||||
|
||||
# Check mapping
|
||||
assert len(mapping) == 3
|
||||
assert mapping['DB-01']['group'] == '焊接'
|
||||
assert mapping['WB-01']['group'] == '焊線'
|
||||
|
||||
# Check short mapping
|
||||
assert short_mapping['DB-01'] == 'DB'
|
||||
assert short_mapping['WB-01'] == 'WB'
|
||||
|
||||
def test_returns_empty_when_no_data(self):
|
||||
"""Test returns empty structures when no data."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
|
||||
with patch.object(fc, 'read_sql_df', return_value=None):
|
||||
groups, mapping, short_mapping = fc._load_workcenter_mapping_from_spec()
|
||||
|
||||
assert groups == []
|
||||
assert mapping == {}
|
||||
assert short_mapping == {}
|
||||
|
||||
def test_handles_empty_dataframe(self):
|
||||
"""Test handles empty DataFrame."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
|
||||
mock_df = pd.DataFrame(columns=['WORK_CENTER', 'WORK_CENTER_GROUP', 'WORKCENTERSEQUENCE_GROUP', 'WORK_CENTER_SHORT'])
|
||||
|
||||
with patch.object(fc, 'read_sql_df', return_value=mock_df):
|
||||
groups, mapping, short_mapping = fc._load_workcenter_mapping_from_spec()
|
||||
|
||||
assert groups == []
|
||||
assert mapping == {}
|
||||
assert short_mapping == {}
|
||||
|
||||
|
||||
class TestGetCacheStatus:
|
||||
"""Test get_cache_status function."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_cache(self):
|
||||
"""Reset cache state before each test."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
with fc._CACHE_LOCK:
|
||||
fc._CACHE['workcenter_groups'] = None
|
||||
fc._CACHE['workcenter_mapping'] = None
|
||||
fc._CACHE['workcenter_to_short'] = None
|
||||
fc._CACHE['last_refresh'] = None
|
||||
fc._CACHE['is_loading'] = False
|
||||
yield
|
||||
with fc._CACHE_LOCK:
|
||||
fc._CACHE['workcenter_groups'] = None
|
||||
fc._CACHE['workcenter_mapping'] = None
|
||||
fc._CACHE['workcenter_to_short'] = None
|
||||
fc._CACHE['last_refresh'] = None
|
||||
fc._CACHE['is_loading'] = False
|
||||
|
||||
def test_returns_not_loaded_when_empty(self):
|
||||
"""Test returns loaded=False when cache empty."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
|
||||
result = fc.get_cache_status()
|
||||
|
||||
assert result['loaded'] is False
|
||||
assert result['last_refresh'] is None
|
||||
|
||||
def test_returns_loaded_when_data_exists(self):
|
||||
"""Test returns loaded=True when cache has data."""
|
||||
import mes_dashboard.services.filter_cache as fc
|
||||
from datetime import datetime
|
||||
|
||||
now = datetime.now()
|
||||
with fc._CACHE_LOCK:
|
||||
fc._CACHE['workcenter_groups'] = [{'name': 'G1', 'sequence': 1}]
|
||||
fc._CACHE['workcenter_mapping'] = {'WC1': {'group': 'G1', 'sequence': 1}}
|
||||
fc._CACHE['last_refresh'] = now
|
||||
|
||||
result = fc.get_cache_status()
|
||||
|
||||
assert result['loaded'] is True
|
||||
assert result['last_refresh'] is not None
|
||||
assert result['workcenter_groups_count'] == 1
|
||||
assert result['workcenter_mapping_count'] == 1
|
||||
Reference in New Issue
Block a user