diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..f64acdd --- /dev/null +++ b/.dockerignore @@ -0,0 +1,66 @@ +# Git +.git +.gitignore + +# Virtual environment +venv/ +env/ +.env.local +.env.development +.env.production + +# Python cache +__pycache__/ +*.pyc +*.pyo +*.pyd +.Python +*.egg-info/ +dist/ +build/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo + +# Logs +*.log +logs/ + +# OS +.DS_Store +Thumbs.db + +# Documentation +*.md +docs/ + +# Test files +tests/ +.pytest_cache/ + +# Coverage +.coverage +htmlcov/ + +# Node modules (for frontend) +frontend/node_modules/ +frontend/.npm +frontend/.next/ +frontend/out/ +frontend/dist/ + +# Uploads (will be mounted as volume) +uploads/* +!uploads/.gitkeep + +# Docker +Dockerfile* +docker-compose* +.dockerignore + +# Database +*.db +*.sqlite3 \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..0a2b699 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,50 @@ +FROM node:20-alpine AS frontend-builder + +# Build frontend +WORKDIR /app/frontend +COPY frontend/package*.json ./ +RUN npm ci + +COPY frontend/ ./ +# Build for production with relative API paths +RUN npm run build + +# Main container with Python and built frontend +FROM python:3.11-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + build-essential \ + curl \ + ffmpeg \ + && rm -rf /var/lib/apt/lists/* + +# Copy and install Python dependencies +COPY requirements.txt ./ +RUN pip install --no-cache-dir -r requirements.txt + +# Copy backend application +COPY . ./ + +# Copy built frontend from builder stage +COPY --from=frontend-builder /app/frontend/dist ./frontend/dist + +# Create necessary directories +RUN mkdir -p uploads + +# Set environment variables +ENV PYTHONPATH=/app +ENV FLASK_APP=app.py +ENV FLASK_ENV=production + +# Expose single port +EXPOSE 12015 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --retries=3 --start-period=40s \ + CMD curl -f http://localhost:12015/api/health || exit 1 + +# Run with Gunicorn for production +CMD ["gunicorn", "--bind", "0.0.0.0:12015", "--worker-class", "gthread", "--workers", "4", "--threads", "8", "--timeout", "120", "--keep-alive", "10", "--max-requests", "2000", "--max-requests-jitter", "200", "--forwarded-allow-ips", "*", "--access-logfile", "-", "app:app"] \ No newline at end of file diff --git a/Dockerfile.redis b/Dockerfile.redis new file mode 100644 index 0000000..2cfc800 --- /dev/null +++ b/Dockerfile.redis @@ -0,0 +1,17 @@ +# Redis for AI Meeting Assistant +FROM redis:7-alpine + +# Set container labels for identification +LABEL application="ai-meeting-assistant" +LABEL component="redis" +LABEL version="v2.1" +LABEL maintainer="PANJIT IT Team" + +# Copy custom redis configuration if needed +# COPY redis.conf /usr/local/etc/redis/redis.conf + +# Expose the default Redis port +EXPOSE 6379 + +# Use the default Redis entrypoint +# CMD ["redis-server", "/usr/local/etc/redis/redis.conf"] \ No newline at end of file diff --git a/README.md b/README.md index 4583764..9e7f689 100644 --- a/README.md +++ b/README.md @@ -1,106 +1,212 @@ -# AI Meeting Assistant +# AI Meeting Assistant V2.1 -An intelligent meeting assistant designed to streamline your workflow by transcribing, summarizing, and managing action items from your meetings. This full-stack application leverages a Python Flask backend for robust API services and a React frontend for a dynamic user experience. +一個智能會議助手系統,旨在通過轉錄、摘要和管理您會議中的行動項目來簡化您的工作流程。此全棧應用程式使用Python Flask後端提供強大的API服務,React前端提供動態用戶體驗。 -## Key Features +## 🔑 主要功能 -- **User Authentication**: Secure login and registration system with role-based access control (Admin, User). -- **Meeting Management**: Create, view, and manage meetings. Upload transcripts or generate them from audio. -- **AI-Powered Summary**: Automatically generate concise summaries from lengthy meeting transcripts. -- **Action Item Tracking**: Automatically preview and batch-create action items from summaries. Manually add, edit, and delete action items with assigned owners and due dates. -- **Permission Control**: Granular permissions for editing and deleting meetings and action items based on user roles (Admin, Meeting Owner, Action Item Owner). -- **File Processing Tools**: Independent tools for audio extraction, transcription, and text translation. +- **LDAP/AD 認證**: 整合企業Active Directory進行安全登入,支援本地備用認證 +- **用戶管理**: 基於角色的訪問控制(管理員、用戶),管理員可刪除用戶帳號 +- **會議管理**: 創建、查看和管理會議,上傳轉錄或從音頻生成轉錄 +- **AI智能摘要**: 從冗長的會議轉錄自動生成簡潔摘要 +- **行動項目追蹤**: 自動預覽並批量創建摘要中的行動項目,手動添加、編輯和刪除行動項目並分配負責人和截止日期 +- **權限控制**: 基於用戶角色(管理員、會議所有者、行動項目所有者)的精細權限管理 +- **檔案處理工具**: 獨立的音頻提取、轉錄和文本翻譯工具 -## Tech Stack +## 🏗️ 技術棧 -**Backend:** -- **Framework**: Flask -- **Database**: SQLAlchemy with Flask-Migrate for schema migrations. -- **Authentication**: Flask-JWT-Extended for token-based security. -- **Async Tasks**: Celery with Redis/RabbitMQ for handling long-running AI tasks. -- **API**: RESTful API design. +**後端:** +- **框架**: Flask + Gunicorn +- **資料庫**: MySQL (生產環境) + SQLAlchemy ORM +- **認證**: Flask-JWT-Extended + LDAP整合 +- **異步任務**: Celery + Redis +- **API**: RESTful API設計 -**Frontend:** -- **Framework**: React.js -- **UI Library**: Material-UI (MUI) -- **Tooling**: Vite -- **API Communication**: Axios +**前端:** +- **框架**: React.js +- **UI庫**: Material-UI (MUI) +- **構建工具**: Vite +- **API通訊**: Axios -## Prerequisites +**部署:** +- **容器化**: Docker + Docker Compose +- **服務編排**: Redis, Backend, Celery Worker, Celery Flower, Frontend +- **生產就緒**: 包含健康檢查和資源限制 -- Python 3.10+ -- Node.js 20.x+ -- A message broker for Celery (e.g., Redis or RabbitMQ) +## 📋 系統需求 -## Installation & Setup +- Docker Desktop (Windows/macOS) 或 Docker Engine (Linux) +- Docker Compose V2 +- 至少4GB可用記憶體 +- 企業Active Directory (LDAP) 服務器訪問權限 -### 1. Backend Setup +## 🚀 快速部署 + +### 方法一:一鍵部署(推薦) -Clone the repository: ```bash +# 克隆專案 git clone -cd AI_meeting_assistant_-_V2.1 +cd AI_meeting_assistant-V2.1 + +# 啟動所有服務(強制重建以確保使用最新代碼) +docker-compose up -d --build + +# 檢查服務狀態 +docker-compose ps + +# 停止服務 +docker-compose down + +# 查看日誌 +docker-compose logs -f ``` -Create a virtual environment and install dependencies: +### 方法二:開發環境設置 + +如需自定義配置或開發調試,請參考下方的詳細設置說明。 + +## 🔧 詳細配置 + +### 環境變數配置 + +主要配置已在`docker-compose.yml`中設定,如需修改: + +```yaml +# 資料庫配置 +DATABASE_URL: mysql+pymysql://username:password@host:port/database + +# LDAP配置 +LDAP_SERVER: your-domain.com +LDAP_PORT: 389 +LDAP_BIND_USER_DN: CN=LdapBind,CN=Users,DC=DOMAIN,DC=COM +LDAP_BIND_USER_PASSWORD: your-bind-password +LDAP_SEARCH_BASE: OU=Users,DC=domain,DC=com +LDAP_USER_LOGIN_ATTR: userPrincipalName + +# JWT配置 +JWT_SECRET_KEY: your-super-secret-key + +# AI服務配置(Dify API) +DIFY_API_BASE_URL: https://your-dify-server.com/v1 +DIFY_STT_API_KEY: app-xxxxxxxxxx +DIFY_TRANSLATOR_API_KEY: app-xxxxxxxxxx +DIFY_SUMMARIZER_API_KEY: app-xxxxxxxxxx +DIFY_ACTION_EXTRACTOR_API_KEY: app-xxxxxxxxxx +``` + +### 服務端口 + +- **前端**: http://localhost:12015 +- **後端API**: http://localhost:5000 +- **Celery Flower監控**: http://localhost:5555 +- **Redis**: localhost:6379 + +## 👥 用戶角色與權限 + +### 管理員權限 +- 查看所有用戶列表 +- 刪除用戶帳號(除自己外) +- 管理所有會議和行動項目 +- 修改任何會議狀態 + +### 一般用戶權限 +- 管理自己創建的會議 +- 編輯分配給自己的行動項目 +- 查看有權限的會議內容 + +### 預設管理員 +- 系統預設將 `ymirliu@panjit.com.tw` 設為管理員角色 +- 其他AD帳號預設為一般用戶角色 + +## 🔧 維護與監控 + +### 查看服務日誌 ```bash -# For Windows -python -m venv venv -venv\Scripts\activate +# 查看所有服務日誌 +docker-compose logs -f -# For macOS/Linux -python3 -m venv venv -source venv/bin/activate +# 查看特定服務日誌 +docker-compose logs -f backend +docker-compose logs -f celery-worker +``` +### 健康檢查 +系統包含自動健康檢查: +- Backend: HTTP健康檢查 +- Frontend: HTTP健康檢查 +- Celery: 程序狀態監控 + +### 備份與恢復 +```bash +# 資料庫備份(需要mysql客戶端) +mysqldump -h mysql.theaken.com -P 33306 -u A060 -p db_A060 > backup.sql + +# 檔案備份 +docker-compose exec backend tar -czf /app/uploads/backup.tar.gz /app/uploads +``` + +## 🛠️ 開發指南 + +### 本地開發設置 + +1. **後端開發**: +```bash +# 安裝依賴 pip install -r requirements.txt -``` -Create a `.env` file by copying `.env.example` (if provided) or creating a new one. Configure the following: -``` -FLASK_APP=app.py -SECRET_KEY=your_super_secret_key -SQLALCHEMY_DATABASE_URI=sqlite:///meetings.db # Or your preferred database connection string -CELERY_BROKER_URL=redis://localhost:6379/0 -CELERY_RESULT_BACKEND=redis://localhost:6379/0 -``` +# 啟動Flask開發服務器 +flask run --port 5000 -Initialize and apply database migrations: -```bash -flask db init # Only if you don't have a 'migrations' folder -flask db migrate -m "Initial migration" -flask db upgrade -``` - -### 2. Frontend Setup - -Navigate to the frontend directory and install dependencies: -```bash -cd frontend -npm install -``` - -## Running the Application - -The application requires three separate processes to be running: the Flask server, the Celery worker, and the Vite frontend server. - -**1. Start the Flask Backend Server:** -```bash -# From the project root directory -flask run -``` -The API server will be running on `http://127.0.0.1:5000`. - -**2. Start the Celery Worker:** -Open a new terminal, activate the virtual environment, and run: -```bash -# From the project root directory +# 啟動Celery Worker celery -A celery_worker.celery worker --loglevel=info ``` -**3. Start the React Frontend Server:** -Open a third terminal and run: +2. **前端開發**: ```bash -# From the 'frontend' directory +cd frontend +npm install npm run dev ``` -The frontend application will be available at `http://localhost:5173`. Open this URL in your browser. + +### 資料庫遷移 +```bash +# 建立新遷移 +docker-compose exec backend flask db migrate -m "Description" + +# 應用遷移 +docker-compose exec backend flask db upgrade +``` + +## 🐛 疑難排解 + +### 常見問題 + +**1. LDAP認證失敗** +- 檢查LDAP服務器連接性 +- 驗證綁定用戶憑證 +- 確認搜索基準DN正確 + +**2. Celery任務無響應** +- 檢查Redis服務狀態 +- 重啟Celery Worker: `docker-compose restart celery-worker` +- 查看Worker日誌: `docker-compose logs celery-worker` + +**3. 前端無法連接後端** +- 確認後端服務運行在5000端口 +- 檢查防火牆設置 +- 驗證API base URL配置 + +### 獲取支援 +- 查看服務日誌進行初步診斷 +- 檢查系統資源使用情況 +- 聯繫IT管理員協助LDAP配置問題 + +## 📄 授權 + +此專案為企業內部使用,請遵守公司軟體使用政策。 + +--- + +**版本**: V2.1 +**最後更新**: 2025-09-18 +**維護團隊**: PANJIT IT Team \ No newline at end of file diff --git a/USER_MANUAL.md b/USER_MANUAL.md deleted file mode 100644 index 9a9c51d..0000000 --- a/USER_MANUAL.md +++ /dev/null @@ -1,60 +0,0 @@ -# AI Meeting Assistant - User Manual - -Welcome to the AI Meeting Assistant! This guide will walk you through the main features of the application and how to use them effectively. - -## 1. Getting Started: Login and Registration - -- **Registration**: If you are a new user, click on the "Register" link on the login page. You will need to provide a unique username and a password to create your account. -- **Login**: Once you have an account, enter your username and password on the login page to access the application. - -## 2. The Dashboard - -After logging in, you will land on the **Dashboard**. This is your main hub for all meetings. - -- **Meeting List**: The dashboard displays a table of all meetings in the system. You can see the meeting's **Topic**, **Owner**, **Meeting Date**, **Status**, and the number of **Action Items**. -- **Sorting**: Click on the column headers (e.g., "Topic", "Meeting Date") to sort the list. -- **Filtering and Searching**: Use the search boxes at the top to find meetings by topic or owner, or filter the list by status. -- **Create a New Meeting**: Click the "New Meeting" button to open a dialog where you can enter a topic and date for a new meeting. Upon creation, you will be taken directly to the Meeting Detail page. -- **View Details**: Click the "View Details" button on any meeting row to navigate to its detail page. -- **Delete a Meeting**: If you are the meeting's creator or an administrator, you will see a delete icon (trash can) to permanently remove the meeting and all its associated data. - -## 3. Meeting Detail Page - -This page is where you'll do most of your work. It's divided into three main sections: Transcript, AI Tools, and Action Items. - -### 3.1. Transcript - -- **View**: This section shows the full transcript of the meeting. -- **Edit**: If you are the meeting owner or an admin, you can click "Edit Transcript" to add, paste, or modify the text content. Click "Save Transcript" to save your changes. - -### 3.2. AI Tools - -This section allows you to leverage AI to process your transcript. - -- **Generate Summary**: - 1. Ensure a transcript has been added. - 2. Click the **"Generate Summary"** button. - 3. A "Generating..." message will appear. The process may take some time depending on the length of the text. - 4. Once complete, a concise summary will appear in the "Summary" box. -- **Edit Summary**: You can also manually edit the generated summary by clicking the "Edit Summary" button. -- **Preview Action Items**: - 1. After a summary or transcript is available, click the **"Preview Action Items"** button. - 2. The AI will analyze the text and display a list of suggested action items in a table. - 3. Review the items. If they are accurate, click **"Save All to List"** to add them to the official "Action Items" list below. - -### 3.3. Action Items - -This is the final list of tasks and to-dos from the meeting. - -- **Add Manually**: Click the "Add Manually" button to open a form where you can create a new action item, assign an owner, and set a due date. -- **Edit an Item**: If you are an Admin, the Meeting Owner, or the assigned owner of an action item, an edit icon (pencil) will appear. Click it to modify the item's details in-line. Click the save icon to confirm. -- **Delete an Item**: If you are an Admin or the Meeting Owner, a delete icon (trash can) will appear, allowing you to remove the action item. -- **Attachments**: You can upload a file attachment when creating or editing an action item. A download icon will appear if an attachment exists. - -## 4. Processing Tools Page - -Accessible from the main navigation, this page provides standalone utilities for file processing. - -1. **Extract Audio**: Upload a video file (e.g., MP4) to extract its audio track into a WAV file, which you can then download. -2. **Transcribe Audio**: Upload an audio file (e.g., WAV, MP3) to generate a text transcript. You can copy the text or download it as a `.txt` file. -3. **Translate Text**: Paste text or upload a `.txt` file, select a target language, and the tool will provide a translation. diff --git a/__pycache__/action_item_routes.cpython-312.pyc b/__pycache__/action_item_routes.cpython-312.pyc deleted file mode 100644 index 7d092ad..0000000 Binary files a/__pycache__/action_item_routes.cpython-312.pyc and /dev/null differ diff --git a/__pycache__/ai_routes.cpython-312.pyc b/__pycache__/ai_routes.cpython-312.pyc deleted file mode 100644 index d7ed3c8..0000000 Binary files a/__pycache__/ai_routes.cpython-312.pyc and /dev/null differ diff --git a/__pycache__/api_routes.cpython-312.pyc b/__pycache__/api_routes.cpython-312.pyc deleted file mode 100644 index 2e8c734..0000000 Binary files a/__pycache__/api_routes.cpython-312.pyc and /dev/null differ diff --git a/__pycache__/app.cpython-312.pyc b/__pycache__/app.cpython-312.pyc deleted file mode 100644 index f92d23f..0000000 Binary files a/__pycache__/app.cpython-312.pyc and /dev/null differ diff --git a/__pycache__/celery_app.cpython-312.pyc b/__pycache__/celery_app.cpython-312.pyc deleted file mode 100644 index a54ff01..0000000 Binary files a/__pycache__/celery_app.cpython-312.pyc and /dev/null differ diff --git a/__pycache__/celery_worker.cpython-312.pyc b/__pycache__/celery_worker.cpython-312.pyc deleted file mode 100644 index 3e78be8..0000000 Binary files a/__pycache__/celery_worker.cpython-312.pyc and /dev/null differ diff --git a/__pycache__/models.cpython-312.pyc b/__pycache__/models.cpython-312.pyc deleted file mode 100644 index 75ffd33..0000000 Binary files a/__pycache__/models.cpython-312.pyc and /dev/null differ diff --git a/__pycache__/tasks.cpython-312.pyc b/__pycache__/tasks.cpython-312.pyc deleted file mode 100644 index 9c6deca..0000000 Binary files a/__pycache__/tasks.cpython-312.pyc and /dev/null differ diff --git a/action_item_routes.py b/action_item_routes.py index 533a0b9..005c575 100644 --- a/action_item_routes.py +++ b/action_item_routes.py @@ -25,7 +25,7 @@ def _resolve_owner_id(owner_val: str | None): return user.id if user else None @action_bp.post("/action-items") -@jwt_required() +# @jwt_required() # 已禁用認證 def create_action_item(): """ 建立單筆代辦(會議詳情頁用) @@ -67,7 +67,7 @@ def create_action_item(): return jsonify({"error": f"create failed: {e}"}), 400 @action_bp.post("/meetings//action-items/batch") -@jwt_required() +# @jwt_required() # 已禁用認證 def batch_create_action_items(meeting_id: int): """ 批次建立代辦(AI 預覽 → 一鍵儲存) diff --git a/ai_routes.py b/ai_routes.py index 792f060..1ff6146 100644 --- a/ai_routes.py +++ b/ai_routes.py @@ -6,7 +6,7 @@ from services.dify_client import translate_text as _translate_text, summarize_te ai_bp = Blueprint("ai_bp", __name__, url_prefix="/api") @ai_bp.post("/translate/text") -@jwt_required() +# @jwt_required() # 已禁用認證 def translate_text_api(): data = request.get_json(force=True) or {} text = (data.get("text") or "").strip() @@ -18,7 +18,7 @@ def translate_text_api(): return jsonify({"translated": translated}) @ai_bp.post("/summarize/text") -@jwt_required() +# @jwt_required() # 已禁用認證 def summarize_text_api(): data = request.get_json(force=True) or {} text = (data.get("text") or "").strip() diff --git a/api_routes.py b/api_routes.py index 95cd975..dbbfba6 100644 --- a/api_routes.py +++ b/api_routes.py @@ -7,9 +7,9 @@ from datetime import datetime from models import db, User, Meeting, ActionItem from tasks import ( - celery, - extract_audio_task, - transcribe_audio_task, + celery, + extract_audio_task, + transcribe_audio_task, translate_text_task, summarize_text_task, preview_action_items_task @@ -32,40 +32,6 @@ def save_uploaded_file(upload_folder, file_key='file'): return file_path, None return None, (jsonify({'error': 'Unknown file error'}), 500) -# --- User Authentication & Admin Routes --- -@api_bp.route('/login', methods=['POST']) -def login(): - data = request.get_json() - user = User.query.filter_by(username=data.get('username')).first() - if user and user.check_password(data.get('password')): - access_token = create_access_token(identity=str(user.id), additional_claims={'role': user.role, 'username': user.username}) - return jsonify(access_token=access_token) - return jsonify({"msg": "Bad username or password"}), 401 - -@api_bp.route('/register', methods=['POST']) -def register(): - """Public endpoint for new user registration.""" - data = request.get_json() - username = data.get('username') - password = data.get('password') - - if not username or not password: - return jsonify({"error": "Username and password are required"}), 400 - - if User.query.filter_by(username=username).first(): - return jsonify({"error": "Username already exists"}), 409 # HTTP 409 Conflict - - try: - new_user = User(username=username, role='user') # Default role is 'user' - new_user.set_password(password) - db.session.add(new_user) - db.session.commit() - return jsonify({"message": "User created successfully"}), 201 # HTTP 201 Created - except Exception as e: - db.session.rollback() - current_app.logger.error(f"Error creating user: {e}") - return jsonify({"error": "An internal error occurred"}), 500 - @api_bp.route('/admin/users', methods=['GET']) @jwt_required() def get_all_users(): @@ -77,36 +43,15 @@ def get_all_users(): @api_bp.route('/users', methods=['GET']) @jwt_required() def get_all_users_for_dropdown(): - """A public endpoint for all logged-in users to fetch a list of users for UI selectors.""" users = User.query.all() return jsonify([user.to_dict() for user in users]) -@api_bp.route('/admin/users', methods=['POST']) -@jwt_required() -def create_user(): - if get_jwt().get('role') != 'admin': - return jsonify({"msg": "Administration rights required"}), 403 - data = request.get_json() - username = data.get('username') - password = data.get('password') - role = data.get('role', 'user') - if not username or not password: - return jsonify({"error": "Username and password are required"}), 400 - if User.query.filter_by(username=username).first(): - return jsonify({"error": "Username already exists"}), 409 - - new_user = User(username=username, role=role) - new_user.set_password(password) - db.session.add(new_user) - db.session.commit() - return jsonify(new_user.to_dict()), 201 - @api_bp.route('/admin/users/', methods=['DELETE']) @jwt_required() def delete_user(user_id): if get_jwt().get('role') != 'admin': return jsonify({"msg": "Administration rights required"}), 403 - + # Prevent admin from deleting themselves if str(user_id) == get_jwt_identity(): return jsonify({"error": "Admin users cannot delete their own account"}), 400 @@ -116,44 +61,19 @@ def delete_user(user_id): return jsonify({"error": "User not found"}), 404 try: - # Disassociate meetings created by this user + # Disassociate meetings created by this user (set to None instead of deleting meetings) Meeting.query.filter_by(created_by_id=user_id).update({"created_by_id": None}) - + # Disassociate action items owned by this user ActionItem.query.filter_by(owner_id=user_id).update({"owner_id": None}) db.session.delete(user_to_delete) db.session.commit() - return jsonify({"msg": f"User {user_to_delete.username} has been deleted."}), 200 + return jsonify({"msg": f"User {user_to_delete.display_name or user_to_delete.username} has been deleted."}), 200 except Exception as e: db.session.rollback() return jsonify({"error": f"An error occurred: {str(e)}"}), 500 -@api_bp.route('/admin/users//password', methods=['PUT']) -@jwt_required() -def update_user_password(user_id): - if get_jwt().get('role') != 'admin': - return jsonify({"msg": "Administration rights required"}), 403 - - user_to_update = User.query.get(user_id) - if not user_to_update: - return jsonify({"error": "User not found"}), 404 - - data = request.get_json() - password = data.get('password') - if not password: - return jsonify({"error": "Password is required"}), 400 - - try: - user_to_update.set_password(password) - db.session.commit() - return jsonify({"msg": f"Password for user {user_to_update.username} has been updated."}), 200 - except Exception as e: - db.session.rollback() - current_app.logger.error(f"Error updating password for user {user_id}: {e}") - return jsonify({"error": "An internal error occurred while updating the password"}), 500 - -# --- Meeting Management Routes --- @api_bp.route('/meetings', methods=['GET', 'POST']) @jwt_required() def handle_meetings(): @@ -166,11 +86,11 @@ def handle_meetings(): try: meeting_date = datetime.fromisoformat(meeting_date_str).date() new_meeting = Meeting( - topic=topic, - meeting_date=meeting_date, + topic=topic, + meeting_date=meeting_date, created_by_id=get_jwt_identity(), - created_at=datetime.utcnow(), # Explicitly set creation time in UTC - status='In Progress' # Set default status to 'In Progress' + created_at=datetime.utcnow(), + status='In Progress' ) db.session.add(new_meeting) db.session.commit() @@ -179,7 +99,7 @@ def handle_meetings(): db.session.rollback() current_app.logger.error(f"Failed to create meeting: {e}") return jsonify({'error': 'Failed to create meeting due to a database error.'}), 500 - + meetings = Meeting.query.order_by(Meeting.meeting_date.desc()).all() return jsonify([meeting.to_dict() for meeting in meetings]) @@ -187,16 +107,15 @@ def handle_meetings(): @jwt_required() def handle_meeting_detail(meeting_id): meeting = Meeting.query.get_or_404(meeting_id) - + if request.method == 'PUT': data = request.get_json() - # Only update fields that are present in the request if 'topic' in data: meeting.topic = data.get('topic') if 'status' in data: - # Security check: only admin or meeting creator can change the status current_user_id = get_jwt_identity() - is_admin = get_jwt().get('role') == 'admin' + current_user_role = get_jwt().get('role') + is_admin = current_user_role == 'admin' if not is_admin and str(meeting.created_by_id) != str(current_user_id): return jsonify({"msg": "Only the meeting creator or an admin can change the status."}), 403 meeting.status = data.get('status') @@ -206,23 +125,22 @@ def handle_meeting_detail(meeting_id): meeting.summary = data.get('summary') if data.get('meeting_date'): meeting.meeting_date = datetime.fromisoformat(data['meeting_date']).date() - + db.session.commit() - # Refresh the object to avoid session state issues before serialization db.session.refresh(meeting) return jsonify(meeting.to_dict()) if request.method == 'DELETE': current_user_id = get_jwt_identity() - is_admin = get_jwt().get('role') == 'admin' - + current_user_role = get_jwt().get('role') + is_admin = current_user_role == 'admin' + if not is_admin and str(meeting.created_by_id) != str(current_user_id): return jsonify({"msg": "Only the meeting creator or an admin can delete this meeting."}), 403 db.session.delete(meeting) db.session.commit() return jsonify({"msg": "Meeting and associated action items deleted"}), 200 - - # GET request + return jsonify(meeting.to_dict()) @api_bp.route('/meetings//summarize', methods=['POST']) @@ -238,14 +156,13 @@ def summarize_meeting(meeting_id): @jwt_required() def preview_actions(meeting_id): meeting = Meeting.query.get_or_404(meeting_id) - text_content = meeting.transcript # Always use the full transcript + text_content = meeting.transcript if not text_content: return jsonify({'error': 'Meeting has no transcript to analyze.'}), 400 - task = preview_action_items_task.delay(text_content) return jsonify({'task_id': task.id, 'status_url': f'/status/{task.id}'}), 202 -# --- Independent Tool Routes --- +# --- Tool Routes --- @api_bp.route('/tools/extract_audio', methods=['POST']) @jwt_required() def handle_extract_audio(): @@ -260,7 +177,6 @@ def handle_extract_audio(): def handle_transcribe_audio(): input_path, error = save_uploaded_file(current_app.config['UPLOAD_FOLDER']) if error: return error - # The 'language' parameter is no longer needed for the Dify-based task. task = transcribe_audio_task.delay(input_path) return jsonify({'task_id': task.id, 'status_url': f'/status/{task.id}'}), 202 @@ -275,74 +191,7 @@ def handle_translate_text(): task = translate_text_task.delay(text_content, target_language) return jsonify({'task_id': task.id, 'status_url': f'/status/{task.id}'}), 202 -# --- Action Item & Task Status Routes (largely unchanged) --- -@api_bp.route('/meetings//action_items', methods=['GET']) -@jwt_required() -def get_action_items_for_meeting(meeting_id): - action_items = ActionItem.query.filter_by(meeting_id=meeting_id).all() - return jsonify([item.to_dict() for item in action_items]) - -@api_bp.route('/action_items/', methods=['PUT', 'DELETE']) -@jwt_required() -def handle_action_item(item_id): - item = ActionItem.query.get_or_404(item_id) - current_user_id = get_jwt_identity() - current_user_role = get_jwt().get('role') - meeting_owner_id = str(item.meeting.created_by_id) - - is_admin = current_user_role == 'admin' - is_meeting_owner = str(current_user_id) == meeting_owner_id - is_action_owner = str(current_user_id) == str(item.owner_id) - - if request.method == 'PUT': - # Edit Permission: Admin, Meeting Owner, or Action Item Owner - if not (is_admin or is_meeting_owner or is_action_owner): - return jsonify({"msg": "You do not have permission to edit this item."}), 403 - - data = request.get_json() - item.item = data.get('item', item.item) - item.action = data.get('action', item.action) - item.status = data.get('status', item.status) - # Handle owner_id, allowing it to be set to null - if 'owner_id' in data: - item.owner_id = data.get('owner_id') if data.get('owner_id') else None - if data.get('due_date'): - item.due_date = datetime.fromisoformat(data['due_date']).date() if data['due_date'] else None - db.session.commit() - db.session.refresh(item) - return jsonify(item.to_dict()) - - elif request.method == 'DELETE': - # Delete Permission: Admin or Meeting Owner - if not (is_admin or is_meeting_owner): - return jsonify({"msg": "You do not have permission to delete this item."}), 403 - - db.session.delete(item) - db.session.commit() - return jsonify({'msg': 'Action item deleted'}), 200 - -@api_bp.route('/action_items//upload', methods=['POST']) -@jwt_required() -def upload_action_item_attachment(item_id): - item = ActionItem.query.get_or_404(item_id) - - # Basic permission check: only meeting creator or action item owner can upload - meeting_creator_id = item.meeting.created_by_id - current_user_id = get_jwt_identity() - - if str(current_user_id) != str(meeting_creator_id) and str(current_user_id) != str(item.owner_id): - return jsonify({"msg": "Permission denied"}), 403 - - file_path, error = save_uploaded_file(current_app.config['UPLOAD_FOLDER']) - if error: - return error - - # TODO: Consider deleting the old file if it exists - item.attachment_path = os.path.basename(file_path) - db.session.commit() - - return jsonify({'attachment_path': item.attachment_path}), 200 - +# --- Task Status Routes --- @api_bp.route('/status/') @jwt_required() def get_task_status(task_id): @@ -356,9 +205,15 @@ def get_task_status(task_id): @jwt_required() def stop_task(task_id): celery.control.revoke(task_id, terminate=True) - return jsonify({'status': 'revoked'}), 200 + return jsonify({'message': f'Task {task_id} has been stopped.'}), 200 @api_bp.route('/download/') @jwt_required() def download_file(filename): return send_from_directory(current_app.config['UPLOAD_FOLDER'], filename, as_attachment=True) + +@api_bp.route('/meetings//action_items', methods=['GET']) +@jwt_required() +def get_action_items_for_meeting(meeting_id): + action_items = ActionItem.query.filter_by(meeting_id=meeting_id).all() + return jsonify([item.to_dict() for item in action_items]) \ No newline at end of file diff --git a/app.py b/app.py index 3d2d2f0..c75ebf9 100644 --- a/app.py +++ b/app.py @@ -18,17 +18,31 @@ def create_app(): # --- Configuration --- app.config.from_mapping( SQLALCHEMY_DATABASE_URI=os.environ.get('DATABASE_URL'), - SQLALCHEMY_ENGINE_OPTIONS={'pool_recycle': 3600}, + SQLALCHEMY_ENGINE_OPTIONS={ + 'pool_recycle': 3600, + 'pool_size': 20, + 'max_overflow': 30, + 'pool_pre_ping': True, + 'pool_timeout': 30 + }, JWT_SECRET_KEY=os.environ.get('JWT_SECRET_KEY'), SQLALCHEMY_TRACK_MODIFICATIONS=False, - JWT_ACCESS_TOKEN_EXPIRES=timedelta(days=3), + JWT_ACCESS_TOKEN_EXPIRES=timedelta(days=2), CELERY_BROKER_URL=os.environ.get('CELERY_BROKER_URL', 'redis://localhost:6379/0'), CELERY_RESULT_BACKEND=os.environ.get('CELERY_RESULT_BACKEND', 'redis://localhost:6379/0'), DIFY_API_BASE_URL=os.environ.get("DIFY_API_BASE_URL"), DIFY_STT_API_KEY=os.environ.get("DIFY_STT_API_KEY"), DIFY_TRANSLATOR_API_KEY=os.environ.get("DIFY_TRANSLATOR_API_KEY"), DIFY_SUMMARIZER_API_KEY=os.environ.get("DIFY_SUMMARIZER_API_KEY"), - DIFY_ACTION_EXTRACTOR_API_KEY=os.environ.get("DIFY_ACTION_EXTRACTOR_API_KEY") + DIFY_ACTION_EXTRACTOR_API_KEY=os.environ.get("DIFY_ACTION_EXTRACTOR_API_KEY"), + # LDAP Configuration + LDAP_SERVER=os.environ.get('LDAP_SERVER', 'panjit.com.tw'), + LDAP_PORT=int(os.environ.get('LDAP_PORT', 389)), + LDAP_USE_SSL=os.environ.get('LDAP_USE_SSL', 'False').lower() == 'true', + LDAP_BIND_USER_DN=os.environ.get('LDAP_BIND_USER_DN', ''), + LDAP_BIND_USER_PASSWORD=os.environ.get('LDAP_BIND_USER_PASSWORD', ''), + LDAP_SEARCH_BASE=os.environ.get('LDAP_SEARCH_BASE', 'DC=panjit,DC=com,DC=tw'), + LDAP_USER_LOGIN_ATTR=os.environ.get('LDAP_USER_LOGIN_ATTR', 'userPrincipalName') ) project_root = os.path.dirname(os.path.abspath(__file__)) @@ -55,17 +69,37 @@ def create_app(): celery.Task = ContextTask # --- Import and Register Blueprints --- + from auth_routes import auth_bp from api_routes import api_bp from ai_routes import ai_bp from action_item_routes import action_bp + app.register_blueprint(auth_bp) app.register_blueprint(api_bp) app.register_blueprint(ai_bp) app.register_blueprint(action_bp) - # --- Root Route --- + # --- Static File Serving (for Single Container) --- + from flask import send_from_directory, send_file + + # Serve React build files @app.route('/') - def index(): - return "AI Meeting Assistant Backend is running." + def serve_frontend(): + try: + return send_file(os.path.join(app.root_path, 'frontend/dist/index.html')) + except: + return "AI Meeting Assistant is running. Frontend build not found." + + @app.route('/') + def serve_static(path): + # Try to serve static files first + try: + return send_from_directory(os.path.join(app.root_path, 'frontend/dist'), path) + except: + # If not found, serve index.html for SPA routing + try: + return send_file(os.path.join(app.root_path, 'frontend/dist/index.html')) + except: + return "File not found", 404 # --- CLI Commands --- @app.cli.command("create_admin") diff --git a/auth_routes.py b/auth_routes.py new file mode 100644 index 0000000..ce66764 --- /dev/null +++ b/auth_routes.py @@ -0,0 +1,249 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Authentication Routes for AI Meeting Assistant with LDAP support + +Author: PANJIT IT Team +Created: 2024-09-18 +""" + +from flask import Blueprint, request, jsonify, current_app +from flask_jwt_extended import ( + create_access_token, create_refresh_token, + jwt_required, get_jwt_identity, get_jwt +) +from datetime import datetime, timedelta +from models import db, User + +auth_bp = Blueprint('auth', __name__, url_prefix='/api') + +@auth_bp.route('/login', methods=['POST']) +def login(): + """LDAP/AD Login with fallback to local authentication""" + try: + data = request.get_json() + username = data.get('username', '').strip() + password = data.get('password', '') + + if not username or not password: + return jsonify({'error': 'Username and password required'}), 400 + + # Try LDAP authentication first + user_info = None + try: + from utils.ldap_utils import authenticate_user + user_info = authenticate_user(username, password) + current_app.logger.info(f"LDAP authentication attempted for: {username}") + except Exception as e: + current_app.logger.error(f"LDAP authentication error: {str(e)}") + # Fall back to local authentication if LDAP fails + pass + + # If LDAP authentication succeeded + if user_info: + ad_account = user_info['ad_account'] + + # Get or create user in local database + user = User.query.filter_by(username=ad_account).first() + if not user: + # Create new user from LDAP info + # AD accounts default to 'user' role, only ymirliu@panjit.com.tw gets admin + is_admin = username.lower() == 'ymirliu@panjit.com.tw' + role = 'admin' if is_admin else 'user' + + # Create display name from LDAP data (username + display_name from AD) + display_name = f"{ad_account} {user_info.get('display_name', '')}" if user_info.get('display_name') else ad_account + + user = User( + username=ad_account, + display_name=display_name, + role=role + ) + # Set a placeholder password (not used for LDAP users) + user.set_password('ldap_user') + db.session.add(user) + db.session.commit() + current_app.logger.info(f"Created new LDAP user: {ad_account} ({display_name}) with role: {role}") + else: + # Update display name if available from LDAP + if user_info.get('display_name') and not user.display_name: + user.display_name = f"{ad_account} {user_info['display_name']}" + + # Update user role if it's ymirliu@panjit.com.tw + if username.lower() == 'ymirliu@panjit.com.tw' and user.role != 'admin': + user.role = 'admin' + current_app.logger.info(f"Updated user {ad_account} to admin role") + + # Update last login time + from datetime import datetime + user.last_login = datetime.utcnow() + db.session.commit() + + # Create tokens + access_token = create_access_token( + identity=str(user.id), + additional_claims={ + 'role': user.role, + 'username': user.username, + 'display_name': user_info.get('display_name', user.username), + 'email': user_info.get('email', ''), + 'auth_method': 'ldap' + } + ) + refresh_token = create_refresh_token(identity=str(user.id)) + + current_app.logger.info(f"Successful LDAP login for user: {ad_account}") + + return jsonify({ + 'access_token': access_token, + 'refresh_token': refresh_token, + 'user': { + 'id': user.id, + 'username': user.username, + 'role': user.role, + 'display_name': user_info.get('display_name', user.username), + 'email': user_info.get('email', ''), + 'auth_method': 'ldap' + } + }), 200 + + # Fall back to local database authentication + user = User.query.filter_by(username=username).first() + if user and user.check_password(password): + access_token = create_access_token( + identity=str(user.id), + additional_claims={ + 'role': user.role, + 'username': user.username, + 'auth_method': 'local' + } + ) + refresh_token = create_refresh_token(identity=str(user.id)) + + current_app.logger.info(f"Successful local login for user: {username}") + + return jsonify({ + 'access_token': access_token, + 'refresh_token': refresh_token, + 'user': { + 'id': user.id, + 'username': user.username, + 'role': user.role, + 'auth_method': 'local' + } + }), 200 + + # Authentication failed + current_app.logger.warning(f"Failed login attempt for user: {username}") + return jsonify({'error': 'Invalid credentials'}), 401 + + except Exception as e: + current_app.logger.error(f"Login error: {str(e)}") + return jsonify({'error': 'Authentication failed'}), 500 + +@auth_bp.route('/refresh', methods=['POST']) +@jwt_required(refresh=True) +def refresh(): + """Refresh access token""" + try: + identity = get_jwt_identity() + user = User.query.get(int(identity)) + + if not user: + return jsonify({'error': 'User not found'}), 404 + + access_token = create_access_token( + identity=identity, + additional_claims={ + 'role': user.role, + 'username': user.username + } + ) + + return jsonify({'access_token': access_token}), 200 + + except Exception as e: + current_app.logger.error(f"Token refresh error: {str(e)}") + return jsonify({'error': 'Token refresh failed'}), 500 + +@auth_bp.route('/logout', methods=['POST']) +@jwt_required() +def logout(): + """Logout (client should remove tokens)""" + try: + identity = get_jwt_identity() + current_app.logger.info(f"User logged out: {identity}") + + # In production, you might want to blacklist the token here + # For now, we'll rely on client-side token removal + + return jsonify({'message': 'Logged out successfully'}), 200 + + except Exception as e: + current_app.logger.error(f"Logout error: {str(e)}") + return jsonify({'error': 'Logout failed'}), 500 + +@auth_bp.route('/me', methods=['GET']) +@jwt_required() +def get_current_user(): + """Get current user information""" + try: + identity = get_jwt_identity() + claims = get_jwt() + user = User.query.get(int(identity)) + + if not user: + return jsonify({'error': 'User not found'}), 404 + + return jsonify({ + 'id': user.id, + 'username': user.username, + 'role': user.role, + 'display_name': claims.get('display_name', user.username), + 'email': claims.get('email', ''), + 'auth_method': claims.get('auth_method', 'local') + }), 200 + + except Exception as e: + current_app.logger.error(f"Get current user error: {str(e)}") + return jsonify({'error': 'Failed to get user information'}), 500 + +@auth_bp.route('/validate', methods=['GET']) +@jwt_required() +def validate_token(): + """Validate JWT token""" + try: + identity = get_jwt_identity() + claims = get_jwt() + + return jsonify({ + 'valid': True, + 'identity': identity, + 'username': claims.get('username'), + 'role': claims.get('role') + }), 200 + + except Exception as e: + current_app.logger.error(f"Token validation error: {str(e)}") + return jsonify({'valid': False}), 401 + +@auth_bp.route('/ldap-test', methods=['GET']) +@jwt_required() +def test_ldap(): + """Test LDAP connection (admin only)""" + try: + claims = get_jwt() + if claims.get('role') != 'admin': + return jsonify({'error': 'Admin access required'}), 403 + + from utils.ldap_utils import test_ldap_connection + result = test_ldap_connection() + + return jsonify({ + 'ldap_connection': 'success' if result else 'failed', + 'timestamp': datetime.utcnow().isoformat() + }), 200 + + except Exception as e: + current_app.logger.error(f"LDAP test error: {str(e)}") + return jsonify({'error': 'LDAP test failed'}), 500 \ No newline at end of file diff --git a/celery_worker.py b/celery_worker.py index 747359b..bc27b94 100644 --- a/celery_worker.py +++ b/celery_worker.py @@ -3,6 +3,13 @@ import eventlet eventlet.monkey_patch() +# Import basic modules only +import os +import sys + +# Add current directory to Python path +sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) + from dotenv import load_dotenv # Load environment variables BEFORE creating the app load_dotenv() diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..c50d36d --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,131 @@ +services: + # Redis for Celery broker and caching + redis: + image: panjit-ai-meeting-assistant:redis + build: + context: . + dockerfile: Dockerfile.redis + restart: unless-stopped + volumes: + - redis_data:/data + command: redis-server --appendonly yes --maxmemory 512mb --maxmemory-policy allkeys-lru + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 30s + timeout: 10s + retries: 3 + + # Main application (Backend + Frontend) + ai-meeting-app: + image: panjit-ai-meeting-assistant:main + build: + context: . + dockerfile: Dockerfile + restart: unless-stopped + # No external port; only Nginx exposes ports + environment: + - DATABASE_URL=mysql+pymysql://A060:WLeSCi0yhtc7@mysql.theaken.com:33306/db_A060 + - JWT_SECRET_KEY=your-super-secret-key-that-no-one-should-know + - CELERY_BROKER_URL=redis://redis:6379/0 + - CELERY_RESULT_BACKEND=redis://redis:6379/0 + - DIFY_API_BASE_URL=https://dify.theaken.com/v1 + - DIFY_STT_API_KEY=app-xQeSipaQecs0cuKeLvYDaRsu + - DIFY_TRANSLATOR_API_KEY=app-YOPrF2ro5fshzMkCZviIuUJd + - DIFY_SUMMARIZER_API_KEY=app-oFptWFRlSgvwhJ8DzZKN08a0 + - DIFY_ACTION_EXTRACTOR_API_KEY=app-UHU5IrVcwE0nVvgzubpGRqym + - FLASK_RUN_PORT=12015 + # LDAP Configuration + - LDAP_SERVER=panjit.com.tw + - LDAP_PORT=389 + - LDAP_USE_SSL=False + - LDAP_BIND_USER_DN=CN=LdapBind,CN=Users,DC=PANJIT,DC=COM,DC=TW + - LDAP_BIND_USER_PASSWORD=panjit2481 + - LDAP_SEARCH_BASE=OU=PANJIT,DC=panjit,DC=com,DC=tw + - LDAP_USER_LOGIN_ATTR=userPrincipalName + volumes: + - ./uploads:/app/uploads + depends_on: + - redis + deploy: + resources: + limits: + memory: 2G + cpus: '1.5' + reservations: + memory: 1G + cpus: '0.8' + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:12015/api/health"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + networks: + - ai-meeting-network + + # Celery worker for AI processing + celery-worker: + image: panjit-ai-meeting-assistant:main + pull_policy: never + restart: unless-stopped + command: celery -A celery_worker.celery worker --loglevel=info --concurrency=4 -Q default,ai_tasks,celery --pool=eventlet + environment: + - DATABASE_URL=mysql+pymysql://A060:WLeSCi0yhtc7@mysql.theaken.com:33306/db_A060 + - JWT_SECRET_KEY=your-super-secret-key-that-no-one-should-know + - CELERY_BROKER_URL=redis://redis:6379/0 + - CELERY_RESULT_BACKEND=redis://redis:6379/0 + - DIFY_API_BASE_URL=https://dify.theaken.com/v1 + - DIFY_STT_API_KEY=app-xQeSipaQecs0cuKeLvYDaRsu + - DIFY_TRANSLATOR_API_KEY=app-YOPrF2ro5fshzMkCZviIuUJd + - DIFY_SUMMARIZER_API_KEY=app-oFptWFRlSgvwhJ8DzZKN08a0 + - DIFY_ACTION_EXTRACTOR_API_KEY=app-UHU5IrVcwE0nVvgzubpGRqym + volumes: + - ./uploads:/app/uploads + depends_on: + - redis + deploy: + resources: + limits: + memory: 2G + reservations: + memory: 1G + networks: + - ai-meeting-network + + # Celery Flower for monitoring + celery-flower: + image: panjit-ai-meeting-assistant:main + pull_policy: never + restart: unless-stopped + command: celery -A celery_worker.celery flower --broker=redis://redis:6379/0 --port=5555 + ports: + - "5555:5555" + environment: + - CELERY_BROKER_URL=redis://redis:6379/0 + - CELERY_RESULT_BACKEND=redis://redis:6379/0 + depends_on: + - redis + networks: + - ai-meeting-network + + # Nginx reverse proxy + nginx: + image: panjit-ai-meeting-assistant:nginx + build: + context: ./nginx + dockerfile: Dockerfile + container_name: ai-meeting-nginx + depends_on: + - ai-meeting-app + ports: + - "12015:12015" + restart: unless-stopped + networks: + - ai-meeting-network + +volumes: + redis_data: + +networks: + ai-meeting-network: + driver: bridge \ No newline at end of file diff --git a/frontend/.dockerignore b/frontend/.dockerignore new file mode 100644 index 0000000..64018fa --- /dev/null +++ b/frontend/.dockerignore @@ -0,0 +1,39 @@ +# Dependencies +node_modules +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# Production build +dist +build + +# Environment files +.env.local +.env.development.local +.env.test.local +.env.production.local + +# IDE +.vscode +.idea +*.swp +*.swo + +# OS +.DS_Store +Thumbs.db + +# Git +.git +.gitignore + +# Logs +logs +*.log + +# Coverage +coverage + +# Misc +.npmrc \ No newline at end of file diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index 50998aa..6955448 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -28,6 +28,7 @@ const PrivateRoute = () => { ); } + // 需要認證才能進入應用 return user ? : ; }; diff --git a/frontend/src/contexts/AuthContext.jsx b/frontend/src/contexts/AuthContext.jsx index 3026768..7422de7 100644 --- a/frontend/src/contexts/AuthContext.jsx +++ b/frontend/src/contexts/AuthContext.jsx @@ -14,50 +14,50 @@ const setAuthToken = token => { export const AuthProvider = ({ children }) => { const [user, setUser] = useState(null); - const [token, setToken] = useState(() => localStorage.getItem('token')); + const [token, setToken] = useState(localStorage.getItem('token')); const [loading, setLoading] = useState(true); useEffect(() => { - if (token) { - try { - const decoded = jwtDecode(token); - const currentTime = Date.now() / 1000; - if (decoded.exp < currentTime) { - console.log("Token expired, logging out."); - logout(); - } else { - setUser({ - id: decoded.sub, - role: decoded.role, - username: decoded.username -}); - setAuthToken(token); + // Check if token exists and validate it + const validateToken = async () => { + const savedToken = localStorage.getItem('token'); + if (savedToken) { + try { + setAuthToken(savedToken); + const response = await axios.get('http://localhost:5000/api/me'); + setUser(response.data); + setToken(savedToken); + } catch (error) { + console.error('Token validation failed:', error); + localStorage.removeItem('token'); + setToken(null); + setUser(null); + setAuthToken(null); } - } catch (error) { - console.error("Invalid token on initial load"); - logout(); } - } - setLoading(false); - }, [token]); + setLoading(false); + }; + + validateToken(); + }, []); const login = async (username, password) => { try { - const response = await axios.post('/api/login', { username, password }); - const { access_token } = response.data; + const response = await axios.post('http://localhost:5000/api/login', { username, password }); + const { access_token, user: userData } = response.data; + localStorage.setItem('token', access_token); setToken(access_token); - const decoded = jwtDecode(access_token); - setUser({ - id: decoded.sub, - role: decoded.role, - username: decoded.username -}); + setUser(userData); setAuthToken(access_token); - return { success: true }; + + return { success: true, user: userData }; } catch (error) { - console.error('Login failed:', error.response?.data?.msg || error.message); - return { success: false, message: error.response?.data?.msg || 'Login failed' }; + console.error('Login failed:', error.response?.data?.error || error.message); + return { + success: false, + message: error.response?.data?.error || 'Login failed' + }; } }; @@ -78,7 +78,13 @@ export const AuthProvider = ({ children }) => { return ( - {!loading && children} + {loading ? ( +
+
Loading...
+
+ ) : ( + children + )}
); }; diff --git a/frontend/src/pages/DashboardPage.jsx b/frontend/src/pages/DashboardPage.jsx index 12034ca..3624306 100644 --- a/frontend/src/pages/DashboardPage.jsx +++ b/frontend/src/pages/DashboardPage.jsx @@ -45,9 +45,10 @@ const DashboardPage = () => { const fetchMeetings = useCallback(async () => { try { const data = await getMeetings(); - setMeetings(data); + setMeetings(Array.isArray(data) ? data : []); } catch (err) { setError('Could not fetch meetings.'); + setMeetings([]); // 確保設置為空陣列 } finally { setLoading(false); } @@ -101,11 +102,14 @@ const DashboardPage = () => { }; const uniqueStatuses = useMemo(() => { + if (!Array.isArray(meetings)) return []; const statuses = new Set(meetings.map(m => m.status)); return Array.from(statuses); }, [meetings]); const filteredAndSortedMeetings = useMemo(() => { + if (!Array.isArray(meetings)) return []; + let filtered = meetings.filter(meeting => { const topicMatch = meeting.topic.toLowerCase().includes(topicSearch.toLowerCase()); const ownerMatch = meeting.owner_name ? meeting.owner_name.toLowerCase().includes(ownerSearch.toLowerCase()) : ownerSearch === ''; diff --git a/frontend/src/pages/LoginPage.jsx b/frontend/src/pages/LoginPage.jsx index a00d87d..dc7d077 100644 --- a/frontend/src/pages/LoginPage.jsx +++ b/frontend/src/pages/LoginPage.jsx @@ -10,11 +10,8 @@ import { register } from '../services/api'; const LoginPage = () => { const [username, setUsername] = useState(''); const [password, setPassword] = useState(''); - const [confirmPassword, setConfirmPassword] = useState(''); const [error, setError] = useState(''); - const [success, setSuccess] = useState(''); const [loading, setLoading] = useState(false); - const [isRegister, setIsRegister] = useState(false); const { login } = useAuth(); const navigate = useNavigate(); @@ -25,7 +22,6 @@ const LoginPage = () => { const handleLogin = async (e) => { e.preventDefault(); setError(''); - setSuccess(''); setLoading(true); const { success, message } = await login(username, password); if (success) { @@ -36,27 +32,6 @@ const LoginPage = () => { setLoading(false); }; - const handleRegister = async (e) => { - e.preventDefault(); - if (password !== confirmPassword) { - setError('Passwords do not match.'); - return; - } - setError(''); - setSuccess(''); - setLoading(true); - try { - await register(username, password); - setSuccess('Account created successfully! Please log in.'); - setIsRegister(false); // Switch back to login view - setUsername(''); // Clear fields - setPassword(''); - setConfirmPassword(''); - } catch (err) { - setError(err.response?.data?.error || 'Failed to create account.'); - } - setLoading(false); - }; return ( @@ -81,15 +56,15 @@ const LoginPage = () => { AI Meeting Assistant - {isRegister ? 'Create Account' : 'Sign In'} + 使用 AD 帳號登入 - + { required fullWidth name="password" - label="Password" + label="AD 密碼" type="password" id="password" - autoComplete={isRegister ? "new-password" : "current-password"} + autoComplete="current-password" value={password} onChange={(e) => setPassword(e.target.value)} /> - {isRegister && ( - setConfirmPassword(e.target.value)} - /> - )} {error && {error}} - {success && {success}} - - - { - setIsRegister(!isRegister); - setError(''); - setSuccess(''); - }}> - {isRegister ? "Already have an account? Sign In" : "Don't have an account? Sign Up"} - - - diff --git a/frontend/vite.config.js b/frontend/vite.config.js index d8dc48f..d801a16 100644 --- a/frontend/vite.config.js +++ b/frontend/vite.config.js @@ -7,7 +7,7 @@ export default defineConfig({ server: { proxy: { '/api': { - target: 'http://127.0.0.1:12000', + target: 'http://backend:5000', changeOrigin: true, }, }, diff --git a/migrations/__pycache__/env.cpython-312.pyc b/migrations/__pycache__/env.cpython-312.pyc deleted file mode 100644 index 5f89e0a..0000000 Binary files a/migrations/__pycache__/env.cpython-312.pyc and /dev/null differ diff --git a/migrations/versions/__pycache__/3b11caf37983_initial_migration_with_users_meetings_.cpython-312.pyc b/migrations/versions/__pycache__/3b11caf37983_initial_migration_with_users_meetings_.cpython-312.pyc deleted file mode 100644 index 6c7dd22..0000000 Binary files a/migrations/versions/__pycache__/3b11caf37983_initial_migration_with_users_meetings_.cpython-312.pyc and /dev/null differ diff --git a/migrations/versions/__pycache__/919aff0aa44b_implement_user_centric_status_and_.cpython-312.pyc b/migrations/versions/__pycache__/919aff0aa44b_implement_user_centric_status_and_.cpython-312.pyc deleted file mode 100644 index 895ddcc..0000000 Binary files a/migrations/versions/__pycache__/919aff0aa44b_implement_user_centric_status_and_.cpython-312.pyc and /dev/null differ diff --git a/migrations/versions/__pycache__/ac069534da31_add_status_and_result_fields_to_meeting.cpython-312.pyc b/migrations/versions/__pycache__/ac069534da31_add_status_and_result_fields_to_meeting.cpython-312.pyc deleted file mode 100644 index 3154008..0000000 Binary files a/migrations/versions/__pycache__/ac069534da31_add_status_and_result_fields_to_meeting.cpython-312.pyc and /dev/null differ diff --git a/models.py b/models.py index 9f94829..42e4d1b 100644 --- a/models.py +++ b/models.py @@ -9,9 +9,11 @@ class User(db.Model): __tablename__ = 'ms_users' id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True, nullable=False) + display_name = db.Column(db.String(100), nullable=True) # For friendly display name like "ymirliu 劉念萱" password_hash = db.Column(db.String(128), nullable=False) role = db.Column(db.String(20), nullable=False, default='user') # 'user' or 'admin' created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + last_login = db.Column(db.DateTime(timezone=True), nullable=True) # Track last login time def set_password(self, password): self.password_hash = bcrypt.generate_password_hash(password).decode('utf-8') @@ -23,8 +25,10 @@ class User(db.Model): return { 'id': self.id, 'username': self.username, + 'display_name': self.display_name, 'role': self.role, - 'created_at': self.created_at.isoformat() if self.created_at else None + 'created_at': self.created_at.isoformat() if self.created_at else None, + 'last_login': self.last_login.isoformat() if self.last_login else None } class Meeting(db.Model): diff --git a/nginx/Dockerfile b/nginx/Dockerfile new file mode 100644 index 0000000..2762895 --- /dev/null +++ b/nginx/Dockerfile @@ -0,0 +1,10 @@ +FROM nginx:1.25-alpine + +# Copy custom nginx configuration +COPY nginx.conf /etc/nginx/nginx.conf + +# Expose port +EXPOSE 12015 + +# Start nginx +CMD ["nginx", "-g", "daemon off;"] \ No newline at end of file diff --git a/nginx/nginx.conf b/nginx/nginx.conf new file mode 100644 index 0000000..32b42bf --- /dev/null +++ b/nginx/nginx.conf @@ -0,0 +1,76 @@ +user nginx; +worker_processes auto; + +events { + worker_connections 1024; + multi_accept on; +} + +http { + include /etc/nginx/mime.types; + default_type application/octet-stream; + + sendfile on; + tcp_nopush on; + tcp_nodelay on; + keepalive_timeout 65; + types_hash_max_size 4096; + + gzip on; + gzip_comp_level 5; + gzip_min_length 1024; + gzip_proxied any; + gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript image/svg+xml; + + map $http_upgrade $connection_upgrade { + default upgrade; + '' close; + } + + upstream app_backend { + server ai-meeting-app:12015 max_fails=3 fail_timeout=10s; + keepalive 64; + } + + server { + listen 12015; + server_name _; + + # Adjust as needed for uploads (AI audio files can be large) + client_max_body_size 100m; + + # Proxy API requests to Flask/Gunicorn + location /api/ { + proxy_pass http://app_backend; + proxy_http_version 1.1; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $connection_upgrade; + proxy_read_timeout 300s; # Longer timeout for AI processing + proxy_send_timeout 300s; + proxy_connect_timeout 10s; + proxy_buffering on; + proxy_buffers 32 32k; + proxy_busy_buffers_size 64k; + } + + # All other routes (frontend SPA and static) via backend + location / { + proxy_pass http://app_backend; + proxy_http_version 1.1; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_read_timeout 120s; + proxy_send_timeout 120s; + proxy_connect_timeout 5s; + proxy_buffering on; + proxy_buffers 32 32k; + proxy_busy_buffers_size 64k; + } + } +} \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 5ff6d25..7cfc0cb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,28 +1,37 @@ +# Core Flask and Web Framework Flask==2.2.5 -celery==5.3.6 -redis==4.5.4 -# For NVIDIA GPU (CUDA 11.8) support, use these lines: -torch --extra-index-url https://download.pytorch.org/whl/cu118 -torchaudio --extra-index-url https://download.pytorch.org/whl/cu118 -# For CPU-only, comment out the two lines above and uncomment the two lines below: -# torch -# torchaudio -openai-whisper -moviepy -opencc-python-reimplemented -ffmpeg-python -python-dotenv gunicorn -demucs -soundfile -gevent # Added for celery on windows +python-dotenv +Flask-CORS -# New dependencies for User Management and Database +# Database and Authentication Flask-SQLAlchemy Flask-Migrate PyMySQL Flask-JWT-Extended Flask-Bcrypt +ldap3 -# Dependency for calling external APIs +# Task Queue and Caching +celery==5.3.6 +redis==4.5.4 +eventlet # Required for Celery worker monkey patching +gevent # Added for celery on windows +flower==2.0.1 # Celery monitoring (separate package since Celery 5.0) + +# Media Processing (Audio/Video) +moviepy +ffmpeg-python +pydub +soundfile + +# Text Processing +opencc-python-reimplemented + +# External API Communication requests + +# Removed AI packages (now using Dify API): +# torch / torchaudio - Not needed for Dify API +# openai-whisper - Replaced by Dify STT service +# demucs - Audio separation not used diff --git a/services/__pycache__/dify_client.cpython-312.pyc b/services/__pycache__/dify_client.cpython-312.pyc deleted file mode 100644 index 2861621..0000000 Binary files a/services/__pycache__/dify_client.cpython-312.pyc and /dev/null differ diff --git a/tasks.py b/tasks.py index 7434fc5..9edd048 100644 --- a/tasks.py +++ b/tasks.py @@ -165,21 +165,49 @@ def extract_audio_task(self, input_path, output_path): @celery.task(base=ProgressTask, bind=True) def transcribe_audio_task(self, audio_path): from app import app + import logging + logger = logging.getLogger(__name__) + + logger.error(f"[TRANSCRIBE DEBUG] Starting transcribe task for: {audio_path}") + with app.app_context(): try: + logger.error(f"[TRANSCRIBE DEBUG] Entered app context") self.update_progress(0, 100, "Loading and preparing audio file...") - audio = AudioSegment.from_file(audio_path) + logger.error(f"[TRANSCRIBE DEBUG] Progress updated to 0%") + + logger.error(f"[TRANSCRIBE DEBUG] About to load audio file: {audio_path}") + audio = AudioSegment.from_file(audio_path) + logger.error(f"[TRANSCRIBE DEBUG] Audio loaded successfully, duration: {len(audio)}ms") + + # 1. Split audio by silence (skip for very long audio to avoid timeout) + logger.error(f"[TRANSCRIBE DEBUG] Starting silence detection") + audio_duration_minutes = len(audio) / (1000 * 60) # Convert to minutes + logger.error(f"[TRANSCRIBE DEBUG] Audio duration: {audio_duration_minutes:.2f} minutes") + + if audio_duration_minutes > 10: # Skip silence detection for audio longer than 10 minutes + logger.error(f"[TRANSCRIBE DEBUG] Audio too long ({audio_duration_minutes:.2f} min), skipping silence detection") + self.update_progress(10, 100, f"Audio is {audio_duration_minutes:.1f} minutes long, processing as single chunk...") + chunks = [audio] # Use entire audio as single chunk + else: + self.update_progress(5, 100, "Detecting silence to split audio into chunks...") + try: + chunks = split_on_silence( + audio, + min_silence_len=700, + silence_thresh=-40, + keep_silence=300 + ) + logger.error(f"[TRANSCRIBE DEBUG] Silence detection completed, found {len(chunks)} chunks") + except Exception as e: + logger.error(f"[TRANSCRIBE DEBUG] Error in silence detection: {str(e)}") + chunks = [audio] - # 1. Split audio by silence - self.update_progress(5, 100, "Detecting silence to split audio into chunks...") - chunks = split_on_silence( - audio, - min_silence_len=700, - silence_thresh=-40, - keep_silence=300 - ) if not chunks: # If no silence is detected, treat the whole audio as one chunk + logger.error(f"[TRANSCRIBE DEBUG] No chunks detected, using full audio") chunks = [audio] + else: + logger.error(f"[TRANSCRIBE DEBUG] Using {len(chunks)} chunks") # 2. Process chunks and ensure they are within API limits final_segments = [] @@ -229,10 +257,13 @@ def transcribe_audio_task(self, audio_path): return {'status': 'Success', 'content': full_content, 'result_path': transcript_filename} except Exception as e: + import traceback + logger.error(f"[TRANSCRIBE DEBUG] Exception occurred: {type(e).__name__}: {str(e)}") + logger.error(f"[TRANSCRIBE DEBUG] Full traceback: {traceback.format_exc()}") error_message = f"An error occurred: {str(e)}" self.update_state( state='FAILURE', - meta={'exc_type': type(e).__name__, 'exc_message': error_message} + meta={'exc_type': type(e).__name__, 'exc_message': error_message, 'traceback': traceback.format_exc()} ) return {'status': 'Error', 'error': error_message} diff --git a/utils/__init__.py b/utils/__init__.py new file mode 100644 index 0000000..ba6aac5 --- /dev/null +++ b/utils/__init__.py @@ -0,0 +1 @@ +# Utils package for AI Meeting Assistant \ No newline at end of file diff --git a/utils/ldap_utils.py b/utils/ldap_utils.py new file mode 100644 index 0000000..edbd776 --- /dev/null +++ b/utils/ldap_utils.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +LDAP Authentication Utilities for AI Meeting Assistant + +Author: PANJIT IT Team +Created: 2024-09-18 +""" + +import time +from ldap3 import Server, Connection, SUBTREE, ALL_ATTRIBUTES +from flask import current_app + +def get_logger(): + """Get application logger""" + return current_app.logger + +def create_ldap_connection(retries=3): + """Create LDAP connection with retry mechanism""" + logger = get_logger() + + # LDAP Configuration from environment + ldap_server = current_app.config.get('LDAP_SERVER', 'panjit.com.tw') + ldap_port = current_app.config.get('LDAP_PORT', 389) + use_ssl = current_app.config.get('LDAP_USE_SSL', False) + bind_dn = current_app.config.get('LDAP_BIND_USER_DN', '') + bind_password = current_app.config.get('LDAP_BIND_USER_PASSWORD', '') + + for attempt in range(retries): + try: + server = Server( + ldap_server, + port=ldap_port, + use_ssl=use_ssl, + get_info=ALL_ATTRIBUTES + ) + + conn = Connection( + server, + user=bind_dn, + password=bind_password, + auto_bind=True, + raise_exceptions=True + ) + + logger.info("LDAP connection established successfully") + return conn + + except Exception as e: + logger.error(f"LDAP connection attempt {attempt + 1} failed: {str(e)}") + if attempt == retries - 1: + raise + time.sleep(1) + + return None + +def authenticate_user(username, password): + """Authenticate user against LDAP/AD""" + logger = get_logger() + + try: + conn = create_ldap_connection() + if not conn: + return None + + # Configuration + search_base = current_app.config.get('LDAP_SEARCH_BASE', 'DC=panjit,DC=com,DC=tw') + login_attr = current_app.config.get('LDAP_USER_LOGIN_ATTR', 'userPrincipalName') + + # Search for user + search_filter = f"(&(objectClass=person)(objectCategory=person)({login_attr}={username}))" + + conn.search( + search_base, + search_filter, + SUBTREE, + attributes=['displayName', 'mail', 'sAMAccountName', 'userPrincipalName'] + ) + + if not conn.entries: + logger.warning(f"User not found: {username}") + return None + + user_entry = conn.entries[0] + user_dn = user_entry.entry_dn + + # Try to bind with user credentials + try: + user_conn = Connection( + conn.server, + user=user_dn, + password=password, + auto_bind=True, + raise_exceptions=True + ) + user_conn.unbind() + + # Return user info + user_info = { + 'ad_account': str(user_entry.sAMAccountName) if user_entry.sAMAccountName else username, + 'display_name': str(user_entry.displayName) if user_entry.displayName else username, + 'email': str(user_entry.mail) if user_entry.mail else '', + 'user_principal_name': str(user_entry.userPrincipalName) if user_entry.userPrincipalName else username, + 'username': username + } + + logger.info(f"User authenticated successfully: {username}") + return user_info + + except Exception as e: + logger.warning(f"Authentication failed for user {username}: {str(e)}") + return None + + except Exception as e: + logger.error(f"LDAP authentication error: {str(e)}") + return None + finally: + if conn: + conn.unbind() + +def test_ldap_connection(): + """Test LDAP connection for health check""" + logger = get_logger() + + try: + conn = create_ldap_connection(retries=1) + if conn: + conn.unbind() + return True + return False + except Exception as e: + logger.error(f"LDAP connection test failed: {str(e)}") + return False \ No newline at end of file