This commit is contained in:
2026-01-26 18:51:33 +08:00
parent fd15ec296b
commit 0918f7ae5a
7 changed files with 415 additions and 92 deletions

View File

@@ -123,22 +123,19 @@ def import_data(request: ImportRequest, db: Session = Depends(get_db)):
print(f"[ETL Import] Starting import: file_type={file_info['file_type']}, rows={len(df)}")
file_type = file_info['file_type']
imported_count = 0
seen_ids = set() # 追蹤已處理的 ID避免檔案內重複
records_to_insert = []
imported_count = 0
# 清除該類型的舊資料,避免重複鍵衝突
try:
if file_type == 'dit':
print("[ETL Import] Clearing old DIT records and dependent matches/logs...")
# 先清除與 DIT 相關的審核日誌與比對結果
db.query(ReviewLog).delete()
db.query(MatchResult).delete()
db.query(DitRecord).delete()
elif file_type == 'sample':
print("[ETL Import] Clearing old Sample records and dependent matches/logs...")
# 先清除與 Sample 相關的比對結果 (及其日誌)
# 這裡比較複雜,因為 ReviewLog 是透過 MatchResult 關聯的
# 但既然我們是清空整個類別,直接清空所有 ReviewLog 和對應的 MatchResult 是最安全的
db.query(ReviewLog).delete()
db.query(MatchResult).filter(MatchResult.target_type == TargetType.SAMPLE).delete()
db.query(SampleRecord).delete()
@@ -147,13 +144,16 @@ def import_data(request: ImportRequest, db: Session = Depends(get_db)):
db.query(ReviewLog).delete()
db.query(MatchResult).filter(MatchResult.target_type == TargetType.ORDER).delete()
db.query(OrderRecord).delete()
db.flush() # 使用 flush 而非 commit保持在同一個事務中
db.flush()
print("[ETL Import] Old data cleared successfully.")
except Exception as e:
db.rollback()
print(f"[ETL Import] Error clearing old data: {traceback.format_exc()}")
raise HTTPException(status_code=500, detail=f"Failed to clear old data: {str(e)}")
print("[ETL Import] Preparing records...")
# 使用 List 收集所有物件,再批次寫入
for idx, row in df.iterrows():
try:
if file_type == 'dit':
@@ -162,17 +162,14 @@ def import_data(request: ImportRequest, db: Session = Depends(get_db)):
customer = clean_value(row.get('customer'))
pn = clean_value(row.get('pn'))
# Skip empty PN as per user request
if not pn:
continue
if not pn: continue
# Deduplicate by OP ID + PN
unique_key = f"{op_id}|{pn}"
if not op_id or unique_key in seen_ids:
continue
seen_ids.add(unique_key)
record = DitRecord(
records_to_insert.append(DitRecord(
op_id=op_id,
op_name=clean_value(row.get('op_name')),
erp_account=erp_account,
@@ -182,61 +179,48 @@ def import_data(request: ImportRequest, db: Session = Depends(get_db)):
eau=int(row.get('eau', 0)) if row.get('eau') and not pd.isna(row.get('eau')) else 0,
stage=clean_value(row.get('stage')),
date=normalize_date(row.get('date'))
)
))
elif file_type == 'sample':
sample_id = clean_value(row.get('sample_id'), f'S{idx}')
oppy_no = clean_value(row.get('oppy_no'), '')
cust_id = clean_value(row.get('cust_id'), '')
# ... other fields
customer = clean_value(row.get('customer'))
pn = clean_value(row.get('pn'))
order_no = clean_value(row.get('order_no'))
# Skip empty PN
if not pn:
continue
# Deduplicate by Sample ID only
# We rely on auto-generated unique IDs if sample_id is missing from Excel mapping
unique_key = sample_id
if not pn: continue
if sample_id in seen_ids:
continue
seen_ids.add(sample_id)
record = SampleRecord(
records_to_insert.append(SampleRecord(
sample_id=sample_id,
order_no=order_no,
oppy_no=oppy_no,
cust_id=cust_id,
order_no=clean_value(row.get('order_no')),
oppy_no=clean_value(row.get('oppy_no'), ''),
cust_id=clean_value(row.get('cust_id'), ''),
customer=customer,
customer_normalized=normalize_customer_name(customer),
pn=sanitize_pn(pn),
qty=int(row.get('qty', 0)) if row.get('qty') and not pd.isna(row.get('qty')) else 0,
date=normalize_date(row.get('date'))
)
))
elif file_type == 'order':
order_id = clean_value(row.get('order_id'), f'O{idx}')
cust_id = clean_value(row.get('cust_id'), '')
customer = clean_value(row.get('customer'))
pn = clean_value(row.get('pn'))
order_no = clean_value(row.get('order_no'))
# Skip empty PN
if not pn:
continue
customer = clean_value(row.get('customer'))
pn = clean_value(row.get('pn'))
if not pn: continue
# Deduplicate by Order No + Order ID (Item No)
# Item No (order_id) is not unique globally, only unique per order usually.
unique_key = f"{order_no}_{order_id}"
if unique_key in seen_ids:
continue
seen_ids.add(unique_key)
record = OrderRecord(
records_to_insert.append(OrderRecord(
order_id=order_id,
order_no=clean_value(row.get('order_no')),
cust_id=cust_id,
order_no=order_no,
cust_id=clean_value(row.get('cust_id'), ''),
customer=customer,
customer_normalized=normalize_customer_name(customer),
pn=sanitize_pn(pn),
@@ -244,20 +228,29 @@ def import_data(request: ImportRequest, db: Session = Depends(get_db)):
status=clean_value(row.get('status'), 'Backlog'),
amount=float(row.get('amount', 0)) if row.get('amount') and not pd.isna(row.get('amount')) else 0,
date=normalize_date(row.get('date'))
)
else:
))
# 小批次處理,避免 list 過大 (雖 7萬筆還好但習慣上分批)
if len(records_to_insert) >= 5000:
db.bulk_save_objects(records_to_insert)
imported_count += len(records_to_insert)
records_to_insert = []
print(f"[ETL Import] Bulk inserted {imported_count} rows...")
except Exception as e:
# 這裡若單行錯誤其實會導致該 batch 失敗,但 bulk_save 較難單行容錯。
# 為了效能,我們假設資料大致正確,若有錯則會在 parser 階段或這裡跳過
print(f"[ETL Import] Error creating record row {idx}: {e}")
continue
db.add(record)
imported_count += 1
if imported_count % 500 == 0:
print(f"[ETL Import] Processed {imported_count} rows...")
except Exception as e:
print(f"[ETL Import] Error importing row {idx}: {e}")
continue
# Insert remaining
if records_to_insert:
db.bulk_save_objects(records_to_insert)
imported_count += len(records_to_insert)
print(f"[ETL Import] Bulk inserted remaining {len(records_to_insert)} rows.")
try:
print(f"[ETL Import] Committing {imported_count} records...")
print(f"[ETL Import] Committing total {imported_count} records...")
db.commit()
print(f"[ETL Import] Import successful: {imported_count} records.")
except Exception as e:

View File

@@ -18,6 +18,7 @@ class LabKPI(BaseModel):
conversion_rate: float # 轉換比例 (%)
orphan_count: int # 孤兒樣品總數
no_dit_count: int # 未歸因大額樣品數
high_qty_no_order_count: int # 大額無單樣品數
class ConversionRecord(BaseModel):
customer: str
@@ -51,6 +52,15 @@ class NoDitSample(BaseModel):
date: Optional[str]
qty: int
class HighQtyNoOrderSample(BaseModel):
sample_id: str
customer: str
pn: str
order_no: Optional[str]
date: Optional[str]
qty: int
days_since_sent: int
def parse_date(date_val) -> Optional[datetime]:
if not date_val:
@@ -59,6 +69,8 @@ def parse_date(date_val) -> Optional[datetime]:
return date_val
if isinstance(date_val, str):
date_str = date_val.strip()
if date_str.endswith(".0"):
date_str = date_str[:-2]
try:
if "T" in date_str:
return datetime.fromisoformat(date_str.replace("Z", "+00:00"))
@@ -138,9 +150,22 @@ def find_matched_orders(s, order_lookup_by_id, order_lookup_by_name, orders_by_c
return unique_candidates
@router.get("/conversions", response_model=List[ConversionRecord])
def get_conversions(db: Session = Depends(get_db)):
samples = db.query(SampleRecord).all()
orders = db.query(OrderRecord).all()
def get_conversions(
start_date: Optional[str] = Query(None),
end_date: Optional[str] = Query(None),
db: Session = Depends(get_db)
):
samples_query = db.query(SampleRecord)
orders_query = db.query(OrderRecord)
if start_date:
samples_query = samples_query.filter(SampleRecord.date >= start_date)
orders_query = orders_query.filter(OrderRecord.date >= start_date)
if end_date:
samples_query = samples_query.filter(SampleRecord.date <= end_date)
samples = samples_query.all()
orders = orders_query.all()
# Build Lookups
order_lookup_by_id = {}
@@ -225,10 +250,11 @@ def get_lab_kpi(
if start_date:
samples_query = samples_query.filter(SampleRecord.date >= start_date)
# Optimization: Only fetch orders that could possibly match these samples (Date >= Start Date)
orders_query = orders_query.filter(OrderRecord.date >= start_date)
if end_date:
samples_query = samples_query.filter(SampleRecord.date <= end_date)
orders_query = orders_query.filter(OrderRecord.date <= end_date)
# Do NOT filter orders by end_date, to capture conversions that happen after the sample window
samples = samples_query.all()
orders = orders_query.all()
@@ -244,18 +270,25 @@ def get_lab_kpi(
norm_cust_name = normalize_customer_name(o.customer)
o_date = parse_date(o.date) or (o.created_at.replace(tzinfo=None) if o.created_at else datetime.max)
# We only need dates for KPI
# Standardized Data Object for compatibility with find_matched_orders
data_obj = {
"clean_pn": clean_pn,
"date": o_date,
"qty": o.qty or 0,
"order_no": o.order_no
}
if clean_cust_id:
key_id = (clean_cust_id, clean_pn)
if key_id not in order_lookup_by_id: order_lookup_by_id[key_id] = []
order_lookup_by_id[key_id].append(o_date)
order_lookup_by_id[key_id].append(data_obj)
key_name = (norm_cust_name, clean_pn)
if key_name not in order_lookup_by_name: order_lookup_by_name[key_name] = []
order_lookup_by_name[key_name].append(o_date)
order_lookup_by_name[key_name].append(data_obj)
if norm_cust_name not in orders_by_cust_name: orders_by_cust_name[norm_cust_name] = []
orders_by_cust_name[norm_cust_name].append({ "clean_pn": clean_pn, "date": o_date })
orders_by_cust_name[norm_cust_name].append(data_obj)
# Group Samples by (CustName, PN) for Project Count
unique_sample_groups = {}
@@ -286,32 +319,34 @@ def get_lab_kpi(
for key, data in unique_sample_groups.items():
norm_cust_name, group_clean_pn = key
matched_dates = []
matched_items = []
# 1. Try ID Match
for cid in data["cust_ids"]:
if (cid, group_clean_pn) in order_lookup_by_id:
matched_dates.extend(order_lookup_by_id[(cid, group_clean_pn)])
matched_items.extend(order_lookup_by_id[(cid, group_clean_pn)])
# 2. Try Name Match
if not matched_dates:
if not matched_items:
if key in order_lookup_by_name:
matched_dates.extend(order_lookup_by_name[key])
matched_items.extend(order_lookup_by_name[key])
# 3. Try Prefix Match (Using first available PN in group vs Orders of same customer)
if not matched_dates and norm_cust_name in orders_by_cust_name:
if not matched_items and norm_cust_name in orders_by_cust_name:
candidates = orders_by_cust_name[norm_cust_name]
for o_dat in candidates:
o_pn = o_dat['clean_pn']
# Check against ANY PN in this sample group
for s_pn in data["raw_pns"]:
if o_pn and (s_pn.startswith(o_pn) or o_pn.startswith(s_pn)):
matched_dates.append(o_dat["date"])
matched_items.append(o_dat)
if matched_dates:
if matched_items:
earliest_sample = min(data["dates"]) if data["dates"] else None
# STRICT FILTER: Post-Sample Orders Only
# Extract dates from matched items
matched_dates = [item["date"] for item in matched_items]
valid_dates = []
if earliest_sample:
valid_dates = [d for d in matched_dates if d >= earliest_sample]
@@ -356,12 +391,51 @@ def get_lab_kpi(
matched_ids_set = set(m[0] for m in matched_ids)
no_dit_count = len([sid for sid in high_qty_ids if sid not in matched_ids_set])
# Calculate High Qty No Order Samples (Count)
# Using existing data structures if possible, or new query
# Criteria: Qty >= 1000 AND No Valid Post-Sample Order
# We can reuse the loop calculation or do it separately.
# Since we already iterated samples to find conversions, let's optimize.
# Actually, the conversion logic above iterates ALL samples.
# Let's add a flag in the main loop?
# Main loop iterates `unique_sample_groups`.
# But High Qty No Order is per SAMPLE, not per group necessarily?
# Actually, business logic wise, if one sample in a group led to order, does it count?
# "Single request quantity > 1000pcs". So it's per sample record.
# If that specific sample has no "attributed" order?
# The current conversion logic is Group-Based (Customer + PN).
# If a group has converted, then likely the samples in it are considered converted.
# BUT, strict definition: "Single sample > 1000".
# Let's iterate high_qty_samples again and check if they belong to a converted group?
# OR check if that sample specifically has a match?
# Our matching logic in `find_matched_orders` is per sample.
high_qty_no_order_count = 0
# efficient check:
for s in high_qty_samples: # calculated above
# Check if this sample has valid orders.
# We need to run find_matched_orders for these samples.
# Ensure we have lookups built. They are built in `get_lab_kpi`.
matched_orders = find_matched_orders(s, order_lookup_by_id, order_lookup_by_name, orders_by_cust_name)
s_date = parse_date(s.date)
is_converted = False
if matched_orders and s_date:
valid_orders = [o for o in matched_orders if o["date"] >= s_date]
if valid_orders:
is_converted = True
if not is_converted:
high_qty_no_order_count += 1
return LabKPI(
converted_count=converted_count,
avg_velocity=round(avg_velocity, 1),
conversion_rate=round(conversion_rate, 1),
orphan_count=orphan_count,
no_dit_count=no_dit_count
no_dit_count=no_dit_count,
high_qty_no_order_count=high_qty_no_order_count
)
@router.get("/scatter", response_model=List[ScatterPoint])
@@ -375,6 +449,7 @@ def get_scatter_data(
if start_date:
samples_query = samples_query.filter(SampleRecord.date >= start_date)
orders_query = orders_query.filter(OrderRecord.date >= start_date)
if end_date:
samples_query = samples_query.filter(SampleRecord.date <= end_date)
@@ -458,14 +533,27 @@ def get_scatter_data(
]
@router.get("/orphans", response_model=List[OrphanSample])
def get_orphans(db: Session = Depends(get_db)):
def get_orphans(
start_date: Optional[str] = Query(None),
end_date: Optional[str] = Query(None),
db: Session = Depends(get_db)
):
now = datetime.now()
threshold_date = now - timedelta(days=90)
samples = db.query(SampleRecord).all()
samples_query = db.query(SampleRecord)
if start_date:
samples_query = samples_query.filter(SampleRecord.date >= start_date)
if end_date:
samples_query = samples_query.filter(SampleRecord.date <= end_date)
samples = samples_query.all()
# Need to match logic check
# To save time, we can fetch all orders and build lookup
orders = db.query(OrderRecord).all()
orders_query = db.query(OrderRecord)
if start_date:
orders_query = orders_query.filter(OrderRecord.date >= start_date)
orders = orders_query.all()
# Build Lookup for Fast Checking
orders_by_cust_name = {}
@@ -544,10 +632,15 @@ def get_orphans(db: Session = Depends(get_db)):
return sorted(orphans, key=lambda x: x.days_since_sent, reverse=True)
@router.get("/no_dit_samples", response_model=List[NoDitSample])
def get_no_dit_samples(db: Session = Depends(get_db)):
def fetch_no_dit_samples(db: Session, start_date: Optional[str] = None, end_date: Optional[str] = None) -> List[NoDitSample]:
# Filter High Qty Samples
high_qty_samples = db.query(SampleRecord).filter(SampleRecord.qty >= 1000).all()
query = db.query(SampleRecord).filter(SampleRecord.qty >= 1000)
if start_date:
query = query.filter(SampleRecord.date >= start_date)
if end_date:
query = query.filter(SampleRecord.date <= end_date)
high_qty_samples = query.all()
results = []
# Batch query matches for efficiency
@@ -571,8 +664,99 @@ def get_no_dit_samples(db: Session = Depends(get_db)):
customer=s.customer,
pn=s.pn,
order_no=s.order_no,
date=s_date.strftime("%Y-%m-%d") if s_date else None,
date=s_date.strftime("%Y-%m-%d") if s_date else (s.date or ""),
qty=s.qty
))
return sorted(results, key=lambda x: x.qty, reverse=True)
@router.get("/no_dit_samples", response_model=List[NoDitSample])
def get_no_dit_samples(
start_date: Optional[str] = Query(None),
end_date: Optional[str] = Query(None),
db: Session = Depends(get_db)
):
return fetch_no_dit_samples(db, start_date, end_date)
def fetch_high_qty_no_order_samples(db: Session, start_date: Optional[str] = None, end_date: Optional[str] = None) -> List[HighQtyNoOrderSample]:
# 1. Get High Qty Samples
query = db.query(SampleRecord).filter(SampleRecord.qty >= 1000)
if start_date:
query = query.filter(SampleRecord.date >= start_date)
if end_date:
query = query.filter(SampleRecord.date <= end_date)
high_qty_samples = query.all()
if not high_qty_samples:
return []
# 2. Get All Orders for Matching
orders_query = db.query(OrderRecord)
if start_date:
orders_query = orders_query.filter(OrderRecord.date >= start_date)
orders = orders_query.all()
# 3. Build Lookup (Same logic as kpi/conversions)
order_lookup_by_id = {}
order_lookup_by_name = {}
orders_by_cust_name = {}
for o in orders:
clean_pn = normalize_pn_for_matching(o.pn)
clean_cust_id = o.cust_id.strip().upper() if o.cust_id else ""
norm_cust_name = normalize_customer_name(o.customer)
o_date = parse_date(o.date) or (o.created_at.replace(tzinfo=None) if o.created_at else datetime.max)
data_obj = {
"clean_pn": clean_pn,
"date": o_date,
"qty": o.qty or 0,
"order_no": o.order_no
}
if clean_cust_id:
key_id = (clean_cust_id, clean_pn)
if key_id not in order_lookup_by_id: order_lookup_by_id[key_id] = []
order_lookup_by_id[key_id].append(data_obj)
key_name = (norm_cust_name, clean_pn)
if key_name not in order_lookup_by_name: order_lookup_by_name[key_name] = []
order_lookup_by_name[key_name].append(data_obj)
if norm_cust_name not in orders_by_cust_name: orders_by_cust_name[norm_cust_name] = []
orders_by_cust_name[norm_cust_name].append(data_obj)
results = []
now = datetime.now()
for s in high_qty_samples:
matched_orders = find_matched_orders(s, order_lookup_by_id, order_lookup_by_name, orders_by_cust_name)
s_date = parse_date(s.date)
is_converted = False
if matched_orders and s_date:
valid_orders = [o for o in matched_orders if o["date"] >= s_date]
if valid_orders:
is_converted = True
if not is_converted:
days_since = (now - s_date).days if s_date else 0
results.append(HighQtyNoOrderSample(
sample_id=str(s.id),
customer=s.customer,
pn=s.pn,
order_no=s.order_no,
date=s_date.strftime("%Y-%m-%d") if s_date else (s.date or ""),
qty=s.qty,
days_since_sent=days_since
))
return sorted(results, key=lambda x: x.qty, reverse=True)
@router.get("/high_qty_no_order_samples", response_model=List[HighQtyNoOrderSample])
def get_high_qty_no_order_samples(
start_date: Optional[str] = Query(None),
end_date: Optional[str] = Query(None),
db: Session = Depends(get_db)
):
return fetch_high_qty_no_order_samples(db, start_date, end_date)

View File

@@ -14,6 +14,7 @@ from app.models.dit import DitRecord
from app.models.sample import SampleRecord
from app.models.order import OrderRecord
from app.models.match import MatchResult, MatchStatus, TargetType
from app.routers.lab import fetch_no_dit_samples, fetch_high_qty_no_order_samples
class ReportGenerator:
def __init__(self, db: Session):
@@ -127,6 +128,55 @@ class ReportGenerator:
orders_received = [row for row in all_data if row['order_no']]
create_sheet("取得訂單", orders_received)
# 4. 未歸因大額樣品 (New)
no_dit_samples = fetch_no_dit_samples(self.db)
if no_dit_samples:
ws = wb.create_sheet(title="未歸因大額樣品")
# Header
sub_headers = ['樣品單號', '客戶名稱', '料號', '送樣日期', '數量', '建議']
for col, header in enumerate(sub_headers, 1):
cell = ws.cell(row=1, column=col, value=header)
cell.font = header_font
cell.fill = header_fill
# Data
for row_idx, s in enumerate(no_dit_samples, 2):
ws.cell(row=row_idx, column=1, value=s.order_no or s.sample_id)
ws.cell(row=row_idx, column=2, value=s.customer)
ws.cell(row=row_idx, column=3, value=s.pn)
ws.cell(row=row_idx, column=4, value=s.date)
ws.cell(row=row_idx, column=5, value=s.qty)
ws.cell(row=row_idx, column=6, value="請檢查 DIT 歸因")
# Widths
ws.column_dimensions['B'].width = 30
ws.column_dimensions['C'].width = 20
# 5. 大額無單樣品 (New)
high_qty_no_order = fetch_high_qty_no_order_samples(self.db)
if high_qty_no_order:
ws = wb.create_sheet(title="大額無單樣品")
# Header
sub_headers = ['樣品單號', '客戶名稱', '料號', '送樣日期', '數量', '送樣天數', '狀態']
for col, header in enumerate(sub_headers, 1):
cell = ws.cell(row=1, column=col, value=header)
cell.font = header_font
cell.fill = header_fill
# Data
for row_idx, s in enumerate(high_qty_no_order, 2):
ws.cell(row=row_idx, column=1, value=s.order_no or s.sample_id)
ws.cell(row=row_idx, column=2, value=s.customer)
ws.cell(row=row_idx, column=3, value=s.pn)
ws.cell(row=row_idx, column=4, value=s.date)
ws.cell(row=row_idx, column=5, value=s.qty)
ws.cell(row=row_idx, column=6, value=s.days_since_sent)
ws.cell(row=row_idx, column=7, value="高投入無回報")
# Widths
ws.column_dimensions['B'].width = 30
ws.column_dimensions['C'].width = 20
# 儲存到 BytesIO
output = io.BytesIO()
wb.save(output)

View File

@@ -40,6 +40,7 @@ export const ImportView: React.FC<ImportViewProps> = ({ onEtlComplete }) => {
...prev,
[type]: { ...prev[type], file, loading: true }
}));
setError(null);
try {
const parsed = await etlApi.upload(file, type);
@@ -47,12 +48,17 @@ export const ImportView: React.FC<ImportViewProps> = ({ onEtlComplete }) => {
...prev,
[type]: { file, parsed, loading: false }
}));
} catch (error) {
} catch (error: any) {
console.error(`Error uploading ${type} file:`, error);
setFiles(prev => ({
...prev,
[type]: { file: null, parsed: null, loading: false }
[type]: { ...prev[type], parsed: null, loading: false }
}));
const msg = error.code === 'ECONNABORTED'
? '上傳逾時,檔案可能過大,請稍後再試'
: (error.response?.data?.detail || error.message || '上傳失敗');
setError(msg);
}
};

View File

@@ -9,7 +9,7 @@ import {
} from 'lucide-react';
import { Card } from './common/Card';
import { labApi } from '../services/api';
import type { LabKPI, ScatterPoint, OrphanSample, NoDitSample } from '../types';
import type { LabKPI, ScatterPoint, OrphanSample, NoDitSample, HighQtyNoOrderSample } from '../types';
export const LabView: React.FC = () => {
const [kpi, setKpi] = useState<LabKPI>({
@@ -17,17 +17,19 @@ export const LabView: React.FC = () => {
avg_velocity: 0,
conversion_rate: 0,
orphan_count: 0,
no_dit_count: 0
no_dit_count: 0,
high_qty_no_order_count: 0
});
const [scatterData, setScatterData] = useState<ScatterPoint[]>([]);
const [orphans, setOrphans] = useState<OrphanSample[]>([]);
const [noDitSamples, setNoDitSamples] = useState<NoDitSample[]>([]);
const [highQtyNoOrderSamples, setHighQtyNoOrderSamples] = useState<HighQtyNoOrderSample[]>([]);
const [conversions, setConversions] = useState<any[]>([]);
const [loading, setLoading] = useState(true);
const [dateRange, setDateRange] = useState<'all' | '12m' | '6m' | '3m'>('all');
const [useLogScale, setUseLogScale] = useState(false);
const [copiedId, setCopiedId] = useState<number | null>(null);
const [viewMode, setViewMode] = useState<'orphans' | 'conversions' | 'no_dit'>('orphans');
const [viewMode, setViewMode] = useState<'orphans' | 'conversions' | 'no_dit' | 'high_qty_no_order'>('orphans');
useEffect(() => {
loadLabData();
@@ -48,18 +50,20 @@ export const LabView: React.FC = () => {
const params = start_date ? { start_date } : {};
const [kpiData, scatterRes, orphanRes, noDitRes, conversionRes] = await Promise.all([
const [kpiData, scatterRes, orphanRes, noDitRes, highQtyNoOrderRes, conversionRes] = await Promise.all([
labApi.getKPI(params),
labApi.getScatter(params),
labApi.getOrphans(),
labApi.getNoDitSamples(),
labApi.getConversions()
labApi.getOrphans(params),
labApi.getNoDitSamples(params),
labApi.getHighQtyNoOrderSamples(params),
labApi.getConversions(params)
]);
setKpi(kpiData);
setScatterData(scatterRes);
setOrphans(orphanRes);
setNoDitSamples(noDitRes);
setHighQtyNoOrderSamples(highQtyNoOrderRes);
setConversions(conversionRes);
} catch (error) {
console.error('Error loading lab data:', error);
@@ -125,7 +129,7 @@ export const LabView: React.FC = () => {
</div>
{/* KPI Cards */}
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-5 gap-4">
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-6 gap-4">
<Card
onClick={() => setViewMode('conversions')}
className={`p-4 border-b-4 border-b-blue-500 bg-gradient-to-br from-white to-blue-50/30 cursor-pointer transition-all hover:shadow-md ${viewMode === 'conversions' ? 'ring-2 ring-blue-500 ring-offset-2' : ''}`}
@@ -214,6 +218,25 @@ export const LabView: React.FC = () => {
</div>
</div>
</Card>
<Card
onClick={() => setViewMode('high_qty_no_order')}
className={`p-4 border-b-4 border-b-violet-500 bg-gradient-to-br from-white to-violet-50/30 cursor-pointer transition-all hover:shadow-md ${viewMode === 'high_qty_no_order' ? 'ring-2 ring-violet-500 ring-offset-2' : ''}`}
>
<div className="flex justify-between items-start">
<div>
<div className="text-xs text-slate-500 font-medium mb-1"></div>
<div className="text-2xl font-bold text-violet-600">{kpi.high_qty_no_order_count} </div>
<div className="text-[10px] text-violet-600 mt-1 flex items-center gap-1 font-bold">
<AlertTriangle size={10} />
&gt; 1000 pcs (No Order)
</div>
</div>
<div className="p-2 bg-violet-100 text-violet-600 rounded-lg">
<AlertTriangle size={20} />
</div>
</div>
</Card>
</div>
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
@@ -347,8 +370,16 @@ export const LabView: React.FC = () => {
{/* Dynamic Table Section */}
<Card className="overflow-hidden">
<div className={`px-6 py-4 border-b flex justify-between items-center ${viewMode === 'conversions' ? 'bg-blue-50 border-blue-200' : viewMode === 'no_dit' ? 'bg-amber-50 border-amber-200' : 'bg-rose-50 border-rose-200'}`}>
<h3 className={`font-bold flex items-center gap-2 ${viewMode === 'conversions' ? 'text-blue-700' : viewMode === 'no_dit' ? 'text-amber-700' : 'text-rose-700'}`}>
<div className={`px-6 py-4 border-b flex justify-between items-center ${viewMode === 'conversions' ? 'bg-blue-50 border-blue-200' :
viewMode === 'no_dit' ? 'bg-amber-50 border-amber-200' :
viewMode === 'high_qty_no_order' ? 'bg-violet-50 border-violet-200' :
'bg-rose-50 border-rose-200'
}`}>
<h3 className={`font-bold flex items-center gap-2 ${viewMode === 'conversions' ? 'text-blue-700' :
viewMode === 'no_dit' ? 'text-amber-700' :
viewMode === 'high_qty_no_order' ? 'text-violet-700' :
'text-rose-700'
}`}>
{viewMode === 'conversions' ? (
<>
<Check size={18} />
@@ -359,6 +390,11 @@ export const LabView: React.FC = () => {
<HelpCircle size={18} />
Unattributed High-Qty Samples
</>
) : viewMode === 'high_qty_no_order' ? (
<>
<AlertTriangle size={18} />
High Quantity No-Order Samples
</>
) : (
<>
<AlertTriangle size={18} />
@@ -375,6 +411,7 @@ export const LabView: React.FC = () => {
<div className="text-[10px] text-slate-400 font-medium">
{viewMode === 'conversions' ? `${conversions.length} 筆成功轉換`
: viewMode === 'no_dit' ? `${noDitSamples.length} 筆未歸因大單`
: viewMode === 'high_qty_no_order' ? `${highQtyNoOrderSamples.length} 筆大額無單`
: `${orphans.length} 筆待追蹤案件`}
</div>
</div>
@@ -399,6 +436,13 @@ export const LabView: React.FC = () => {
<th className="px-6 py-3"> (Date/Qty)</th>
<th className="px-6 py-3 text-center"></th>
</>
) : viewMode === 'high_qty_no_order' ? (
<>
<th className="px-6 py-3"></th>
<th className="px-6 py-3"> (Date/Qty)</th>
<th className="px-6 py-3 text-center"></th>
<th className="px-6 py-3 text-center"></th>
</>
) : (
<>
<th className="px-6 py-3"></th>
@@ -457,6 +501,28 @@ export const LabView: React.FC = () => {
</td>
</tr>
))
) : viewMode === 'high_qty_no_order' ? (
highQtyNoOrderSamples.map((row, i) => (
<tr key={i} className="hover:bg-violet-50/50">
<td className="px-6 py-4 font-medium text-slate-800">{row.customer}</td>
<td className="px-6 py-4 font-mono text-xs text-slate-600">{row.pn}</td>
<td className="px-6 py-4 font-mono text-xs text-slate-500">{row.order_no || '-'}</td>
<td className="px-6 py-4">
<div className="flex flex-col">
<span className="text-slate-500 text-xs">{row.date?.replace(/(\d{4})(\d{2})(\d{2})/, '$1/$2/$3')}</span>
<span className="font-bold text-violet-600">{row.qty?.toLocaleString()} pcs</span>
</div>
</td>
<td className="px-6 py-4 text-center">
<span className="font-bold text-slate-600">{row.days_since_sent} </span>
</td>
<td className="px-6 py-4 text-center">
<span className="inline-flex items-center px-2 py-1 rounded-full text-xs font-bold bg-violet-100 text-violet-700">
</span>
</td>
</tr>
))
) : (
orphans.map((row, i) => {
const groupKey = `${row.customer?.trim()?.toUpperCase()}|${row.pn?.trim()?.toUpperCase()}`;
@@ -529,6 +595,13 @@ export const LabView: React.FC = () => {
</td>
</tr>
)}
{viewMode === 'high_qty_no_order' && highQtyNoOrderSamples.length === 0 && (
<tr>
<td colSpan={6} className="px-6 py-10 text-center text-slate-400">
1000pcs
</td>
</tr>
)}
{viewMode === 'no_dit' && noDitSamples.length === 0 && (
<tr>
<td colSpan={5} className="px-6 py-10 text-center text-slate-400">

View File

@@ -14,12 +14,13 @@ import type {
ScatterPoint,
OrphanSample,
ConversionRecord,
NoDitSample
NoDitSample,
HighQtyNoOrderSample
} from '../types';
const api = axios.create({
baseURL: '/api',
timeout: 15000,
timeout: 900000,
headers: {
'Content-Type': 'application/json',
},
@@ -171,18 +172,23 @@ export const labApi = {
return response.data;
},
getOrphans: async (): Promise<OrphanSample[]> => {
const response = await api.get<OrphanSample[]>('/lab/orphans');
getOrphans: async (params?: { start_date?: string; end_date?: string }): Promise<OrphanSample[]> => {
const response = await api.get<OrphanSample[]>('/lab/orphans', { params });
return response.data;
},
getConversions: async (): Promise<ConversionRecord[]> => {
const response = await api.get<ConversionRecord[]>('/lab/conversions');
getConversions: async (params?: { start_date?: string; end_date?: string }): Promise<ConversionRecord[]> => {
const response = await api.get<ConversionRecord[]>('/lab/conversions', { params });
return response.data;
},
getNoDitSamples: async (): Promise<NoDitSample[]> => {
const response = await api.get<NoDitSample[]>('/lab/no_dit_samples');
getNoDitSamples: async (params?: { start_date?: string; end_date?: string }): Promise<NoDitSample[]> => {
const response = await api.get<NoDitSample[]>('/lab/no_dit_samples', { params });
return response.data;
},
getHighQtyNoOrderSamples: async (params?: { start_date?: string; end_date?: string }): Promise<HighQtyNoOrderSample[]> => {
const response = await api.get<HighQtyNoOrderSample[]>('/lab/high_qty_no_order_samples', { params });
return response.data;
},
};

View File

@@ -123,6 +123,7 @@ export interface LabKPI {
conversion_rate: number;
orphan_count: number;
no_dit_count: number;
high_qty_no_order_count: number;
}
export interface ScatterPoint {
@@ -150,6 +151,16 @@ export interface NoDitSample {
qty: number;
}
export interface HighQtyNoOrderSample {
sample_id: string;
customer: string;
pn: string;
order_no: string;
date: string;
qty: number;
days_since_sent: number;
}
export interface ConversionRecord {
customer: string;
pn: string;