This commit is contained in:
2026-01-23 18:34:34 +08:00
parent e53c3c838c
commit fd15ec296b
23 changed files with 916 additions and 356 deletions

View File

@@ -1,4 +1,4 @@
from sqlalchemy import Column, Integer, String, DateTime, Float, UniqueConstraint
from sqlalchemy import Column, Integer, String, DateTime, Float, UniqueConstraint, BigInteger
from sqlalchemy.sql import func
from app.models import Base
from app.config import TABLE_PREFIX
@@ -16,7 +16,7 @@ class DitRecord(Base):
customer = Column(String(255), nullable=False, index=True)
customer_normalized = Column(String(255), index=True)
pn = Column(String(100), nullable=False, index=True)
eau = Column(Integer, default=0)
eau = Column(BigInteger, default=0)
stage = Column(String(50))
date = Column(String(20))
created_at = Column(DateTime(timezone=True), server_default=func.now())

View File

@@ -1,4 +1,4 @@
from sqlalchemy import Column, Integer, String, DateTime, Float, Enum, ForeignKey
from sqlalchemy import Column, Integer, String, DateTime, Float, Enum, ForeignKey, UniqueConstraint
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func
from app.models import Base
@@ -21,6 +21,9 @@ class ReviewAction(str, enum.Enum):
class MatchResult(Base):
__tablename__ = f"{TABLE_PREFIX}Match_Results"
__table_args__ = (
UniqueConstraint('dit_id', 'target_type', 'target_id', name='uix_match_dit_target'),
)
id = Column(Integer, primary_key=True, index=True)
dit_id = Column(Integer, ForeignKey(f"{TABLE_PREFIX}DIT_Records.id"), nullable=False)

View File

@@ -161,11 +161,17 @@ def import_data(request: ImportRequest, db: Session = Depends(get_db)):
erp_account = clean_value(row.get('erp_account'), '')
customer = clean_value(row.get('customer'))
pn = clean_value(row.get('pn'))
# 跳過無效資料列或重複的 op_id + pn 組合
# Skip empty PN as per user request
if not pn:
continue
# Deduplicate by OP ID + PN
unique_key = f"{op_id}|{pn}"
if not op_id or unique_key in seen_ids:
continue
seen_ids.add(unique_key)
record = DitRecord(
op_id=op_id,
op_name=clean_value(row.get('op_name')),
@@ -183,13 +189,24 @@ def import_data(request: ImportRequest, db: Session = Depends(get_db)):
cust_id = clean_value(row.get('cust_id'), '')
customer = clean_value(row.get('customer'))
pn = clean_value(row.get('pn'))
# 跳過重複的 sample_id
order_no = clean_value(row.get('order_no'))
# Skip empty PN
if not pn:
continue
# Deduplicate by Sample ID only
# We rely on auto-generated unique IDs if sample_id is missing from Excel mapping
unique_key = sample_id
if sample_id in seen_ids:
continue
seen_ids.add(sample_id)
record = SampleRecord(
sample_id=sample_id,
order_no=clean_value(row.get('order_no')),
order_no=order_no,
oppy_no=oppy_no,
cust_id=cust_id,
customer=customer,
@@ -203,10 +220,19 @@ def import_data(request: ImportRequest, db: Session = Depends(get_db)):
cust_id = clean_value(row.get('cust_id'), '')
customer = clean_value(row.get('customer'))
pn = clean_value(row.get('pn'))
# 跳過重複的 order_id
if order_id in seen_ids:
order_no = clean_value(row.get('order_no'))
# Skip empty PN
if not pn:
continue
seen_ids.add(order_id)
# Deduplicate by Order No + Order ID (Item No)
# Item No (order_id) is not unique globally, only unique per order usually.
unique_key = f"{order_no}_{order_id}"
if unique_key in seen_ids:
continue
seen_ids.add(unique_key)
record = OrderRecord(
order_id=order_id,
order_no=clean_value(row.get('order_no')),

View File

@@ -7,6 +7,8 @@ from pydantic import BaseModel
from app.models import get_db
from app.models.sample import SampleRecord
from app.models.order import OrderRecord
from app.models.match import MatchResult, MatchStatus, TargetType
from app.services.fuzzy_matcher import normalize_pn_for_matching, normalize_customer_name
router = APIRouter(prefix="/lab", tags=["Lab"])
@@ -15,6 +17,7 @@ class LabKPI(BaseModel):
avg_velocity: float # 平均轉換時間 (天)
conversion_rate: float # 轉換比例 (%)
orphan_count: int # 孤兒樣品總數
no_dit_count: int # 未歸因大額樣品數
class ConversionRecord(BaseModel):
customer: str
@@ -22,10 +25,10 @@ class ConversionRecord(BaseModel):
sample_date: str
sample_qty: int
order_date: str
order_qty: int
order_qty: int # First Order Qty
total_order_qty: int # Total Order Qty (Post-Sample)
days_to_convert: int
# ... (ScatterPoint and OrphanSample classes remain same)
class ScatterPoint(BaseModel):
customer: str
pn: str
@@ -36,19 +39,113 @@ class OrphanSample(BaseModel):
customer: str
pn: str
days_since_sent: int
order_no: str
date: str
order_no: Optional[str] = None
date: Optional[str] = None
sample_qty: int = 0
# ... (parse_date function remains same)
class NoDitSample(BaseModel):
sample_id: str
customer: str
pn: str
order_no: Optional[str]
date: Optional[str]
qty: int
def parse_date(date_val) -> Optional[datetime]:
if not date_val:
return None
if isinstance(date_val, datetime):
return date_val
if isinstance(date_val, str):
date_str = date_val.strip()
try:
if "T" in date_str:
return datetime.fromisoformat(date_str.replace("Z", "+00:00"))
# Try common formats
for fmt in ["%Y-%m-%d", "%Y/%m/%d", "%Y.%m.%d", "%d-%m-%Y", "%Y%m%d"]:
try:
return datetime.strptime(date_str, fmt)
except ValueError:
continue
# Fallback: try parsing with pandas if simple strptime fails,
# but for now let's just stick to common formats to avoid heavy dependency inside loop if not needed.
return None
except ValueError:
return None
return None
# Helper to build order lookups
from app.services.fuzzy_matcher import normalize_pn_for_matching, normalize_customer_name
def normalize_id(val: any) -> str:
"""正規化 ID (去除空白、單引號、轉字串)"""
if val is None:
return ""
s = str(val).strip()
s = s.lstrip("'")
if s.endswith(".0"):
s = s[:-2]
return s.upper()
def build_order_lookups(orders):
def find_matched_orders(s, order_lookup_by_id, order_lookup_by_name, orders_by_cust_name):
# Use a dictionary to deduplicate matches by a unique key (e.g. order's internal ID or file_id+row which we don't have, so object ID is best if in memory, or full content)
# Since we built lookups with `data` dicts that are created fresh in the loop, we can't rely on object identity of `data`.
# However, `data` might need a unique identifier from the source order.
# Let's add `order_db_id` to `data` in get_conversions first?
# Actually, simpler: just collect all and dedup by `(date, qty, order_no, clean_pn)` tuple?
# Or better, trust the strategy hierarchy but be more permissive?
# Strategy change: Try to find ALL valid matches.
# Combine ID and Name matches.
candidates = []
clean_pn = normalize_pn_for_matching(s.pn)
norm_cust_name = normalize_customer_name(s.customer)
clean_cust_id = normalize_id(s.cust_id)
# 1. Try ID Match
if clean_cust_id:
key_id = (clean_cust_id, clean_pn)
if key_id in order_lookup_by_id:
candidates.extend(order_lookup_by_id[key_id])
# 2. Try Name Match (ALWAYS check this too, in case ID is missing on some order rows)
key_name = (norm_cust_name, clean_pn)
if key_name in order_lookup_by_name:
candidates.extend(order_lookup_by_name[key_name])
# 3. Try Prefix Match (Only if we have relatively few candidates? Or always?)
# If we already have exact matches, prefix might introduce noise.
# Let's keep prefix as a fallback OR if the existing candidates count is low?
# Actually, let's keep it as fallback for now. Explicit matching is better.
if not candidates and norm_cust_name in orders_by_cust_name:
candidates_prefix = orders_by_cust_name[norm_cust_name]
for o_dat in candidates_prefix:
o_pn = o_dat['clean_pn']
if o_pn and clean_pn and (clean_pn.startswith(o_pn) or o_pn.startswith(clean_pn)):
candidates.append(o_dat)
# Deduplicate candidates based on a unique signature
# Signature: (date, qty, order_no)
unique_candidates = []
seen = set()
for c in candidates:
sig = (c["date"], c["qty"], c["order_no"])
if sig not in seen:
seen.add(sig)
unique_candidates.append(c)
return unique_candidates
@router.get("/conversions", response_model=List[ConversionRecord])
def get_conversions(db: Session = Depends(get_db)):
samples = db.query(SampleRecord).all()
orders = db.query(OrderRecord).all()
# Build Lookups
order_lookup_by_id = {}
order_lookup_by_name = {}
orders_by_cust_name = {} # For prefix matching: name -> list of {clean_pn, date, qty, ...}
for o in orders:
clean_pn = normalize_pn_for_matching(o.pn)
@@ -60,7 +157,8 @@ def build_order_lookups(orders):
data = {
"date": o_date,
"qty": o.qty or 0,
"order_no": o.order_no
"order_no": o.order_no,
"clean_pn": clean_pn # Store for prefix check
}
if clean_cust_id:
@@ -71,86 +169,49 @@ def build_order_lookups(orders):
key_name = (norm_cust_name, clean_pn)
if key_name not in order_lookup_by_name: order_lookup_by_name[key_name] = []
order_lookup_by_name[key_name].append(data)
return order_lookup_by_id, order_lookup_by_name
@router.get("/conversions", response_model=List[ConversionRecord])
def get_conversions(db: Session = Depends(get_db)):
# 找出所有樣品
samples = db.query(SampleRecord).all()
# 找出所有訂單
orders = db.query(OrderRecord).all()
order_lookup_by_id, order_lookup_by_name = build_order_lookups(orders)
if norm_cust_name not in orders_by_cust_name: orders_by_cust_name[norm_cust_name] = []
orders_by_cust_name[norm_cust_name].append(data)
conversions = []
# We want to list "Sample Records" that successfully converted.
# Or "Groups"? The user said "list of sample sent and their order qty".
# Listing each sample record seems appropriate.
for s in samples:
clean_pn = normalize_pn_for_matching(s.pn)
norm_cust_name = normalize_customer_name(s.customer)
clean_cust_id = s.cust_id.strip().upper() if s.cust_id else ""
matched_orders = find_matched_orders(s, order_lookup_by_id, order_lookup_by_name, orders_by_cust_name)
s_date = parse_date(s.date)
matched_orders = []
# 1. Try via ID
if clean_cust_id:
if (clean_cust_id, clean_pn) in order_lookup_by_id:
matched_orders.extend(order_lookup_by_id[(clean_cust_id, clean_pn)])
# 2. Try via Name (Fallback)
if not matched_orders:
if (norm_cust_name, clean_pn) in order_lookup_by_name:
matched_orders.extend(order_lookup_by_name[(norm_cust_name, clean_pn)])
if matched_orders and s_date:
# Sort orders by date
matched_orders.sort(key=lambda x: x["date"])
first_order = matched_orders[0]
# STRICT FILTER: Only consider orders AFTER or ON sample date
valid_orders = [o for o in matched_orders if o["date"] >= s_date]
# Simple aggregations if multiple orders? User asked for "their order qty".
# showing total order qty for this PN/Cust might be better
total_order_qty = sum(o["qty"] for o in matched_orders)
days_diff = (first_order["date"] - s_date).days
# Filter unrealistic past orders?
# if days_diff < 0: continue # Optional
conversions.append(ConversionRecord(
customer=s.customer,
pn=s.pn,
sample_date=s.date,
sample_qty=s.qty or 0,
order_date=first_order["date"].strftime("%Y-%m-%d"), # First order date
order_qty=total_order_qty,
days_to_convert=days_diff
))
# Sort by recent sample date
return sorted(conversions, key=lambda x: x.sample_date, reverse=True)
if valid_orders:
# Sort orders by date
valid_orders.sort(key=lambda x: x["date"])
# Identify First Order Date & Aggregate Qty for that date
first_order = valid_orders[0]
first_date = first_order["date"]
# Sum qty of ALL orders that match the first order date
first_date_qty = sum(o["qty"] for o in valid_orders if o["date"] == first_date)
def parse_date(date_str: str) -> Optional[datetime]:
if not date_str:
return None
val = str(date_str).strip()
# Try parsing YYYYMMDD
if len(val) == 8 and val.isdigit():
try:
return datetime.strptime(val, "%Y%m%d")
except ValueError:
pass
for fmt in ("%Y-%m-%d", "%Y/%m/%d", "%Y-%m-%d %H:%M:%S", "%Y/%m/%d %H:%M:%S", "%d-%b-%y"):
try:
return datetime.strptime(str(date_str).split(' ')[0], fmt.split(' ')[0])
except ValueError:
continue
return None
# Total Order Qty (Cumulative for all valid post-sample orders)
total_order_qty = sum(o["qty"] for o in valid_orders)
days_diff = (first_date - s_date).days
s_date_str = s_date.strftime("%Y-%m-%d")
conversions.append(ConversionRecord(
customer=s.customer,
pn=s.pn,
sample_date=s_date_str,
sample_qty=s.qty or 0,
order_date=first_date.strftime("%Y-%m-%d"),
order_qty=first_date_qty, # Show First Order Qty ONLY
total_order_qty=total_order_qty, # Show Total Qty
days_to_convert=days_diff
))
return sorted(conversions, key=lambda x: x.sample_date if x.sample_date else "0000-00-00", reverse=True)
@router.get("/kpi", response_model=LabKPI)
def get_lab_kpi(
@@ -158,14 +219,13 @@ def get_lab_kpi(
end_date: Optional[str] = Query(None),
db: Session = Depends(get_db)
):
# 1. 取得所有樣品與訂單
# Fetch Data
samples_query = db.query(SampleRecord)
orders_query = db.query(OrderRecord)
if start_date:
samples_query = samples_query.filter(SampleRecord.date >= start_date)
orders_query = orders_query.filter(OrderRecord.date >= start_date)
if end_date:
samples_query = samples_query.filter(SampleRecord.date <= end_date)
orders_query = orders_query.filter(OrderRecord.date <= end_date)
@@ -173,40 +233,8 @@ def get_lab_kpi(
samples = samples_query.all()
orders = orders_query.all()
# 建立群組 (ERP Code + PN)
# ERP Code correspond to cust_id
from app.services.fuzzy_matcher import normalize_pn_for_matching
sample_groups = {}
for s in samples:
# Use simple normalization like stripping spaces
clean_pn = normalize_pn_for_matching(s.pn)
clean_cust = s.cust_id.strip().upper() if s.cust_id else ""
key = (clean_cust, clean_pn)
if key not in sample_groups:
sample_groups[key] = []
sample_groups[key].append(s)
order_groups = {}
for o in orders:
clean_pn = normalize_pn_for_matching(o.pn)
clean_cust = o.cust_id.strip().upper() if o.cust_id else ""
key = (clean_cust, clean_pn)
if key not in order_groups:
order_groups[key] = []
order_groups[key].append(o)
# 計算 Velocity 與 轉換率
velocities = []
converted_samples_count = 0
total_samples_count = len(samples)
# Re-use the lookup maps built above if possible, but we need to build them first.
# Let's rebuild lookups here for clarity or refactor.
# To be safe and clean, let's just implement the loop here.
from app.services.fuzzy_matcher import normalize_pn_for_matching, normalize_customer_name
# Build Lookups (Same as conversions)
orders_by_cust_name = {}
order_lookup_by_id = {}
order_lookup_by_name = {}
@@ -214,9 +242,9 @@ def get_lab_kpi(
clean_pn = normalize_pn_for_matching(o.pn)
clean_cust_id = o.cust_id.strip().upper() if o.cust_id else ""
norm_cust_name = normalize_customer_name(o.customer)
o_date = parse_date(o.date) or (o.created_at.replace(tzinfo=None) if o.created_at else datetime.max)
# We only need dates for KPI
if clean_cust_id:
key_id = (clean_cust_id, clean_pn)
if key_id not in order_lookup_by_id: order_lookup_by_id[key_id] = []
@@ -225,80 +253,115 @@ def get_lab_kpi(
key_name = (norm_cust_name, clean_pn)
if key_name not in order_lookup_by_name: order_lookup_by_name[key_name] = []
order_lookup_by_name[key_name].append(o_date)
if norm_cust_name not in orders_by_cust_name: orders_by_cust_name[norm_cust_name] = []
orders_by_cust_name[norm_cust_name].append({ "clean_pn": clean_pn, "date": o_date })
# Group Samples by (CustName, PN) for calculation to avoid double counting if multiple samples -> same order
# Actually, "Conversion Rate" is usually "Percentage of Sample Records that resulted in Order".
# Or "Percentage of Projects". Let's stick to "Sample Groups" (Unique trials).
unique_sample_groups = {} # (norm_cust_name, clean_pn) -> list of sample dates
# Group Samples by (CustName, PN) for Project Count
unique_sample_groups = {}
for s in samples:
clean_pn = normalize_pn_for_matching(s.pn)
norm_cust_name = normalize_customer_name(s.customer)
clean_cust_id = s.cust_id.strip().upper() if s.cust_id else ""
key = (norm_cust_name, clean_pn) # Group by Name+PN
key = (norm_cust_name, clean_pn)
if key not in unique_sample_groups:
unique_sample_groups[key] = {
"dates": [],
"cust_ids": set()
"cust_ids": set(),
"raw_pns": set()
}
s_date = parse_date(s.date)
if s_date: unique_sample_groups[key]["dates"].append(s_date)
if clean_cust_id: unique_sample_groups[key]["cust_ids"].add(clean_cust_id)
if s.cust_id: unique_sample_groups[key]["cust_ids"].add(s.cust_id.strip().upper())
unique_sample_groups[key]["raw_pns"].add(clean_pn)
# Calculate
total_samples_count = len(unique_sample_groups) # Total "Projects"
total_samples_count = len(unique_sample_groups)
converted_count = 0
orphan_count = 0
velocities = []
now = datetime.now()
for key, data in unique_sample_groups.items():
norm_cust_name, clean_pn = key
norm_cust_name, group_clean_pn = key
# Try finding orders
matched_dates = []
# 1. Try via ID
# 1. Try ID Match
for cid in data["cust_ids"]:
if (cid, clean_pn) in order_lookup_by_id:
matched_dates.extend(order_lookup_by_id[(cid, clean_pn)])
if (cid, group_clean_pn) in order_lookup_by_id:
matched_dates.extend(order_lookup_by_id[(cid, group_clean_pn)])
# 2. Try via Name
# 2. Try Name Match
if not matched_dates:
if key in order_lookup_by_name:
matched_dates.extend(order_lookup_by_name[key])
# 3. Try Prefix Match (Using first available PN in group vs Orders of same customer)
if not matched_dates and norm_cust_name in orders_by_cust_name:
candidates = orders_by_cust_name[norm_cust_name]
for o_dat in candidates:
o_pn = o_dat['clean_pn']
# Check against ANY PN in this sample group
for s_pn in data["raw_pns"]:
if o_pn and (s_pn.startswith(o_pn) or o_pn.startswith(s_pn)):
matched_dates.append(o_dat["date"])
if matched_dates:
converted_count += 1
# Velocity
earliest_sample = min(data["dates"]) if data["dates"] else None
# Filter orders that came AFTER sample? Or just first order?
# Typically first order date.
first_order = min(matched_dates) if matched_dates else None
if earliest_sample and first_order:
diff = (first_order - earliest_sample).days
if diff >= 0:
velocities.append(diff)
# STRICT FILTER: Post-Sample Orders Only
valid_dates = []
if earliest_sample:
valid_dates = [d for d in matched_dates if d >= earliest_sample]
if valid_dates:
converted_count += 1
first_order = min(valid_dates)
diff = (first_order - earliest_sample).days
if diff >= 0:
velocities.append(diff)
else:
# No valid post-sample order -> Potential Orphan
if earliest_sample and (now - earliest_sample).days > 90:
orphan_count += 1
else:
# Check Orphan (No Order)
# Use earliest sample date
# Orphan Check
earliest_sample = min(data["dates"]) if data["dates"] else None
# If no date, can't determine orphans strictly, but also definitely not converted.
# Only count as orphan if we know it's old enough.
if earliest_sample and (now - earliest_sample).days > 90:
orphan_count += 1
avg_velocity = sum(velocities) / len(velocities) if velocities else 0
conversion_rate = (converted_count / total_samples_count * 100) if total_samples_count > 0 else 0
# Calculate No DIT High Qty Samples (Count)
kpi_samples_query = db.query(SampleRecord).filter(SampleRecord.qty >= 1000)
if start_date: kpi_samples_query = kpi_samples_query.filter(SampleRecord.date >= start_date)
if end_date: kpi_samples_query = kpi_samples_query.filter(SampleRecord.date <= end_date)
high_qty_samples = kpi_samples_query.all()
high_qty_ids = [s.id for s in high_qty_samples]
no_dit_count = 0
if high_qty_ids:
matched_ids = db.query(MatchResult.target_id).filter(
MatchResult.target_id.in_(high_qty_ids),
MatchResult.target_type == TargetType.SAMPLE,
MatchResult.status.in_([MatchStatus.accepted, MatchStatus.auto_matched])
).all()
matched_ids_set = set(m[0] for m in matched_ids)
no_dit_count = len([sid for sid in high_qty_ids if sid not in matched_ids_set])
return LabKPI(
converted_count=converted_count,
avg_velocity=round(avg_velocity, 1),
conversion_rate=round(conversion_rate, 1),
orphan_count=orphan_count
orphan_count=orphan_count,
no_dit_count=no_dit_count
)
@router.get("/scatter", response_model=List[ScatterPoint])
@@ -318,80 +381,31 @@ def get_scatter_data(
samples = samples_query.all()
orders = orders_query.all()
# 聚合資料
from app.services.fuzzy_matcher import normalize_pn_for_matching, normalize_customer_name
# Build Lookups (simplified for aggregation)
orders_by_cust_name = {} # name -> list of {clean_pn, qty, date}
# 建立多重索引的 Order Lookup
# order_lookup_by_id: (cust_id, pn) -> Order Data
# order_lookup_by_name: (cust_name, pn) -> Order Data
order_lookup_by_id = {}
order_lookup_by_name = {}
for o in orders:
clean_pn = normalize_pn_for_matching(o.pn)
clean_cust_id = o.cust_id.strip().upper() if o.cust_id else ""
norm_cust_name = normalize_customer_name(o.customer)
clean_pn = normalize_pn_for_matching(o.pn)
o_date = parse_date(o.date) or (o.created_at.replace(tzinfo=None) if o.created_at else datetime.max)
# Aggregate by Cust ID
if clean_cust_id:
key_id = (clean_cust_id, clean_pn)
if key_id not in order_lookup_by_id:
order_lookup_by_id[key_id] = {"qty": 0, "dates": []}
order_lookup_by_id[key_id]["qty"] += (o.qty or 0)
if o.date: order_lookup_by_id[key_id]["dates"].append(parse_date(o.date) or datetime.max)
elif o.created_at: order_lookup_by_id[key_id]["dates"].append(o.created_at.replace(tzinfo=None))
if norm_cust_name not in orders_by_cust_name:
orders_by_cust_name[norm_cust_name] = []
orders_by_cust_name[norm_cust_name].append({
"clean_pn": clean_pn,
"qty": o.qty or 0,
"date": o_date
})
# Aggregate by Cust Name (Fallback)
key_name = (norm_cust_name, clean_pn)
if key_name not in order_lookup_by_name:
order_lookup_by_name[key_name] = {"qty": 0, "dates": []}
order_lookup_by_name[key_name]["qty"] += (o.qty or 0)
if o.date: order_lookup_by_name[key_name]["dates"].append(parse_date(o.date) or datetime.max)
elif o.created_at: order_lookup_by_name[key_name]["dates"].append(o.created_at.replace(tzinfo=None))
# Group by (Display Cust, Display PN) - but we need to match broadly
# Strategy: Group by Display Keys first, then try to find match for that group
unique_groups = {} # (norm_cust, clean_pn) -> {display_cust, display_pn, sample_qty, order_qty, min_sample_date}
final_data_map = {} # Key (Display Customer, Original PN) -> Data
for s in samples:
clean_pn = normalize_pn_for_matching(s.pn)
clean_cust_id = s.cust_id.strip().upper() if s.cust_id else ""
norm_cust_name = normalize_customer_name(s.customer)
# 嘗試比對 Order
matched_order = None
# 1. Try Cust ID match
if clean_cust_id:
matched_order = order_lookup_by_id.get((clean_cust_id, clean_pn))
# 2. If no match, Try Cust Name match
if not matched_order:
matched_order = order_lookup_by_name.get((norm_cust_name, clean_pn))
# Render Key using Sample's info
display_key = (s.customer, s.pn)
if display_key not in final_data_map:
final_data_map[display_key] = {"sample_qty": 0, "order_qty": 0, "customer": s.customer, "orignal_pn": s.pn}
final_data_map[display_key]["sample_qty"] += (s.qty or 0)
if matched_order:
# 注意:這裡簡單累加可能會導致重複計算如果多個樣品對應同一個訂單聚合
# 但目前邏輯是以「樣品」為基底看轉換,所以我們顯示該樣品對應到的訂單總量是合理的
# 不過為了 scatter plot 的準確性,我們應該只在第一次遇到這個 key 時加上 order qty?
# 或者Scatter Plot 的點是 (Customer, PN),所以我們應該是把這個 Group 的 Sample Qty 和 Order Qty 放在一起。
# Order Qty 已經在 lookup 裡聚合過了。
pass
# Re-construct the final map properly merging Order Data
# 上面的迴圈有點問題,因為我們是依據 Sample 來建立點,但 Order 總量是固定的。
# 正確做法:以 (Customer, PN) 為 Unique Key。
unique_groups = {} # (norm_cust_name, clean_pn) -> {display_cust, display_pn, sample_qty, order_qty}
for s in samples:
clean_pn = normalize_pn_for_matching(s.pn)
norm_cust_name = normalize_customer_name(s.customer)
s_date = parse_date(s.date)
key = (norm_cust_name, clean_pn)
if key not in unique_groups:
@@ -400,31 +414,38 @@ def get_scatter_data(
"display_pn": s.pn,
"sample_qty": 0,
"order_qty": 0,
"matched": False
"min_sample_date": s_date
}
unique_groups[key]["sample_qty"] += (s.qty or 0)
# Update min date
current_min = unique_groups[key]["min_sample_date"]
if s_date:
if not current_min or s_date < current_min:
unique_groups[key]["min_sample_date"] = s_date
# Fill in Order Qty
# Fill Order Qty
for key, data in unique_groups.items():
norm_cust_name, clean_pn = key
norm_cust_name, sample_clean_pn = key
min_s_date = data["min_sample_date"]
# Try finding orders
# Note: We rely on Name match here primarily since we grouped by Name.
# Ideally we should also check CustID if available on the samples in this group, but grouping by Name is safer for visual scatter plot.
matched_qty = 0
matched_order = order_lookup_by_name.get((norm_cust_name, clean_pn))
# If no name match, maybe check if any sample in this group had a CustId that matches?
# For simplicity, let's stick to Name+PN for the Scatter Plot aggregation
if matched_order:
data["order_qty"] = matched_order["qty"]
data["matched"] = True
data_map = unique_groups # Replace old data_map logic
if norm_cust_name in orders_by_cust_name:
candidates = orders_by_cust_name[norm_cust_name]
for o_dat in candidates:
o_pn = o_dat['clean_pn']
o_date = o_dat['date']
# Check Date Causality first
if min_s_date and o_date < min_s_date:
continue
# 如果有訂單但沒樣品,我們在 ROI 分析中可能不顯示,或者顯示在 Y 軸上 X=0。
# 根據需求:分析「樣品寄送」與「訂單接收」的關聯,通常以有送樣的為基底。
# Exact or Prefix Match
if o_pn and (sample_clean_pn == o_pn or sample_clean_pn.startswith(o_pn) or o_pn.startswith(sample_clean_pn)):
matched_qty += o_dat['qty']
data["order_qty"] = matched_qty
return [
ScatterPoint(
@@ -433,7 +454,7 @@ def get_scatter_data(
sample_qty=v["sample_qty"],
order_qty=v["order_qty"]
)
for key, v in data_map.items()
for key, v in unique_groups.items()
]
@router.get("/orphans", response_model=List[OrphanSample])
@@ -441,54 +462,117 @@ def get_orphans(db: Session = Depends(get_db)):
now = datetime.now()
threshold_date = now - timedelta(days=90)
# 找出所有樣品
samples = db.query(SampleRecord).all()
# 找出所有訂單
# Need to match logic check
# To save time, we can fetch all orders and build lookup
orders = db.query(OrderRecord).all()
# Build Order Lookups (ID and Name)
from app.services.fuzzy_matcher import normalize_pn_for_matching, normalize_customer_name
order_keys_id = set()
order_keys_name = set()
# Build Lookup for Fast Checking
orders_by_cust_name = {}
for o in orders:
clean_pn = normalize_pn_for_matching(o.pn)
clean_cust_id = o.cust_id.strip().upper() if o.cust_id else ""
norm_cust_name = normalize_customer_name(o.customer)
clean_pn = normalize_pn_for_matching(o.pn)
o_date = parse_date(o.date) or (o.created_at.replace(tzinfo=None) if o.created_at else datetime.max)
if clean_cust_id:
order_keys_id.add((clean_cust_id, clean_pn))
order_keys_name.add((norm_cust_name, clean_pn))
if norm_cust_name not in orders_by_cust_name: orders_by_cust_name[norm_cust_name] = []
orders_by_cust_name[norm_cust_name].append({
"clean_pn": clean_pn,
"date": o_date
})
orphans = []
# Aggregation Dictionary
# Key: (normalized_customer, normalized_pn, order_no, date_str)
# Value: { "raw_customer": str, "raw_pn": str, "qty": int, "date_obj": datetime }
orphan_groups = {}
for s in samples:
clean_pn = normalize_pn_for_matching(s.pn)
norm_cust_name = normalize_customer_name(s.customer)
clean_cust_id = s.cust_id.strip().upper() if s.cust_id else ""
clean_pn = normalize_pn_for_matching(s.pn)
s_date = parse_date(s.date)
# Check match
s_date_str = s_date.strftime("%Y-%m-%d") if s_date else "Unknown"
s_order_no = s.order_no.strip() if s.order_no else ""
# Check if matched (Logic same as before, check against all orders)
matched = False
if clean_cust_id:
if (clean_cust_id, clean_pn) in order_keys_id:
matched = True
if not matched:
if (norm_cust_name, clean_pn) in order_keys_name:
matched = True
if s_date and norm_cust_name in orders_by_cust_name:
candidates = orders_by_cust_name[norm_cust_name]
for o_dat in candidates:
o_pn = o_dat['clean_pn']
o_date = o_dat['date']
# Check Date Causality first
if o_date < s_date:
continue
# Check PN Match (Exact or Prefix)
if o_pn and (clean_pn == o_pn or clean_pn.startswith(o_pn) or o_pn.startswith(clean_pn)):
matched = True
break
if not matched:
# Only consider old enough samples
if s_date and s_date < threshold_date:
orphans.append(OrphanSample(
customer=s.customer,
pn=s.pn,
days_since_sent=(now - s_date).days,
order_no=s.order_no,
date=s.date
))
# Add to group
# We use the FIRST raw customer/pn encountered for display, or could be smarter.
# Group Key: (norm_cust, clean_pn, order_no, date)
key = (norm_cust_name, clean_pn, s_order_no, s_date_str)
if key not in orphan_groups:
orphan_groups[key] = {
"customer": s.customer,
"pn": s.pn,
"order_no": s.order_no,
"date": s_date_str,
"qty": 0,
"days": (now - s_date).days
}
orphan_groups[key]["qty"] += (s.qty or 0)
# Convert groups to list
orphans = []
for data in orphan_groups.values():
orphans.append(OrphanSample(
customer=data["customer"],
pn=data["pn"],
days_since_sent=data["days"],
order_no=data["order_no"],
date=data["date"],
sample_qty=data["qty"]
))
return sorted(orphans, key=lambda x: x.days_since_sent, reverse=True)
@router.get("/no_dit_samples", response_model=List[NoDitSample])
def get_no_dit_samples(db: Session = Depends(get_db)):
# Filter High Qty Samples
high_qty_samples = db.query(SampleRecord).filter(SampleRecord.qty >= 1000).all()
results = []
# Batch query matches for efficiency
sample_ids = [s.id for s in high_qty_samples]
if not sample_ids:
return []
matched_ids = db.query(MatchResult.target_id).filter(
MatchResult.target_id.in_(sample_ids),
MatchResult.target_type == TargetType.SAMPLE,
MatchResult.status.in_([MatchStatus.accepted, MatchStatus.auto_matched])
).all()
matched_ids_set = set(m[0] for m in matched_ids)
for s in high_qty_samples:
if s.id not in matched_ids_set:
s_date = parse_date(s.date)
results.append(NoDitSample(
sample_id=str(s.id),
customer=s.customer,
pn=s.pn,
order_no=s.order_no,
date=s_date.strftime("%Y-%m-%d") if s_date else None,
qty=s.qty
))
return sorted(results, key=lambda x: x.qty, reverse=True)

View File

@@ -29,6 +29,7 @@ class DitInfo(BaseModel):
class TargetInfo(BaseModel):
id: int
sample_id: Optional[str] = None
customer: str
pn: str
order_no: Optional[str]
@@ -83,6 +84,7 @@ def get_results(db: Session = Depends(get_db)):
if sample:
target_info = TargetInfo(
id=sample.id,
sample_id=sample.sample_id,
customer=sample.customer,
pn=sample.pn,
order_no=sample.order_no,
@@ -93,6 +95,7 @@ def get_results(db: Session = Depends(get_db)):
if order:
target_info = TargetInfo(
id=order.id,
sample_id=order.order_id,
customer=order.customer,
pn=order.pn,
order_no=order.order_no,
@@ -142,6 +145,7 @@ def review_match(match_id: int, request: ReviewRequest, db: Session = Depends(ge
if sample:
target_info = TargetInfo(
id=sample.id,
sample_id=sample.sample_id,
customer=sample.customer,
pn=sample.pn,
order_no=sample.order_no,
@@ -152,6 +156,7 @@ def review_match(match_id: int, request: ReviewRequest, db: Session = Depends(ge
if order:
target_info = TargetInfo(
id=order.id,
sample_id=order.order_id,
customer=order.customer,
pn=order.pn,
order_no=order.order_no,

View File

@@ -43,13 +43,13 @@ COLUMN_MAPPING = {
'date': ['created date', '日期', 'date', '建立日期', 'create date']
},
'sample': {
'sample_id': ['樣品訂單號碼', 'item', '樣品編號', 'sample_id', 'sample id', '編號'],
'order_no': ['樣品訂單號碼', '單號', 'order_no', 'order no', '樣品單號', '申請單號'],
'sample_id': ['sample_id', 'sample id', '樣品ID'],
'order_no': ['樣品訂單號碼', '單號', 'order_no', 'order no', '樣品單號', '申請單號', '樣品訂單號'],
'oppy_no': ['oppy no', 'oppy_no', '案號', '案件編號', 'opportunity no'],
'cust_id': ['cust id', 'cust_id', '客戶編號', '客戶代碼', '客戶代號'],
'customer': ['客戶名稱', '客戶簡稱', '客戶', 'customer', 'customer name'],
'pn': ['item', 'type', '料號', 'part number', 'pn', 'part no', '產品料號', '索樣數量'],
'qty': ['索樣數量pcs', '索樣數量 k', '數量', 'qty', 'quantity', '申請數量'],
'pn': ['item', '料號', 'part number', 'pn', 'part no', '產品料號', '索樣數量', 'type'],
'qty': ['索樣數量pcs', '索樣數量 k', '數量', 'qty', 'quantity', '申請數量', '索樣數量'],
'date': ['出貨日', '需求日', '日期', 'date', '申請日期']
},
'order': {

View File

@@ -66,22 +66,40 @@ def normalize_customer_name(name: str) -> str:
# 全形轉半形
normalized = normalized.replace(' ', ' ')
# 移除特殊結尾字符 that might remain (like "Co.,") if suffix list didn't catch it
# Remove trailing "Co." or "Co.,"
normalized = re.sub(r'[,.\s]+Co[.,]*$', '', normalized, flags=re.IGNORECASE)
# NEW: 移除連字號及其後面的內容 (僅針對包含中文字符的名稱,假設是分公司或地點)
# 例如: "廣達-桃園" -> "廣達"
has_chinese = bool(re.search(r'[\u4e00-\u9fff]', normalized))
if has_chinese and '-' in normalized:
parts = normalized.split('-')
# 如果分割後的第一部分長度大於1 (避免 "A-Team" 變成 "A" 造成誤判)
if len(parts[0].strip()) > 1:
normalized = parts[0].strip()
# 移除多餘空白
normalized = re.sub(r'\s+', ' ', normalized).strip()
# Remove all punctuation for final key? No, fuzzy match might rely on it.
# But for "Key" based matching in Lab, we want strict alphabetic?
# No, keep it similar to before but cleaner.
# Final aggressive strip of trailing punctuation
normalized = normalized.strip("., ")
return normalized.upper()
def normalize_id(val: any) -> str:
"""正規化 ID (去除空白、單引號、轉字串)"""
if val is None:
return ""
s = str(val).strip()
s = s.lstrip("'") # 去除 Excel 可能的文字格式引號
if s.endswith(".0"): # 去除 float 轉 string 可能產生的 .0
s = s[:-2]
return s.upper()
def calculate_similarity(name1: str, name2: str) -> Tuple[float, str]:
"""計算兩個名稱的相似度"""
# 正規化
@@ -212,7 +230,10 @@ class FuzzyMatcher:
# Priority 2 & 3 則限制在相同 PN (Ignored symbols)
elif dit_norm_pn == normalize_pn_for_matching(sample.pn):
# Priority 2: 客戶代碼比對 (Silver Key)
if dit.erp_account and sample.cust_id and dit.erp_account == sample.cust_id:
dit_erp = normalize_id(dit.erp_account)
sample_cust = normalize_id(sample.cust_id)
if dit_erp and sample_cust and dit_erp == sample_cust:
match_priority = 2
match_source = f"Matched via ERP Account: {dit.erp_account}"
score = 99.0
@@ -254,7 +275,10 @@ class FuzzyMatcher:
reason = ""
# Priority 2: 客戶代碼比對 (Silver Key)
if dit.erp_account and order.cust_id and dit.erp_account == order.cust_id:
dit_erp = normalize_id(dit.erp_account)
order_cust = normalize_id(order.cust_id)
if dit_erp and order_cust and dit_erp == order_cust:
match_priority = 2
match_source = f"Matched via ERP Account: {dit.erp_account}"
score = 99.0
@@ -267,6 +291,7 @@ class FuzzyMatcher:
match_priority = 3
match_source = f"Matched via Name Similarity ({reason})"
if match_priority > 0:
status = MatchStatus.auto_matched if score >= MATCH_THRESHOLD_AUTO else MatchStatus.pending
match = MatchResult(

View File

@@ -13,7 +13,7 @@ from sqlalchemy.orm import Session
from app.models.dit import DitRecord
from app.models.sample import SampleRecord
from app.models.order import OrderRecord
from app.models.match import MatchResult, MatchStatus
from app.models.match import MatchResult, MatchStatus, TargetType
class ReportGenerator:
def __init__(self, db: Session):
@@ -40,7 +40,7 @@ class ReportGenerator:
# 找到已接受的樣品匹配
sample_match = self.db.query(MatchResult).filter(
MatchResult.dit_id == dit.id,
MatchResult.target_type == 'SAMPLE',
MatchResult.target_type == TargetType.SAMPLE,
MatchResult.status.in_([MatchStatus.accepted, MatchStatus.auto_matched])
).first()
@@ -54,7 +54,7 @@ class ReportGenerator:
# 找到已接受的訂單匹配
order_match = self.db.query(MatchResult).filter(
MatchResult.dit_id == dit.id,
MatchResult.target_type == 'ORDER',
MatchResult.target_type == TargetType.ORDER,
MatchResult.status.in_([MatchStatus.accepted, MatchStatus.auto_matched])
).first()
@@ -64,7 +64,7 @@ class ReportGenerator:
).first()
if order:
row['order_no'] = order.order_no
row['order_status'] = order.status.value if order.status else None
row['order_status'] = order.status if order.status else None
row['order_amount'] = order.amount
result.append(row)

View File

@@ -22,7 +22,10 @@ def verify_password(plain_password: str, hashed_password: str) -> bool:
password_bytes = plain_password.encode('utf-8')
if len(password_bytes) > 72:
plain_password = password_bytes[:72].decode('utf-8', errors='ignore')
return pwd_context.verify(plain_password, hashed_password)
try:
return pwd_context.verify(plain_password, hashed_password)
except Exception:
return False
def get_password_hash(password: str) -> str:
password_bytes = password.encode('utf-8')

53
backend/check_log.txt Normal file
View File

@@ -0,0 +1,53 @@
--- Checking DIT Records ---
Total DIT Records: 7498
Duplicate DITs (same op_id + pn): 0
DITs with empty PN: 1482
Example Empty PN DIT: ID 61584, OP OP0000021791
--- Checking Sample Records ---
Total Sample Records: 14145
Duplicate Sample IDs: 0
--- Checking Match Results ---
Total Match Results: 3844
Duplicate Matches (same dit_id + target_type + target_id): 0
--- Investigating Screenshot Case ---
DIT Records with op_id 'OP0000021498':
ID: 63802, PN: '2N7002K-AU_R1_000A2', Cust: Magna Electronics, LLC.
ID: 63804, PN: 'BAS16-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63805, PN: 'BAT54TS-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63807, PN: 'BAV20WS-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63808, PN: 'BC817-40-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63810, PN: 'BC846BPN-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63811, PN: 'BC856BW-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63813, PN: 'BCP56-16-AU_R2_007A1', Cust: Magna Electronics, LLC.
ID: 63815, PN: 'BZT52-C3S-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63803, PN: 'BZT52-C4V3S-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63809, PN: 'BZX584C24-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63817, PN: 'BZX84C12-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63818, PN: 'BZX84C15-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63819, PN: 'BZX84C16-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63816, PN: 'BZX84C18-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63821, PN: 'BZX84C4V7-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63822, PN: 'MER1DMB-AU_R2_006A1', Cust: Magna Electronics, LLC.
ID: 63806, PN: 'MER2DMB-AU_R2_006A1', Cust: Magna Electronics, LLC.
ID: 63820, PN: 'MMBD4148TS-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63823, PN: 'MMBT3906-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63812, PN: 'MMSZ5245B-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63828, PN: 'PDZ18B-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63814, PN: 'PDZ51B-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63824, PN: 'PDZ56B-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63825, PN: 'PJA138K-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63801, PN: 'PJD60N06SA-AU_L2_006A1', Cust: Magna Electronics, LLC.
ID: 63826, PN: 'PJMBZ27C-AU_R1_005A1', Cust: Magna Electronics, LLC.
ID: 63827, PN: 'PJMBZ33A-AU_R1_007A1', Cust: Magna Electronics, LLC.
ID: 63829, PN: 'PJQ5465A-AU_R2_000A1', Cust: Magna Electronics, LLC.
ID: 63830, PN: 'PJQ5466A1-AU_R2_000A1', Cust: Magna Electronics, LLC.
ID: 63831, PN: 'PJQ5540S6C-AU_R2_002A1', Cust: Magna Electronics, LLC.
ID: 63832, PN: 'PJQ5948S6-AU_R2_002A1', Cust: Magna Electronics, LLC.
ID: 63834, PN: 'PZS5115BAS-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63833, PN: 'PZS516V2BAS-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63835, PN: 'SBA0840AS-AU_R1_000A1', Cust: Magna Electronics, LLC.
ID: 63836, PN: 'SK26-AU_R1_000A1', Cust: Magna Electronics, LLC.
Sample Records with sample_id 'S202509514':

View File

@@ -0,0 +1,21 @@
from app.models import get_db, init_db
from app.models.order import OrderRecord
from app.models.sample import SampleRecord
from app.services.fuzzy_matcher import normalize_pn_for_matching, normalize_customer_name
init_db()
db = next(get_db())
pn = "PSMQC098N10LS2-AU_R2_002A1"
normalized_pn = normalize_pn_for_matching(pn)
customer_keyword = "SEMI"
orders = db.query(OrderRecord).filter(OrderRecord.pn.like(f"%{pn}%")).all()
print(f"--- Querying Orders for PN: {pn} ---")
for o in orders:
print(f"ID: {o.id}, OrderNo: {o.order_no}, CustID: {o.cust_id}, Customer: {o.customer}, PN: {o.pn}, Qty: {o.qty}, Date: {o.date}")
print(f"\n--- Checking Parsed Samples ---")
samples = db.query(SampleRecord).filter(SampleRecord.pn.like(f"%{pn}%")).all()
for s in samples:
print(f"ID: {s.id}, OrderNo: {s.order_no}, CustID: {s.cust_id}, Customer: {s.customer}, PN: {s.pn}, Qty: {s.qty}, Date: {s.date}")

47
backend/debug_lab_v2.py Normal file
View File

@@ -0,0 +1,47 @@
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from app.models import get_db, SampleRecord, OrderRecord
from app.routers.lab import get_conversions, build_order_lookups, parse_date
from app.services.fuzzy_matcher import normalize_pn_for_matching, normalize_customer_name
db = next(get_db())
print("--- DEBUG START ---")
# 1. Check Date Parsing
print(f"Date '45292' parses to: {parse_date('45292')}")
# 2. Check Raw Data Count
s_count = db.query(SampleRecord).count()
o_count = db.query(OrderRecord).count()
print(f"Total Samples: {s_count}, Total Orders: {o_count}")
# 3. Check Top Data
print("\n--- Top 3 Samples ---")
samples = db.query(SampleRecord).limit(3).all()
for s in samples:
print(f"S: {s.customer} (ID:{s.cust_id}) | PN: {s.pn} | Date: {s.date}")
print("\n--- Top 3 Orders ---")
orders = db.query(OrderRecord).limit(3).all()
for o in orders:
print(f"O: {o.customer} (ID:{o.cust_id}) | PN: {o.pn} | Date: {o.date}")
# 4. Check Lookups
lookup_id, lookup_name = build_order_lookups(db.query(OrderRecord).all())
print(f"\nLookup ID Size: {len(lookup_id)}")
print(f"Lookup Name Size: {len(lookup_name)}")
if len(lookup_name) > 0:
first_key = list(lookup_name.keys())[0]
print(f"Example Name Key: {first_key} -> {lookup_name[first_key]}")
# 5. Check Conversions
print("\n--- Run Logic ---")
conversions = get_conversions(db)
print(f"Total Conversions Found: {len(conversions)}")
for c in conversions[:3]:
print(c)
print("--- DEBUG END ---")

5
backend/debug_log.txt Normal file
View File

@@ -0,0 +1,5 @@
--- Searching Orders for SEMISALES ---
Found 2 orders for SEMISALES
ID: 4820, OrderNo: 1125030196, Date: 2025-09-26, Qty: 36000, PN: PSMQC098N10LS2-AU_R2_002A1 [MATCH PN]
ID: 4821, OrderNo: 1125016840, Date: 2025-06-05, Qty: 3000, PN: PSMQC098N10LS2-AU_R2_002A1 [MATCH PN]
--- Done ---

7
backend/debug_log_v2.txt Normal file
View File

@@ -0,0 +1,7 @@
--- Searching Orders for SEMISALES (or PN match) ---
Found 4 orders:
ID: 4832, OrderID: 1.1, OrderNo: 1125077715, Date: 2025-09-30, Qty: 36000, PN: PSMQC098N10LS2-AU_R2_002A1, Cust: 台湾强茂 [MATCH PN]
ID: 4833, OrderID: 2.1, OrderNo: 1125030196, Date: 2025-09-26, Qty: 36000, PN: PSMQC098N10LS2-AU_R2_002A1, Cust: SEMISALES [MATCH PN]
ID: 4834, OrderID: 2.2, OrderNo: 1125016840, Date: 2025-06-05, Qty: 3000, PN: PSMQC098N10LS2-AU_R2_002A1, Cust: SEMISALES [MATCH PN]
ID: 4835, OrderID: 3.1, OrderNo: 1125016840, Date: 2025-06-05, Qty: 12000, PN: PSMQC098N10LS2-AU_R2_002A1, Cust: SEMISALES [MATCH PN]
--- Done ---

View File

@@ -0,0 +1,42 @@
import sys
import os
# Set up logging to file
f = open("debug_log_v2.txt", "w", encoding="utf-8")
def log(msg):
print(msg)
f.write(str(msg) + "\n")
sys.path.append(os.getcwd())
try:
from app.models import SessionLocal, OrderRecord
from app.services.fuzzy_matcher import normalize_pn_for_matching, normalize_customer_name
except ImportError as e:
log(f"Import Error: {e}")
sys.exit(1)
pn1 = "PSMQC098N10LS2-AU_R2_002A1"
target_norm = normalize_pn_for_matching(pn1)
db = SessionLocal()
log("--- Searching Orders for SEMISALES (or PN match) ---")
orders = db.query(OrderRecord).all()
found = []
for o in orders:
# Check customer name (fuzzy) or PN
norm_cust = normalize_customer_name(o.customer or "")
norm_pn = normalize_pn_for_matching(o.pn or "")
if "SEMISALES" in norm_cust or norm_pn == target_norm:
found.append(o)
log(f"Found {len(found)} orders:")
for o in found:
norm_o_pn = normalize_pn_for_matching(o.pn)
match_mark = "[MATCH PN]" if norm_o_pn == target_norm else "[NO MATCH]"
log(f"ID: {o.id}, OrderID: {o.order_id}, OrderNo: {o.order_no}, Date: {o.date}, Qty: {o.qty}, PN: {o.pn}, Cust: {o.customer} {match_mark}")
log("--- Done ---")
f.close()
db.close()

BIN
backend/debug_out.txt Normal file

Binary file not shown.

24
backend/verify_lab_v3.py Normal file
View File

@@ -0,0 +1,24 @@
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from app.models import get_db
from app.routers.lab import get_conversions, get_lab_kpi
db = next(get_db())
print("--- VERIFY LAB LOGIC v3 ---")
# Check Conversions
res = get_conversions(db)
print(f"Total Conversions: {len(res)}")
if len(res) > 0:
print("Example Conversion:")
print(res[0])
# Check KPI
kpi = get_lab_kpi(db=db)
print("\nKPI:")
print(kpi)
print("--- END ---")

View File

@@ -5,6 +5,7 @@ import {
} from 'recharts';
import { Filter, Activity, Download, Info, CheckCircle, HelpCircle, XCircle } from 'lucide-react';
import { Card } from './common/Card';
import { Tooltip } from './common/Tooltip';
import { dashboardApi, reportApi } from '../services/api';
import type { DashboardKPI, FunnelData, AttributionRow } from '../types';
@@ -127,22 +128,42 @@ export const DashboardView: React.FC = () => {
<div className="text-[10px] text-slate-400 mt-1">Total Pipeline</div>
</Card>
<Card className="p-4 border-l-4 border-l-purple-500">
<div className="text-xs text-slate-500 mb-1"></div>
<div className="text-xs text-slate-500 mb-1 flex items-center gap-1">
<Tooltip content={`送樣轉換率 = (成功送樣的專案數 / DIT 總專案數) * 100%\n反映有多少比例的 DIT 案件成功進入送樣階段。`}>
<HelpCircle size={12} className="cursor-help text-slate-400 hover:text-purple-600" />
</Tooltip>
</div>
<div className="text-2xl font-bold text-slate-800">{kpi.sample_rate}%</div>
<div className="text-[10px] text-purple-600 mt-1">Sample Rate</div>
</Card>
<Card className="p-4 border-l-4 border-l-emerald-500">
<div className="text-xs text-slate-500 mb-1"></div>
<div className="text-xs text-slate-500 mb-1 flex items-center gap-1">
<Tooltip content={`訂單命中率 = (取得訂單的專案數 / DIT 總專案數) * 100%\n反映多少比例的 DIT 案件最終成功取得訂單。`}>
<HelpCircle size={12} className="cursor-help text-slate-400 hover:text-emerald-600" />
</Tooltip>
</div>
<div className="text-2xl font-bold text-slate-800">{kpi.hit_rate}%</div>
<div className="text-[10px] text-emerald-600 mt-1">Hit Rate (Binary)</div>
</Card>
<Card className="p-4 border-l-4 border-l-amber-500">
<div className="text-xs text-slate-500 mb-1">EAU </div>
<div className="text-xs text-slate-500 mb-1 flex items-center gap-1">
EAU
<Tooltip content={`EAU 達成率 = (歸因訂單總量 / DIT 預估總用量 EAU) * 100%\n反映實際取得的訂單量佔全體潛在商機 (EAU) 的成交比例。`}>
<HelpCircle size={12} className="cursor-help text-slate-400 hover:text-amber-600" />
</Tooltip>
</div>
<div className="text-2xl font-bold text-slate-800">{kpi.fulfillment_rate}%</div>
<div className="text-[10px] text-amber-600 mt-1">Fulfillment (LIFO)</div>
</Card>
<Card className="p-4 border-l-4 border-l-rose-500">
<div className="text-xs text-slate-500 mb-1"></div>
<div className="text-xs text-slate-500 mb-1 flex items-center gap-1">
<Tooltip content={`無訂單樣品率 = (有送樣但無訂單的專案數 / 成功送樣專案數) * 100%\n反映已送樣但尚未轉單的 DIT 專案比例。`}>
<HelpCircle size={12} className="cursor-help text-slate-400 hover:text-rose-600" />
</Tooltip>
</div>
<div className="text-2xl font-bold text-rose-600">{kpi.no_order_sample_rate}%</div>
<div className="text-[10px] text-rose-400 mt-1">No-Order Sample</div>
</Card>

View File

@@ -9,23 +9,25 @@ import {
} from 'lucide-react';
import { Card } from './common/Card';
import { labApi } from '../services/api';
import type { LabKPI, ScatterPoint, OrphanSample } from '../types';
import type { LabKPI, ScatterPoint, OrphanSample, NoDitSample } from '../types';
export const LabView: React.FC = () => {
const [kpi, setKpi] = useState<LabKPI>({
converted_count: 0,
avg_velocity: 0,
conversion_rate: 0,
orphan_count: 0
orphan_count: 0,
no_dit_count: 0
});
const [scatterData, setScatterData] = useState<ScatterPoint[]>([]);
const [orphans, setOrphans] = useState<OrphanSample[]>([]);
const [noDitSamples, setNoDitSamples] = useState<NoDitSample[]>([]);
const [conversions, setConversions] = useState<any[]>([]);
const [loading, setLoading] = useState(true);
const [dateRange, setDateRange] = useState<'all' | '12m' | '6m' | '3m'>('all');
const [useLogScale, setUseLogScale] = useState(false);
const [copiedId, setCopiedId] = useState<number | null>(null);
const [viewMode, setViewMode] = useState<'orphans' | 'conversions'>('orphans');
const [viewMode, setViewMode] = useState<'orphans' | 'conversions' | 'no_dit'>('orphans');
useEffect(() => {
loadLabData();
@@ -46,16 +48,18 @@ export const LabView: React.FC = () => {
const params = start_date ? { start_date } : {};
const [kpiData, scatterRes, orphanRes, conversionRes] = await Promise.all([
const [kpiData, scatterRes, orphanRes, noDitRes, conversionRes] = await Promise.all([
labApi.getKPI(params),
labApi.getScatter(params),
labApi.getOrphans(),
labApi.getNoDitSamples(),
labApi.getConversions()
]);
setKpi(kpiData);
setScatterData(scatterRes);
setOrphans(orphanRes);
setNoDitSamples(noDitRes);
setConversions(conversionRes);
} catch (error) {
console.error('Error loading lab data:', error);
@@ -77,7 +81,7 @@ export const LabView: React.FC = () => {
const groupInfo = React.useMemo(() => {
const counts: Record<string, number> = {};
orphans.forEach(o => {
const key = `${o.customer}|${o.pn}`;
const key = `${o.customer?.trim()?.toUpperCase()}|${o.pn?.trim()?.toUpperCase()}`;
counts[key] = (counts[key] || 0) + 1;
});
return counts;
@@ -100,7 +104,7 @@ export const LabView: React.FC = () => {
(Sample Conversion Lab)
</h1>
<p className="text-slate-500 mt-1">
(ROI) | ERP Code + PN
(ROI) | ERP Code + ()
</p>
</div>
@@ -121,73 +125,92 @@ export const LabView: React.FC = () => {
</div>
{/* KPI Cards */}
<div className="grid grid-cols-1 md:grid-cols-4 gap-6">
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-5 gap-4">
<Card
onClick={() => setViewMode('conversions')}
className={`p-6 border-b-4 border-b-blue-500 bg-gradient-to-br from-white to-blue-50/30 cursor-pointer transition-all hover:shadow-md ${viewMode === 'conversions' ? 'ring-2 ring-blue-500 ring-offset-2' : ''}`}
className={`p-4 border-b-4 border-b-blue-500 bg-gradient-to-br from-white to-blue-50/30 cursor-pointer transition-all hover:shadow-md ${viewMode === 'conversions' ? 'ring-2 ring-blue-500 ring-offset-2' : ''}`}
>
<div className="flex justify-between items-start">
<div>
<div className="text-sm text-slate-500 font-medium mb-1"></div>
<div className="text-3xl font-bold text-slate-800">{kpi.converted_count} </div>
<div className="text-xs text-blue-600 mt-2 flex items-center gap-1 font-bold">
<Check size={12} />
Converted Samples
<div className="text-xs text-slate-500 font-medium mb-1"></div>
<div className="text-2xl font-bold text-slate-800">{kpi.converted_count} </div>
<div className="text-[10px] text-blue-600 mt-1 flex items-center gap-1 font-bold">
<Check size={10} />
Converted
</div>
</div>
<div className="p-3 bg-blue-100 text-blue-600 rounded-xl">
<TrendingUp size={24} />
<div className="p-2 bg-blue-100 text-blue-600 rounded-lg">
<TrendingUp size={20} />
</div>
</div>
</Card>
<Card className="p-6 border-b-4 border-b-indigo-500 bg-gradient-to-br from-white to-indigo-50/30">
<Card className="p-4 border-b-4 border-b-indigo-500 bg-gradient-to-br from-white to-indigo-50/30">
<div className="flex justify-between items-start">
<div>
<div className="text-sm text-slate-500 font-medium mb-1"></div>
<div className="text-3xl font-bold text-slate-800">{kpi.avg_velocity} </div>
<div className="text-xs text-indigo-600 mt-2 flex items-center gap-1 font-bold">
<Clock size={12} />
Conversion Velocity
<div className="text-xs text-slate-500 font-medium mb-1"></div>
<div className="text-2xl font-bold text-slate-800">{kpi.avg_velocity} </div>
<div className="text-[10px] text-indigo-600 mt-1 flex items-center gap-1 font-bold">
<Clock size={10} />
Avg Velocity
</div>
</div>
<div className="p-3 bg-indigo-100 text-indigo-600 rounded-xl">
<Clock size={24} />
<div className="p-2 bg-indigo-100 text-indigo-600 rounded-lg">
<Clock size={20} />
</div>
</div>
</Card>
<Card className="p-6 border-b-4 border-b-emerald-500 bg-gradient-to-br from-white to-emerald-50/30">
<Card className="p-4 border-b-4 border-b-emerald-500 bg-gradient-to-br from-white to-emerald-50/30">
<div className="flex justify-between items-start">
<div>
<div className="text-sm text-slate-500 font-medium mb-1"> (ROI)</div>
<div className="text-3xl font-bold text-slate-800">{kpi.conversion_rate}%</div>
<div className="text-xs text-emerald-600 mt-2 flex items-center gap-1 font-bold">
<Target size={12} />
Sample to Order Ratio
<div className="text-xs text-slate-500 font-medium mb-1"></div>
<div className="text-2xl font-bold text-slate-800">{kpi.conversion_rate}%</div>
<div className="text-[10px] text-emerald-600 mt-1 flex items-center gap-1 font-bold">
<Target size={10} />
ROI Rate
</div>
</div>
<div className="p-3 bg-emerald-100 text-emerald-600 rounded-xl">
<FlaskConical size={24} />
<div className="p-2 bg-emerald-100 text-emerald-600 rounded-lg">
<FlaskConical size={20} />
</div>
</div>
</Card>
<Card
onClick={() => setViewMode('orphans')}
className={`p-6 border-b-4 border-b-rose-500 bg-gradient-to-br from-white to-rose-50/30 cursor-pointer transition-all hover:shadow-md ${viewMode === 'orphans' ? 'ring-2 ring-rose-500 ring-offset-2' : ''}`}
className={`p-4 border-b-4 border-b-rose-500 bg-gradient-to-br from-white to-rose-50/30 cursor-pointer transition-all hover:shadow-md ${viewMode === 'orphans' ? 'ring-2 ring-rose-500 ring-offset-2' : ''}`}
>
<div className="flex justify-between items-start">
<div>
<div className="text-sm text-slate-500 font-medium mb-1"></div>
<div className="text-3xl font-bold text-rose-600">{kpi.orphan_count} </div>
<div className="text-xs text-rose-400 mt-2 flex items-center gap-1 font-bold">
<AlertTriangle size={12} />
Wait-time &gt; 90 Days
<div className="text-xs text-slate-500 font-medium mb-1"></div>
<div className="text-2xl font-bold text-rose-600">{kpi.orphan_count} </div>
<div className="text-[10px] text-rose-400 mt-1 flex items-center gap-1 font-bold">
<AlertTriangle size={10} />
&gt; 90 Days
</div>
</div>
<div className="p-3 bg-rose-100 text-rose-600 rounded-xl">
<AlertTriangle size={24} />
<div className="p-2 bg-rose-100 text-rose-600 rounded-lg">
<AlertTriangle size={20} />
</div>
</div>
</Card>
<Card
onClick={() => setViewMode('no_dit')}
className={`p-4 border-b-4 border-b-amber-500 bg-gradient-to-br from-white to-amber-50/30 cursor-pointer transition-all hover:shadow-md ${viewMode === 'no_dit' ? 'ring-2 ring-amber-500 ring-offset-2' : ''}`}
>
<div className="flex justify-between items-start">
<div>
<div className="text-xs text-slate-500 font-medium mb-1"></div>
<div className="text-2xl font-bold text-amber-600">{kpi.no_dit_count} </div>
<div className="text-[10px] text-amber-600 mt-1 flex items-center gap-1 font-bold">
<HelpCircle size={10} />
&gt; 1000 pcs (No DIT)
</div>
</div>
<div className="p-2 bg-amber-100 text-amber-600 rounded-lg">
<HelpCircle size={20} />
</div>
</div>
</Card>
@@ -297,7 +320,7 @@ export const LabView: React.FC = () => {
</Card>
{/* Insight Card */}
<Card className="p-6 bg-slate-900 text-white flex flex-col justify-between">
<Card className="p-6 !bg-slate-900 !border-slate-700 text-white flex flex-col justify-between">
<div>
<h3 className="font-bold text-slate-100 mb-4 flex items-center gap-2">
<Info size={18} className="text-indigo-400" />
@@ -305,17 +328,17 @@ export const LabView: React.FC = () => {
</h3>
<div className="space-y-4">
<div className="p-3 bg-slate-800/50 rounded-lg border border-slate-700">
<p className="text-xs text-slate-400 mb-1"></p>
<p className="text-sm font-medium"></p>
<p className="text-xs text-indigo-300 mb-1 font-bold"></p>
<p className="text-sm font-medium text-slate-100"></p>
</div>
<div className="p-3 bg-slate-800/50 rounded-lg border border-slate-700">
<p className="text-xs text-slate-400 mb-1"></p>
<p className="text-sm font-medium"></p>
<p className="text-xs text-rose-300 mb-1 font-bold"></p>
<p className="text-sm font-medium text-slate-100"></p>
</div>
</div>
</div>
<div className="mt-8 p-4 bg-indigo-600/20 rounded-xl border border-indigo-500/30">
<p className="text-[11px] text-indigo-300 leading-relaxed italic">
<p className="text-[11px] text-indigo-100 leading-relaxed italic">
"本模組直接比對 ERP 編號,確保不因專案名稱模糊而漏失任何實際營收數據。"
</p>
</div>
@@ -324,13 +347,18 @@ export const LabView: React.FC = () => {
{/* Dynamic Table Section */}
<Card className="overflow-hidden">
<div className={`px-6 py-4 border-b flex justify-between items-center ${viewMode === 'conversions' ? 'bg-blue-50 border-blue-200' : 'bg-rose-50 border-rose-200'}`}>
<h3 className={`font-bold flex items-center gap-2 ${viewMode === 'conversions' ? 'text-blue-700' : 'text-rose-700'}`}>
<div className={`px-6 py-4 border-b flex justify-between items-center ${viewMode === 'conversions' ? 'bg-blue-50 border-blue-200' : viewMode === 'no_dit' ? 'bg-amber-50 border-amber-200' : 'bg-rose-50 border-rose-200'}`}>
<h3 className={`font-bold flex items-center gap-2 ${viewMode === 'conversions' ? 'text-blue-700' : viewMode === 'no_dit' ? 'text-amber-700' : 'text-rose-700'}`}>
{viewMode === 'conversions' ? (
<>
<Check size={18} />
Successful Conversions List
</>
) : viewMode === 'no_dit' ? (
<>
<HelpCircle size={18} />
Unattributed High-Qty Samples
</>
) : (
<>
<AlertTriangle size={18} />
@@ -345,7 +373,9 @@ export const LabView: React.FC = () => {
</div>
)}
<div className="text-[10px] text-slate-400 font-medium">
{viewMode === 'conversions' ? `${conversions.length} 筆成功轉換` : `${orphans.length} 筆待追蹤案件`}
{viewMode === 'conversions' ? `${conversions.length} 筆成功轉換`
: viewMode === 'no_dit' ? `${noDitSamples.length} 筆未歸因大單`
: `${orphans.length} 筆待追蹤案件`}
</div>
</div>
</div>
@@ -360,11 +390,19 @@ export const LabView: React.FC = () => {
<>
<th className="px-6 py-3"> (Date/Qty)</th>
<th className="px-6 py-3"> (Date/Qty)</th>
<th className="px-6 py-3"> (Total Order Qty)</th>
<th className="px-6 py-3 text-center"></th>
</>
) : viewMode === 'no_dit' ? (
<>
<th className="px-6 py-3"></th>
<th className="px-6 py-3"> (Date/Qty)</th>
<th className="px-6 py-3 text-center"></th>
</>
) : (
<>
<th className="px-6 py-3"></th>
<th className="px-6 py-3"></th>
<th className="px-6 py-3"> (Date/Qty)</th>
<th className="px-6 py-3 text-center"></th>
<th className="px-6 py-3 text-center"></th>
<th className="px-6 py-3 text-right"></th>
@@ -390,6 +428,9 @@ export const LabView: React.FC = () => {
<span className="font-bold text-emerald-600">{row.order_qty.toLocaleString()} pcs</span>
</div>
</td>
<td className="px-6 py-4">
<span className="font-bold text-emerald-700">{row.total_order_qty ? row.total_order_qty.toLocaleString() : '-'} pcs</span>
</td>
<td className="px-6 py-4 text-center">
<span className="inline-flex items-center px-2 py-1 rounded-full text-xs font-bold bg-blue-100 text-blue-700">
{row.days_to_convert}
@@ -397,9 +438,28 @@ export const LabView: React.FC = () => {
</td>
</tr>
))
) : viewMode === 'no_dit' ? (
noDitSamples.map((row, i) => (
<tr key={i} className="hover:bg-amber-50/50">
<td className="px-6 py-4 font-medium text-slate-800">{row.customer}</td>
<td className="px-6 py-4 font-mono text-xs text-slate-600">{row.pn}</td>
<td className="px-6 py-4 font-mono text-xs text-slate-500">{row.order_no || '-'}</td>
<td className="px-6 py-4">
<div className="flex flex-col">
<span className="text-slate-500 text-xs">{row.date?.replace(/(\d{4})(\d{2})(\d{2})/, '$1/$2/$3')}</span>
<span className="font-bold text-amber-600">{row.qty?.toLocaleString()} pcs</span>
</div>
</td>
<td className="px-6 py-4 text-center">
<span className="inline-flex items-center px-2 py-1 rounded-full text-xs font-bold bg-amber-100 text-amber-700">
DIT
</span>
</td>
</tr>
))
) : (
orphans.map((row, i) => {
const groupKey = `${row.customer}|${row.pn}`;
const groupKey = `${row.customer?.trim()?.toUpperCase()}|${row.pn?.trim()?.toUpperCase()}`;
const isRepeated = (groupInfo[groupKey] || 0) > 1;
const isSelected = selectedGroup === groupKey;
@@ -425,7 +485,15 @@ export const LabView: React.FC = () => {
<td className="px-6 py-4 font-mono text-xs text-slate-600">
{row.pn}
</td>
<td className="px-6 py-4 text-slate-500">{row.date?.replace(/(\d{4})(\d{2})(\d{2})/, '$1/$2/$3')}</td>
<td className="px-6 py-4 font-mono text-xs text-slate-500">
{row.order_no || '-'}
</td>
<td className="px-6 py-4">
<div className="flex flex-col">
<span className="text-slate-500 text-xs">{row.date?.replace(/(\d{4})(\d{2})(\d{2})/, '$1/$2/$3')}</span>
<span className="font-bold text-slate-700">{row.sample_qty?.toLocaleString() || 0} pcs</span>
</div>
</td>
<td className="px-6 py-4 text-center">
<span className={`font-bold ${row.days_since_sent > 180 ? 'text-rose-600' : 'text-amber-600'}`}>
{row.days_since_sent}
@@ -461,6 +529,13 @@ export const LabView: React.FC = () => {
</td>
</tr>
)}
{viewMode === 'no_dit' && noDitSamples.length === 0 && (
<tr>
<td colSpan={5} className="px-6 py-10 text-center text-slate-400">
1000pcs
</td>
</tr>
)}
{viewMode === 'conversions' && conversions.length === 0 && (
<tr>
<td colSpan={5} className="px-6 py-10 text-center text-slate-400">

View File

@@ -0,0 +1,70 @@
import React, { useState, useRef, ReactNode, useEffect } from 'react';
import { createPortal } from 'react-dom';
interface TooltipProps {
content: ReactNode;
children: ReactNode;
}
export const Tooltip: React.FC<TooltipProps> = ({ content, children }) => {
const [isVisible, setIsVisible] = useState(false);
const [style, setStyle] = useState<React.CSSProperties>({});
const triggerRef = useRef<HTMLDivElement>(null);
const updatePosition = () => {
if (triggerRef.current) {
const rect = triggerRef.current.getBoundingClientRect();
setStyle({
left: `${rect.left + rect.width / 2}px`,
top: `${rect.top - 8}px`,
position: 'fixed',
zIndex: 99999,
transform: 'translate(-50%, -100%)',
minWidth: '200px',
maxWidth: '280px',
pointerEvents: 'none'
});
}
};
const handleMouseEnter = () => {
updatePosition();
setIsVisible(true);
};
// Optional: Re-calculate on scroll/resize if visible
useEffect(() => {
if (isVisible) {
window.addEventListener('scroll', updatePosition);
window.addEventListener('resize', updatePosition);
return () => {
window.removeEventListener('scroll', updatePosition);
window.removeEventListener('resize', updatePosition);
};
}
}, [isVisible]);
return (
<div
className="relative inline-flex items-center cursor-help"
ref={triggerRef}
onMouseEnter={handleMouseEnter}
onMouseLeave={() => setIsVisible(false)}
>
{children}
{isVisible && createPortal(
<div
className="px-3 py-2 bg-slate-800 text-white text-xs rounded-md shadow-xl whitespace-pre-line text-center"
style={style}
>
{content}
{/* Bottom Arrow */}
<div
className="absolute left-1/2 top-full transform -translate-x-1/2 border-4 border-transparent border-t-slate-800"
/>
</div>,
document.body
)}
</div>
);
};

View File

@@ -3,23 +3,49 @@
@tailwind utilities;
@keyframes fade-in {
from { opacity: 0; }
to { opacity: 1; }
from {
opacity: 0;
}
to {
opacity: 1;
}
}
@keyframes slide-in-from-bottom-4 {
from { transform: translateY(1rem); opacity: 0; }
to { transform: translateY(0); opacity: 1; }
from {
transform: translateY(1rem);
opacity: 0;
}
to {
transform: translateY(0);
opacity: 1;
}
}
@keyframes slide-in-from-right-4 {
from { transform: translateX(1rem); opacity: 0; }
to { transform: translateX(0); opacity: 1; }
from {
transform: translateX(1rem);
opacity: 0;
}
to {
transform: translateX(0);
opacity: 1;
}
}
@keyframes zoom-in-95 {
from { transform: scale(0.95); opacity: 0; }
to { transform: scale(1); opacity: 1; }
from {
transform: scale(0.95);
opacity: 0;
}
to {
transform: scale(1);
opacity: 1;
}
}
.animate-in {
@@ -55,3 +81,8 @@
.duration-300 {
animation-duration: 0.3s;
}
/* Ensure tooltips are not clipped by cards */
.card-tooltip-container {
overflow: visible !important;
}

View File

@@ -13,11 +13,13 @@ import type {
LabKPI,
ScatterPoint,
OrphanSample,
ConversionRecord
ConversionRecord,
NoDitSample
} from '../types';
const api = axios.create({
baseURL: '/api',
timeout: 15000,
headers: {
'Content-Type': 'application/json',
},
@@ -178,6 +180,11 @@ export const labApi = {
const response = await api.get<ConversionRecord[]>('/lab/conversions');
return response.data;
},
getNoDitSamples: async (): Promise<NoDitSample[]> => {
const response = await api.get<NoDitSample[]>('/lab/no_dit_samples');
return response.data;
},
};
export default api;

View File

@@ -122,6 +122,7 @@ export interface LabKPI {
avg_velocity: number;
conversion_rate: number;
orphan_count: number;
no_dit_count: number;
}
export interface ScatterPoint {
@@ -137,6 +138,16 @@ export interface OrphanSample {
days_since_sent: number;
order_no: string;
date: string;
sample_qty: number;
}
export interface NoDitSample {
sample_id: string;
customer: string;
pn: string;
order_no: string;
date: string;
qty: number;
}
export interface ConversionRecord {