4096 lines
158 KiB
Python
4096 lines
158 KiB
Python
import logging
|
|
import os
|
|
import shutil
|
|
import json
|
|
import re
|
|
import hashlib
|
|
import base64
|
|
import html
|
|
from pathlib import Path
|
|
from datetime import datetime, timedelta
|
|
from typing import List, Optional, Dict
|
|
from uuid import uuid4
|
|
|
|
from fastapi import APIRouter, HTTPException, Query, UploadFile, File, Request, Form, Response
|
|
from fastapi.responses import FileResponse, HTMLResponse
|
|
from pydantic import BaseModel, Field
|
|
from app.core.database import execute_query, execute_query_single, table_has_column
|
|
from app.models.schemas import TodoStep, TodoStepCreate, TodoStepUpdate, QuickCreateAnalysis
|
|
from app.core.config import settings
|
|
from app.services.email_service import EmailService
|
|
from app.services.case_analysis_service import CaseAnalysisService
|
|
from app.services.ollama_service import ollama_service
|
|
from app.services.brother_label_print_service import BrotherLabelPrintService, LabelJob
|
|
|
|
try:
|
|
import extract_msg
|
|
except ImportError:
|
|
extract_msg = None
|
|
import email
|
|
from email.header import decode_header
|
|
|
|
logger = logging.getLogger(__name__)
|
|
router = APIRouter()
|
|
|
|
|
|
def _table_exists(table_name: str) -> bool:
|
|
row = execute_query_single("SELECT to_regclass(%s) AS table_name", (f"public.{table_name}",))
|
|
return bool(row and row.get("table_name"))
|
|
|
|
|
|
def _get_user_id_from_request(request: Request) -> int:
|
|
user_id = getattr(request.state, "user_id", None)
|
|
if user_id is not None:
|
|
try:
|
|
return int(user_id)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid user_id format")
|
|
|
|
user_id = request.query_params.get("user_id")
|
|
if user_id:
|
|
try:
|
|
return int(user_id)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid user_id format")
|
|
|
|
raise HTTPException(status_code=401, detail="User not authenticated - provide user_id query parameter")
|
|
|
|
|
|
def _normalize_case_status(status_value: Optional[str]) -> str:
|
|
allowed_statuses = []
|
|
seen = set()
|
|
|
|
def _add_status(value: Optional[str]) -> None:
|
|
candidate = str(value or "").strip()
|
|
if not candidate:
|
|
return
|
|
key = candidate.lower()
|
|
if key in seen:
|
|
return
|
|
seen.add(key)
|
|
allowed_statuses.append(candidate)
|
|
|
|
try:
|
|
setting_row = execute_query_single("SELECT value FROM settings WHERE key = %s", ("case_statuses",))
|
|
if setting_row and setting_row.get("value"):
|
|
parsed = json.loads(setting_row.get("value") or "[]")
|
|
for item in parsed if isinstance(parsed, list) else []:
|
|
if isinstance(item, str):
|
|
value = item.strip()
|
|
elif isinstance(item, dict):
|
|
value = str(item.get("value") or "").strip()
|
|
else:
|
|
value = ""
|
|
_add_status(value)
|
|
except Exception:
|
|
pass
|
|
|
|
# Include historical/current DB statuses so legacy values remain valid
|
|
try:
|
|
rows = execute_query("SELECT DISTINCT status FROM sag_sager WHERE deleted_at IS NULL ORDER BY status", ()) or []
|
|
for row in rows:
|
|
_add_status(row.get("status"))
|
|
except Exception:
|
|
pass
|
|
|
|
if not allowed_statuses:
|
|
allowed_statuses = ["åben", "under behandling", "afventer", "løst", "lukket"]
|
|
|
|
allowed_map = {s.lower(): s for s in allowed_statuses}
|
|
|
|
if not status_value:
|
|
return allowed_map.get("åben", allowed_statuses[0])
|
|
|
|
normalized = str(status_value).strip().lower()
|
|
if normalized in allowed_map:
|
|
return allowed_map[normalized]
|
|
|
|
# Backward compatibility for legacy mapping
|
|
if normalized == "afventer" and "åben" in allowed_map:
|
|
return allowed_map["åben"]
|
|
|
|
# Do not force unknown values back to default; preserve user-entered/custom DB values
|
|
raw_value = str(status_value).strip()
|
|
if raw_value:
|
|
return raw_value
|
|
|
|
return allowed_map.get("åben", allowed_statuses[0])
|
|
|
|
|
|
def _normalize_optional_timestamp(value: Optional[str], field_name: str) -> Optional[str]:
|
|
if value is None:
|
|
return None
|
|
|
|
if isinstance(value, datetime):
|
|
return value.strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
text = str(value).strip()
|
|
if not text:
|
|
return None
|
|
|
|
try:
|
|
parsed = datetime.fromisoformat(text.replace("Z", "+00:00"))
|
|
if parsed.tzinfo is not None:
|
|
parsed = parsed.replace(tzinfo=None)
|
|
return parsed.strftime("%Y-%m-%d %H:%M:%S")
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail=f"Invalid datetime format for {field_name}")
|
|
|
|
|
|
def _coerce_optional_int(value: Optional[object], field_name: str) -> Optional[int]:
|
|
if value is None or value == "":
|
|
return None
|
|
try:
|
|
return int(value)
|
|
except (TypeError, ValueError):
|
|
raise HTTPException(status_code=400, detail=f"Invalid {field_name}")
|
|
|
|
|
|
def _validate_user_id(user_id: Optional[int], field_name: str = "ansvarlig_bruger_id") -> None:
|
|
if user_id is None:
|
|
return
|
|
exists = execute_query("SELECT 1 FROM users WHERE user_id = %s", (user_id,))
|
|
if not exists:
|
|
raise HTTPException(status_code=400, detail=f"Invalid {field_name}")
|
|
|
|
|
|
def _validate_group_id(group_id: Optional[int], field_name: str = "assigned_group_id") -> None:
|
|
if group_id is None:
|
|
return
|
|
exists = execute_query("SELECT 1 FROM groups WHERE id = %s", (group_id,))
|
|
if not exists:
|
|
raise HTTPException(status_code=400, detail=f"Invalid {field_name}")
|
|
|
|
|
|
def _validate_customer_id(customer_id: Optional[int], field_name: str = "customer_id") -> None:
|
|
if customer_id is None:
|
|
return
|
|
exists = execute_query("SELECT 1 FROM customers WHERE id = %s", (customer_id,))
|
|
if not exists:
|
|
raise HTTPException(status_code=400, detail=f"Invalid {field_name}")
|
|
|
|
# ============================================================================
|
|
# QUICKCREATE AI ANALYSIS
|
|
# ============================================================================
|
|
|
|
class QuickCreateRequest(BaseModel):
|
|
text: str = Field(..., min_length=1, max_length=5000)
|
|
user_id: int
|
|
|
|
|
|
class RewriteTextRequest(BaseModel):
|
|
text: str = Field(..., min_length=1, max_length=10000)
|
|
context: Optional[str] = Field(default="case")
|
|
|
|
|
|
class RewriteTextResponse(BaseModel):
|
|
rewritten_text: str
|
|
model: Optional[str] = None
|
|
endpoint: Optional[str] = None
|
|
context: Optional[str] = None
|
|
|
|
|
|
class SagSendEmailRequest(BaseModel):
|
|
to: List[str]
|
|
subject: str = Field(..., min_length=1, max_length=998)
|
|
body_text: str = Field(..., min_length=1)
|
|
cc: List[str] = Field(default_factory=list)
|
|
bcc: List[str] = Field(default_factory=list)
|
|
body_html: Optional[str] = None
|
|
attachment_file_ids: List[int] = Field(default_factory=list)
|
|
thread_email_id: Optional[int] = None
|
|
thread_key: Optional[str] = None
|
|
|
|
|
|
class SignatureCanvasRequest(BaseModel):
|
|
data_url: str = Field(..., min_length=32)
|
|
|
|
|
|
class DirectPrintOverrideRequest(BaseModel):
|
|
printer_host: Optional[str] = None
|
|
printer_port: Optional[int] = None
|
|
printer_model: Optional[str] = None
|
|
label_size: Optional[str] = None
|
|
hardware_id: Optional[int] = None
|
|
|
|
|
|
def _normalize_email_list(values: List[str], field_name: str) -> List[str]:
|
|
cleaned: List[str] = []
|
|
for value in values or []:
|
|
candidate = str(value or "").strip()
|
|
if not candidate:
|
|
continue
|
|
if "@" not in candidate or "." not in candidate.split("@")[-1]:
|
|
raise HTTPException(status_code=400, detail=f"Invalid email in {field_name}: {candidate}")
|
|
cleaned.append(candidate)
|
|
return list(dict.fromkeys(cleaned))
|
|
|
|
|
|
def _normalize_message_id_token(value: Optional[str]) -> Optional[str]:
|
|
if not value:
|
|
return None
|
|
normalized = str(value).strip().strip("<>").lower()
|
|
normalized = "".join(normalized.split())
|
|
return normalized or None
|
|
|
|
|
|
def _derive_thread_key_for_outbound(
|
|
payload_thread_key: Optional[str],
|
|
in_reply_to_header: Optional[str],
|
|
references_header: Optional[str],
|
|
generated_message_id: Optional[str],
|
|
) -> Optional[str]:
|
|
provided = _normalize_message_id_token(payload_thread_key)
|
|
if provided:
|
|
return provided
|
|
|
|
if references_header:
|
|
for token in re.split(r"[\s,]+", references_header.strip()):
|
|
normalized = _normalize_message_id_token(token)
|
|
if normalized:
|
|
return normalized
|
|
|
|
reply_to = _normalize_message_id_token(in_reply_to_header)
|
|
if reply_to:
|
|
return reply_to
|
|
|
|
return _normalize_message_id_token(generated_message_id)
|
|
|
|
|
|
def _generate_local_thread_key_for_new_outbound(sag_id: int) -> str:
|
|
"""Generate a stable local thread key for brand-new case emails.
|
|
|
|
This prevents fallback BMCid tags like sXXt001 that cannot be mapped back
|
|
to a concrete thread later.
|
|
"""
|
|
nonce = uuid4().hex[:10]
|
|
return f"sag{sag_id}-{int(datetime.now().timestamp())}-{nonce}@bmchub.local"
|
|
|
|
|
|
def _build_scan_token(sag_id: int, token_type: str) -> str:
|
|
if token_type == "work_order":
|
|
return f"BMCSCAN-WO-S{sag_id}-{uuid4().hex[:10].upper()}"
|
|
|
|
# Keep hardware label tokens shorter so Code39 labels stay physically compact.
|
|
return f"BMCSCAN-HW-{sag_id}-{uuid4().hex[:6].upper()}"
|
|
|
|
|
|
def _create_document_token(
|
|
sag_id: int,
|
|
token_type: str,
|
|
user_id: Optional[int] = None,
|
|
hardware_id: Optional[int] = None,
|
|
) -> str:
|
|
token = _build_scan_token(sag_id, token_type)
|
|
expires_at = datetime.now() + timedelta(days=30)
|
|
|
|
execute_query(
|
|
"""
|
|
INSERT INTO sag_document_tokens (
|
|
sag_id,
|
|
token,
|
|
token_type,
|
|
hardware_id,
|
|
created_by_user_id,
|
|
expires_at
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s)
|
|
""",
|
|
(sag_id, token, token_type, hardware_id, user_id, expires_at),
|
|
)
|
|
return token
|
|
|
|
|
|
def _get_setting_value(key: str, fallback: Optional[str] = None) -> Optional[str]:
|
|
row = execute_query_single("SELECT value FROM settings WHERE key = %s", (key,))
|
|
if not row:
|
|
return fallback
|
|
value = row.get("value")
|
|
if value is None:
|
|
return fallback
|
|
return str(value)
|
|
|
|
|
|
_CODE39_PATTERNS: Dict[str, str] = {
|
|
"0": "nnnwwnwnn", "1": "wnnwnnnnw", "2": "nnwwnnnnw", "3": "wnwwnnnnn",
|
|
"4": "nnnwwnnnw", "5": "wnnwwnnnn", "6": "nnwwwnnnn", "7": "nnnwnnwnw",
|
|
"8": "wnnwnnwnn", "9": "nnwwnnwnn", "A": "wnnnnwnnw", "B": "nnwnnwnnw",
|
|
"C": "wnwnnwnnn", "D": "nnnnwwnnw", "E": "wnnnwwnnn", "F": "nnwnwwnnn",
|
|
"G": "nnnnnwwnw", "H": "wnnnnwwnn", "I": "nnwnnwwnn", "J": "nnnnwwwnn",
|
|
"K": "wnnnnnnww", "L": "nnwnnnnww", "M": "wnwnnnnwn", "N": "nnnnwnnww",
|
|
"O": "wnnnwnnwn", "P": "nnwnwnnwn", "Q": "nnnnnnwww", "R": "wnnnnnwwn",
|
|
"S": "nnwnnnwwn", "T": "nnnnwnwwn", "U": "wwnnnnnnw", "V": "nwwnnnnnw",
|
|
"W": "wwwnnnnnn", "X": "nwnnwnnnw", "Y": "wwnnwnnnn", "Z": "nwwnwnnnn",
|
|
"-": "nwnnnnwnw", ".": "wwnnnnwnn", " ": "nwwnnnwnn", "$": "nwnwnwnnn",
|
|
"/": "nwnwnnnwn", "+": "nwnnnwnwn", "%": "nnnwnwnwn", "*": "nwnnwnwnn",
|
|
}
|
|
|
|
|
|
def _render_code39_svg(
|
|
value: str,
|
|
height: int = 48,
|
|
narrow: int = 2,
|
|
wide: int = 5,
|
|
gap: int = 2,
|
|
font_size: int = 11,
|
|
include_text: bool = True,
|
|
) -> str:
|
|
safe_value = "".join(ch for ch in (value or "").upper() if ch in _CODE39_PATTERNS and ch != "*")
|
|
if not safe_value:
|
|
safe_value = "EMPTY"
|
|
|
|
sequence = f"*{safe_value}*"
|
|
total_width = 12
|
|
for ch in sequence:
|
|
pattern = _CODE39_PATTERNS[ch]
|
|
for idx, code in enumerate(pattern):
|
|
stroke = wide if code == "w" else narrow
|
|
total_width += stroke
|
|
if idx < len(pattern) - 1:
|
|
total_width += gap
|
|
total_width += gap
|
|
|
|
x = 6
|
|
bars = []
|
|
for ch in sequence:
|
|
pattern = _CODE39_PATTERNS[ch]
|
|
for idx, code in enumerate(pattern):
|
|
stroke = wide if code == "w" else narrow
|
|
if idx % 2 == 0:
|
|
bars.append(f'<rect x="{x}" y="0" width="{stroke}" height="{height}" fill="#111827" />')
|
|
x += stroke
|
|
if idx < len(pattern) - 1:
|
|
x += gap
|
|
x += gap
|
|
|
|
text_block = ""
|
|
svg_height = height
|
|
if include_text:
|
|
svg_height = height + 20
|
|
text_block = (
|
|
f'<text x="{total_width / 2}" y="{height + 14}" text-anchor="middle" '
|
|
f'style="font-family: monospace; font-size: {font_size}px; fill: #111827;">{html.escape(safe_value)}</text>'
|
|
)
|
|
|
|
return (
|
|
f'<svg xmlns="http://www.w3.org/2000/svg" width="{total_width}" height="{svg_height}" '
|
|
f'viewBox="0 0 {total_width} {svg_height}" role="img" aria-label="Barcode">'
|
|
f"{''.join(bars)}"
|
|
f"{text_block}"
|
|
f"</svg>"
|
|
)
|
|
|
|
|
|
def _get_signature_template() -> str:
|
|
default_template = (
|
|
"{full_name}\n"
|
|
"{title}\n"
|
|
"{company_name}\n"
|
|
"Telefon: {company_phone}\n"
|
|
"Email: {email}\n"
|
|
"Web: {company_website}\n"
|
|
"Adresse: {company_address}\n"
|
|
"BMCid: {bmc_id_tag}"
|
|
)
|
|
row = execute_query_single(
|
|
"SELECT value FROM settings WHERE key = %s",
|
|
("email_default_signature_template",),
|
|
)
|
|
value = (row or {}).get("value") if row else None
|
|
return (value or default_template).strip()
|
|
|
|
|
|
def _build_case_bmc_id_tag(sag_id: int, thread_key: Optional[str] = None) -> str:
|
|
"""Build a case+thread marker used in signatures for helpdesk routing hints.
|
|
|
|
Example: s53t472193
|
|
- s53 => case id 53
|
|
- t472193 => deterministic per-thread suffix (or 001 when unknown)
|
|
"""
|
|
if not thread_key:
|
|
return f"s{sag_id}t001"
|
|
|
|
normalized = re.sub(r"[^a-z0-9]+", "", str(thread_key).lower())
|
|
if not normalized:
|
|
return f"s{sag_id}t001"
|
|
|
|
digest = hashlib.sha1(normalized.encode("utf-8")).hexdigest()
|
|
thread_suffix = (int(digest[:8], 16) % 900000) + 100000
|
|
return f"s{sag_id}t{thread_suffix}"
|
|
|
|
|
|
def _render_signature_for_case_email(user_id: int, sag_id: int, thread_key: Optional[str] = None) -> str:
|
|
user = execute_query_single(
|
|
"SELECT full_name, email FROM users WHERE user_id = %s",
|
|
(user_id,),
|
|
) or {}
|
|
|
|
company_rows = execute_query(
|
|
"SELECT key, value FROM settings WHERE key IN ('company_name', 'company_phone', 'company_address', 'company_email', 'company_website')",
|
|
(),
|
|
) or []
|
|
company = {row.get("key"): row.get("value") for row in company_rows}
|
|
|
|
context = {
|
|
"full_name": (user.get("full_name") or "BMC Hub").strip(),
|
|
"title": "IT-Supporter",
|
|
"email": (user.get("email") or company.get("company_email") or "").strip(),
|
|
"company_name": (company.get("company_name") or "BMC Networks ApS").strip(),
|
|
"company_phone": (company.get("company_phone") or "").strip(),
|
|
"company_website": (company.get("company_website") or "https://bmcnetworks.dk").strip(),
|
|
"company_address": (company.get("company_address") or "").strip(),
|
|
"bmc_id_tag": _build_case_bmc_id_tag(sag_id, thread_key),
|
|
}
|
|
|
|
template = _get_signature_template()
|
|
signature = template
|
|
for key, value in context.items():
|
|
signature = signature.replace(f"{{{key}}}", value)
|
|
|
|
if "BMCid:" not in signature:
|
|
signature = f"{signature}\nBMCid: {context['bmc_id_tag']}"
|
|
|
|
return signature.strip()
|
|
|
|
|
|
def _append_signature_to_body(body_text: str, signature: str) -> str:
|
|
clean_body = (body_text or "").rstrip()
|
|
clean_signature = (signature or "").strip()
|
|
if not clean_signature:
|
|
return clean_body
|
|
return f"{clean_body}\n\n--\n{clean_signature}" if clean_body else clean_signature
|
|
|
|
|
|
def _append_signature_to_html(body_html: Optional[str], signature: str) -> Optional[str]:
|
|
clean_signature = (signature or "").strip()
|
|
if not clean_signature:
|
|
return body_html
|
|
|
|
# If original message is plain text, do not synthesize an HTML-only body.
|
|
# That can cause clients/providers to display only the signature.
|
|
if not body_html or not body_html.strip():
|
|
return None
|
|
|
|
signature_html = clean_signature.replace("&", "&").replace("<", "<").replace(">", ">").replace("\n", "<br>")
|
|
signature_block = (
|
|
"<div style=\"margin-top:16px;padding:12px 14px;border:1px solid #d7e3ed;"
|
|
"border-left:4px solid #0f4c75;border-radius:8px;background:#f8fbfe;"
|
|
"color:#243746;font-size:13px;line-height:1.45;\">"
|
|
f"{signature_html}"
|
|
"</div>"
|
|
)
|
|
return f"{body_html}{signature_block}"
|
|
|
|
|
|
@router.post("/sag/analyze-quick-create", response_model=QuickCreateAnalysis)
|
|
async def analyze_quick_create(request: QuickCreateRequest):
|
|
"""
|
|
Analyze case description text using AI for QuickCreate feature.
|
|
Returns structured suggestions for customer, technician, priority, tags, etc.
|
|
"""
|
|
try:
|
|
logger.info(f"🔍 QuickCreate analysis requested by user {request.user_id}, text length: {len(request.text)}")
|
|
|
|
# Initialize service and analyze
|
|
service = CaseAnalysisService()
|
|
analysis = await service.analyze_case_text(request.text, request.user_id)
|
|
|
|
logger.info(f"✅ QuickCreate analysis complete: confidence={analysis.confidence}, priority={analysis.suggested_priority}")
|
|
return analysis
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ QuickCreate analysis failed: {str(e)}", exc_info=True)
|
|
raise HTTPException(status_code=500, detail=f"Analysis failed: {str(e)}")
|
|
|
|
|
|
@router.post("/sag/rewrite-text", response_model=RewriteTextResponse)
|
|
async def rewrite_sag_text(request: RewriteTextRequest):
|
|
"""Rewrite case/email text using Ollama with configurable prompt."""
|
|
try:
|
|
result = await ollama_service.rewrite_text(request.text, request.context or "case")
|
|
if not result or result.get("error"):
|
|
detail = (result or {}).get("error") or "Could not rewrite text"
|
|
raise HTTPException(status_code=502, detail=detail)
|
|
|
|
return RewriteTextResponse(
|
|
rewritten_text=result.get("rewritten_text", ""),
|
|
model=result.get("model"),
|
|
endpoint=result.get("endpoint"),
|
|
context=result.get("context"),
|
|
)
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Rewrite text failed: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to rewrite text")
|
|
|
|
|
|
@router.post("/rewrite-text", response_model=RewriteTextResponse)
|
|
async def rewrite_text_global(request: RewriteTextRequest):
|
|
"""Global rewrite alias used by UI flows that should avoid feature-specific route conflicts."""
|
|
try:
|
|
result = await ollama_service.rewrite_text(request.text, request.context or "case")
|
|
if not result or result.get("error"):
|
|
detail = (result or {}).get("error") or "Could not rewrite text"
|
|
raise HTTPException(status_code=502, detail=detail)
|
|
|
|
return RewriteTextResponse(
|
|
rewritten_text=result.get("rewritten_text", ""),
|
|
model=result.get("model"),
|
|
endpoint=result.get("endpoint"),
|
|
context=result.get("context"),
|
|
)
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Global rewrite text failed: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to rewrite text")
|
|
|
|
# ============================================================================
|
|
# SAGER - CRUD Operations
|
|
# ============================================================================
|
|
|
|
@router.get("/sag")
|
|
async def list_sager(
|
|
status: Optional[str] = Query(None),
|
|
tag: Optional[str] = Query(None),
|
|
customer_id: Optional[int] = Query(None),
|
|
ansvarlig_bruger_id: Optional[int] = Query(None),
|
|
assigned_group_id: Optional[int] = Query(None),
|
|
include_deferred: bool = Query(False),
|
|
q: Optional[str] = Query(None),
|
|
limit: Optional[int] = Query(None, ge=1, le=200),
|
|
offset: int = Query(0, ge=0),
|
|
):
|
|
"""List all cases with optional filtering."""
|
|
try:
|
|
query = """
|
|
SELECT s.*,
|
|
COALESCE(u.full_name, u.username) AS ansvarlig_navn,
|
|
g.name AS assigned_group_name
|
|
FROM sag_sager s
|
|
LEFT JOIN users u ON u.user_id = s.ansvarlig_bruger_id
|
|
LEFT JOIN groups g ON g.id = s.assigned_group_id
|
|
WHERE s.deleted_at IS NULL
|
|
"""
|
|
params = []
|
|
|
|
if not include_deferred:
|
|
query += " AND (deferred_until IS NULL OR deferred_until <= NOW())"
|
|
|
|
if status:
|
|
query += " AND s.status = %s"
|
|
params.append(status)
|
|
if customer_id:
|
|
query += " AND s.customer_id = %s"
|
|
params.append(customer_id)
|
|
if ansvarlig_bruger_id:
|
|
query += " AND s.ansvarlig_bruger_id = %s"
|
|
params.append(ansvarlig_bruger_id)
|
|
if assigned_group_id:
|
|
query += " AND s.assigned_group_id = %s"
|
|
params.append(assigned_group_id)
|
|
|
|
if q:
|
|
query += " AND (LOWER(s.titel) LIKE %s OR CAST(s.id AS TEXT) LIKE %s)"
|
|
q_like = f"%{q.lower()}%"
|
|
params.extend([q_like, q_like])
|
|
|
|
query += " ORDER BY s.created_at DESC"
|
|
|
|
if limit is not None:
|
|
query += " LIMIT %s OFFSET %s"
|
|
params.extend([limit, offset])
|
|
|
|
cases = execute_query(query, tuple(params))
|
|
|
|
# If tag filter, filter in Python after fetch
|
|
if tag:
|
|
case_ids = [case["id"] for case in cases]
|
|
if case_ids:
|
|
tag_query = "SELECT sag_id FROM sag_tags WHERE tag_navn = %s AND deleted_at IS NULL"
|
|
tagged_cases = execute_query(tag_query, (tag,))
|
|
tagged_ids = set(t["sag_id"] for t in tagged_cases)
|
|
cases = [c for c in cases if c["id"] in tagged_ids]
|
|
|
|
return cases
|
|
except Exception:
|
|
logger.exception("❌ Error listing cases")
|
|
raise HTTPException(status_code=500, detail="Failed to list cases")
|
|
|
|
@router.get("/sag/sale-items/all")
|
|
async def list_all_sale_items(
|
|
type: Optional[str] = Query(None),
|
|
status: Optional[str] = Query(None),
|
|
q: Optional[str] = Query(None),
|
|
customer_id: Optional[int] = Query(None),
|
|
sag_id: Optional[int] = Query(None),
|
|
date_from: Optional[str] = Query(None),
|
|
date_to: Optional[str] = Query(None),
|
|
):
|
|
"""List all sale items across cases (orders overview)."""
|
|
try:
|
|
if not _table_exists("sag_salgsvarer"):
|
|
logger.warning("⚠️ sag_salgsvarer table missing - returning empty sale items list")
|
|
return []
|
|
|
|
query = """
|
|
SELECT si.*, s.titel AS sag_titel, s.customer_id, c.name AS customer_name
|
|
FROM sag_salgsvarer si
|
|
JOIN sag_sager s ON s.id = si.sag_id
|
|
LEFT JOIN customers c ON c.id = s.customer_id
|
|
WHERE s.deleted_at IS NULL
|
|
"""
|
|
params = []
|
|
|
|
if type:
|
|
query += " AND LOWER(si.type) = %s"
|
|
params.append(type.lower())
|
|
if status:
|
|
query += " AND LOWER(si.status) = %s"
|
|
params.append(status.lower())
|
|
if q:
|
|
query += " AND (LOWER(si.description) LIKE %s OR LOWER(si.external_ref) LIKE %s OR LOWER(s.titel) LIKE %s OR LOWER(c.name) LIKE %s)"
|
|
like = f"%{q.lower()}%"
|
|
params.extend([like, like, like, like])
|
|
if customer_id:
|
|
query += " AND s.customer_id = %s"
|
|
params.append(customer_id)
|
|
if sag_id:
|
|
query += " AND s.id = %s"
|
|
params.append(sag_id)
|
|
if date_from:
|
|
query += " AND si.line_date >= %s"
|
|
params.append(date_from)
|
|
if date_to:
|
|
query += " AND si.line_date <= %s"
|
|
params.append(date_to)
|
|
|
|
query += " ORDER BY si.line_date DESC NULLS LAST, si.id DESC"
|
|
|
|
return execute_query(query, tuple(params)) or []
|
|
except Exception as e:
|
|
logger.error("❌ Error listing sale items: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to list sale items")
|
|
|
|
@router.post("/sag")
|
|
async def create_sag(data: dict):
|
|
"""Create a new case."""
|
|
try:
|
|
if not data.get("titel"):
|
|
raise HTTPException(status_code=400, detail="titel is required")
|
|
if not data.get("customer_id"):
|
|
raise HTTPException(status_code=400, detail="customer_id is required")
|
|
|
|
status = _normalize_case_status(data.get("status"))
|
|
deadline = _normalize_optional_timestamp(data.get("deadline"), "deadline")
|
|
deferred_until = _normalize_optional_timestamp(data.get("deferred_until"), "deferred_until")
|
|
ansvarlig_bruger_id = _coerce_optional_int(data.get("ansvarlig_bruger_id"), "ansvarlig_bruger_id")
|
|
assigned_group_id = _coerce_optional_int(data.get("assigned_group_id"), "assigned_group_id")
|
|
|
|
_validate_user_id(ansvarlig_bruger_id)
|
|
_validate_group_id(assigned_group_id)
|
|
|
|
query = """
|
|
INSERT INTO sag_sager
|
|
(titel, beskrivelse, template_key, status, customer_id, ansvarlig_bruger_id, assigned_group_id, created_by_user_id, deadline, deferred_until, deferred_until_case_id, deferred_until_status)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
RETURNING *
|
|
"""
|
|
params = (
|
|
data.get("titel"),
|
|
data.get("beskrivelse", ""),
|
|
data.get("template_key") or data.get("type", "ticket"),
|
|
status,
|
|
data.get("customer_id"),
|
|
ansvarlig_bruger_id,
|
|
assigned_group_id,
|
|
data.get("created_by_user_id", 1),
|
|
deadline,
|
|
deferred_until,
|
|
data.get("deferred_until_case_id"),
|
|
data.get("deferred_until_status"),
|
|
)
|
|
|
|
result = execute_query(query, params)
|
|
if result:
|
|
logger.info("✅ Case created: %s", result[0]["id"])
|
|
return result[0]
|
|
raise HTTPException(status_code=500, detail="Failed to create case")
|
|
except Exception as e:
|
|
logger.error("❌ Error creating case: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to create case")
|
|
|
|
@router.get("/sag/{sag_id:int}")
|
|
async def get_sag(sag_id: int):
|
|
"""Get a specific case."""
|
|
try:
|
|
query = """
|
|
SELECT s.*,
|
|
COALESCE(u.full_name, u.username) AS ansvarlig_navn,
|
|
g.name AS assigned_group_name
|
|
FROM sag_sager s
|
|
LEFT JOIN users u ON u.user_id = s.ansvarlig_bruger_id
|
|
LEFT JOIN groups g ON g.id = s.assigned_group_id
|
|
WHERE s.id = %s AND s.deleted_at IS NULL
|
|
"""
|
|
result = execute_query(query, (sag_id,))
|
|
if not result:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
return result[0]
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error getting case: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to get case")
|
|
|
|
|
|
@router.get("/sag/{sag_id}/modules")
|
|
async def get_case_module_prefs(sag_id: int):
|
|
"""Get module visibility preferences for a case."""
|
|
try:
|
|
query = "SELECT module_key, is_enabled FROM sag_module_prefs WHERE sag_id = %s"
|
|
prefs = execute_query(query, (sag_id,))
|
|
return prefs or []
|
|
except Exception as e:
|
|
logger.error("❌ Error getting module prefs: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to get module prefs")
|
|
|
|
|
|
@router.post("/sag/{sag_id}/modules")
|
|
async def set_case_module_pref(sag_id: int, data: dict):
|
|
"""Set module visibility preference for a case."""
|
|
try:
|
|
module_key = data.get("module_key")
|
|
is_enabled = data.get("is_enabled")
|
|
|
|
if not module_key or is_enabled is None:
|
|
raise HTTPException(status_code=400, detail="module_key and is_enabled are required")
|
|
|
|
query = """
|
|
INSERT INTO sag_module_prefs (sag_id, module_key, is_enabled)
|
|
VALUES (%s, %s, %s)
|
|
ON CONFLICT (sag_id, module_key)
|
|
DO UPDATE SET is_enabled = EXCLUDED.is_enabled
|
|
RETURNING module_key, is_enabled
|
|
"""
|
|
result = execute_query(query, (sag_id, module_key, bool(is_enabled)))
|
|
return result[0] if result else {"module_key": module_key, "is_enabled": bool(is_enabled)}
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error setting module pref: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to set module pref")
|
|
|
|
|
|
# =========================================================================
|
|
# SAG TODO STEPS
|
|
# =========================================================================
|
|
|
|
@router.get("/sag/{sag_id}/todo-steps", response_model=List[TodoStep])
|
|
async def list_todo_steps(sag_id: int):
|
|
try:
|
|
query = """
|
|
SELECT
|
|
t.*,
|
|
COALESCE(u_created.full_name, u_created.username) AS created_by_name,
|
|
COALESCE(u_completed.full_name, u_completed.username) AS completed_by_name
|
|
FROM sag_todo_steps t
|
|
LEFT JOIN users u_created ON u_created.user_id = t.created_by_user_id
|
|
LEFT JOIN users u_completed ON u_completed.user_id = t.completed_by_user_id
|
|
WHERE t.sag_id = %s AND t.deleted_at IS NULL
|
|
ORDER BY t.is_done ASC, t.is_next DESC, t.due_date NULLS LAST, t.created_at DESC
|
|
"""
|
|
return execute_query(query, (sag_id,)) or []
|
|
except Exception as e:
|
|
logger.error("❌ Error listing todo steps: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to list todo steps")
|
|
|
|
|
|
@router.post("/sag/{sag_id}/todo-steps", response_model=TodoStep)
|
|
async def create_todo_step(sag_id: int, request: Request, data: TodoStepCreate):
|
|
try:
|
|
user_id = _get_user_id_from_request(request)
|
|
if not data.title:
|
|
raise HTTPException(status_code=400, detail="title is required")
|
|
|
|
insert_query = """
|
|
INSERT INTO sag_todo_steps
|
|
(sag_id, title, description, due_date, created_by_user_id)
|
|
VALUES (%s, %s, %s, %s, %s)
|
|
RETURNING id
|
|
"""
|
|
result = execute_query(insert_query, (
|
|
sag_id,
|
|
data.title,
|
|
data.description,
|
|
data.due_date,
|
|
user_id
|
|
))
|
|
if not result:
|
|
raise HTTPException(status_code=500, detail="Failed to create todo step")
|
|
|
|
step_id = result[0]["id"]
|
|
return execute_query(
|
|
"""
|
|
SELECT
|
|
t.*,
|
|
COALESCE(u_created.full_name, u_created.username) AS created_by_name,
|
|
COALESCE(u_completed.full_name, u_completed.username) AS completed_by_name
|
|
FROM sag_todo_steps t
|
|
LEFT JOIN users u_created ON u_created.user_id = t.created_by_user_id
|
|
LEFT JOIN users u_completed ON u_completed.user_id = t.completed_by_user_id
|
|
WHERE t.id = %s
|
|
""",
|
|
(step_id,)
|
|
)[0]
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error creating todo step: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to create todo step")
|
|
|
|
|
|
@router.patch("/sag/todo-steps/{step_id}", response_model=TodoStep)
|
|
async def update_todo_step(step_id: int, request: Request, data: TodoStepUpdate):
|
|
try:
|
|
if data.is_done is None and data.is_next is None:
|
|
raise HTTPException(status_code=400, detail="Provide is_done or is_next")
|
|
|
|
step_row = execute_query_single(
|
|
"SELECT id, sag_id, is_done FROM sag_todo_steps WHERE id = %s AND deleted_at IS NULL",
|
|
(step_id,)
|
|
)
|
|
if not step_row:
|
|
raise HTTPException(status_code=404, detail="Todo step not found")
|
|
|
|
if data.is_done is not None:
|
|
user_id = _get_user_id_from_request(request)
|
|
if data.is_done:
|
|
update_query = """
|
|
UPDATE sag_todo_steps
|
|
SET is_done = TRUE,
|
|
is_next = FALSE,
|
|
completed_by_user_id = %s,
|
|
completed_at = CURRENT_TIMESTAMP
|
|
WHERE id = %s AND deleted_at IS NULL
|
|
RETURNING id
|
|
"""
|
|
execute_query(update_query, (user_id, step_id))
|
|
else:
|
|
update_query = """
|
|
UPDATE sag_todo_steps
|
|
SET is_done = FALSE,
|
|
completed_by_user_id = NULL,
|
|
completed_at = NULL
|
|
WHERE id = %s AND deleted_at IS NULL
|
|
RETURNING id
|
|
"""
|
|
execute_query(update_query, (step_id,))
|
|
|
|
if data.is_next is not None:
|
|
if step_row.get("is_done") and data.is_next:
|
|
raise HTTPException(status_code=400, detail="Completed todo cannot be marked as next")
|
|
|
|
if data.is_next:
|
|
execute_query(
|
|
"""
|
|
UPDATE sag_todo_steps
|
|
SET is_next = FALSE
|
|
WHERE sag_id = %s AND deleted_at IS NULL
|
|
""",
|
|
(step_row["sag_id"],)
|
|
)
|
|
|
|
execute_query(
|
|
"""
|
|
UPDATE sag_todo_steps
|
|
SET is_next = %s
|
|
WHERE id = %s AND deleted_at IS NULL
|
|
""",
|
|
(bool(data.is_next), step_id)
|
|
)
|
|
|
|
return execute_query(
|
|
"""
|
|
SELECT
|
|
t.*,
|
|
COALESCE(u_created.full_name, u_created.username) AS created_by_name,
|
|
COALESCE(u_completed.full_name, u_completed.username) AS completed_by_name
|
|
FROM sag_todo_steps t
|
|
LEFT JOIN users u_created ON u_created.user_id = t.created_by_user_id
|
|
LEFT JOIN users u_completed ON u_completed.user_id = t.completed_by_user_id
|
|
WHERE t.id = %s
|
|
""",
|
|
(step_id,)
|
|
)[0]
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error updating todo step: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to update todo step")
|
|
|
|
|
|
@router.delete("/sag/todo-steps/{step_id}")
|
|
async def delete_todo_step(step_id: int):
|
|
try:
|
|
result = execute_query(
|
|
"""
|
|
UPDATE sag_todo_steps
|
|
SET deleted_at = CURRENT_TIMESTAMP
|
|
WHERE id = %s AND deleted_at IS NULL
|
|
RETURNING id
|
|
""",
|
|
(step_id,)
|
|
)
|
|
if not result:
|
|
raise HTTPException(status_code=404, detail="Todo step not found")
|
|
return {"status": "deleted"}
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error deleting todo step: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to delete todo step")
|
|
|
|
@router.patch("/sag/{sag_id:int}")
|
|
async def update_sag(sag_id: int, updates: dict):
|
|
"""Update a case."""
|
|
try:
|
|
# Check if case exists
|
|
check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
# Backwards compatibility: frontend sends "type", DB stores "template_key"
|
|
if "type" in updates and "template_key" not in updates:
|
|
updates["template_key"] = updates.get("type")
|
|
|
|
if "status" in updates:
|
|
updates["status"] = _normalize_case_status(updates.get("status"))
|
|
if "deadline" in updates:
|
|
updates["deadline"] = _normalize_optional_timestamp(updates.get("deadline"), "deadline")
|
|
if "start_date" in updates:
|
|
updates["start_date"] = _normalize_optional_timestamp(updates.get("start_date"), "start_date")
|
|
if "deferred_until" in updates:
|
|
updates["deferred_until"] = _normalize_optional_timestamp(updates.get("deferred_until"), "deferred_until")
|
|
if "priority" in updates:
|
|
updates["priority"] = (str(updates.get("priority") or "").strip().lower() or "normal")
|
|
if "ansvarlig_bruger_id" in updates:
|
|
updates["ansvarlig_bruger_id"] = _coerce_optional_int(updates.get("ansvarlig_bruger_id"), "ansvarlig_bruger_id")
|
|
_validate_user_id(updates["ansvarlig_bruger_id"])
|
|
if "assigned_group_id" in updates:
|
|
updates["assigned_group_id"] = _coerce_optional_int(updates.get("assigned_group_id"), "assigned_group_id")
|
|
_validate_group_id(updates["assigned_group_id"])
|
|
if "customer_id" in updates:
|
|
updates["customer_id"] = _coerce_optional_int(updates.get("customer_id"), "customer_id")
|
|
_validate_customer_id(updates["customer_id"])
|
|
|
|
# Build dynamic update query
|
|
allowed_fields = [
|
|
"titel",
|
|
"beskrivelse",
|
|
"template_key",
|
|
"status",
|
|
"customer_id",
|
|
"ansvarlig_bruger_id",
|
|
"assigned_group_id",
|
|
"priority",
|
|
"start_date",
|
|
"deadline",
|
|
"deferred_until",
|
|
"deferred_until_case_id",
|
|
"deferred_until_status",
|
|
]
|
|
set_clauses = []
|
|
params = []
|
|
|
|
for field in allowed_fields:
|
|
if field in updates:
|
|
set_clauses.append(f"{field} = %s")
|
|
params.append(updates[field])
|
|
|
|
if not set_clauses:
|
|
raise HTTPException(status_code=400, detail="No valid fields to update")
|
|
|
|
params.append(sag_id)
|
|
set_sql = ", ".join(set_clauses)
|
|
query = f"UPDATE sag_sager SET {set_sql} WHERE id = %s RETURNING *"
|
|
|
|
result = execute_query(query, tuple(params))
|
|
if result:
|
|
logger.info("✅ Case updated: %s", sag_id)
|
|
return result[0]
|
|
raise HTTPException(status_code=500, detail="Failed to update case")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error updating case: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to update case")
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Beskrivelse inline editing with history
|
|
# ---------------------------------------------------------------------------
|
|
|
|
class BeskrivelsePatch(BaseModel):
|
|
beskrivelse: str
|
|
|
|
|
|
@router.patch("/sag/{sag_id}/beskrivelse")
|
|
async def update_sag_beskrivelse(sag_id: int, body: BeskrivelsePatch, request: Request):
|
|
"""Update case description and store a change history entry."""
|
|
try:
|
|
row = execute_query_single(
|
|
"SELECT id, beskrivelse FROM sag_sager WHERE id = %s AND deleted_at IS NULL",
|
|
(sag_id,)
|
|
)
|
|
if not row:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
old_beskrivelse = row.get("beskrivelse")
|
|
new_beskrivelse = body.beskrivelse
|
|
|
|
# Resolve acting user (may be None for anonymous)
|
|
user_id = _get_user_id_from_request(request)
|
|
changed_by_name = None
|
|
if user_id:
|
|
u = execute_query_single(
|
|
"SELECT COALESCE(full_name, username, CONCAT('Bruger #', user_id::text)) AS name FROM users WHERE user_id = %s",
|
|
(user_id,)
|
|
)
|
|
if u:
|
|
changed_by_name = u["name"]
|
|
|
|
# Write history entry
|
|
execute_query(
|
|
"""INSERT INTO sag_beskrivelse_history
|
|
(sag_id, beskrivelse_before, beskrivelse_after, changed_by_user_id, changed_by_name)
|
|
VALUES (%s, %s, %s, %s, %s)""",
|
|
(sag_id, old_beskrivelse, new_beskrivelse, user_id, changed_by_name)
|
|
)
|
|
|
|
# Update the case
|
|
execute_query(
|
|
"UPDATE sag_sager SET beskrivelse = %s, updated_at = NOW() WHERE id = %s",
|
|
(new_beskrivelse, sag_id)
|
|
)
|
|
|
|
logger.info("✅ Beskrivelse updated for sag %s by user %s", sag_id, user_id)
|
|
return {"ok": True, "beskrivelse": new_beskrivelse}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error updating beskrivelse for sag %s: %s", sag_id, e)
|
|
raise HTTPException(status_code=500, detail="Failed to update description")
|
|
|
|
|
|
@router.get("/sag/{sag_id}/beskrivelse/history")
|
|
async def get_sag_beskrivelse_history(sag_id: int):
|
|
"""Return the change history for a case's description, newest first."""
|
|
exists = execute_query_single(
|
|
"SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL",
|
|
(sag_id,)
|
|
)
|
|
if not exists:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
rows = execute_query(
|
|
"""SELECT id, beskrivelse_before, beskrivelse_after,
|
|
changed_by_name, changed_at
|
|
FROM sag_beskrivelse_history
|
|
WHERE sag_id = %s
|
|
ORDER BY changed_at DESC
|
|
LIMIT 50""",
|
|
(sag_id,)
|
|
) or []
|
|
|
|
return rows
|
|
|
|
|
|
class PipelineUpdate(BaseModel):
|
|
amount: Optional[float] = None
|
|
probability: Optional[int] = Field(default=None, ge=0, le=100)
|
|
stage_id: Optional[int] = None
|
|
description: Optional[str] = None
|
|
|
|
|
|
@router.patch("/sag/{sag_id}/pipeline")
|
|
async def update_sag_pipeline(sag_id: int, pipeline_data: PipelineUpdate):
|
|
"""Update pipeline fields for a case."""
|
|
try:
|
|
exists = execute_query(
|
|
"SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL",
|
|
(sag_id,)
|
|
)
|
|
if not exists:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
provided = pipeline_data.model_dump(exclude_unset=True)
|
|
|
|
if not provided:
|
|
raise HTTPException(status_code=400, detail="No pipeline fields provided")
|
|
|
|
if "stage_id" in provided and provided["stage_id"] is not None:
|
|
stage_exists = execute_query(
|
|
"SELECT id FROM pipeline_stages WHERE id = %s",
|
|
(provided["stage_id"],)
|
|
)
|
|
if not stage_exists:
|
|
raise HTTPException(status_code=400, detail="Invalid pipeline stage")
|
|
|
|
set_clauses = []
|
|
params = []
|
|
|
|
if "amount" in provided:
|
|
set_clauses.append("pipeline_amount = %s")
|
|
params.append(provided["amount"])
|
|
|
|
if "probability" in provided:
|
|
set_clauses.append("pipeline_probability = %s")
|
|
params.append(provided["probability"])
|
|
|
|
if "stage_id" in provided:
|
|
set_clauses.append("pipeline_stage_id = %s")
|
|
params.append(provided["stage_id"])
|
|
|
|
if "description" in provided:
|
|
set_clauses.append("pipeline_description = %s")
|
|
params.append(provided["description"])
|
|
|
|
params.append(sag_id)
|
|
query = f"UPDATE sag_sager SET {', '.join(set_clauses)} WHERE id = %s RETURNING *"
|
|
result = execute_query(query, tuple(params))
|
|
|
|
if not result:
|
|
raise HTTPException(status_code=500, detail="Failed to update pipeline")
|
|
|
|
logger.info("✅ Pipeline updated for case: %s", sag_id)
|
|
return result[0]
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error updating pipeline for case %s: %s", sag_id, e)
|
|
raise HTTPException(status_code=500, detail="Failed to update pipeline")
|
|
|
|
@router.delete("/sag/{sag_id:int}")
|
|
async def delete_sag(sag_id: int):
|
|
"""Soft-delete a case."""
|
|
try:
|
|
check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
query = "UPDATE sag_sager SET deleted_at = NOW() WHERE id = %s RETURNING id"
|
|
result = execute_query(query, (sag_id,))
|
|
|
|
if result:
|
|
logger.info("✅ Case soft-deleted: %s", sag_id)
|
|
return {"status": "deleted", "id": sag_id}
|
|
raise HTTPException(status_code=500, detail="Failed to delete case")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error deleting case: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to delete case")
|
|
|
|
# ============================================================================
|
|
# RELATIONER - Case Relations
|
|
# ============================================================================
|
|
|
|
@router.get("/sag/{sag_id}/relationer")
|
|
async def get_relationer(sag_id: int):
|
|
"""Get all relations for a case."""
|
|
try:
|
|
# Check if case exists
|
|
check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
query = """
|
|
SELECT sr.*,
|
|
ss_kilde.titel as kilde_titel,
|
|
ss_mål.titel as mål_titel
|
|
FROM sag_relationer sr
|
|
JOIN sag_sager ss_kilde ON sr.kilde_sag_id = ss_kilde.id
|
|
JOIN sag_sager ss_mål ON sr.målsag_id = ss_mål.id
|
|
WHERE (sr.kilde_sag_id = %s OR sr.målsag_id = %s)
|
|
AND sr.deleted_at IS NULL
|
|
ORDER BY sr.created_at DESC
|
|
"""
|
|
result = execute_query(query, (sag_id, sag_id))
|
|
return result
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error getting relations: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to get relations")
|
|
|
|
@router.post("/sag/{sag_id}/relationer")
|
|
async def create_relation(sag_id: int, data: dict):
|
|
"""Add a relation to another case."""
|
|
try:
|
|
if not data.get("målsag_id") or not data.get("relationstype"):
|
|
raise HTTPException(status_code=400, detail="målsag_id and relationstype required")
|
|
|
|
målsag_id = data.get("målsag_id")
|
|
relationstype = data.get("relationstype")
|
|
|
|
# Validate both cases exist
|
|
check1 = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
|
|
check2 = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (målsag_id,))
|
|
|
|
if not check1 or not check2:
|
|
raise HTTPException(status_code=404, detail="One or both cases not found")
|
|
|
|
query = """
|
|
INSERT INTO sag_relationer (kilde_sag_id, målsag_id, relationstype)
|
|
VALUES (%s, %s, %s)
|
|
RETURNING *
|
|
"""
|
|
result = execute_query(query, (sag_id, målsag_id, relationstype))
|
|
|
|
if result:
|
|
logger.info("✅ Relation created: %s -> %s (%s)", sag_id, målsag_id, relationstype)
|
|
return result[0]
|
|
raise HTTPException(status_code=500, detail="Failed to create relation")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error creating relation: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to create relation")
|
|
|
|
@router.delete("/sag/{sag_id}/relationer/{relation_id}")
|
|
async def delete_relation(sag_id: int, relation_id: int):
|
|
"""Soft-delete a relation."""
|
|
try:
|
|
check = execute_query(
|
|
"SELECT id FROM sag_relationer WHERE id = %s AND deleted_at IS NULL AND (kilde_sag_id = %s OR målsag_id = %s)",
|
|
(relation_id, sag_id, sag_id)
|
|
)
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Relation not found")
|
|
|
|
query = "UPDATE sag_relationer SET deleted_at = NOW() WHERE id = %s RETURNING id"
|
|
result = execute_query(query, (relation_id,))
|
|
|
|
if result:
|
|
logger.info("✅ Relation soft-deleted: %s", relation_id)
|
|
return {"status": "deleted", "id": relation_id}
|
|
raise HTTPException(status_code=500, detail="Failed to delete relation")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error deleting relation: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to delete relation")
|
|
|
|
# ============================================================================
|
|
# TAGS - Case Tags
|
|
# ============================================================================
|
|
|
|
@router.get("/sag/tags/all")
|
|
async def get_all_tags():
|
|
"""Return all distinct tag names across all cases (for autocomplete)."""
|
|
rows = execute_query(
|
|
"SELECT DISTINCT tag_navn FROM sag_tags WHERE deleted_at IS NULL ORDER BY tag_navn ASC LIMIT 200"
|
|
) or []
|
|
return rows
|
|
|
|
|
|
@router.get("/sag/{sag_id}/tags")
|
|
async def get_tags(sag_id: int):
|
|
"""Get all tags for a case."""
|
|
try:
|
|
check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
query = "SELECT * FROM sag_tags WHERE sag_id = %s AND deleted_at IS NULL ORDER BY created_at DESC"
|
|
result = execute_query(query, (sag_id,))
|
|
return result
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error getting tags: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to get tags")
|
|
|
|
@router.post("/sag/{sag_id}/tags")
|
|
async def add_tag(sag_id: int, data: dict):
|
|
"""Add a tag to a case."""
|
|
try:
|
|
if not data.get("tag_navn"):
|
|
raise HTTPException(status_code=400, detail="tag_navn is required")
|
|
|
|
check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
query = """
|
|
INSERT INTO sag_tags (sag_id, tag_navn)
|
|
VALUES (%s, %s)
|
|
RETURNING *
|
|
"""
|
|
result = execute_query(query, (sag_id, data.get("tag_navn")))
|
|
|
|
if result:
|
|
logger.info("✅ Tag added: %s -> %s", sag_id, data.get("tag_navn"))
|
|
return result[0]
|
|
raise HTTPException(status_code=500, detail="Failed to add tag")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error adding tag: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to add tag")
|
|
|
|
@router.delete("/sag/{sag_id}/tags/{tag_id}")
|
|
async def delete_tag(sag_id: int, tag_id: int):
|
|
"""Soft-delete a tag."""
|
|
try:
|
|
check = execute_query(
|
|
"SELECT id FROM sag_tags WHERE id = %s AND sag_id = %s AND deleted_at IS NULL",
|
|
(tag_id, sag_id)
|
|
)
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Tag not found")
|
|
|
|
query = "UPDATE sag_tags SET deleted_at = NOW() WHERE id = %s RETURNING id"
|
|
result = execute_query(query, (tag_id,))
|
|
|
|
if result:
|
|
logger.info("✅ Tag soft-deleted: %s", tag_id)
|
|
return {"status": "deleted", "id": tag_id}
|
|
raise HTTPException(status_code=500, detail="Failed to delete tag")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error deleting tag: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to delete tag")
|
|
|
|
|
|
# ============================================================================
|
|
# CUSTOMERS - Case Customers (Many-to-Many)
|
|
# ============================================================================
|
|
|
|
@router.get("/sag/{sag_id}/customers")
|
|
async def list_case_customers(sag_id: int):
|
|
"""List customers associated with a case."""
|
|
try:
|
|
query = """
|
|
SELECT sk.*, c.name as customer_name, c.email as customer_email
|
|
FROM sag_kunder sk
|
|
JOIN customers c ON sk.customer_id = c.id
|
|
WHERE sk.sag_id = %s AND sk.deleted_at IS NULL
|
|
"""
|
|
result = execute_query(query, (sag_id,))
|
|
return result
|
|
except Exception as e:
|
|
logger.error("❌ Error listing case customers: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to list case customers")
|
|
|
|
@router.post("/sag/{sag_id}/customers")
|
|
async def add_case_customer(sag_id: int, data: dict):
|
|
"""Add a customer to a case."""
|
|
try:
|
|
if not data.get("customer_id"):
|
|
raise HTTPException(status_code=400, detail="customer_id is required")
|
|
|
|
role = data.get("role", "Kunde")
|
|
|
|
# Check if already exists
|
|
check = execute_query(
|
|
"SELECT id FROM sag_kunder WHERE sag_id = %s AND customer_id = %s AND deleted_at IS NULL",
|
|
(sag_id, data["customer_id"])
|
|
)
|
|
if check:
|
|
return check[0] # Already linked
|
|
|
|
query = """
|
|
INSERT INTO sag_kunder (sag_id, customer_id, role)
|
|
VALUES (%s, %s, %s)
|
|
RETURNING *
|
|
"""
|
|
result = execute_query(query, (sag_id, data["customer_id"], role))
|
|
|
|
if result:
|
|
logger.info("✅ Customer %s added to case %s", data["customer_id"], sag_id)
|
|
return result[0]
|
|
raise HTTPException(status_code=500, detail="Failed to add case customer")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error adding case customer: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to add case customer")
|
|
|
|
@router.delete("/sag/{sag_id}/customers/{customer_id}")
|
|
async def remove_case_customer(sag_id: int, customer_id: int):
|
|
"""Remove customer from case."""
|
|
try:
|
|
query = "UPDATE sag_kunder SET deleted_at = NOW() WHERE sag_id = %s AND customer_id = %s RETURNING id"
|
|
result = execute_query(query, (sag_id, customer_id))
|
|
|
|
if result:
|
|
logger.info("✅ Customer %s removed from case %s", customer_id, sag_id)
|
|
return {"status": "deleted"}
|
|
raise HTTPException(status_code=404, detail="Customer link not found")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error removing case customer: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to remove case customer")
|
|
|
|
|
|
# ============================================================================
|
|
# CONTACTS - Case Contacts
|
|
# ============================================================================
|
|
|
|
@router.get("/sag/{sag_id}/contacts")
|
|
async def list_case_contacts(sag_id: int):
|
|
"""List contacts associated with a case."""
|
|
try:
|
|
query = """
|
|
SELECT
|
|
sk.*,
|
|
c.first_name,
|
|
c.last_name,
|
|
c.email,
|
|
c.phone,
|
|
c.mobile,
|
|
c.title,
|
|
company.customer_name
|
|
FROM sag_kontakter sk
|
|
JOIN contacts c ON sk.contact_id = c.id
|
|
LEFT JOIN LATERAL (
|
|
SELECT cu.name AS customer_name
|
|
FROM contact_companies cc
|
|
JOIN customers cu ON cu.id = cc.customer_id
|
|
WHERE cc.contact_id = c.id
|
|
ORDER BY cc.is_primary DESC, cu.name
|
|
LIMIT 1
|
|
) company ON TRUE
|
|
WHERE sk.sag_id = %s AND sk.deleted_at IS NULL
|
|
"""
|
|
result = execute_query(query, (sag_id,))
|
|
return result
|
|
except Exception as e:
|
|
logger.error("❌ Error listing case contacts: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to list case contacts")
|
|
|
|
@router.post("/sag/{sag_id}/contacts")
|
|
async def add_case_contact(sag_id: int, data: dict):
|
|
"""Add a contact to a case."""
|
|
try:
|
|
if not data.get("contact_id"):
|
|
raise HTTPException(status_code=400, detail="contact_id is required")
|
|
|
|
role = data.get("role", "Kontakt")
|
|
is_primary = bool(data.get("is_primary", False))
|
|
|
|
# Check if already exists
|
|
check = execute_query(
|
|
"SELECT id FROM sag_kontakter WHERE sag_id = %s AND contact_id = %s AND deleted_at IS NULL",
|
|
(sag_id, data["contact_id"])
|
|
)
|
|
if check:
|
|
return check[0] # Already linked
|
|
|
|
if is_primary:
|
|
execute_update(
|
|
"UPDATE sag_kontakter SET is_primary = FALSE WHERE sag_id = %s",
|
|
(sag_id,)
|
|
)
|
|
|
|
query = """
|
|
INSERT INTO sag_kontakter (sag_id, contact_id, role, is_primary)
|
|
VALUES (%s, %s, %s, %s)
|
|
RETURNING *
|
|
"""
|
|
result = execute_query(query, (sag_id, data["contact_id"], role, is_primary))
|
|
|
|
if result:
|
|
logger.info("✅ Contact added to case %s: %s", sag_id, data["contact_id"])
|
|
return result[0]
|
|
raise HTTPException(status_code=500, detail="Failed to add contact")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error adding case contact: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to add contact")
|
|
|
|
@router.delete("/sag/{sag_id}/contacts/{contact_id}")
|
|
async def remove_case_contact(sag_id: int, contact_id: int):
|
|
"""Remove contact from case."""
|
|
try:
|
|
query = "UPDATE sag_kontakter SET deleted_at = NOW() WHERE sag_id = %s AND contact_id = %s RETURNING id"
|
|
result = execute_query(query, (sag_id, contact_id))
|
|
|
|
if result:
|
|
logger.info("✅ Contact %s removed from case %s", contact_id, sag_id)
|
|
return {"status": "deleted"}
|
|
raise HTTPException(status_code=404, detail="Contact link not found")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error removing case contact: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to remove case contact")
|
|
|
|
|
|
@router.patch("/sag/{sag_id}/contacts/{contact_id}")
|
|
async def update_case_contact(sag_id: int, contact_id: int, data: dict):
|
|
"""Update role or primary status for a case contact."""
|
|
try:
|
|
existing = execute_query(
|
|
"SELECT id FROM sag_kontakter WHERE sag_id = %s AND contact_id = %s AND deleted_at IS NULL",
|
|
(sag_id, contact_id)
|
|
)
|
|
if not existing:
|
|
raise HTTPException(status_code=404, detail="Contact link not found")
|
|
|
|
role = data.get("role")
|
|
is_primary = data.get("is_primary")
|
|
|
|
updates = []
|
|
params = []
|
|
|
|
if role is not None:
|
|
updates.append("role = %s")
|
|
params.append(role)
|
|
|
|
if is_primary is not None:
|
|
if bool(is_primary):
|
|
execute_update(
|
|
"UPDATE sag_kontakter SET is_primary = FALSE WHERE sag_id = %s",
|
|
(sag_id,)
|
|
)
|
|
updates.append("is_primary = %s")
|
|
params.append(bool(is_primary))
|
|
|
|
if not updates:
|
|
raise HTTPException(status_code=400, detail="No fields to update")
|
|
|
|
params.extend([sag_id, contact_id])
|
|
query = f"""
|
|
UPDATE sag_kontakter
|
|
SET {', '.join(updates)}
|
|
WHERE sag_id = %s AND contact_id = %s
|
|
RETURNING *
|
|
"""
|
|
|
|
result = execute_query(query, tuple(params))
|
|
if result:
|
|
return result[0]
|
|
raise HTTPException(status_code=500, detail="Failed to update contact")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error updating case contact: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to update contact")
|
|
|
|
|
|
# ============================================================================
|
|
# HARDWARE - Placeholder endpoints for frontend compatibility
|
|
# ============================================================================
|
|
|
|
@router.get("/sag/{sag_id}/hardware")
|
|
async def list_case_hardware(sag_id: int):
|
|
"""List hardware associated with a case."""
|
|
try:
|
|
query = """
|
|
SELECT sh.id as relation_id, h.*
|
|
FROM sag_hardware sh
|
|
JOIN hardware_assets h ON sh.hardware_id = h.id
|
|
WHERE sh.sag_id = %s AND sh.deleted_at IS NULL
|
|
ORDER BY sh.created_at DESC
|
|
"""
|
|
result = execute_query(query, (sag_id,))
|
|
return result
|
|
except Exception as e:
|
|
logger.error("❌ Error listing case hardware: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to list case hardware")
|
|
|
|
@router.post("/sag/{sag_id}/hardware")
|
|
async def add_case_hardware(sag_id: int, data: dict):
|
|
"""Add hardware to case."""
|
|
try:
|
|
if not data.get("hardware_id"):
|
|
raise HTTPException(status_code=400, detail="hardware_id is required")
|
|
|
|
hardware_id = data.get("hardware_id")
|
|
|
|
# Check if already linked
|
|
check = execute_query(
|
|
"SELECT id FROM sag_hardware WHERE sag_id = %s AND hardware_id = %s AND deleted_at IS NULL",
|
|
(sag_id, hardware_id)
|
|
)
|
|
if check:
|
|
return check[0] # Already linked
|
|
|
|
query = """
|
|
INSERT INTO sag_hardware (sag_id, hardware_id, note)
|
|
VALUES (%s, %s, %s)
|
|
ON CONFLICT (sag_id, hardware_id)
|
|
DO UPDATE SET deleted_at = NULL, note = EXCLUDED.note
|
|
RETURNING *
|
|
"""
|
|
result = execute_query(query, (sag_id, hardware_id, data.get("note")))
|
|
|
|
if result:
|
|
logger.info("✅ Hardware %s added to case %s", hardware_id, sag_id)
|
|
return result[0]
|
|
raise HTTPException(status_code=500, detail="Failed to add hardware")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error adding case hardware: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to add hardware")
|
|
|
|
@router.delete("/sag/{sag_id}/hardware/{hardware_id}")
|
|
async def remove_case_hardware(sag_id: int, hardware_id: int):
|
|
"""Remove hardware from case."""
|
|
try:
|
|
query = "UPDATE sag_hardware SET deleted_at = NOW() WHERE sag_id = %s AND hardware_id = %s RETURNING id"
|
|
result = execute_query(query, (sag_id, hardware_id))
|
|
|
|
if result:
|
|
logger.info("✅ Hardware %s removed from case %s", hardware_id, sag_id)
|
|
return {"status": "deleted"}
|
|
raise HTTPException(status_code=404, detail="Hardware link not found")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error removing case hardware: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to remove hardware")
|
|
|
|
|
|
# ============================================================================
|
|
# LOCATIONS - Placeholder endpoints for frontend compatibility
|
|
# ============================================================================
|
|
|
|
@router.get("/sag/{sag_id}/locations")
|
|
async def list_case_locations(sag_id: int):
|
|
"""List locations associated with a case."""
|
|
try:
|
|
query = """
|
|
SELECT sl.id as relation_id, l.*
|
|
FROM sag_lokationer sl
|
|
JOIN locations_locations l ON sl.location_id = l.id
|
|
WHERE sl.sag_id = %s AND sl.deleted_at IS NULL
|
|
ORDER BY sl.created_at DESC
|
|
"""
|
|
result = execute_query(query, (sag_id,))
|
|
return result
|
|
except Exception as e:
|
|
logger.error("❌ Error listing case locations: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to list case locations")
|
|
|
|
@router.post("/sag/{sag_id}/locations")
|
|
async def add_case_location(sag_id: int, data: dict):
|
|
"""Add location to case."""
|
|
try:
|
|
if not data.get("location_id"):
|
|
raise HTTPException(status_code=400, detail="location_id is required")
|
|
|
|
location_id = data.get("location_id")
|
|
|
|
# Check if already linked
|
|
check = execute_query(
|
|
"SELECT id FROM sag_lokationer WHERE sag_id = %s AND location_id = %s AND deleted_at IS NULL",
|
|
(sag_id, location_id)
|
|
)
|
|
if check:
|
|
return check[0] # Already linked
|
|
|
|
query = """
|
|
INSERT INTO sag_lokationer (sag_id, location_id, note)
|
|
VALUES (%s, %s, %s)
|
|
ON CONFLICT (sag_id, location_id)
|
|
DO UPDATE SET deleted_at = NULL, note = EXCLUDED.note
|
|
RETURNING *
|
|
"""
|
|
result = execute_query(query, (sag_id, location_id, data.get("note")))
|
|
|
|
if result:
|
|
logger.info("✅ Location %s added to case %s", location_id, sag_id)
|
|
return result[0]
|
|
raise HTTPException(status_code=500, detail="Failed to add location")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error adding case location: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to add location")
|
|
|
|
@router.delete("/sag/{sag_id}/locations/{location_id}")
|
|
async def remove_case_location(sag_id: int, location_id: int):
|
|
"""Remove location from case."""
|
|
try:
|
|
query = """
|
|
UPDATE sag_lokationer
|
|
SET deleted_at = NOW()
|
|
WHERE sag_id = %s
|
|
AND deleted_at IS NULL
|
|
AND (location_id = %s OR id = %s)
|
|
RETURNING id
|
|
"""
|
|
result = execute_query(query, (sag_id, location_id, location_id))
|
|
|
|
if result:
|
|
logger.info("✅ Location %s removed from case %s", location_id, sag_id)
|
|
return {"status": "deleted"}
|
|
raise HTTPException(status_code=404, detail="Location link not found")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error removing case location: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to remove location")
|
|
|
|
# ============================================================================
|
|
# VAREKØB & SALG - Aggregation (Case Tree)
|
|
# ============================================================================
|
|
|
|
@router.get("/sag/{sag_id}/varekob-salg")
|
|
async def get_varekob_salg(sag_id: int, include_subcases: bool = True):
|
|
"""Aggregate time entries (and future sales items) for a case tree."""
|
|
try:
|
|
check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
has_sale_items_table = _table_exists("sag_salgsvarer")
|
|
if not has_sale_items_table:
|
|
logger.warning("⚠️ sag_salgsvarer table missing - sale item aggregation skipped for sag_id=%s", sag_id)
|
|
|
|
if include_subcases:
|
|
case_tree_query = """
|
|
WITH RECURSIVE normalized_relations AS (
|
|
SELECT
|
|
CASE
|
|
WHEN LOWER(relationstype) IN ('afledt af', 'afledt_af') THEN målsag_id
|
|
WHEN LOWER(relationstype) IN ('årsag til', 'årsag_til') THEN kilde_sag_id
|
|
ELSE kilde_sag_id
|
|
END AS parent_id,
|
|
CASE
|
|
WHEN LOWER(relationstype) IN ('afledt af', 'afledt_af') THEN kilde_sag_id
|
|
WHEN LOWER(relationstype) IN ('årsag til', 'årsag_til') THEN målsag_id
|
|
ELSE målsag_id
|
|
END AS child_id
|
|
FROM sag_relationer
|
|
WHERE deleted_at IS NULL
|
|
),
|
|
case_tree AS (
|
|
SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL
|
|
UNION
|
|
SELECT nr.child_id
|
|
FROM normalized_relations nr
|
|
JOIN case_tree ct ON nr.parent_id = ct.id
|
|
)
|
|
SELECT s.id, s.titel
|
|
FROM sag_sager s
|
|
JOIN case_tree ct ON s.id = ct.id
|
|
WHERE s.deleted_at IS NULL
|
|
ORDER BY s.id
|
|
"""
|
|
case_tree = execute_query(case_tree_query, (sag_id,))
|
|
|
|
time_query = """
|
|
WITH RECURSIVE normalized_relations AS (
|
|
SELECT
|
|
CASE
|
|
WHEN LOWER(relationstype) IN ('afledt af', 'afledt_af') THEN målsag_id
|
|
WHEN LOWER(relationstype) IN ('årsag til', 'årsag_til') THEN kilde_sag_id
|
|
ELSE kilde_sag_id
|
|
END AS parent_id,
|
|
CASE
|
|
WHEN LOWER(relationstype) IN ('afledt af', 'afledt_af') THEN kilde_sag_id
|
|
WHEN LOWER(relationstype) IN ('årsag til', 'årsag_til') THEN målsag_id
|
|
ELSE målsag_id
|
|
END AS child_id
|
|
FROM sag_relationer
|
|
WHERE deleted_at IS NULL
|
|
),
|
|
case_tree AS (
|
|
SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL
|
|
UNION
|
|
SELECT nr.child_id
|
|
FROM normalized_relations nr
|
|
JOIN case_tree ct ON nr.parent_id = ct.id
|
|
)
|
|
SELECT t.*, s.titel AS source_sag_titel
|
|
FROM tmodule_times t
|
|
JOIN case_tree ct ON t.sag_id = ct.id
|
|
LEFT JOIN sag_sager s ON s.id = t.sag_id
|
|
ORDER BY t.worked_date DESC NULLS LAST, t.id DESC
|
|
"""
|
|
time_entries = execute_query(time_query, (sag_id,))
|
|
|
|
if has_sale_items_table:
|
|
sale_items_query = """
|
|
WITH RECURSIVE normalized_relations AS (
|
|
SELECT
|
|
CASE
|
|
WHEN LOWER(relationstype) IN ('afledt af', 'afledt_af') THEN målsag_id
|
|
WHEN LOWER(relationstype) IN ('årsag til', 'årsag_til') THEN kilde_sag_id
|
|
ELSE kilde_sag_id
|
|
END AS parent_id,
|
|
CASE
|
|
WHEN LOWER(relationstype) IN ('afledt af', 'afledt_af') THEN kilde_sag_id
|
|
WHEN LOWER(relationstype) IN ('årsag til', 'årsag_til') THEN målsag_id
|
|
ELSE målsag_id
|
|
END AS child_id
|
|
FROM sag_relationer
|
|
WHERE deleted_at IS NULL
|
|
),
|
|
case_tree AS (
|
|
SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL
|
|
UNION
|
|
SELECT nr.child_id
|
|
FROM normalized_relations nr
|
|
JOIN case_tree ct ON nr.parent_id = ct.id
|
|
)
|
|
SELECT si.*, s.titel AS source_sag_titel
|
|
FROM sag_salgsvarer si
|
|
JOIN case_tree ct ON si.sag_id = ct.id
|
|
LEFT JOIN sag_sager s ON s.id = si.sag_id
|
|
ORDER BY si.line_date DESC NULLS LAST, si.id DESC
|
|
"""
|
|
sale_items = execute_query(sale_items_query, (sag_id,))
|
|
else:
|
|
sale_items = []
|
|
else:
|
|
case_tree = execute_query(
|
|
"SELECT id, titel FROM sag_sager WHERE id = %s AND deleted_at IS NULL",
|
|
(sag_id,)
|
|
)
|
|
time_query = """
|
|
SELECT t.*, s.titel AS source_sag_titel
|
|
FROM tmodule_times t
|
|
LEFT JOIN sag_sager s ON s.id = t.sag_id
|
|
WHERE t.sag_id = %s
|
|
ORDER BY t.worked_date DESC NULLS LAST, t.id DESC
|
|
"""
|
|
time_entries = execute_query(time_query, (sag_id,))
|
|
|
|
if has_sale_items_table:
|
|
sale_items_query = """
|
|
SELECT si.*, s.titel AS source_sag_titel
|
|
FROM sag_salgsvarer si
|
|
LEFT JOIN sag_sager s ON s.id = si.sag_id
|
|
WHERE si.sag_id = %s
|
|
ORDER BY si.line_date DESC NULLS LAST, si.id DESC
|
|
"""
|
|
sale_items = execute_query(sale_items_query, (sag_id,))
|
|
else:
|
|
sale_items = []
|
|
|
|
total_entries = len(time_entries or [])
|
|
total_hours = 0
|
|
billable_hours = 0
|
|
|
|
sale_total = 0
|
|
purchase_total = 0
|
|
|
|
for entry in time_entries or []:
|
|
hours = entry.get("approved_hours") or entry.get("original_hours") or 0
|
|
total_hours += float(hours)
|
|
if entry.get("billable"):
|
|
billable_hours += float(hours)
|
|
|
|
for item in sale_items or []:
|
|
amount = item.get("amount") or 0
|
|
if (item.get("type") or "").lower() == "purchase":
|
|
purchase_total += float(amount)
|
|
else:
|
|
sale_total += float(amount)
|
|
|
|
return {
|
|
"sag_id": sag_id,
|
|
"include_subcases": include_subcases,
|
|
"case_tree": case_tree or [],
|
|
"time_entries": time_entries or [],
|
|
"sale_items": sale_items or [],
|
|
"totals": {
|
|
"total_entries": total_entries,
|
|
"total_hours": round(total_hours, 2),
|
|
"billable_hours": round(billable_hours, 2),
|
|
"sale_total": round(sale_total, 2),
|
|
"purchase_total": round(purchase_total, 2),
|
|
"net_total": round(sale_total - purchase_total, 2)
|
|
}
|
|
}
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error aggregating case data: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to aggregate case data")
|
|
|
|
|
|
@router.get("/sag/{sag_id}/calendar-events")
|
|
async def get_case_calendar_events(sag_id: int, include_children: bool = True):
|
|
"""Return calendar events for a case and optionally its child cases."""
|
|
try:
|
|
check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
if include_children:
|
|
case_tree_query = """
|
|
WITH RECURSIVE normalized_relations AS (
|
|
SELECT
|
|
CASE
|
|
WHEN LOWER(relationstype) IN ('afledt af', 'afledt_af') THEN målsag_id
|
|
WHEN LOWER(relationstype) IN ('årsag til', 'årsag_til') THEN kilde_sag_id
|
|
ELSE kilde_sag_id
|
|
END AS parent_id,
|
|
CASE
|
|
WHEN LOWER(relationstype) IN ('afledt af', 'afledt_af') THEN kilde_sag_id
|
|
WHEN LOWER(relationstype) IN ('årsag til', 'årsag_til') THEN målsag_id
|
|
ELSE målsag_id
|
|
END AS child_id
|
|
FROM sag_relationer
|
|
WHERE deleted_at IS NULL
|
|
),
|
|
case_tree AS (
|
|
SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL
|
|
UNION
|
|
SELECT nr.child_id
|
|
FROM normalized_relations nr
|
|
JOIN case_tree ct ON nr.parent_id = ct.id
|
|
)
|
|
SELECT s.id, s.titel
|
|
FROM sag_sager s
|
|
JOIN case_tree ct ON s.id = ct.id
|
|
WHERE s.deleted_at IS NULL
|
|
ORDER BY s.id
|
|
"""
|
|
case_rows = execute_query(case_tree_query, (sag_id,)) or []
|
|
else:
|
|
case_rows = execute_query(
|
|
"SELECT id, titel FROM sag_sager WHERE id = %s AND deleted_at IS NULL",
|
|
(sag_id,)
|
|
) or []
|
|
|
|
case_ids = [row["id"] for row in case_rows]
|
|
case_titles = {row["id"]: row.get("titel") for row in case_rows}
|
|
|
|
if not case_ids:
|
|
return {"current": [], "children": []}
|
|
|
|
placeholders = ",".join(["%s"] * len(case_ids))
|
|
|
|
reminder_query = f"""
|
|
SELECT r.id, r.sag_id, r.title, r.message, r.event_type, r.priority,
|
|
r.next_check_at, r.scheduled_at
|
|
FROM sag_reminders r
|
|
WHERE r.deleted_at IS NULL
|
|
AND r.is_active = true
|
|
AND r.sag_id IN ({placeholders})
|
|
"""
|
|
reminders = execute_query(reminder_query, tuple(case_ids)) or []
|
|
|
|
case_query = f"""
|
|
SELECT id, titel, deadline, deferred_until
|
|
FROM sag_sager
|
|
WHERE deleted_at IS NULL
|
|
AND id IN ({placeholders})
|
|
"""
|
|
case_dates = execute_query(case_query, tuple(case_ids)) or []
|
|
|
|
events_by_case: dict[int, list] = {cid: [] for cid in case_ids}
|
|
|
|
for row in reminders:
|
|
start_value = row.get("next_check_at") or row.get("scheduled_at")
|
|
if not start_value:
|
|
continue
|
|
events_by_case[row["sag_id"]].append({
|
|
"id": f"reminder:{row['id']}",
|
|
"title": row.get("title"),
|
|
"message": row.get("message"),
|
|
"event_kind": row.get("event_type") or "reminder",
|
|
"start": start_value.isoformat(),
|
|
"url": f"/sag/{row['sag_id']}"
|
|
})
|
|
|
|
for row in case_dates:
|
|
if row.get("deadline"):
|
|
events_by_case[row["id"]].append({
|
|
"id": f"deadline:{row['id']}",
|
|
"title": f"Deadline: {row.get('titel')}",
|
|
"event_kind": "deadline",
|
|
"start": row["deadline"].isoformat(),
|
|
"url": f"/sag/{row['id']}"
|
|
})
|
|
if row.get("deferred_until"):
|
|
events_by_case[row["id"]].append({
|
|
"id": f"deferred:{row['id']}",
|
|
"title": f"Deferred: {row.get('titel')}",
|
|
"event_kind": "deferred",
|
|
"start": row["deferred_until"].isoformat(),
|
|
"url": f"/sag/{row['id']}"
|
|
})
|
|
|
|
current_events = events_by_case.get(sag_id, [])
|
|
children = []
|
|
for cid in case_ids:
|
|
if cid == sag_id:
|
|
continue
|
|
children.append({
|
|
"case_id": cid,
|
|
"case_title": case_titles.get(cid) or f"Sag #{cid}",
|
|
"events": events_by_case.get(cid, [])
|
|
})
|
|
|
|
return {"current": current_events, "children": children}
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error loading case calendar events: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to load calendar events")
|
|
|
|
# ============================================================================
|
|
# VAREKØB & SALG - CRUD (Case-linked sale items)
|
|
# ============================================================================
|
|
|
|
@router.get("/sag/{sag_id}/sale-items")
|
|
async def list_sale_items(sag_id: int):
|
|
"""List sale items for a case."""
|
|
try:
|
|
check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
if not _table_exists("sag_salgsvarer"):
|
|
logger.warning("⚠️ sag_salgsvarer table missing - returning empty sale items list for sag_id=%s", sag_id)
|
|
return []
|
|
|
|
query = """
|
|
SELECT si.*, s.titel AS source_sag_titel
|
|
FROM sag_salgsvarer si
|
|
LEFT JOIN sag_sager s ON s.id = si.sag_id
|
|
WHERE si.sag_id = %s
|
|
ORDER BY si.line_date DESC NULLS LAST, si.id DESC
|
|
"""
|
|
return execute_query(query, (sag_id,)) or []
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error listing sale items: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to list sale items")
|
|
|
|
|
|
@router.post("/sag/{sag_id}/sale-items")
|
|
async def create_sale_item(sag_id: int, data: dict):
|
|
"""Create a sale item for a case."""
|
|
try:
|
|
check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
description = data.get("description")
|
|
amount = data.get("amount")
|
|
if not description:
|
|
raise HTTPException(status_code=400, detail="description is required")
|
|
if amount is None:
|
|
raise HTTPException(status_code=400, detail="amount is required")
|
|
|
|
item_type = (data.get("type") or "sale").lower()
|
|
if item_type not in ("sale", "purchase"):
|
|
raise HTTPException(status_code=400, detail="type must be sale or purchase")
|
|
|
|
status = (data.get("status") or "draft").lower()
|
|
if status not in ("draft", "confirmed", "cancelled"):
|
|
raise HTTPException(status_code=400, detail="status must be draft, confirmed, or cancelled")
|
|
|
|
query = """
|
|
INSERT INTO sag_salgsvarer
|
|
(sag_id, type, description, quantity, unit, unit_price, amount, currency, status, line_date, external_ref, product_id)
|
|
VALUES
|
|
(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
RETURNING *
|
|
"""
|
|
params = (
|
|
sag_id,
|
|
item_type,
|
|
description,
|
|
data.get("quantity"),
|
|
data.get("unit"),
|
|
data.get("unit_price"),
|
|
amount,
|
|
data.get("currency", "DKK"),
|
|
status,
|
|
data.get("line_date"),
|
|
data.get("external_ref"),
|
|
data.get("product_id"),
|
|
)
|
|
result = execute_query(query, params)
|
|
if result:
|
|
logger.info("✅ Sale item created for case %s", sag_id)
|
|
return result[0]
|
|
raise HTTPException(status_code=500, detail="Failed to create sale item")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error creating sale item: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to create sale item")
|
|
|
|
|
|
@router.get("/sag/{sag_id}/sale-items/{item_id}")
|
|
async def get_sale_item(sag_id: int, item_id: int):
|
|
"""Get a single sale item for a case."""
|
|
try:
|
|
query = """
|
|
SELECT si.*, s.titel AS source_sag_titel
|
|
FROM sag_salgsvarer si
|
|
LEFT JOIN sag_sager s ON s.id = si.sag_id
|
|
WHERE si.sag_id = %s AND si.id = %s
|
|
"""
|
|
result = execute_query(query, (sag_id, item_id))
|
|
if not result:
|
|
raise HTTPException(status_code=404, detail="Sale item not found")
|
|
return result[0]
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error getting sale item: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to get sale item")
|
|
|
|
|
|
@router.patch("/sag/{sag_id}/sale-items/{item_id}")
|
|
async def update_sale_item(sag_id: int, item_id: int, updates: dict):
|
|
"""Update a sale item for a case."""
|
|
try:
|
|
check = execute_query(
|
|
"SELECT id FROM sag_salgsvarer WHERE id = %s AND sag_id = %s",
|
|
(item_id, sag_id)
|
|
)
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Sale item not found")
|
|
|
|
allowed_fields = [
|
|
"type",
|
|
"description",
|
|
"quantity",
|
|
"unit",
|
|
"unit_price",
|
|
"amount",
|
|
"currency",
|
|
"status",
|
|
"line_date",
|
|
"external_ref",
|
|
"product_id",
|
|
]
|
|
|
|
set_clauses = []
|
|
params = []
|
|
|
|
for field in allowed_fields:
|
|
if field in updates:
|
|
if field == "type":
|
|
value = (updates[field] or "").lower()
|
|
if value not in ("sale", "purchase"):
|
|
raise HTTPException(status_code=400, detail="type must be sale or purchase")
|
|
set_clauses.append("type = %s")
|
|
params.append(value)
|
|
elif field == "status":
|
|
value = (updates[field] or "").lower()
|
|
if value not in ("draft", "confirmed", "cancelled"):
|
|
raise HTTPException(status_code=400, detail="status must be draft, confirmed, or cancelled")
|
|
set_clauses.append("status = %s")
|
|
params.append(value)
|
|
else:
|
|
set_clauses.append(f"{field} = %s")
|
|
params.append(updates[field])
|
|
|
|
if not set_clauses:
|
|
raise HTTPException(status_code=400, detail="No valid fields to update")
|
|
|
|
params.extend([item_id, sag_id])
|
|
query = f"UPDATE sag_salgsvarer SET {', '.join(set_clauses)} WHERE id = %s AND sag_id = %s RETURNING *"
|
|
result = execute_query(query, tuple(params))
|
|
if result:
|
|
logger.info("✅ Sale item updated: %s", item_id)
|
|
return result[0]
|
|
raise HTTPException(status_code=500, detail="Failed to update sale item")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error updating sale item: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to update sale item")
|
|
|
|
|
|
@router.delete("/sag/{sag_id}/sale-items/{item_id}")
|
|
async def delete_sale_item(sag_id: int, item_id: int):
|
|
"""Delete a sale item for a case."""
|
|
try:
|
|
query = "DELETE FROM sag_salgsvarer WHERE id = %s AND sag_id = %s RETURNING id"
|
|
result = execute_query(query, (item_id, sag_id))
|
|
if result:
|
|
logger.info("✅ Sale item deleted: %s", item_id)
|
|
return {"status": "deleted", "id": item_id}
|
|
raise HTTPException(status_code=404, detail="Sale item not found")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error deleting sale item: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to delete sale item")
|
|
|
|
# ============================================================================
|
|
# KOMMENTARER - Case Comments
|
|
# ============================================================================
|
|
|
|
@router.get("/sag/{sag_id}/kommentarer")
|
|
async def get_kommentarer(sag_id: int):
|
|
"""Get all comments for a case."""
|
|
try:
|
|
check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
query = "SELECT * FROM sag_kommentarer WHERE sag_id = %s AND deleted_at IS NULL ORDER BY created_at DESC"
|
|
result = execute_query(query, (sag_id,))
|
|
return result
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error getting comments: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to get comments")
|
|
|
|
@router.post("/sag/{sag_id}/kommentarer")
|
|
async def add_kommentar(sag_id: int, data: dict, request: Request):
|
|
"""Add a comment to a case."""
|
|
try:
|
|
if not data.get("indhold"):
|
|
raise HTTPException(status_code=400, detail="indhold is required")
|
|
|
|
check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
er_system_besked = bool(data.get("er_system_besked", False))
|
|
|
|
if er_system_besked:
|
|
forfatter = str(data.get("forfatter") or "System").strip() or "System"
|
|
else:
|
|
forfatter = None
|
|
try:
|
|
user_id = _get_user_id_from_request(request)
|
|
user_row = execute_query_single(
|
|
"""
|
|
SELECT COALESCE(NULLIF(TRIM(full_name), ''), NULLIF(TRIM(username), '')) AS display_name
|
|
FROM users
|
|
WHERE user_id = %s
|
|
""",
|
|
(user_id,),
|
|
)
|
|
if user_row and user_row.get("display_name"):
|
|
forfatter = str(user_row.get("display_name")).strip()
|
|
except HTTPException:
|
|
# Fallback to provided author for legacy callers without auth context.
|
|
pass
|
|
|
|
if not forfatter:
|
|
provided_author = str(data.get("forfatter") or "").strip()
|
|
if provided_author and provided_author.lower() != "bruger":
|
|
forfatter = provided_author
|
|
else:
|
|
forfatter = "Bruger"
|
|
|
|
query = """
|
|
INSERT INTO sag_kommentarer (sag_id, forfatter, indhold, er_system_besked)
|
|
VALUES (%s, %s, %s, %s)
|
|
RETURNING *
|
|
"""
|
|
result = execute_query(query, (sag_id, forfatter, data.get("indhold"), er_system_besked))
|
|
|
|
if result:
|
|
logger.info("✅ Comment added to case %s by %s", sag_id, forfatter)
|
|
return result[0]
|
|
raise HTTPException(status_code=500, detail="Failed to add comment")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ Error adding comment: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to add comment")
|
|
|
|
|
|
# ============================================================================
|
|
# WORK ORDERS / LABELS
|
|
# ============================================================================
|
|
|
|
def _resolve_case_row_for_documents(sag_id: int):
|
|
row = execute_query_single(
|
|
"""
|
|
SELECT s.id, s.titel, s.status, s.created_at, s.beskrivelse, s.customer_id, c.name AS customer_name
|
|
FROM sag_sager s
|
|
LEFT JOIN customers c ON c.id = s.customer_id
|
|
WHERE s.id = %s AND s.deleted_at IS NULL
|
|
""",
|
|
(sag_id,),
|
|
)
|
|
if not row:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
return row
|
|
|
|
|
|
def _store_generated_case_file(
|
|
sag_id: int,
|
|
filename: str,
|
|
content_bytes: bytes,
|
|
content_type: str,
|
|
source_type: str,
|
|
source_token: Optional[str] = None,
|
|
) -> Dict:
|
|
stored_name = _generate_stored_name(filename, SAG_FILE_SUBDIR)
|
|
destination = _resolve_attachment_path(stored_name)
|
|
destination.parent.mkdir(parents=True, exist_ok=True)
|
|
destination.write_bytes(content_bytes)
|
|
|
|
query = """
|
|
INSERT INTO sag_files (
|
|
sag_id,
|
|
filename,
|
|
content_type,
|
|
size_bytes,
|
|
stored_name,
|
|
source_type,
|
|
source_token
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s)
|
|
RETURNING id, filename, created_at
|
|
"""
|
|
rows = execute_query(
|
|
query,
|
|
(sag_id, filename, content_type, len(content_bytes), stored_name, source_type, source_token),
|
|
)
|
|
return rows[0]
|
|
|
|
|
|
@router.get("/sag/{sag_id}/work-orders/print", response_class=HTMLResponse)
|
|
async def print_case_work_order(sag_id: int, request: Request):
|
|
"""Render a printable work order with scan token + barcode."""
|
|
case = _resolve_case_row_for_documents(sag_id)
|
|
|
|
user_id = None
|
|
try:
|
|
user_id = _get_user_id_from_request(request)
|
|
except HTTPException:
|
|
user_id = None
|
|
|
|
token = _create_document_token(sag_id, "work_order", user_id=user_id)
|
|
barcode_svg = _render_code39_svg(token)
|
|
|
|
todo_steps = execute_query(
|
|
"""
|
|
SELECT title, description, due_date, is_done
|
|
FROM sag_todo_steps
|
|
WHERE sag_id = %s
|
|
AND deleted_at IS NULL
|
|
ORDER BY is_done ASC, due_date ASC NULLS LAST, id ASC
|
|
""",
|
|
(sag_id,),
|
|
) or []
|
|
|
|
todo_items_html = "".join(
|
|
(
|
|
"<tr>"
|
|
"<td style='width:30px;border:1px solid #d4dbe3;padding:8px;text-align:center;'>□</td>"
|
|
f"<td style='border:1px solid #d4dbe3;padding:8px;'><strong>{html.escape(step.get('title') or '')}</strong>"
|
|
f"<div style='font-size:12px;color:#5b6670;margin-top:4px;'>{html.escape(step.get('description') or '')}</div></td>"
|
|
"</tr>"
|
|
)
|
|
for step in todo_steps
|
|
)
|
|
if not todo_items_html:
|
|
todo_items_html = (
|
|
"<tr><td style='width:30px;border:1px solid #d4dbe3;padding:8px;text-align:center;'>□</td>"
|
|
"<td style='border:1px solid #d4dbe3;padding:8px;'>Ingen todo-opgaver registreret</td></tr>"
|
|
)
|
|
|
|
def _nl2br(value: Optional[str]) -> str:
|
|
text = str(value or "").strip()
|
|
if not text:
|
|
return "-"
|
|
return "<br>".join(html.escape(part) for part in text.splitlines())
|
|
|
|
def _clip(value: Optional[str], limit: int = 600) -> str:
|
|
text = str(value or "").strip()
|
|
if not text:
|
|
return ""
|
|
if len(text) <= limit:
|
|
return text
|
|
return f"{text[:limit].rstrip()}..."
|
|
|
|
def _strip_quoted_email_text(value: Optional[str]) -> str:
|
|
text = str(value or "").replace("\r\n", "\n").replace("\r", "\n").strip()
|
|
if not text:
|
|
return ""
|
|
|
|
lines = text.split("\n")
|
|
kept = []
|
|
header_re = re.compile(r"^(fra|from|til|to|sendt|sent|dato|date|emne|subject)\s*:\s*", re.IGNORECASE)
|
|
original_msg_re = re.compile(r"^(-----\s*original message\s*-----|begin forwarded message)", re.IGNORECASE)
|
|
wrote_re = re.compile(r"\b(wrote|skrev)\s*:\s*$", re.IGNORECASE)
|
|
|
|
for idx, line in enumerate(lines):
|
|
trimmed = line.strip()
|
|
|
|
if trimmed.startswith(">"):
|
|
break
|
|
if original_msg_re.match(trimmed):
|
|
break
|
|
if wrote_re.search(trimmed):
|
|
break
|
|
|
|
if re.match(r"^[-_]{3,}$", trimmed):
|
|
lookahead = lines[idx + 1: idx + 5]
|
|
if any(header_re.match(str(candidate or "").strip()) for candidate in lookahead):
|
|
break
|
|
|
|
if idx > 0 and header_re.match(trimmed) and not str(lines[idx - 1] or "").strip():
|
|
break
|
|
|
|
kept.append(line)
|
|
|
|
while kept and not str(kept[-1]).strip():
|
|
kept.pop()
|
|
|
|
return "\n".join(kept).strip()
|
|
|
|
email_from_expr = "NULL"
|
|
if table_has_column("email_messages", "sender_email"):
|
|
email_from_expr = "e.sender_email"
|
|
elif table_has_column("email_messages", "from_email"):
|
|
email_from_expr = "e.from_email"
|
|
|
|
email_to_expr = "NULL"
|
|
if table_has_column("email_messages", "recipient_email"):
|
|
email_to_expr = "e.recipient_email"
|
|
elif table_has_column("email_messages", "to_email"):
|
|
email_to_expr = "e.to_email"
|
|
|
|
linked_emails = []
|
|
email_comment_rows = []
|
|
if _table_exists("sag_emails") and _table_exists("email_messages"):
|
|
try:
|
|
linked_emails = execute_query(
|
|
f"""
|
|
SELECT
|
|
e.received_date,
|
|
e.subject,
|
|
{email_from_expr} AS from_email,
|
|
{email_to_expr} AS to_email,
|
|
e.body_text
|
|
FROM sag_emails se
|
|
JOIN email_messages e ON e.id = se.email_id
|
|
WHERE se.sag_id = %s
|
|
ORDER BY e.received_date DESC NULLS LAST, e.id DESC
|
|
LIMIT 30
|
|
""",
|
|
(sag_id,),
|
|
) or []
|
|
except Exception as exc:
|
|
logger.warning("⚠️ Work-order linked email query failed for SAG-%s: %s", sag_id, exc)
|
|
linked_emails = []
|
|
|
|
if _table_exists("sag_kommentarer"):
|
|
try:
|
|
email_comment_rows = execute_query(
|
|
"""
|
|
SELECT created_at, forfatter, indhold
|
|
FROM sag_kommentarer
|
|
WHERE sag_id = %s
|
|
AND deleted_at IS NULL
|
|
AND (
|
|
COALESCE(indhold, '') ILIKE '%%Email-ID:%%'
|
|
OR COALESCE(indhold, '') ILIKE '%%📧%%'
|
|
OR COALESCE(indhold, '') ILIKE '%%Indgående email%%'
|
|
OR COALESCE(indhold, '') ILIKE '%%Udgående email%%'
|
|
)
|
|
ORDER BY created_at DESC
|
|
LIMIT 30
|
|
""",
|
|
(sag_id,),
|
|
) or []
|
|
except Exception as exc:
|
|
logger.warning("⚠️ Work-order email comment query failed for SAG-%s: %s", sag_id, exc)
|
|
email_comment_rows = []
|
|
|
|
has_name = table_has_column("hardware_assets", "name")
|
|
has_brand = table_has_column("hardware_assets", "brand")
|
|
has_model = table_has_column("hardware_assets", "model")
|
|
has_serial = table_has_column("hardware_assets", "serial_number")
|
|
has_asset_tag = table_has_column("hardware_assets", "asset_tag")
|
|
has_customer_asset_id = table_has_column("hardware_assets", "customer_asset_id")
|
|
has_internal_asset_id = table_has_column("hardware_assets", "internal_asset_id")
|
|
has_type = table_has_column("hardware_assets", "type")
|
|
has_asset_type = table_has_column("hardware_assets", "asset_type")
|
|
|
|
name_expr_parts = []
|
|
if has_name:
|
|
name_expr_parts.append("NULLIF(TRIM(h.name), '')")
|
|
if has_brand and has_model:
|
|
name_expr_parts.append("NULLIF(TRIM(CONCAT_WS(' ', h.brand, h.model)), '')")
|
|
if has_brand:
|
|
name_expr_parts.append("NULLIF(TRIM(h.brand), '')")
|
|
if has_model:
|
|
name_expr_parts.append("NULLIF(TRIM(h.model), '')")
|
|
if has_serial:
|
|
name_expr_parts.append("NULLIF(TRIM(h.serial_number), '')")
|
|
name_expr_parts.append("CONCAT('Hardware #', h.id::text)")
|
|
name_expr = "COALESCE(" + ", ".join(name_expr_parts) + ")"
|
|
|
|
serial_expr = "h.serial_number" if has_serial else "NULL"
|
|
|
|
tag_expr_parts = []
|
|
if has_asset_tag:
|
|
tag_expr_parts.append("NULLIF(TRIM(h.asset_tag), '')")
|
|
if has_customer_asset_id:
|
|
tag_expr_parts.append("NULLIF(TRIM(h.customer_asset_id), '')")
|
|
if has_internal_asset_id:
|
|
tag_expr_parts.append("NULLIF(TRIM(h.internal_asset_id), '')")
|
|
tag_expr_parts.append("'-'")
|
|
tag_expr = "COALESCE(" + ", ".join(tag_expr_parts) + ")"
|
|
|
|
type_expr_parts = []
|
|
if has_type:
|
|
type_expr_parts.append("NULLIF(TRIM(h.type), '')")
|
|
if has_asset_type:
|
|
type_expr_parts.append("NULLIF(TRIM(h.asset_type), '')")
|
|
type_expr_parts.append("'ukendt'")
|
|
type_expr = "COALESCE(" + ", ".join(type_expr_parts) + ")"
|
|
|
|
hardware_rows = []
|
|
if _table_exists("sag_hardware") and _table_exists("hardware_assets"):
|
|
try:
|
|
hardware_rows = execute_query(
|
|
f"""
|
|
SELECT
|
|
h.id,
|
|
{name_expr} AS label_name,
|
|
{serial_expr} AS serial_number,
|
|
{tag_expr} AS label_tag,
|
|
{type_expr} AS label_type
|
|
FROM sag_hardware sh
|
|
JOIN hardware_assets h ON h.id = sh.hardware_id
|
|
WHERE sh.sag_id = %s
|
|
AND sh.deleted_at IS NULL
|
|
ORDER BY label_name ASC
|
|
""",
|
|
(sag_id,),
|
|
) or []
|
|
except Exception as exc:
|
|
logger.warning("⚠️ Work-order hardware query failed for SAG-%s: %s", sag_id, exc)
|
|
hardware_rows = []
|
|
|
|
hardware_html = "".join(
|
|
(
|
|
"<tr>"
|
|
f"<td style='border:1px solid #d4dbe3;padding:8px;'>{html.escape(str(hw.get('label_name') or '-'))}</td>"
|
|
f"<td style='border:1px solid #d4dbe3;padding:8px;'>{html.escape(str(hw.get('serial_number') or '-'))}</td>"
|
|
f"<td style='border:1px solid #d4dbe3;padding:8px;'>{html.escape(str(hw.get('label_tag') or '-'))}</td>"
|
|
f"<td style='border:1px solid #d4dbe3;padding:8px;'>{html.escape(str(hw.get('label_type') or '-'))}</td>"
|
|
"</tr>"
|
|
)
|
|
for hw in hardware_rows
|
|
)
|
|
if not hardware_html:
|
|
hardware_html = "<tr><td colspan='4' style='border:1px solid #d4dbe3;padding:8px;'>Ingen hardware er knyttet til sagen.</td></tr>"
|
|
|
|
internal_filter = "COALESCE(er_system_besked, FALSE) = TRUE"
|
|
if table_has_column("sag_kommentarer", "er_intern"):
|
|
internal_filter = "(COALESCE(er_intern, FALSE) = TRUE OR COALESCE(er_system_besked, FALSE) = TRUE)"
|
|
|
|
internal_messages = []
|
|
if _table_exists("sag_kommentarer"):
|
|
internal_messages = execute_query(
|
|
f"""
|
|
SELECT created_at, forfatter, indhold
|
|
FROM sag_kommentarer
|
|
WHERE sag_id = %s
|
|
AND deleted_at IS NULL
|
|
AND {internal_filter}
|
|
ORDER BY created_at DESC
|
|
LIMIT 50
|
|
""",
|
|
(sag_id,),
|
|
) or []
|
|
|
|
emails_html = "".join(
|
|
(
|
|
"<div style='border:1px solid #d4dbe3;border-radius:6px;padding:10px;margin-bottom:8px;'>"
|
|
f"<div style='font-size:12px;color:#475569;margin-bottom:6px;'><strong>{html.escape((row.get('subject') or '(Ingen emne)'))}</strong></div>"
|
|
f"<div style='font-size:12px;color:#475569;'>Fra: {html.escape(row.get('from_email') or '-')} · Til: {html.escape(row.get('to_email') or '-')}</div>"
|
|
f"<div style='font-size:12px;color:#64748b;margin:4px 0 8px 0;'>Dato: {html.escape(str(row.get('received_date') or '-'))}</div>"
|
|
f"<div style='font-size:12px;color:#0f172a;white-space:pre-wrap;'>{_nl2br(_clip(_strip_quoted_email_text(row.get('body_text')), 800))}</div>"
|
|
"</div>"
|
|
)
|
|
for row in linked_emails
|
|
)
|
|
if not linked_emails and email_comment_rows:
|
|
emails_html = "".join(
|
|
(
|
|
"<div style='border:1px solid #d4dbe3;border-radius:6px;padding:10px;margin-bottom:8px;'>"
|
|
f"<div style='font-size:12px;color:#475569;margin-bottom:6px;'><strong>{html.escape(row.get('forfatter') or 'Email')}</strong> · {html.escape(str(row.get('created_at') or '-'))}</div>"
|
|
f"<div style='font-size:12px;color:#0f172a;white-space:pre-wrap;'>{_nl2br(_clip(_strip_quoted_email_text(row.get('indhold')), 1200))}</div>"
|
|
"</div>"
|
|
)
|
|
for row in email_comment_rows
|
|
)
|
|
elif not emails_html:
|
|
emails_html = "<div style='font-size:13px;color:#64748b;'>Ingen linkede emails.</div>"
|
|
|
|
internal_messages_html = "".join(
|
|
(
|
|
"<div style='border:1px solid #d4dbe3;border-radius:6px;padding:10px;margin-bottom:8px;'>"
|
|
f"<div style='font-size:12px;color:#475569;margin-bottom:6px;'><strong>{html.escape(row.get('forfatter') or 'Ukendt')}</strong> · {html.escape(str(row.get('created_at') or '-'))}</div>"
|
|
f"<div style='font-size:12px;color:#0f172a;white-space:pre-wrap;'>{_nl2br(_clip(row.get('indhold'), 900))}</div>"
|
|
"</div>"
|
|
)
|
|
for row in internal_messages
|
|
)
|
|
if not internal_messages_html:
|
|
internal_messages_html = "<div style='font-size:13px;color:#64748b;'>Ingen interne beskeder.</div>"
|
|
|
|
html_doc = f"""
|
|
<!doctype html>
|
|
<html lang='da'>
|
|
<head>
|
|
<meta charset='utf-8'>
|
|
<title>Arbejdsseddel SAG-{case['id']}</title>
|
|
<style>
|
|
body {{ font-family: 'Segoe UI', sans-serif; color: #0f172a; margin: 24px; }}
|
|
.header {{ display:flex; justify-content:space-between; align-items:flex-start; gap: 20px; margin-bottom: 16px; }}
|
|
.brand {{ color:#0f4c75; font-weight:700; font-size:24px; letter-spacing: 0.4px; }}
|
|
.meta {{ font-size:13px; line-height:1.5; }}
|
|
.box {{ border:1px solid #d4dbe3; border-radius:8px; padding:14px; margin-bottom:14px; }}
|
|
.label {{ font-size:11px; text-transform:uppercase; color:#6b7280; letter-spacing:0.6px; }}
|
|
.value {{ font-size:16px; font-weight:600; }}
|
|
table {{ width:100%; border-collapse:collapse; }}
|
|
.signature {{ height:120px; border:1px dashed #94a3b8; border-radius:8px; margin-top:10px; }}
|
|
</style>
|
|
</head>
|
|
<body>
|
|
<div class='header'>
|
|
<div>
|
|
<div class='brand'>BMC Work Order</div>
|
|
<div class='meta'>SAG-{case['id']} · {html.escape(case.get('status') or '-')}<br>Kunde: {html.escape(case.get('customer_name') or '-')}</div>
|
|
</div>
|
|
<div>{barcode_svg}</div>
|
|
</div>
|
|
|
|
<div class='box'>
|
|
<div class='label'>Sags titel</div>
|
|
<div class='value'>{html.escape(case.get('titel') or '-')}</div>
|
|
</div>
|
|
|
|
<div class='box'>
|
|
<div class='label'>Sagsbeskrivelse</div>
|
|
<div style='font-size:14px;line-height:1.5;color:#0f172a;white-space:pre-wrap;'>{_nl2br(case.get('beskrivelse'))}</div>
|
|
</div>
|
|
|
|
<div class='box'>
|
|
<div class='label'>Opgaver (afkryds ved udførelse)</div>
|
|
<table>{todo_items_html}</table>
|
|
</div>
|
|
|
|
<div class='box'>
|
|
<div class='label'>Hardware</div>
|
|
<table>
|
|
<thead>
|
|
<tr>
|
|
<th style='text-align:left;border:1px solid #d4dbe3;padding:8px;background:#f8fafc;'>Enhed</th>
|
|
<th style='text-align:left;border:1px solid #d4dbe3;padding:8px;background:#f8fafc;'>Serienr.</th>
|
|
<th style='text-align:left;border:1px solid #d4dbe3;padding:8px;background:#f8fafc;'>Tag</th>
|
|
<th style='text-align:left;border:1px solid #d4dbe3;padding:8px;background:#f8fafc;'>Type</th>
|
|
</tr>
|
|
</thead>
|
|
<tbody>{hardware_html}</tbody>
|
|
</table>
|
|
</div>
|
|
|
|
<div class='box'>
|
|
<div class='label'>Linkede emails</div>
|
|
<div>{emails_html}</div>
|
|
</div>
|
|
|
|
<div class='box'>
|
|
<div class='label'>Interne beskeder</div>
|
|
<div>{internal_messages_html}</div>
|
|
</div>
|
|
|
|
<div class='box'>
|
|
<div class='label'>Underskrift</div>
|
|
<div class='signature'></div>
|
|
<div style='margin-top:8px;font-size:12px;color:#475569;'>Dato: _____________ Navn: __________________________</div>
|
|
</div>
|
|
|
|
<div style='font-family:monospace;font-size:13px;margin-top:10px;'>Scan-token: {html.escape(token)}</div>
|
|
</body>
|
|
</html>
|
|
"""
|
|
return HTMLResponse(content=html_doc)
|
|
|
|
|
|
@router.post("/sag/{sag_id}/work-orders/{token}/signature-canvas")
|
|
async def upload_work_order_signature_canvas(sag_id: int, token: str, payload: SignatureCanvasRequest):
|
|
"""Save canvas signature as case file and consume token."""
|
|
_resolve_case_row_for_documents(sag_id)
|
|
|
|
token_row = execute_query_single(
|
|
"""
|
|
SELECT token, sag_id
|
|
FROM sag_document_tokens
|
|
WHERE token = %s AND token_type = 'work_order' AND sag_id = %s
|
|
""",
|
|
(token, sag_id),
|
|
)
|
|
if not token_row:
|
|
raise HTTPException(status_code=404, detail="Work-order token not found")
|
|
|
|
if "," not in payload.data_url:
|
|
raise HTTPException(status_code=400, detail="Invalid signature payload")
|
|
|
|
_, encoded = payload.data_url.split(",", 1)
|
|
try:
|
|
signature_bytes = base64.b64decode(encoded)
|
|
except Exception as exc:
|
|
raise HTTPException(status_code=400, detail=f"Invalid base64 signature: {exc}")
|
|
|
|
saved = _store_generated_case_file(
|
|
sag_id=sag_id,
|
|
filename=f"SAG-{sag_id}-signature-{datetime.now().strftime('%Y%m%d-%H%M%S')}.png",
|
|
content_bytes=signature_bytes,
|
|
content_type="image/png",
|
|
source_type="signature_canvas",
|
|
source_token=token,
|
|
)
|
|
|
|
execute_query(
|
|
"""
|
|
UPDATE sag_document_tokens
|
|
SET consumed_at = COALESCE(consumed_at, CURRENT_TIMESTAMP)
|
|
WHERE token = %s
|
|
""",
|
|
(token,),
|
|
)
|
|
|
|
return {"status": "saved", "file": saved}
|
|
|
|
|
|
@router.post("/sag/{sag_id}/work-orders/{token}/signature-file")
|
|
async def upload_work_order_signature_file(
|
|
sag_id: int,
|
|
token: str,
|
|
file: UploadFile = File(...),
|
|
source: str = Form("signature_upload"),
|
|
):
|
|
"""Save uploaded signature file and consume token."""
|
|
_resolve_case_row_for_documents(sag_id)
|
|
|
|
token_row = execute_query_single(
|
|
"""
|
|
SELECT token, sag_id
|
|
FROM sag_document_tokens
|
|
WHERE token = %s AND token_type = 'work_order' AND sag_id = %s
|
|
""",
|
|
(token, sag_id),
|
|
)
|
|
if not token_row:
|
|
raise HTTPException(status_code=404, detail="Work-order token not found")
|
|
|
|
content = await file.read()
|
|
if not content:
|
|
raise HTTPException(status_code=400, detail="Empty file")
|
|
|
|
safe_name = Path(file.filename or "signature-upload.bin").name
|
|
saved = _store_generated_case_file(
|
|
sag_id=sag_id,
|
|
filename=f"SAG-{sag_id}-{safe_name}",
|
|
content_bytes=content,
|
|
content_type=file.content_type or "application/octet-stream",
|
|
source_type=source or "signature_upload",
|
|
source_token=token,
|
|
)
|
|
|
|
execute_query(
|
|
"""
|
|
UPDATE sag_document_tokens
|
|
SET consumed_at = COALESCE(consumed_at, CURRENT_TIMESTAMP)
|
|
WHERE token = %s
|
|
""",
|
|
(token,),
|
|
)
|
|
|
|
return {"status": "saved", "file": saved}
|
|
|
|
|
|
@router.get("/sag/{sag_id}/labels/hardware/print", response_class=HTMLResponse)
|
|
async def print_case_hardware_labels(
|
|
sag_id: int,
|
|
request: Request,
|
|
auto_print: bool = Query(False),
|
|
):
|
|
"""Render printable hardware labels with IDs and barcodes."""
|
|
_resolve_case_row_for_documents(sag_id)
|
|
|
|
has_name = table_has_column("hardware_assets", "name")
|
|
has_brand = table_has_column("hardware_assets", "brand")
|
|
has_model = table_has_column("hardware_assets", "model")
|
|
has_serial = table_has_column("hardware_assets", "serial_number")
|
|
has_asset_tag = table_has_column("hardware_assets", "asset_tag")
|
|
has_customer_asset_id = table_has_column("hardware_assets", "customer_asset_id")
|
|
has_internal_asset_id = table_has_column("hardware_assets", "internal_asset_id")
|
|
has_type = table_has_column("hardware_assets", "type")
|
|
has_asset_type = table_has_column("hardware_assets", "asset_type")
|
|
|
|
name_expr_parts = []
|
|
if has_name:
|
|
name_expr_parts.append("NULLIF(TRIM(h.name), '')")
|
|
if has_brand and has_model:
|
|
name_expr_parts.append("NULLIF(TRIM(CONCAT_WS(' ', h.brand, h.model)), '')")
|
|
if has_brand:
|
|
name_expr_parts.append("NULLIF(TRIM(h.brand), '')")
|
|
if has_model:
|
|
name_expr_parts.append("NULLIF(TRIM(h.model), '')")
|
|
if has_serial:
|
|
name_expr_parts.append("NULLIF(TRIM(h.serial_number), '')")
|
|
name_expr_parts.append("CONCAT('Hardware #', h.id::text)")
|
|
name_expr = "COALESCE(" + ", ".join(name_expr_parts) + ")"
|
|
|
|
tag_expr_parts = []
|
|
if has_asset_tag:
|
|
tag_expr_parts.append("NULLIF(TRIM(h.asset_tag), '')")
|
|
if has_customer_asset_id:
|
|
tag_expr_parts.append("NULLIF(TRIM(h.customer_asset_id), '')")
|
|
if has_internal_asset_id:
|
|
tag_expr_parts.append("NULLIF(TRIM(h.internal_asset_id), '')")
|
|
tag_expr_parts.append("'-'")
|
|
tag_expr = "COALESCE(" + ", ".join(tag_expr_parts) + ")"
|
|
|
|
type_expr_parts = []
|
|
if has_type:
|
|
type_expr_parts.append("NULLIF(TRIM(h.type), '')")
|
|
if has_asset_type:
|
|
type_expr_parts.append("NULLIF(TRIM(h.asset_type), '')")
|
|
type_expr_parts.append("'ukendt'")
|
|
type_expr = "COALESCE(" + ", ".join(type_expr_parts) + ")"
|
|
|
|
serial_expr = "h.serial_number" if has_serial else "NULL"
|
|
|
|
hardware_id_filter = None
|
|
if payload and payload.hardware_id is not None:
|
|
try:
|
|
hardware_id_filter = int(payload.hardware_id)
|
|
except (TypeError, ValueError):
|
|
raise HTTPException(status_code=400, detail="Ugyldigt hardware_id")
|
|
|
|
hardware_query = f"""
|
|
SELECT
|
|
h.id,
|
|
{name_expr} AS label_name,
|
|
{serial_expr} AS serial_number,
|
|
{tag_expr} AS label_tag,
|
|
{type_expr} AS label_type
|
|
FROM sag_hardware sh
|
|
JOIN hardware_assets h ON h.id = sh.hardware_id
|
|
WHERE sh.sag_id = %s
|
|
AND sh.deleted_at IS NULL
|
|
"""
|
|
params = [sag_id]
|
|
if hardware_id_filter is not None:
|
|
hardware_query += " AND sh.hardware_id = %s"
|
|
params.append(hardware_id_filter)
|
|
hardware_query += " ORDER BY label_name ASC"
|
|
|
|
hardware_rows = execute_query(hardware_query, tuple(params)) or []
|
|
|
|
user_id = None
|
|
try:
|
|
user_id = _get_user_id_from_request(request)
|
|
except HTTPException:
|
|
user_id = None
|
|
|
|
label_cards = []
|
|
for hw in hardware_rows:
|
|
token = _create_document_token(
|
|
sag_id=sag_id,
|
|
token_type="hardware_label",
|
|
user_id=user_id,
|
|
hardware_id=hw.get("id"),
|
|
)
|
|
barcode_svg = _render_code39_svg(
|
|
token,
|
|
height=26,
|
|
narrow=1,
|
|
wide=2,
|
|
gap=0,
|
|
font_size=10,
|
|
include_text=False,
|
|
)
|
|
label_cards.append(
|
|
f"""
|
|
<div class='label-card'>
|
|
<div class='title'>{html.escape(hw.get('label_name') or 'Ukendt enhed')}</div>
|
|
<div class='meta'>ID: HW-{hw.get('id')} · SAG-{sag_id}</div>
|
|
<div class='meta'>SN: {html.escape(hw.get('serial_number') or '-')} · Tag: {html.escape(hw.get('label_tag') or '-')} · Type: {html.escape(hw.get('label_type') or '-')}</div>
|
|
<div class='barcode'>{barcode_svg}</div>
|
|
<div class='token'>{html.escape(token)}</div>
|
|
</div>
|
|
"""
|
|
)
|
|
|
|
if not label_cards:
|
|
label_cards.append("<div class='empty'>Ingen hardware er knyttet til sagen endnu.</div>")
|
|
|
|
auto_print_script = ""
|
|
if auto_print:
|
|
auto_print_script = (
|
|
"<script>"
|
|
"window.addEventListener('load', function(){ setTimeout(function(){ window.print(); }, 120); });"
|
|
"</script>"
|
|
)
|
|
|
|
html_doc = f"""
|
|
<!doctype html>
|
|
<html lang='da'>
|
|
<head>
|
|
<meta charset='utf-8'>
|
|
<title>Hardware labels SAG-{sag_id}</title>
|
|
<style>
|
|
body {{ margin: 12px; font-family: 'Segoe UI', sans-serif; }}
|
|
.sheet {{ display:grid; grid-template-columns: repeat(auto-fill, minmax(320px, 1fr)); gap: 10px; }}
|
|
.label-card {{ border:1px solid #d6deea; border-radius:8px; padding:10px; background:#fff; break-inside:avoid; overflow:hidden; }}
|
|
.title {{ font-size:15px; font-weight:700; color:#0f4c75; margin-bottom:3px; }}
|
|
.meta {{ font-size:11px; color:#334155; }}
|
|
.barcode svg {{ display:block; width:100%; height:auto; max-width:100%; }}
|
|
.token {{ font-family:monospace; font-size:10px; margin-top:2px; color:#475569; }}
|
|
.empty {{ padding:20px; color:#475569; border:1px dashed #cbd5e1; border-radius:8px; }}
|
|
@media print {{ body {{ margin: 0; }} }}
|
|
</style>
|
|
</head>
|
|
<body>
|
|
<div class='sheet'>
|
|
{''.join(label_cards)}
|
|
</div>
|
|
{auto_print_script}
|
|
</body>
|
|
</html>
|
|
"""
|
|
return HTMLResponse(content=html_doc)
|
|
|
|
|
|
@router.post("/sag/{sag_id}/labels/hardware/print-direct")
|
|
async def print_case_hardware_labels_direct(
|
|
sag_id: int,
|
|
request: Request,
|
|
payload: Optional[DirectPrintOverrideRequest] = None,
|
|
):
|
|
"""Print hardware labels directly to configured Brother network printer."""
|
|
_resolve_case_row_for_documents(sag_id)
|
|
|
|
enabled = (_get_setting_value("label_printer_enabled", "false") or "false").strip().lower() == "true"
|
|
if not enabled:
|
|
raise HTTPException(status_code=400, detail="Direkte label-print er ikke aktiveret i indstillinger")
|
|
|
|
host = (payload.printer_host if payload and payload.printer_host else _get_setting_value("label_printer_host", "")).strip()
|
|
port_raw = payload.printer_port if payload and payload.printer_port is not None else _get_setting_value("label_printer_port", "9100")
|
|
model = (payload.printer_model if payload and payload.printer_model else _get_setting_value("label_printer_model", "QL-710W")).strip()
|
|
label_size = (payload.label_size if payload and payload.label_size else _get_setting_value("label_printer_label_size", "62")).strip()
|
|
|
|
try:
|
|
port = int(port_raw or 9100)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Ugyldig printer-port")
|
|
|
|
if not host:
|
|
raise HTTPException(status_code=400, detail="Printer host/IP mangler i indstillinger")
|
|
|
|
has_name = table_has_column("hardware_assets", "name")
|
|
has_brand = table_has_column("hardware_assets", "brand")
|
|
has_model = table_has_column("hardware_assets", "model")
|
|
has_serial = table_has_column("hardware_assets", "serial_number")
|
|
has_asset_tag = table_has_column("hardware_assets", "asset_tag")
|
|
has_customer_asset_id = table_has_column("hardware_assets", "customer_asset_id")
|
|
has_internal_asset_id = table_has_column("hardware_assets", "internal_asset_id")
|
|
has_type = table_has_column("hardware_assets", "type")
|
|
has_asset_type = table_has_column("hardware_assets", "asset_type")
|
|
|
|
name_expr_parts = []
|
|
if has_name:
|
|
name_expr_parts.append("NULLIF(TRIM(h.name), '')")
|
|
if has_brand and has_model:
|
|
name_expr_parts.append("NULLIF(TRIM(CONCAT_WS(' ', h.brand, h.model)), '')")
|
|
if has_brand:
|
|
name_expr_parts.append("NULLIF(TRIM(h.brand), '')")
|
|
if has_model:
|
|
name_expr_parts.append("NULLIF(TRIM(h.model), '')")
|
|
if has_serial:
|
|
name_expr_parts.append("NULLIF(TRIM(h.serial_number), '')")
|
|
name_expr_parts.append("CONCAT('Hardware #', h.id::text)")
|
|
name_expr = "COALESCE(" + ", ".join(name_expr_parts) + ")"
|
|
|
|
tag_expr_parts = []
|
|
if has_asset_tag:
|
|
tag_expr_parts.append("NULLIF(TRIM(h.asset_tag), '')")
|
|
if has_customer_asset_id:
|
|
tag_expr_parts.append("NULLIF(TRIM(h.customer_asset_id), '')")
|
|
if has_internal_asset_id:
|
|
tag_expr_parts.append("NULLIF(TRIM(h.internal_asset_id), '')")
|
|
tag_expr_parts.append("'-'")
|
|
tag_expr = "COALESCE(" + ", ".join(tag_expr_parts) + ")"
|
|
|
|
type_expr_parts = []
|
|
if has_type:
|
|
type_expr_parts.append("NULLIF(TRIM(h.type), '')")
|
|
if has_asset_type:
|
|
type_expr_parts.append("NULLIF(TRIM(h.asset_type), '')")
|
|
type_expr_parts.append("'ukendt'")
|
|
type_expr = "COALESCE(" + ", ".join(type_expr_parts) + ")"
|
|
|
|
serial_expr = "h.serial_number" if has_serial else "NULL"
|
|
|
|
hardware_rows = execute_query(
|
|
f"""
|
|
SELECT
|
|
h.id,
|
|
{name_expr} AS label_name,
|
|
{serial_expr} AS serial_number,
|
|
{tag_expr} AS label_tag,
|
|
{type_expr} AS label_type
|
|
FROM sag_hardware sh
|
|
JOIN hardware_assets h ON h.id = sh.hardware_id
|
|
WHERE sh.sag_id = %s
|
|
AND sh.deleted_at IS NULL
|
|
ORDER BY label_name ASC
|
|
""",
|
|
(sag_id,),
|
|
) or []
|
|
|
|
if not hardware_rows:
|
|
if hardware_id_filter is not None:
|
|
raise HTTPException(status_code=404, detail="Valgt hardware er ikke knyttet til sagen")
|
|
raise HTTPException(status_code=400, detail="Ingen hardware er knyttet til sagen")
|
|
|
|
user_id = None
|
|
try:
|
|
user_id = _get_user_id_from_request(request)
|
|
except HTTPException:
|
|
user_id = None
|
|
|
|
jobs: List[LabelJob] = []
|
|
for hw in hardware_rows:
|
|
token = _create_document_token(
|
|
sag_id=sag_id,
|
|
token_type="hardware_label",
|
|
user_id=user_id,
|
|
hardware_id=hw.get("id"),
|
|
)
|
|
meta = (
|
|
f"ID: HW-{hw.get('id')} SAG-{sag_id} "
|
|
f"SN: {hw.get('serial_number') or '-'} Tag: {hw.get('label_tag') or '-'} Type: {hw.get('label_type') or '-'}"
|
|
)
|
|
jobs.append(
|
|
LabelJob(
|
|
name=str(hw.get("label_name") or "Ukendt enhed"),
|
|
meta_line=meta,
|
|
token=token,
|
|
)
|
|
)
|
|
|
|
service = BrotherLabelPrintService(
|
|
model=model,
|
|
host=host,
|
|
port=port,
|
|
label_size=label_size,
|
|
)
|
|
|
|
try:
|
|
printed = service.print_jobs(jobs)
|
|
except Exception as exc:
|
|
logger.error("❌ Direct label print failed for SAG-%s: %s", sag_id, exc)
|
|
raise HTTPException(status_code=500, detail=f"Direkte print fejlede: {exc}")
|
|
|
|
return {
|
|
"status": "ok",
|
|
"printed": printed,
|
|
"hardware_ids": [int(hw.get("id")) for hw in hardware_rows if hw.get("id") is not None],
|
|
"printer": {
|
|
"model": model,
|
|
"host": host,
|
|
"port": port,
|
|
"label_size": label_size,
|
|
},
|
|
}
|
|
|
|
|
|
# ============================================================================
|
|
# FILES - Case Files
|
|
# ============================================================================
|
|
|
|
UPLOAD_BASE_PATH = Path(settings.UPLOAD_DIR).resolve()
|
|
SAG_FILE_SUBDIR = "sag_files"
|
|
(UPLOAD_BASE_PATH / SAG_FILE_SUBDIR).mkdir(parents=True, exist_ok=True)
|
|
ALLOWED_EXTENSIONS = {ext.lower() for ext in settings.ALLOWED_EXTENSIONS}
|
|
MAX_ATTACHMENT_SIZE = settings.EMAIL_MAX_UPLOAD_SIZE_MB * 1024 * 1024
|
|
|
|
def _generate_stored_name(filename: str, subdir: str) -> str:
|
|
cleaned = Path(filename).name
|
|
unique = f"{uuid4().hex}_{cleaned}"
|
|
return f"{subdir}/{unique}"
|
|
|
|
def _resolve_attachment_path(stored_name: str) -> Path:
|
|
return UPLOAD_BASE_PATH / stored_name
|
|
|
|
def _store_upload_file(upload_file: UploadFile, subdir: str):
|
|
if not upload_file.filename:
|
|
raise HTTPException(400, detail="Filename missing")
|
|
|
|
ext = Path(upload_file.filename).suffix.lower().lstrip(".")
|
|
# Basic check - allow more types for generic files?
|
|
# if ext not in ALLOWED_EXTENSIONS: ...
|
|
|
|
upload_file.file.seek(0, os.SEEK_END)
|
|
size = upload_file.file.tell()
|
|
upload_file.file.seek(0)
|
|
|
|
if size > MAX_ATTACHMENT_SIZE:
|
|
raise HTTPException(400, detail=f"File too large (> {settings.EMAIL_MAX_UPLOAD_SIZE_MB}MB)")
|
|
|
|
stored_name = _generate_stored_name(upload_file.filename, subdir)
|
|
destination = _resolve_attachment_path(stored_name)
|
|
destination.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
try:
|
|
with destination.open("wb") as buffer:
|
|
shutil.copyfileobj(upload_file.file, buffer)
|
|
except Exception as e:
|
|
logger.error(f"Upload failed: {e}")
|
|
raise HTTPException(500, detail="Server upload failed")
|
|
|
|
return stored_name, size
|
|
|
|
@router.get("/sag/{sag_id}/files")
|
|
async def list_sag_files(sag_id: int):
|
|
"""List files attached to a case."""
|
|
try:
|
|
if not _table_exists("sag_files"):
|
|
logger.warning("⚠️ sag_files table missing - returning empty file list for SAG-%s", sag_id)
|
|
return []
|
|
|
|
order_expr = "created_at DESC" if table_has_column("sag_files", "created_at") else "id DESC"
|
|
query = """
|
|
SELECT * FROM sag_files
|
|
WHERE sag_id = %s
|
|
ORDER BY {order_expr}
|
|
"""
|
|
query = query.format(order_expr=order_expr)
|
|
files = execute_query(query, (sag_id,))
|
|
# Add download URL
|
|
if files:
|
|
for f in files:
|
|
f["download_url"] = f"/api/v1/sag/{sag_id}/files/{f['id']}"
|
|
return files or []
|
|
except Exception as e:
|
|
logger.error("❌ Error listing files: %s", e)
|
|
raise HTTPException(status_code=500, detail="Failed to list files")
|
|
|
|
@router.post("/sag/{sag_id}/files")
|
|
async def upload_sag_files(sag_id: int, files: List[UploadFile] = File(...)):
|
|
"""Upload files to a case."""
|
|
if not _table_exists("sag_files"):
|
|
raise HTTPException(status_code=503, detail="sag_files table is missing. Run database migrations first")
|
|
|
|
check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
|
|
if not check:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
saved_files = []
|
|
|
|
for file in files:
|
|
try:
|
|
stored_name, size = _store_upload_file(file, SAG_FILE_SUBDIR)
|
|
|
|
query = """
|
|
INSERT INTO sag_files (sag_id, filename, content_type, size_bytes, stored_name)
|
|
VALUES (%s, %s, %s, %s, %s)
|
|
RETURNING id, filename, created_at
|
|
"""
|
|
result = execute_query(query, (sag_id, file.filename, file.content_type, size, stored_name))
|
|
if result:
|
|
saved = result[0]
|
|
saved["download_url"] = f"/api/v1/sag/{sag_id}/files/{saved['id']}"
|
|
saved_files.append(saved)
|
|
except HTTPException:
|
|
continue # Skip invalid
|
|
except Exception as e:
|
|
logger.error(f"Error saving file {file.filename}: {e}")
|
|
continue
|
|
|
|
return saved_files
|
|
|
|
@router.get("/sag/{sag_id}/files/{file_id}")
|
|
async def download_sag_file(sag_id: int, file_id: int, download: bool = False):
|
|
"""Download or preview a specific file.
|
|
|
|
Args:
|
|
download: If True, force download. If False (default), display inline in browser.
|
|
"""
|
|
if not _table_exists("sag_files"):
|
|
raise HTTPException(status_code=503, detail="sag_files table is missing. Run database migrations first")
|
|
|
|
query = "SELECT * FROM sag_files WHERE id = %s AND sag_id = %s"
|
|
result = execute_query(query, (file_id, sag_id))
|
|
|
|
if not result:
|
|
raise HTTPException(status_code=404, detail="File not found")
|
|
|
|
file_data = result[0]
|
|
path = _resolve_attachment_path(file_data["stored_name"])
|
|
|
|
if not path.exists():
|
|
raise HTTPException(status_code=404, detail="File lost on server")
|
|
|
|
# Determine content disposition
|
|
headers = {}
|
|
if download:
|
|
headers["Content-Disposition"] = f'attachment; filename="{file_data["filename"]}"'
|
|
else:
|
|
headers["Content-Disposition"] = f'inline; filename="{file_data["filename"]}"'
|
|
|
|
return FileResponse(
|
|
path=path,
|
|
filename=file_data["filename"],
|
|
media_type=file_data.get("content_type", "application/octet-stream"),
|
|
headers=headers
|
|
)
|
|
|
|
|
|
@router.get("/sag/{sag_id}/files/{file_id}/preview-image")
|
|
async def preview_sag_pdf_as_image(sag_id: int, file_id: int, page: int = Query(1, ge=1), scale: float = Query(2.8, ge=1.0, le=5.0)):
|
|
"""Render a PDF page as PNG for consistent in-app preview sizing."""
|
|
if not _table_exists("sag_files"):
|
|
raise HTTPException(status_code=503, detail="sag_files table is missing. Run database migrations first")
|
|
|
|
query = "SELECT * FROM sag_files WHERE id = %s AND sag_id = %s"
|
|
result = execute_query(query, (file_id, sag_id))
|
|
|
|
if not result:
|
|
raise HTTPException(status_code=404, detail="File not found")
|
|
|
|
file_data = result[0]
|
|
path = _resolve_attachment_path(file_data["stored_name"])
|
|
|
|
if not path.exists():
|
|
raise HTTPException(status_code=404, detail="File lost on server")
|
|
|
|
content_type = (file_data.get("content_type") or "").lower()
|
|
filename = str(file_data.get("filename") or "").lower()
|
|
if "pdf" not in content_type and not filename.endswith(".pdf"):
|
|
raise HTTPException(status_code=400, detail="Preview image is only supported for PDF files")
|
|
|
|
try:
|
|
import pypdfium2 as pdfium
|
|
|
|
document = pdfium.PdfDocument(str(path))
|
|
page_index = min(max(page - 1, 0), max(len(document) - 1, 0))
|
|
pdf_page = document.get_page(page_index)
|
|
bitmap = pdf_page.render(scale=scale)
|
|
png_bytes = bitmap.to_pil().convert("RGB")
|
|
|
|
import io
|
|
buffer = io.BytesIO()
|
|
png_bytes.save(buffer, format="PNG", optimize=True)
|
|
data = buffer.getvalue()
|
|
|
|
try:
|
|
pdf_page.close()
|
|
except Exception:
|
|
pass
|
|
|
|
return Response(content=data, media_type="image/png")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error("❌ PDF preview render failed for SAG-%s file %s: %s", sag_id, file_id, e)
|
|
raise HTTPException(status_code=500, detail="Could not render PDF preview")
|
|
|
|
@router.delete("/sag/{sag_id}/files/{file_id}")
|
|
async def delete_sag_file(sag_id: int, file_id: int):
|
|
"""Delete a file."""
|
|
if not _table_exists("sag_files"):
|
|
raise HTTPException(status_code=503, detail="sag_files table is missing. Run database migrations first")
|
|
|
|
query = "DELETE FROM sag_files WHERE id = %s AND sag_id = %s RETURNING stored_name"
|
|
result = execute_query(query, (file_id, sag_id))
|
|
|
|
if result:
|
|
# Clean up disk
|
|
path = _resolve_attachment_path(result[0]["stored_name"])
|
|
if path.exists():
|
|
try:
|
|
os.remove(path)
|
|
except:
|
|
pass
|
|
return {"status": "deleted"}
|
|
raise HTTPException(status_code=404, detail="File not found")
|
|
|
|
# ============================================================================
|
|
# EMAILS - Case Emails (Linked)
|
|
# ============================================================================
|
|
|
|
@router.post("/sag/{sag_id}/email-links")
|
|
async def add_sag_email_link(sag_id: int, payload: dict):
|
|
"""Link an existing email to a case."""
|
|
email_id = payload.get("email_id")
|
|
if not email_id:
|
|
raise HTTPException(status_code=400, detail="email_id required")
|
|
|
|
query = """
|
|
INSERT INTO sag_emails (sag_id, email_id)
|
|
VALUES (%s, %s)
|
|
ON CONFLICT DO NOTHING
|
|
RETURNING *
|
|
"""
|
|
execute_query(query, (sag_id, email_id))
|
|
return {"status": "linked"}
|
|
|
|
@router.get("/sag/{sag_id}/email-links")
|
|
async def get_sag_emails(sag_id: int):
|
|
"""Get emails linked to a case."""
|
|
if not _table_exists("sag_emails") or not _table_exists("email_messages"):
|
|
logger.warning("⚠️ Email links requested for SAG-%s but required tables are missing", sag_id)
|
|
return []
|
|
|
|
has_thread_key = table_has_column("email_messages", "thread_key")
|
|
has_email_references = table_has_column("email_messages", "email_references")
|
|
has_in_reply_to = table_has_column("email_messages", "in_reply_to")
|
|
has_subject = table_has_column("email_messages", "subject")
|
|
has_message_id = table_has_column("email_messages", "message_id")
|
|
has_folder = table_has_column("email_messages", "folder")
|
|
has_status = table_has_column("email_messages", "status")
|
|
has_received_date = table_has_column("email_messages", "received_date")
|
|
|
|
thread_key_expr = "e.thread_key" if has_thread_key else "NULL"
|
|
email_references_expr = "e.email_references" if has_email_references else "NULL"
|
|
in_reply_to_expr = "e.in_reply_to" if has_in_reply_to else "NULL"
|
|
subject_expr = "e.subject" if has_subject else "NULL"
|
|
message_id_expr = "e.message_id" if has_message_id else "NULL"
|
|
|
|
outgoing_checks = []
|
|
if has_folder:
|
|
outgoing_checks.append("LOWER(COALESCE(linked_emails.folder, '')) LIKE 'sent%%'")
|
|
if has_status:
|
|
outgoing_checks.append("LOWER(COALESCE(linked_emails.status, '')) = 'sent'")
|
|
is_outgoing_expr = " OR ".join(outgoing_checks) if outgoing_checks else "FALSE"
|
|
|
|
if has_received_date:
|
|
query = f"""
|
|
WITH linked_emails AS (
|
|
SELECT
|
|
e.*,
|
|
COALESCE(
|
|
NULLIF(REGEXP_REPLACE(TRIM(COALESCE({thread_key_expr}, '')), '[<>\\s]', '', 'g'), ''),
|
|
NULLIF(REGEXP_REPLACE((REGEXP_SPLIT_TO_ARRAY(COALESCE({email_references_expr}, ''), E'[\\s,]+'))[1], '[<>\\s]', '', 'g'), ''),
|
|
NULLIF(
|
|
REGEXP_REPLACE(
|
|
(REGEXP_SPLIT_TO_ARRAY(COALESCE({in_reply_to_expr}, ''), E'[\\s,]+'))[1],
|
|
'[<>\\s]',
|
|
'',
|
|
'g'
|
|
),
|
|
''
|
|
),
|
|
NULLIF(
|
|
REGEXP_REPLACE(
|
|
LOWER(TRIM(COALESCE({subject_expr}, ''))),
|
|
'^(?:(?:re|fw|fwd|sv|aw)\\s*:\\s*)+',
|
|
'',
|
|
'i'
|
|
),
|
|
''
|
|
),
|
|
NULLIF(REGEXP_REPLACE(TRIM(COALESCE({message_id_expr}, '')), '[<>\\s]', '', 'g'), ''),
|
|
CONCAT('email-', e.id::text)
|
|
) AS resolved_thread_key
|
|
FROM email_messages e
|
|
JOIN sag_emails se ON e.id = se.email_id
|
|
WHERE se.sag_id = %s
|
|
)
|
|
SELECT
|
|
linked_emails.*,
|
|
({is_outgoing_expr}) AS is_outgoing,
|
|
COUNT(*) OVER (PARTITION BY linked_emails.resolved_thread_key) AS thread_message_count,
|
|
MAX(linked_emails.received_date) OVER (PARTITION BY linked_emails.resolved_thread_key) AS thread_last_received_date
|
|
FROM linked_emails
|
|
ORDER BY thread_last_received_date DESC NULLS LAST, received_date DESC
|
|
"""
|
|
else:
|
|
query = f"""
|
|
WITH linked_emails AS (
|
|
SELECT
|
|
e.*,
|
|
COALESCE(
|
|
NULLIF(REGEXP_REPLACE(TRIM(COALESCE({thread_key_expr}, '')), '[<>\\s]', '', 'g'), ''),
|
|
NULLIF(REGEXP_REPLACE((REGEXP_SPLIT_TO_ARRAY(COALESCE({email_references_expr}, ''), E'[\\s,]+'))[1], '[<>\\s]', '', 'g'), ''),
|
|
NULLIF(
|
|
REGEXP_REPLACE(
|
|
(REGEXP_SPLIT_TO_ARRAY(COALESCE({in_reply_to_expr}, ''), E'[\\s,]+'))[1],
|
|
'[<>\\s]',
|
|
'',
|
|
'g'
|
|
),
|
|
''
|
|
),
|
|
NULLIF(
|
|
REGEXP_REPLACE(
|
|
LOWER(TRIM(COALESCE({subject_expr}, ''))),
|
|
'^(?:(?:re|fw|fwd|sv|aw)\\s*:\\s*)+',
|
|
'',
|
|
'i'
|
|
),
|
|
''
|
|
),
|
|
NULLIF(REGEXP_REPLACE(TRIM(COALESCE({message_id_expr}, '')), '[<>\\s]', '', 'g'), ''),
|
|
CONCAT('email-', e.id::text)
|
|
) AS resolved_thread_key
|
|
FROM email_messages e
|
|
JOIN sag_emails se ON e.id = se.email_id
|
|
WHERE se.sag_id = %s
|
|
)
|
|
SELECT
|
|
linked_emails.*,
|
|
({is_outgoing_expr}) AS is_outgoing,
|
|
COUNT(*) OVER (PARTITION BY linked_emails.resolved_thread_key) AS thread_message_count,
|
|
NULL::timestamp AS thread_last_received_date
|
|
FROM linked_emails
|
|
ORDER BY linked_emails.id DESC
|
|
"""
|
|
|
|
try:
|
|
return execute_query(query, (sag_id,)) or []
|
|
except Exception as exc:
|
|
logger.error("❌ Failed loading linked emails for SAG-%s: %s", sag_id, exc)
|
|
return []
|
|
|
|
@router.delete("/sag/{sag_id}/email-links/{email_id}")
|
|
async def remove_sag_email_link(sag_id: int, email_id: int):
|
|
"""Unlink an email."""
|
|
query = "DELETE FROM sag_emails WHERE sag_id = %s AND email_id = %s"
|
|
execute_query(query, (sag_id, email_id))
|
|
return {"status": "unlinked"}
|
|
|
|
def _decode_header_str(header_val):
|
|
if not header_val: return ""
|
|
try:
|
|
decoded_list = decode_header(header_val)
|
|
result = ""
|
|
for content, encoding in decoded_list:
|
|
if isinstance(content, bytes):
|
|
if encoding:
|
|
try:
|
|
result += content.decode(encoding)
|
|
except:
|
|
result += content.decode('utf-8', errors='ignore')
|
|
else:
|
|
result += content.decode('utf-8', errors='ignore')
|
|
else:
|
|
result += str(content)
|
|
return result
|
|
except:
|
|
return str(header_val)
|
|
|
|
@router.post("/sag/{sag_id}/upload-email")
|
|
async def upload_sag_email(sag_id: int, file: UploadFile = File(...)):
|
|
"""Upload .eml/.msg, parse it, save to email_messages, and link to case."""
|
|
content = await file.read()
|
|
filename = file.filename.lower()
|
|
email_data = {}
|
|
temp_id = uuid4().hex
|
|
|
|
# 1. Parse File
|
|
if filename.endswith('.msg'):
|
|
if not extract_msg: raise HTTPException(500, "extract-msg missing")
|
|
import io
|
|
msg = extract_msg.Message(io.BytesIO(content))
|
|
email_data = {
|
|
'message_id': msg.messageId or f"msg-{temp_id}",
|
|
'subject': msg.subject or "No Subject",
|
|
'sender_email': msg.sender or "",
|
|
'sender_name': msg.sender or "",
|
|
'recipient_email': msg.to or "",
|
|
'cc': msg.cc or "",
|
|
'body_text': msg.body,
|
|
'body_html': msg.htmlBody,
|
|
'received_date': msg.date or datetime.now(),
|
|
'folder': 'Imported',
|
|
'attachments': [],
|
|
'has_attachments': False,
|
|
'attachment_count': 0
|
|
}
|
|
elif filename.endswith('.eml'):
|
|
msg = email.message_from_bytes(content)
|
|
body_text = ""
|
|
if msg.is_multipart():
|
|
for part in msg.walk():
|
|
if part.get_content_type() == "text/plain":
|
|
body_text = part.get_payload(decode=True).decode('utf-8', errors='ignore')
|
|
break
|
|
else:
|
|
body_text = msg.get_payload(decode=True).decode('utf-8', errors='ignore')
|
|
|
|
email_data = {
|
|
'message_id': msg.get('Message-ID', f"eml-{temp_id}"),
|
|
'in_reply_to': _decode_header_str(msg.get('In-Reply-To', '')),
|
|
'email_references': _decode_header_str(msg.get('References', '')),
|
|
'subject': _decode_header_str(msg.get('Subject', 'No Subject')),
|
|
'sender_email': _decode_header_str(msg.get('From', '')),
|
|
'sender_name': _decode_header_str(msg.get('From', '')),
|
|
'recipient_email': _decode_header_str(msg.get('To', '')),
|
|
'cc': _decode_header_str(msg.get('Cc', '')),
|
|
'received_date': datetime.now(),
|
|
'body_text': body_text,
|
|
'body_html': "",
|
|
'folder': 'Imported',
|
|
'has_attachments': False,
|
|
'attachment_count': 0,
|
|
'attachments': []
|
|
}
|
|
else:
|
|
raise HTTPException(400, "Only .msg or .eml files allowed")
|
|
|
|
# 2. Save Email via Service (deduplicate)
|
|
svc = EmailService()
|
|
existing = execute_query_single("SELECT id FROM email_messages WHERE message_id = %s", (email_data['message_id'],))
|
|
|
|
if existing:
|
|
email_id = existing['id']
|
|
else:
|
|
# We try to strict save, keeping it simple
|
|
email_id = await svc.save_email(email_data)
|
|
|
|
# 3. Link
|
|
await add_sag_email_link(sag_id, {"email_id": email_id})
|
|
return {"status": "imported", "email_id": email_id}
|
|
|
|
|
|
@router.post("/sag/{sag_id}/emails/send")
|
|
async def send_sag_email(sag_id: int, payload: SagSendEmailRequest, request: Request):
|
|
"""Send outbound email directly from case email tab and link it to case."""
|
|
case_exists = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
|
|
if not case_exists:
|
|
raise HTTPException(status_code=404, detail="Case not found")
|
|
|
|
to_addresses = _normalize_email_list(payload.to, "to")
|
|
cc_addresses = _normalize_email_list(payload.cc, "cc")
|
|
bcc_addresses = _normalize_email_list(payload.bcc, "bcc")
|
|
|
|
if not to_addresses:
|
|
raise HTTPException(status_code=400, detail="At least one recipient in 'to' is required")
|
|
|
|
subject = (payload.subject or "").strip()
|
|
body_text = (payload.body_text or "").strip()
|
|
if not subject:
|
|
raise HTTPException(status_code=400, detail="subject is required")
|
|
if not body_text:
|
|
raise HTTPException(status_code=400, detail="body_text is required")
|
|
|
|
try:
|
|
user_id = _get_user_id_from_request(request)
|
|
except HTTPException:
|
|
user_id = 1
|
|
|
|
attachment_rows = []
|
|
attachment_ids = list(dict.fromkeys(payload.attachment_file_ids or []))
|
|
if attachment_ids:
|
|
placeholders = ",".join(["%s"] * len(attachment_ids))
|
|
attachment_query = f"""
|
|
SELECT id, filename, content_type, size_bytes, stored_name
|
|
FROM sag_files
|
|
WHERE sag_id = %s AND id IN ({placeholders})
|
|
"""
|
|
attachment_rows = execute_query(attachment_query, (sag_id, *attachment_ids))
|
|
|
|
if len(attachment_rows) != len(attachment_ids):
|
|
raise HTTPException(status_code=400, detail="One or more selected attachments were not found on this case")
|
|
|
|
smtp_attachments = []
|
|
for row in attachment_rows:
|
|
path = _resolve_attachment_path(row["stored_name"])
|
|
if not path.exists():
|
|
raise HTTPException(status_code=404, detail=f"Attachment file is missing on server: {row['filename']}")
|
|
|
|
smtp_attachments.append({
|
|
"filename": row["filename"],
|
|
"content_type": row.get("content_type") or "application/octet-stream",
|
|
"content": path.read_bytes(),
|
|
"size": row.get("size_bytes") or 0,
|
|
"file_path": str(path),
|
|
})
|
|
|
|
in_reply_to_header = None
|
|
references_header = None
|
|
selected_thread_key = None
|
|
if payload.thread_email_id:
|
|
thread_row = None
|
|
try:
|
|
thread_row = execute_query_single(
|
|
"""
|
|
SELECT id, message_id, in_reply_to, email_references, thread_key
|
|
FROM email_messages
|
|
WHERE id = %s
|
|
""",
|
|
(payload.thread_email_id,),
|
|
)
|
|
except Exception:
|
|
# Backward compatibility for DBs without in_reply_to/email_references columns.
|
|
try:
|
|
thread_row = execute_query_single(
|
|
"""
|
|
SELECT id, message_id, thread_key
|
|
FROM email_messages
|
|
WHERE id = %s
|
|
""",
|
|
(payload.thread_email_id,),
|
|
)
|
|
except Exception:
|
|
thread_row = execute_query_single(
|
|
"""
|
|
SELECT id, message_id
|
|
FROM email_messages
|
|
WHERE id = %s
|
|
""",
|
|
(payload.thread_email_id,),
|
|
)
|
|
if thread_row:
|
|
base_message_id = str(thread_row.get("message_id") or "").strip()
|
|
if base_message_id and not base_message_id.startswith("<"):
|
|
base_message_id = f"<{base_message_id}>"
|
|
if base_message_id:
|
|
in_reply_to_header = base_message_id
|
|
|
|
existing_refs = str(thread_row.get("email_references") or "").strip()
|
|
if existing_refs:
|
|
references_header = f"{existing_refs} {base_message_id}".strip()
|
|
else:
|
|
references_header = base_message_id
|
|
|
|
selected_thread_key = _derive_thread_key_for_outbound(
|
|
thread_row.get("thread_key"),
|
|
thread_row.get("in_reply_to"),
|
|
thread_row.get("email_references"),
|
|
thread_row.get("message_id"),
|
|
)
|
|
|
|
effective_payload_thread_key = payload.thread_key or selected_thread_key
|
|
if not effective_payload_thread_key:
|
|
# Brand-new thread: assign a local key immediately so signature/BMCid
|
|
# can carry a resolvable thread identity from the first outbound email.
|
|
effective_payload_thread_key = _generate_local_thread_key_for_new_outbound(sag_id)
|
|
|
|
provisional_thread_key = _derive_thread_key_for_outbound(
|
|
effective_payload_thread_key,
|
|
in_reply_to_header,
|
|
references_header,
|
|
None,
|
|
)
|
|
signature = _render_signature_for_case_email(user_id, sag_id, provisional_thread_key)
|
|
body_text = _append_signature_to_body(body_text, signature)
|
|
body_html = _append_signature_to_html(payload.body_html, signature)
|
|
|
|
# Inject hidden BMCid tracker into HTML body so replies can be routed back
|
|
bmc_id_tag = _build_case_bmc_id_tag(sag_id, provisional_thread_key)
|
|
hidden_tracker = f'<div style="display:none!important;font-size:0;line-height:0;height:0;overflow:hidden" class="bmc-thread-id">BMCid: {bmc_id_tag}</div>'
|
|
if body_html:
|
|
body_html = f"{hidden_tracker}{body_html}"
|
|
elif body_text:
|
|
# Synthesize minimal HTML wrapper with tracker when only plain text exists
|
|
import html as _html
|
|
body_html = f"{hidden_tracker}<pre>{_html.escape(body_text)}</pre>"
|
|
|
|
# Ensure subject carries [SAG-XX] prefix for reliable subject-line matching
|
|
sag_prefix = f"[SAG-{sag_id}]"
|
|
if sag_prefix not in subject:
|
|
subject = f"{sag_prefix} {subject}"
|
|
|
|
email_service = EmailService()
|
|
success, send_message, generated_message_id, provider_thread_key = await email_service.send_email_with_attachments(
|
|
to_addresses=to_addresses,
|
|
subject=subject,
|
|
body_text=body_text,
|
|
body_html=body_html,
|
|
cc=cc_addresses,
|
|
bcc=bcc_addresses,
|
|
in_reply_to=in_reply_to_header,
|
|
references=references_header,
|
|
attachments=smtp_attachments,
|
|
respect_dry_run=False,
|
|
)
|
|
|
|
if not success:
|
|
logger.error("❌ Failed to send case email for case %s: %s", sag_id, send_message)
|
|
failure_detail = str(send_message or "Email send failed without provider detail").strip()
|
|
# Keep error user-facing but concise; frontend will display this detail directly.
|
|
failure_detail = failure_detail.replace("❌", "").strip()
|
|
if failure_detail.lower() == "failed to send email":
|
|
failure_detail = "Email send failed (provider returned generic error; check Graph app permissions and API logs)"
|
|
raise HTTPException(status_code=500, detail=f"{failure_detail} [diag:sag-send-v3]")
|
|
|
|
sender_name = settings.EMAIL_SMTP_FROM_NAME or "BMC Hub"
|
|
sender_email = settings.EMAIL_SMTP_FROM_ADDRESS or ""
|
|
provider_thread_key_normalized = _normalize_message_id_token(provider_thread_key)
|
|
|
|
# Keep replies in the existing case thread when we already know the target thread.
|
|
# Some providers may return a new conversation id even for replies.
|
|
derived_thread_key = _derive_thread_key_for_outbound(
|
|
effective_payload_thread_key,
|
|
in_reply_to_header,
|
|
references_header,
|
|
None,
|
|
)
|
|
|
|
thread_key = derived_thread_key or provider_thread_key_normalized
|
|
if not thread_key:
|
|
thread_key = _derive_thread_key_for_outbound(
|
|
effective_payload_thread_key,
|
|
in_reply_to_header,
|
|
references_header,
|
|
generated_message_id,
|
|
)
|
|
|
|
insert_result = None
|
|
insert_error = None
|
|
try:
|
|
insert_email_query = """
|
|
INSERT INTO email_messages (
|
|
message_id, subject, sender_email, sender_name,
|
|
recipient_email, cc, body_text, body_html,
|
|
in_reply_to, email_references, thread_key,
|
|
received_date, folder, has_attachments, attachment_count,
|
|
status, import_method, linked_case_id
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
ON CONFLICT (message_id) DO UPDATE
|
|
SET
|
|
subject = EXCLUDED.subject,
|
|
sender_email = EXCLUDED.sender_email,
|
|
sender_name = EXCLUDED.sender_name,
|
|
recipient_email = EXCLUDED.recipient_email,
|
|
cc = EXCLUDED.cc,
|
|
body_text = EXCLUDED.body_text,
|
|
body_html = EXCLUDED.body_html,
|
|
in_reply_to = COALESCE(EXCLUDED.in_reply_to, email_messages.in_reply_to),
|
|
email_references = COALESCE(EXCLUDED.email_references, email_messages.email_references),
|
|
thread_key = COALESCE(EXCLUDED.thread_key, email_messages.thread_key),
|
|
folder = 'Sent',
|
|
has_attachments = EXCLUDED.has_attachments,
|
|
attachment_count = EXCLUDED.attachment_count,
|
|
status = 'sent',
|
|
import_method = COALESCE(email_messages.import_method, EXCLUDED.import_method),
|
|
linked_case_id = COALESCE(email_messages.linked_case_id, EXCLUDED.linked_case_id),
|
|
updated_at = CURRENT_TIMESTAMP
|
|
RETURNING id
|
|
"""
|
|
insert_result = execute_query(
|
|
insert_email_query,
|
|
(
|
|
generated_message_id,
|
|
subject,
|
|
sender_email,
|
|
sender_name,
|
|
", ".join(to_addresses),
|
|
", ".join(cc_addresses),
|
|
body_text,
|
|
body_html,
|
|
in_reply_to_header,
|
|
references_header,
|
|
thread_key,
|
|
datetime.now(),
|
|
"Sent",
|
|
bool(smtp_attachments),
|
|
len(smtp_attachments),
|
|
"sent",
|
|
"manual_upload",
|
|
sag_id,
|
|
),
|
|
)
|
|
except Exception as e:
|
|
insert_error = e
|
|
logger.warning("⚠️ Outbound email full insert fallback for case %s: %s", sag_id, e)
|
|
|
|
if not insert_result:
|
|
try:
|
|
insert_email_query = """
|
|
INSERT INTO email_messages (
|
|
message_id, subject, sender_email, sender_name,
|
|
recipient_email, cc, body_text, body_html,
|
|
received_date, folder, has_attachments, attachment_count,
|
|
status, import_method, linked_case_id
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
ON CONFLICT (message_id) DO UPDATE
|
|
SET
|
|
subject = EXCLUDED.subject,
|
|
sender_email = EXCLUDED.sender_email,
|
|
sender_name = EXCLUDED.sender_name,
|
|
recipient_email = EXCLUDED.recipient_email,
|
|
cc = EXCLUDED.cc,
|
|
body_text = EXCLUDED.body_text,
|
|
body_html = EXCLUDED.body_html,
|
|
folder = 'Sent',
|
|
has_attachments = EXCLUDED.has_attachments,
|
|
attachment_count = EXCLUDED.attachment_count,
|
|
status = 'sent',
|
|
import_method = COALESCE(email_messages.import_method, EXCLUDED.import_method),
|
|
linked_case_id = COALESCE(email_messages.linked_case_id, EXCLUDED.linked_case_id),
|
|
updated_at = CURRENT_TIMESTAMP
|
|
RETURNING id
|
|
"""
|
|
insert_result = execute_query(
|
|
insert_email_query,
|
|
(
|
|
generated_message_id,
|
|
subject,
|
|
sender_email,
|
|
sender_name,
|
|
", ".join(to_addresses),
|
|
", ".join(cc_addresses),
|
|
body_text,
|
|
body_html,
|
|
datetime.now(),
|
|
"Sent",
|
|
bool(smtp_attachments),
|
|
len(smtp_attachments),
|
|
"sent",
|
|
"manual_upload",
|
|
sag_id,
|
|
),
|
|
)
|
|
except Exception as e:
|
|
insert_error = e
|
|
logger.warning("⚠️ Outbound email medium insert fallback for case %s: %s", sag_id, e)
|
|
|
|
if not insert_result:
|
|
# Legacy-safe fallback: persist with minimal guaranteed columns.
|
|
try:
|
|
insert_email_query = """
|
|
INSERT INTO email_messages (
|
|
message_id, subject, sender_email, sender_name,
|
|
recipient_email, cc, body_text,
|
|
received_date, folder, has_attachments, attachment_count
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
ON CONFLICT (message_id) DO UPDATE
|
|
SET
|
|
subject = EXCLUDED.subject,
|
|
sender_email = EXCLUDED.sender_email,
|
|
sender_name = EXCLUDED.sender_name,
|
|
recipient_email = EXCLUDED.recipient_email,
|
|
cc = EXCLUDED.cc,
|
|
body_text = EXCLUDED.body_text,
|
|
folder = 'Sent',
|
|
has_attachments = EXCLUDED.has_attachments,
|
|
attachment_count = EXCLUDED.attachment_count,
|
|
updated_at = CURRENT_TIMESTAMP
|
|
RETURNING id
|
|
"""
|
|
insert_result = execute_query(
|
|
insert_email_query,
|
|
(
|
|
generated_message_id,
|
|
subject,
|
|
sender_email,
|
|
sender_name,
|
|
", ".join(to_addresses),
|
|
", ".join(cc_addresses),
|
|
body_text,
|
|
datetime.now(),
|
|
"Sent",
|
|
bool(smtp_attachments),
|
|
len(smtp_attachments),
|
|
),
|
|
)
|
|
except Exception as e:
|
|
insert_error = e
|
|
logger.error("❌ Email sent but outbound log insert failed for case %s: %s", sag_id, e)
|
|
|
|
email_id = None
|
|
if insert_result:
|
|
email_id = insert_result[0]["id"]
|
|
else:
|
|
# Last chance recovery: if row exists already, continue with that id.
|
|
existing_email = execute_query_single(
|
|
"SELECT id FROM email_messages WHERE message_id = %s",
|
|
(generated_message_id,),
|
|
) if generated_message_id else None
|
|
if existing_email:
|
|
email_id = existing_email["id"]
|
|
else:
|
|
warning_detail = str(insert_error or "email logging failed")
|
|
logger.error("❌ Email sent but no local email_id could be resolved for case %s", sag_id)
|
|
return {
|
|
"status": "sent",
|
|
"email_id": None,
|
|
"message": send_message,
|
|
"warning": f"Email sent but could not be logged locally: {warning_detail}",
|
|
}
|
|
|
|
if smtp_attachments and email_id:
|
|
from psycopg2 import Binary
|
|
|
|
for attachment in smtp_attachments:
|
|
try:
|
|
execute_query(
|
|
"""
|
|
INSERT INTO email_attachments (
|
|
email_id, filename, content_type, size_bytes, file_path, content_data
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s)
|
|
""",
|
|
(
|
|
email_id,
|
|
attachment["filename"],
|
|
attachment["content_type"],
|
|
attachment.get("size") or len(attachment["content"]),
|
|
attachment.get("file_path"),
|
|
Binary(attachment["content"]),
|
|
),
|
|
)
|
|
except Exception as e:
|
|
logger.warning(
|
|
"⚠️ Could not persist outbound email attachment '%s' for email_id=%s: %s",
|
|
attachment.get("filename"),
|
|
email_id,
|
|
e,
|
|
)
|
|
|
|
linked_ok = False
|
|
try:
|
|
execute_query(
|
|
"""
|
|
INSERT INTO sag_emails (sag_id, email_id)
|
|
VALUES (%s, %s)
|
|
ON CONFLICT DO NOTHING
|
|
""",
|
|
(sag_id, email_id),
|
|
)
|
|
linked_ok = True
|
|
except Exception as e:
|
|
logger.warning("⚠️ Could not insert sag_emails link for case=%s email_id=%s: %s", sag_id, email_id, e)
|
|
if table_has_column("email_messages", "linked_case_id"):
|
|
try:
|
|
execute_query(
|
|
"UPDATE email_messages SET linked_case_id = %s WHERE id = %s",
|
|
(sag_id, email_id),
|
|
)
|
|
linked_ok = True
|
|
except Exception as nested_e:
|
|
logger.warning(
|
|
"⚠️ Fallback linked_case_id update also failed for case=%s email_id=%s: %s",
|
|
sag_id,
|
|
email_id,
|
|
nested_e,
|
|
)
|
|
|
|
sent_ts = datetime.now().isoformat()
|
|
outgoing_comment = (
|
|
"📧 Udgående email\n"
|
|
f"Email-ID: {email_id}\n"
|
|
f"Fra: {sender_email or sender_name or 'BMC Hub'}\n"
|
|
f"Til: {', '.join(to_addresses)}\n"
|
|
f"Emne: {subject}\n"
|
|
f"Modtaget: {sent_ts}\n\n"
|
|
f"{body_text}"
|
|
)
|
|
|
|
comment_row = {}
|
|
try:
|
|
has_system_flag = table_has_column("sag_kommentarer", "er_system_besked")
|
|
attempted_errors = []
|
|
|
|
has_comment_id_col = table_has_column("sag_kommentarer", "kommentar_id")
|
|
has_id_col = table_has_column("sag_kommentarer", "id")
|
|
|
|
# Prefer the variant that matches the live schema to avoid noisy SQL errors in logs.
|
|
if has_comment_id_col:
|
|
returning_variants = ["kommentar_id", "id AS kommentar_id"]
|
|
elif has_id_col:
|
|
returning_variants = ["id AS kommentar_id", "kommentar_id"]
|
|
else:
|
|
returning_variants = ["kommentar_id", "id AS kommentar_id"]
|
|
|
|
if has_system_flag:
|
|
comment_variants = [
|
|
(
|
|
f"""
|
|
INSERT INTO sag_kommentarer (sag_id, forfatter, indhold, er_system_besked)
|
|
VALUES (%s, %s, %s, %s)
|
|
RETURNING {returning_expr}, created_at
|
|
""",
|
|
(sag_id, 'Email Bot', outgoing_comment, True),
|
|
)
|
|
for returning_expr in returning_variants
|
|
]
|
|
else:
|
|
comment_variants = [
|
|
(
|
|
f"""
|
|
INSERT INTO sag_kommentarer (sag_id, forfatter, indhold)
|
|
VALUES (%s, %s, %s)
|
|
RETURNING {returning_expr}, created_at
|
|
""",
|
|
(sag_id, 'Email Bot', outgoing_comment),
|
|
)
|
|
for returning_expr in returning_variants
|
|
]
|
|
|
|
for variant_query, variant_params in comment_variants:
|
|
try:
|
|
comment_row = execute_query_single(variant_query, variant_params) or {}
|
|
if comment_row:
|
|
break
|
|
except Exception as variant_error:
|
|
attempted_errors.append(str(variant_error))
|
|
|
|
if not comment_row and attempted_errors:
|
|
logger.warning(
|
|
"⚠️ Outbound email sent but comment logging variants failed for case %s: %s",
|
|
sag_id,
|
|
" | ".join(attempted_errors),
|
|
)
|
|
except Exception as e:
|
|
logger.warning("⚠️ Outbound email sent but comment logging failed for case %s: %s", sag_id, e)
|
|
|
|
comment_created_at = comment_row.get("created_at")
|
|
if isinstance(comment_created_at, datetime):
|
|
comment_created_at = comment_created_at.isoformat()
|
|
elif comment_created_at is None:
|
|
comment_created_at = sent_ts
|
|
|
|
logger.info(
|
|
"✅ Outbound case email sent and linked (case=%s, email_id=%s, thread_email_id=%s, payload_thread_key=%s, stored_thread_key=%s, provider_thread_key=%s, recipients=%s)",
|
|
sag_id,
|
|
email_id,
|
|
payload.thread_email_id,
|
|
effective_payload_thread_key,
|
|
thread_key,
|
|
provider_thread_key_normalized,
|
|
", ".join(to_addresses),
|
|
)
|
|
return {
|
|
"status": "sent",
|
|
"email_id": email_id,
|
|
"message": send_message,
|
|
"linked_to_case": linked_ok,
|
|
"warning": None if linked_ok else "Email sent, but automatic case-thread link fallback was required",
|
|
"comment": {
|
|
"kommentar_id": comment_row.get("kommentar_id"),
|
|
"forfatter": "Email Bot",
|
|
"indhold": outgoing_comment,
|
|
"created_at": comment_created_at,
|
|
"er_system_besked": True,
|
|
},
|
|
}
|
|
|
|
# ============================================================================
|
|
# SOLUTIONS
|
|
# ============================================================================
|
|
from . import solutions
|
|
router.include_router(solutions.router)
|