diff --git a/.env.example b/.env.example index 75d12ef..8d73e6c 100644 --- a/.env.example +++ b/.env.example @@ -16,6 +16,11 @@ API_HOST=0.0.0.0 API_PORT=8001 # Changed from 8000 to avoid conflicts with other services ENABLE_RELOAD=false # Set to true for live code reload (causes log spam in Docker) +# FirmaAPI (CVR company lookup) +FIRMAAPI_BASE_URL=https://firmaapi.dk/api/v1 +FIRMAAPI_API_KEY= +FIRMAAPI_TIMEOUT_SECONDS=12 + # ===================================================== # SECURITY # ===================================================== @@ -77,6 +82,7 @@ LINKS_READ_ONLY=true LINKS_DRY_RUN=true LINKS_DEAD_LINK_CHECK_ENABLED=true LINKS_DEAD_LINK_CHECK_INTERVAL_MINUTES=60 +LINKS_CHECK_TIMEOUT_SECONDS=5 # Vaultwarden (Bitwarden-compatible) VAULTWARDEN_BASE_URL= diff --git a/.env.prod.example b/.env.prod.example index 608a28b..ebbad72 100644 --- a/.env.prod.example +++ b/.env.prod.example @@ -44,6 +44,11 @@ API_HOST=0.0.0.0 API_PORT=8000 API_RELOAD=false +# FirmaAPI (CVR company lookup) +FIRMAAPI_BASE_URL=https://firmaapi.dk/api/v1 +FIRMAAPI_API_KEY= +FIRMAAPI_TIMEOUT_SECONDS=12 + # ===================================================== # SECURITY - Production # ===================================================== @@ -86,6 +91,7 @@ LINKS_READ_ONLY=true LINKS_DRY_RUN=true LINKS_DEAD_LINK_CHECK_ENABLED=true LINKS_DEAD_LINK_CHECK_INTERVAL_MINUTES=60 +LINKS_CHECK_TIMEOUT_SECONDS=5 # Vaultwarden (Bitwarden-compatible) VAULTWARDEN_BASE_URL= diff --git a/Dockerfile b/Dockerfile index 42d78ca..907e764 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,6 +7,7 @@ RUN apt-get update && apt-get install -y \ curl \ git \ libpq-dev \ + libzbar0 \ gcc \ g++ \ python3-dev \ diff --git a/app/contacts/frontend/contacts.html b/app/contacts/frontend/contacts.html index e0c4331..726422a 100644 --- a/app/contacts/frontend/contacts.html +++ b/app/contacts/frontend/contacts.html @@ -4,6 +4,53 @@ {% block extra_css %} {% endblock %} {% block content %} -
Administrer kontaktpersoner
| Navn | @@ -123,7 +419,7 @@ -|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| Kunne ikke indlæse kontakter | |||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
${initials}
-
${escapeHtml(contact.first_name + ' ' + contact.last_name)}
- ${contact.department || '-'}
+ ${safeName}
+ ${safeDepartment}
|
- ${contact.email || '-'}
- ${smsLine}
+ ${safeEmail}
+ ${smsLine}
|
- ${contact.title || '-'} | +${safeTitle} |
-
- ${companyCount}
+
+ ${companyCount}
- ${companyDisplay !== '-' ? ' ' + companyDisplay + ' ' : ''}
+ ${companyDisplay !== '-' ? '' + escapeHtml(companyDisplay) + ' ' : ''}
|
${statusBadge} |
-
@@ -580,20 +960,88 @@ async function loadCompaniesForSelect() {
try {
const response = await fetch('/api/v1/customers?limit=1000');
const data = await response.json();
-
- const select = document.getElementById('companySelect');
- select.innerHTML = data.customers.map(c =>
- ``
- ).join('');
+
+ availableCompanies = Array.isArray(data.customers)
+ ? data.customers.map((c) => ({ id: Number(c.id), name: String(c.name || '').trim() }))
+ : [];
+ renderCompanyResults(document.getElementById('companySearchInput')?.value || '');
+ renderSelectedCompanies();
} catch (error) {
console.error('Failed to load companies:', error);
}
}
+function renderCompanyResults(query) {
+ const host = document.getElementById('companyResults');
+ if (!host) return;
+
+ const needle = String(query || '').trim().toLowerCase();
+ let list = availableCompanies;
+ if (needle) {
+ list = availableCompanies.filter((c) => c.name.toLowerCase().includes(needle));
+ }
+
+ list = list.slice(0, 80);
+
+ if (!list.length) {
+ host.innerHTML = 'Ingen firmaer fundet ';
+ return;
+ }
+
+ host.innerHTML = list.map((c) => {
+ const selected = selectedCompanyIds.has(c.id);
+ return `
+
+
+ Links
+
+
+
+
+
+
+
+
+
+ Kundens sager+ Alle sager knyttet til denne kunde +
+
+
+ Ingen sager fundet for denne kunde
+
+
@@ -748,6 +803,42 @@
+
+
+
+
+
+
+ Åbn fuld visning
+
+ Links / Endpoints+ Driftslinks knyttet til denne kunde +
+
+
+ Ingen links fundet for denne kunde
+
+
{% include "modules/nextcloud/templates/tab.html" %}
@@ -1210,6 +1301,11 @@ let customerKontaktFilter = 'all';
let eventListenersAdded = false;
+function getAuthHeaders() {
+ const token = localStorage.getItem('access_token') || sessionStorage.getItem('access_token');
+ return token ? { Authorization: `Bearer ${token}` } : {};
+}
+
document.addEventListener('DOMContentLoaded', () => {
if (eventListenersAdded) {
console.log('Event listeners already added, skipping...');
@@ -1226,6 +1322,13 @@ document.addEventListener('DOMContentLoaded', () => {
}, { once: false });
}
+ const casesTab = document.querySelector('a[href="#cases"]');
+ if (casesTab) {
+ casesTab.addEventListener('shown.bs.tab', () => {
+ loadCustomerCases();
+ }, { once: false });
+ }
+
const kontaktTab = document.querySelector('a[href="#kontakt"]');
if (kontaktTab) {
kontaktTab.addEventListener('shown.bs.tab', () => {
@@ -1265,6 +1368,13 @@ document.addEventListener('DOMContentLoaded', () => {
loadCustomerHardware();
}, { once: false });
}
+
+ const linksTab = document.querySelector('a[href="#links"]');
+ if (linksTab) {
+ linksTab.addEventListener('shown.bs.tab', () => {
+ loadCustomerLinks();
+ }, { once: false });
+ }
// Load activity when tab is shown
const activityTab = document.querySelector('a[href="#activity"]');
@@ -2315,6 +2425,107 @@ async function loadContacts() {
}
}
+async function loadCustomerCases() {
+ const container = document.getElementById('customerCasesContainer');
+ const empty = document.getElementById('customerCasesEmpty');
+
+ if (!container || !empty) {
+ return;
+ }
+
+ container.classList.remove('d-none');
+ empty.classList.add('d-none');
+
+ container.innerHTML = `
+
@@ -2232,6 +2338,8 @@
{% set ticon = type_icons.get(tkey, 'bi-card-text') %}
{% set tlabel = type_labels.get(tkey, tkey|capitalize) %}
+
+
#${id} |
+ ${title} |
+ ${statusLabel} |
+ ${priority} |
+ ${created} |
+
+
+
+
+ |
+
${escapeHtml(error.message || 'Fejl ved hentning af sager')} `;
+ }
+}
+
let subscriptionsLoaded = false;
async function loadSubscriptions() {
@@ -2376,6 +2587,7 @@ async function loadCustomerPipeline() {
let customerHardware = [];
let hardwareLocationsById = {};
+let customerLinks = [];
function getHardwareGroupLabel(item, groupBy) {
if (groupBy === 'location') {
@@ -2548,6 +2760,109 @@ document.addEventListener('change', (event) => {
}
});
+function renderCustomerLinksTable() {
+ const container = document.getElementById('customerLinksContainer');
+ const empty = document.getElementById('customerLinksEmpty');
+ if (!container || !empty) return;
+
+ if (!customerLinks.length) {
+ container.classList.add('d-none');
+ empty.classList.remove('d-none');
+ return;
+ }
+
+ container.classList.remove('d-none');
+ empty.classList.add('d-none');
+
+ container.innerHTML = `
+
+
KunderAdministrer dine kunder
-
-
+
+
+
+
-
@@ -73,55 +150,391 @@
+
+
+{% endblock %}
diff --git a/app/modules/sag/backend/router.py b/app/modules/sag/backend/router.py
index b548ec4..1ac3997 100644
--- a/app/modules/sag/backend/router.py
+++ b/app/modules/sag/backend/router.py
@@ -4,13 +4,15 @@ import shutil
import json
import re
import hashlib
+import base64
+import html
from pathlib import Path
-from datetime import datetime
-from typing import List, Optional
+from datetime import datetime, timedelta
+from typing import List, Optional, Dict
from uuid import uuid4
-from fastapi import APIRouter, HTTPException, Query, UploadFile, File, Request
-from fastapi.responses import FileResponse
+from fastapi import APIRouter, HTTPException, Query, UploadFile, File, Request, Form, Response
+from fastapi.responses import FileResponse, HTMLResponse
from pydantic import BaseModel, Field
from app.core.database import execute_query, execute_query_single, table_has_column
from app.models.schemas import TodoStep, TodoStepCreate, TodoStepUpdate, QuickCreateAnalysis
@@ -18,6 +20,7 @@ from app.core.config import settings
from app.services.email_service import EmailService
from app.services.case_analysis_service import CaseAnalysisService
from app.services.ollama_service import ollama_service
+from app.services.brother_label_print_service import BrotherLabelPrintService, LabelJob
try:
import extract_msg
@@ -191,6 +194,18 @@ class SagSendEmailRequest(BaseModel):
thread_key: Optional[str] = None
+class SignatureCanvasRequest(BaseModel):
+ data_url: str = Field(..., min_length=32)
+
+
+class DirectPrintOverrideRequest(BaseModel):
+ printer_host: Optional[str] = None
+ printer_port: Optional[int] = None
+ printer_model: Optional[str] = None
+ label_size: Optional[str] = None
+ hardware_id: Optional[int] = None
+
+
def _normalize_email_list(values: List[str], field_name: str) -> List[str]:
cleaned: List[str] = []
for value in values or []:
@@ -234,6 +249,130 @@ def _derive_thread_key_for_outbound(
return _normalize_message_id_token(generated_message_id)
+def _generate_local_thread_key_for_new_outbound(sag_id: int) -> str:
+ """Generate a stable local thread key for brand-new case emails.
+
+ This prevents fallback BMCid tags like sXXt001 that cannot be mapped back
+ to a concrete thread later.
+ """
+ nonce = uuid4().hex[:10]
+ return f"sag{sag_id}-{int(datetime.now().timestamp())}-{nonce}@bmchub.local"
+
+
+def _build_scan_token(sag_id: int, token_type: str) -> str:
+ if token_type == "work_order":
+ return f"BMCSCAN-WO-S{sag_id}-{uuid4().hex[:10].upper()}"
+
+ # Keep hardware label tokens shorter so Code39 labels stay physically compact.
+ return f"BMCSCAN-HW-{sag_id}-{uuid4().hex[:6].upper()}"
+
+
+def _create_document_token(
+ sag_id: int,
+ token_type: str,
+ user_id: Optional[int] = None,
+ hardware_id: Optional[int] = None,
+) -> str:
+ token = _build_scan_token(sag_id, token_type)
+ expires_at = datetime.now() + timedelta(days=30)
+
+ execute_query(
+ """
+ INSERT INTO sag_document_tokens (
+ sag_id,
+ token,
+ token_type,
+ hardware_id,
+ created_by_user_id,
+ expires_at
+ )
+ VALUES (%s, %s, %s, %s, %s, %s)
+ """,
+ (sag_id, token, token_type, hardware_id, user_id, expires_at),
+ )
+ return token
+
+
+def _get_setting_value(key: str, fallback: Optional[str] = None) -> Optional[str]:
+ row = execute_query_single("SELECT value FROM settings WHERE key = %s", (key,))
+ if not row:
+ return fallback
+ value = row.get("value")
+ if value is None:
+ return fallback
+ return str(value)
+
+
+_CODE39_PATTERNS: Dict[str, str] = {
+ "0": "nnnwwnwnn", "1": "wnnwnnnnw", "2": "nnwwnnnnw", "3": "wnwwnnnnn",
+ "4": "nnnwwnnnw", "5": "wnnwwnnnn", "6": "nnwwwnnnn", "7": "nnnwnnwnw",
+ "8": "wnnwnnwnn", "9": "nnwwnnwnn", "A": "wnnnnwnnw", "B": "nnwnnwnnw",
+ "C": "wnwnnwnnn", "D": "nnnnwwnnw", "E": "wnnnwwnnn", "F": "nnwnwwnnn",
+ "G": "nnnnnwwnw", "H": "wnnnnwwnn", "I": "nnwnnwwnn", "J": "nnnnwwwnn",
+ "K": "wnnnnnnww", "L": "nnwnnnnww", "M": "wnwnnnnwn", "N": "nnnnwnnww",
+ "O": "wnnnwnnwn", "P": "nnwnwnnwn", "Q": "nnnnnnwww", "R": "wnnnnnwwn",
+ "S": "nnwnnnwwn", "T": "nnnnwnwwn", "U": "wwnnnnnnw", "V": "nwwnnnnnw",
+ "W": "wwwnnnnnn", "X": "nwnnwnnnw", "Y": "wwnnwnnnn", "Z": "nwwnwnnnn",
+ "-": "nwnnnnwnw", ".": "wwnnnnwnn", " ": "nwwnnnwnn", "$": "nwnwnwnnn",
+ "/": "nwnwnnnwn", "+": "nwnnnwnwn", "%": "nnnwnwnwn", "*": "nwnnwnwnn",
+}
+
+
+def _render_code39_svg(
+ value: str,
+ height: int = 48,
+ narrow: int = 2,
+ wide: int = 5,
+ gap: int = 2,
+ font_size: int = 11,
+ include_text: bool = True,
+) -> str:
+ safe_value = "".join(ch for ch in (value or "").upper() if ch in _CODE39_PATTERNS and ch != "*")
+ if not safe_value:
+ safe_value = "EMPTY"
+
+ sequence = f"*{safe_value}*"
+ total_width = 12
+ for ch in sequence:
+ pattern = _CODE39_PATTERNS[ch]
+ for idx, code in enumerate(pattern):
+ stroke = wide if code == "w" else narrow
+ total_width += stroke
+ if idx < len(pattern) - 1:
+ total_width += gap
+ total_width += gap
+
+ x = 6
+ bars = []
+ for ch in sequence:
+ pattern = _CODE39_PATTERNS[ch]
+ for idx, code in enumerate(pattern):
+ stroke = wide if code == "w" else narrow
+ if idx % 2 == 0:
+ bars.append(f'
+
+
+
+
+
+
+ Opret ny kunde+") - return f"{body_html} -- {signature_html}" + signature_block = ( + " "
+ f"{signature_html}"
+ " "
+ )
+ return f"{body_html}{signature_block}"
@router.post("/sag/analyze-quick-create", response_model=QuickCreateAnalysis)
@@ -2152,6 +2298,816 @@ async def add_kommentar(sag_id: int, data: dict, request: Request):
raise HTTPException(status_code=500, detail="Failed to add comment")
+# ============================================================================
+# WORK ORDERS / LABELS
+# ============================================================================
+
+def _resolve_case_row_for_documents(sag_id: int):
+ row = execute_query_single(
+ """
+ SELECT s.id, s.titel, s.status, s.created_at, s.beskrivelse, s.customer_id, c.name AS customer_name
+ FROM sag_sager s
+ LEFT JOIN customers c ON c.id = s.customer_id
+ WHERE s.id = %s AND s.deleted_at IS NULL
+ """,
+ (sag_id,),
+ )
+ if not row:
+ raise HTTPException(status_code=404, detail="Case not found")
+ return row
+
+
+def _store_generated_case_file(
+ sag_id: int,
+ filename: str,
+ content_bytes: bytes,
+ content_type: str,
+ source_type: str,
+ source_token: Optional[str] = None,
+) -> Dict:
+ stored_name = _generate_stored_name(filename, SAG_FILE_SUBDIR)
+ destination = _resolve_attachment_path(stored_name)
+ destination.parent.mkdir(parents=True, exist_ok=True)
+ destination.write_bytes(content_bytes)
+
+ query = """
+ INSERT INTO sag_files (
+ sag_id,
+ filename,
+ content_type,
+ size_bytes,
+ stored_name,
+ source_type,
+ source_token
+ )
+ VALUES (%s, %s, %s, %s, %s, %s, %s)
+ RETURNING id, filename, created_at
+ """
+ rows = execute_query(
+ query,
+ (sag_id, filename, content_type, len(content_bytes), stored_name, source_type, source_token),
+ )
+ return rows[0]
+
+
+@router.get("/sag/{sag_id}/work-orders/print", response_class=HTMLResponse)
+async def print_case_work_order(sag_id: int, request: Request):
+ """Render a printable work order with scan token + barcode."""
+ case = _resolve_case_row_for_documents(sag_id)
+
+ user_id = None
+ try:
+ user_id = _get_user_id_from_request(request)
+ except HTTPException:
+ user_id = None
+
+ token = _create_document_token(sag_id, "work_order", user_id=user_id)
+ barcode_svg = _render_code39_svg(token)
+
+ todo_steps = execute_query(
+ """
+ SELECT title, description, due_date, is_done
+ FROM sag_todo_steps
+ WHERE sag_id = %s
+ AND deleted_at IS NULL
+ ORDER BY is_done ASC, due_date ASC NULLS LAST, id ASC
+ """,
+ (sag_id,),
+ ) or []
+
+ todo_items_html = "".join(
+ (
+ "□ | "
+ f"{html.escape(step.get('title') or '')}"
+ f" | "
+ "{html.escape(step.get('description') or '')} □ | "
+ "Ingen todo-opgaver registreret | ".join(html.escape(part) for part in text.splitlines()) + + def _clip(value: Optional[str], limit: int = 600) -> str: + text = str(value or "").strip() + if not text: + return "" + if len(text) <= limit: + return text + return f"{text[:limit].rstrip()}..." + + def _strip_quoted_email_text(value: Optional[str]) -> str: + text = str(value or "").replace("\r\n", "\n").replace("\r", "\n").strip() + if not text: + return "" + + lines = text.split("\n") + kept = [] + header_re = re.compile(r"^(fra|from|til|to|sendt|sent|dato|date|emne|subject)\s*:\s*", re.IGNORECASE) + original_msg_re = re.compile(r"^(-----\s*original message\s*-----|begin forwarded message)", re.IGNORECASE) + wrote_re = re.compile(r"\b(wrote|skrev)\s*:\s*$", re.IGNORECASE) + + for idx, line in enumerate(lines): + trimmed = line.strip() + + if trimmed.startswith(">"): + break + if original_msg_re.match(trimmed): + break + if wrote_re.search(trimmed): + break + + if re.match(r"^[-_]{3,}$", trimmed): + lookahead = lines[idx + 1: idx + 5] + if any(header_re.match(str(candidate or "").strip()) for candidate in lookahead): + break + + if idx > 0 and header_re.match(trimmed) and not str(lines[idx - 1] or "").strip(): + break + + kept.append(line) + + while kept and not str(kept[-1]).strip(): + kept.pop() + + return "\n".join(kept).strip() + + email_from_expr = "NULL" + if table_has_column("email_messages", "sender_email"): + email_from_expr = "e.sender_email" + elif table_has_column("email_messages", "from_email"): + email_from_expr = "e.from_email" + + email_to_expr = "NULL" + if table_has_column("email_messages", "recipient_email"): + email_to_expr = "e.recipient_email" + elif table_has_column("email_messages", "to_email"): + email_to_expr = "e.to_email" + + linked_emails = [] + email_comment_rows = [] + if _table_exists("sag_emails") and _table_exists("email_messages"): + try: + linked_emails = execute_query( + f""" + SELECT + e.received_date, + e.subject, + {email_from_expr} AS from_email, + {email_to_expr} AS to_email, + e.body_text + FROM sag_emails se + JOIN email_messages e ON e.id = se.email_id + WHERE se.sag_id = %s + ORDER BY e.received_date DESC NULLS LAST, e.id DESC + LIMIT 30 + """, + (sag_id,), + ) or [] + except Exception as exc: + logger.warning("⚠️ Work-order linked email query failed for SAG-%s: %s", sag_id, exc) + linked_emails = [] + + if _table_exists("sag_kommentarer"): + try: + email_comment_rows = execute_query( + """ + SELECT created_at, forfatter, indhold + FROM sag_kommentarer + WHERE sag_id = %s + AND deleted_at IS NULL + AND ( + COALESCE(indhold, '') ILIKE '%%Email-ID:%%' + OR COALESCE(indhold, '') ILIKE '%%📧%%' + OR COALESCE(indhold, '') ILIKE '%%Indgående email%%' + OR COALESCE(indhold, '') ILIKE '%%Udgående email%%' + ) + ORDER BY created_at DESC + LIMIT 30 + """, + (sag_id,), + ) or [] + except Exception as exc: + logger.warning("⚠️ Work-order email comment query failed for SAG-%s: %s", sag_id, exc) + email_comment_rows = [] + + has_name = table_has_column("hardware_assets", "name") + has_brand = table_has_column("hardware_assets", "brand") + has_model = table_has_column("hardware_assets", "model") + has_serial = table_has_column("hardware_assets", "serial_number") + has_asset_tag = table_has_column("hardware_assets", "asset_tag") + has_customer_asset_id = table_has_column("hardware_assets", "customer_asset_id") + has_internal_asset_id = table_has_column("hardware_assets", "internal_asset_id") + has_type = table_has_column("hardware_assets", "type") + has_asset_type = table_has_column("hardware_assets", "asset_type") + + name_expr_parts = [] + if has_name: + name_expr_parts.append("NULLIF(TRIM(h.name), '')") + if has_brand and has_model: + name_expr_parts.append("NULLIF(TRIM(CONCAT_WS(' ', h.brand, h.model)), '')") + if has_brand: + name_expr_parts.append("NULLIF(TRIM(h.brand), '')") + if has_model: + name_expr_parts.append("NULLIF(TRIM(h.model), '')") + if has_serial: + name_expr_parts.append("NULLIF(TRIM(h.serial_number), '')") + name_expr_parts.append("CONCAT('Hardware #', h.id::text)") + name_expr = "COALESCE(" + ", ".join(name_expr_parts) + ")" + + serial_expr = "h.serial_number" if has_serial else "NULL" + + tag_expr_parts = [] + if has_asset_tag: + tag_expr_parts.append("NULLIF(TRIM(h.asset_tag), '')") + if has_customer_asset_id: + tag_expr_parts.append("NULLIF(TRIM(h.customer_asset_id), '')") + if has_internal_asset_id: + tag_expr_parts.append("NULLIF(TRIM(h.internal_asset_id), '')") + tag_expr_parts.append("'-'") + tag_expr = "COALESCE(" + ", ".join(tag_expr_parts) + ")" + + type_expr_parts = [] + if has_type: + type_expr_parts.append("NULLIF(TRIM(h.type), '')") + if has_asset_type: + type_expr_parts.append("NULLIF(TRIM(h.asset_type), '')") + type_expr_parts.append("'ukendt'") + type_expr = "COALESCE(" + ", ".join(type_expr_parts) + ")" + + hardware_rows = [] + if _table_exists("sag_hardware") and _table_exists("hardware_assets"): + try: + hardware_rows = execute_query( + f""" + SELECT + h.id, + {name_expr} AS label_name, + {serial_expr} AS serial_number, + {tag_expr} AS label_tag, + {type_expr} AS label_type + FROM sag_hardware sh + JOIN hardware_assets h ON h.id = sh.hardware_id + WHERE sh.sag_id = %s + AND sh.deleted_at IS NULL + ORDER BY label_name ASC + """, + (sag_id,), + ) or [] + except Exception as exc: + logger.warning("⚠️ Work-order hardware query failed for SAG-%s: %s", sag_id, exc) + hardware_rows = [] + + hardware_html = "".join( + ( + " {html.escape(str(hw.get('label_name') or '-'))} | "
+ f"{html.escape(str(hw.get('serial_number') or '-'))} | "
+ f"{html.escape(str(hw.get('label_tag') or '-'))} | "
+ f"{html.escape(str(hw.get('label_type') or '-'))} | "
+ "Ingen hardware er knyttet til sagen. | "
+ f" "
+ )
+ for row in linked_emails
+ )
+ if not linked_emails and email_comment_rows:
+ emails_html = "".join(
+ (
+ "{html.escape((row.get('subject') or '(Ingen emne)'))} "
+ f"Fra: {html.escape(row.get('from_email') or '-')} · Til: {html.escape(row.get('to_email') or '-')} "
+ f"Dato: {html.escape(str(row.get('received_date') or '-'))} "
+ f"{_nl2br(_clip(_strip_quoted_email_text(row.get('body_text')), 800))} "
+ ""
+ f" "
+ )
+ for row in email_comment_rows
+ )
+ elif not emails_html:
+ emails_html = "{html.escape(row.get('forfatter') or 'Email')} · {html.escape(str(row.get('created_at') or '-'))} "
+ f"{_nl2br(_clip(_strip_quoted_email_text(row.get('indhold')), 1200))} "
+ "Ingen linkede emails. "
+
+ internal_messages_html = "".join(
+ (
+ ""
+ f" "
+ )
+ for row in internal_messages
+ )
+ if not internal_messages_html:
+ internal_messages_html = "{html.escape(row.get('forfatter') or 'Ukendt')} · {html.escape(str(row.get('created_at') or '-'))} "
+ f"{_nl2br(_clip(row.get('indhold'), 900))} "
+ "Ingen interne beskeder. "
+
+ html_doc = f"""
+
+
+
+
+
+
+
+
+
+ BMC Work Order
+ SAG-{case['id']} · {html.escape(case.get('status') or '-')}
+ Kunde: {html.escape(case.get('customer_name') or '-')} {barcode_svg}
+
+
+
+ Sags titel
+ {html.escape(case.get('titel') or '-')}
+
+
+
+ Sagsbeskrivelse
+ {_nl2br(case.get('beskrivelse'))}
+
+
+
+ Opgaver (afkryds ved udførelse)
+
+
+
+ Hardware
+
+
+
+ Linkede emails
+ {emails_html}
+
+
+
+ Interne beskeder
+ {internal_messages_html}
+
+
+
+ Underskrift
+
+ Dato: _____________ Navn: __________________________
+ Scan-token: {html.escape(token)}
+
+
+"""
+ return HTMLResponse(content=html_doc)
+
+
+@router.post("/sag/{sag_id}/work-orders/{token}/signature-canvas")
+async def upload_work_order_signature_canvas(sag_id: int, token: str, payload: SignatureCanvasRequest):
+ """Save canvas signature as case file and consume token."""
+ _resolve_case_row_for_documents(sag_id)
+
+ token_row = execute_query_single(
+ """
+ SELECT token, sag_id
+ FROM sag_document_tokens
+ WHERE token = %s AND token_type = 'work_order' AND sag_id = %s
+ """,
+ (token, sag_id),
+ )
+ if not token_row:
+ raise HTTPException(status_code=404, detail="Work-order token not found")
+
+ if "," not in payload.data_url:
+ raise HTTPException(status_code=400, detail="Invalid signature payload")
+
+ _, encoded = payload.data_url.split(",", 1)
+ try:
+ signature_bytes = base64.b64decode(encoded)
+ except Exception as exc:
+ raise HTTPException(status_code=400, detail=f"Invalid base64 signature: {exc}")
+
+ saved = _store_generated_case_file(
+ sag_id=sag_id,
+ filename=f"SAG-{sag_id}-signature-{datetime.now().strftime('%Y%m%d-%H%M%S')}.png",
+ content_bytes=signature_bytes,
+ content_type="image/png",
+ source_type="signature_canvas",
+ source_token=token,
+ )
+
+ execute_query(
+ """
+ UPDATE sag_document_tokens
+ SET consumed_at = COALESCE(consumed_at, CURRENT_TIMESTAMP)
+ WHERE token = %s
+ """,
+ (token,),
+ )
+
+ return {"status": "saved", "file": saved}
+
+
+@router.post("/sag/{sag_id}/work-orders/{token}/signature-file")
+async def upload_work_order_signature_file(
+ sag_id: int,
+ token: str,
+ file: UploadFile = File(...),
+ source: str = Form("signature_upload"),
+):
+ """Save uploaded signature file and consume token."""
+ _resolve_case_row_for_documents(sag_id)
+
+ token_row = execute_query_single(
+ """
+ SELECT token, sag_id
+ FROM sag_document_tokens
+ WHERE token = %s AND token_type = 'work_order' AND sag_id = %s
+ """,
+ (token, sag_id),
+ )
+ if not token_row:
+ raise HTTPException(status_code=404, detail="Work-order token not found")
+
+ content = await file.read()
+ if not content:
+ raise HTTPException(status_code=400, detail="Empty file")
+
+ safe_name = Path(file.filename or "signature-upload.bin").name
+ saved = _store_generated_case_file(
+ sag_id=sag_id,
+ filename=f"SAG-{sag_id}-{safe_name}",
+ content_bytes=content,
+ content_type=file.content_type or "application/octet-stream",
+ source_type=source or "signature_upload",
+ source_token=token,
+ )
+
+ execute_query(
+ """
+ UPDATE sag_document_tokens
+ SET consumed_at = COALESCE(consumed_at, CURRENT_TIMESTAMP)
+ WHERE token = %s
+ """,
+ (token,),
+ )
+
+ return {"status": "saved", "file": saved}
+
+
+@router.get("/sag/{sag_id}/labels/hardware/print", response_class=HTMLResponse)
+async def print_case_hardware_labels(
+ sag_id: int,
+ request: Request,
+ auto_print: bool = Query(False),
+):
+ """Render printable hardware labels with IDs and barcodes."""
+ _resolve_case_row_for_documents(sag_id)
+
+ has_name = table_has_column("hardware_assets", "name")
+ has_brand = table_has_column("hardware_assets", "brand")
+ has_model = table_has_column("hardware_assets", "model")
+ has_serial = table_has_column("hardware_assets", "serial_number")
+ has_asset_tag = table_has_column("hardware_assets", "asset_tag")
+ has_customer_asset_id = table_has_column("hardware_assets", "customer_asset_id")
+ has_internal_asset_id = table_has_column("hardware_assets", "internal_asset_id")
+ has_type = table_has_column("hardware_assets", "type")
+ has_asset_type = table_has_column("hardware_assets", "asset_type")
+
+ name_expr_parts = []
+ if has_name:
+ name_expr_parts.append("NULLIF(TRIM(h.name), '')")
+ if has_brand and has_model:
+ name_expr_parts.append("NULLIF(TRIM(CONCAT_WS(' ', h.brand, h.model)), '')")
+ if has_brand:
+ name_expr_parts.append("NULLIF(TRIM(h.brand), '')")
+ if has_model:
+ name_expr_parts.append("NULLIF(TRIM(h.model), '')")
+ if has_serial:
+ name_expr_parts.append("NULLIF(TRIM(h.serial_number), '')")
+ name_expr_parts.append("CONCAT('Hardware #', h.id::text)")
+ name_expr = "COALESCE(" + ", ".join(name_expr_parts) + ")"
+
+ tag_expr_parts = []
+ if has_asset_tag:
+ tag_expr_parts.append("NULLIF(TRIM(h.asset_tag), '')")
+ if has_customer_asset_id:
+ tag_expr_parts.append("NULLIF(TRIM(h.customer_asset_id), '')")
+ if has_internal_asset_id:
+ tag_expr_parts.append("NULLIF(TRIM(h.internal_asset_id), '')")
+ tag_expr_parts.append("'-'")
+ tag_expr = "COALESCE(" + ", ".join(tag_expr_parts) + ")"
+
+ type_expr_parts = []
+ if has_type:
+ type_expr_parts.append("NULLIF(TRIM(h.type), '')")
+ if has_asset_type:
+ type_expr_parts.append("NULLIF(TRIM(h.asset_type), '')")
+ type_expr_parts.append("'ukendt'")
+ type_expr = "COALESCE(" + ", ".join(type_expr_parts) + ")"
+
+ serial_expr = "h.serial_number" if has_serial else "NULL"
+
+ hardware_id_filter = None
+ if payload and payload.hardware_id is not None:
+ try:
+ hardware_id_filter = int(payload.hardware_id)
+ except (TypeError, ValueError):
+ raise HTTPException(status_code=400, detail="Ugyldigt hardware_id")
+
+ hardware_query = f"""
+ SELECT
+ h.id,
+ {name_expr} AS label_name,
+ {serial_expr} AS serial_number,
+ {tag_expr} AS label_tag,
+ {type_expr} AS label_type
+ FROM sag_hardware sh
+ JOIN hardware_assets h ON h.id = sh.hardware_id
+ WHERE sh.sag_id = %s
+ AND sh.deleted_at IS NULL
+ """
+ params = [sag_id]
+ if hardware_id_filter is not None:
+ hardware_query += " AND sh.hardware_id = %s"
+ params.append(hardware_id_filter)
+ hardware_query += " ORDER BY label_name ASC"
+
+ hardware_rows = execute_query(hardware_query, tuple(params)) or []
+
+ user_id = None
+ try:
+ user_id = _get_user_id_from_request(request)
+ except HTTPException:
+ user_id = None
+
+ label_cards = []
+ for hw in hardware_rows:
+ token = _create_document_token(
+ sag_id=sag_id,
+ token_type="hardware_label",
+ user_id=user_id,
+ hardware_id=hw.get("id"),
+ )
+ barcode_svg = _render_code39_svg(
+ token,
+ height=26,
+ narrow=1,
+ wide=2,
+ gap=0,
+ font_size=10,
+ include_text=False,
+ )
+ label_cards.append(
+ f"""
+
+
+ """
+ )
+
+ if not label_cards:
+ label_cards.append("{html.escape(hw.get('label_name') or 'Ukendt enhed')}
+ ID: HW-{hw.get('id')} · SAG-{sag_id}
+ SN: {html.escape(hw.get('serial_number') or '-')} · Tag: {html.escape(hw.get('label_tag') or '-')} · Type: {html.escape(hw.get('label_type') or '-')}
+ {barcode_svg}
+ {html.escape(token)}
+ Ingen hardware er knyttet til sagen endnu. ")
+
+ auto_print_script = ""
+ if auto_print:
+ auto_print_script = (
+ ""
+ )
+
+ html_doc = f"""
+
+
+
+
+
+ {''.join(label_cards)}
+
+ {auto_print_script}
+
+
+"""
+ return HTMLResponse(content=html_doc)
+
+
+@router.post("/sag/{sag_id}/labels/hardware/print-direct")
+async def print_case_hardware_labels_direct(
+ sag_id: int,
+ request: Request,
+ payload: Optional[DirectPrintOverrideRequest] = None,
+):
+ """Print hardware labels directly to configured Brother network printer."""
+ _resolve_case_row_for_documents(sag_id)
+
+ enabled = (_get_setting_value("label_printer_enabled", "false") or "false").strip().lower() == "true"
+ if not enabled:
+ raise HTTPException(status_code=400, detail="Direkte label-print er ikke aktiveret i indstillinger")
+
+ host = (payload.printer_host if payload and payload.printer_host else _get_setting_value("label_printer_host", "")).strip()
+ port_raw = payload.printer_port if payload and payload.printer_port is not None else _get_setting_value("label_printer_port", "9100")
+ model = (payload.printer_model if payload and payload.printer_model else _get_setting_value("label_printer_model", "QL-710W")).strip()
+ label_size = (payload.label_size if payload and payload.label_size else _get_setting_value("label_printer_label_size", "62")).strip()
+
+ try:
+ port = int(port_raw or 9100)
+ except ValueError:
+ raise HTTPException(status_code=400, detail="Ugyldig printer-port")
+
+ if not host:
+ raise HTTPException(status_code=400, detail="Printer host/IP mangler i indstillinger")
+
+ has_name = table_has_column("hardware_assets", "name")
+ has_brand = table_has_column("hardware_assets", "brand")
+ has_model = table_has_column("hardware_assets", "model")
+ has_serial = table_has_column("hardware_assets", "serial_number")
+ has_asset_tag = table_has_column("hardware_assets", "asset_tag")
+ has_customer_asset_id = table_has_column("hardware_assets", "customer_asset_id")
+ has_internal_asset_id = table_has_column("hardware_assets", "internal_asset_id")
+ has_type = table_has_column("hardware_assets", "type")
+ has_asset_type = table_has_column("hardware_assets", "asset_type")
+
+ name_expr_parts = []
+ if has_name:
+ name_expr_parts.append("NULLIF(TRIM(h.name), '')")
+ if has_brand and has_model:
+ name_expr_parts.append("NULLIF(TRIM(CONCAT_WS(' ', h.brand, h.model)), '')")
+ if has_brand:
+ name_expr_parts.append("NULLIF(TRIM(h.brand), '')")
+ if has_model:
+ name_expr_parts.append("NULLIF(TRIM(h.model), '')")
+ if has_serial:
+ name_expr_parts.append("NULLIF(TRIM(h.serial_number), '')")
+ name_expr_parts.append("CONCAT('Hardware #', h.id::text)")
+ name_expr = "COALESCE(" + ", ".join(name_expr_parts) + ")"
+
+ tag_expr_parts = []
+ if has_asset_tag:
+ tag_expr_parts.append("NULLIF(TRIM(h.asset_tag), '')")
+ if has_customer_asset_id:
+ tag_expr_parts.append("NULLIF(TRIM(h.customer_asset_id), '')")
+ if has_internal_asset_id:
+ tag_expr_parts.append("NULLIF(TRIM(h.internal_asset_id), '')")
+ tag_expr_parts.append("'-'")
+ tag_expr = "COALESCE(" + ", ".join(tag_expr_parts) + ")"
+
+ type_expr_parts = []
+ if has_type:
+ type_expr_parts.append("NULLIF(TRIM(h.type), '')")
+ if has_asset_type:
+ type_expr_parts.append("NULLIF(TRIM(h.asset_type), '')")
+ type_expr_parts.append("'ukendt'")
+ type_expr = "COALESCE(" + ", ".join(type_expr_parts) + ")"
+
+ serial_expr = "h.serial_number" if has_serial else "NULL"
+
+ hardware_rows = execute_query(
+ f"""
+ SELECT
+ h.id,
+ {name_expr} AS label_name,
+ {serial_expr} AS serial_number,
+ {tag_expr} AS label_tag,
+ {type_expr} AS label_type
+ FROM sag_hardware sh
+ JOIN hardware_assets h ON h.id = sh.hardware_id
+ WHERE sh.sag_id = %s
+ AND sh.deleted_at IS NULL
+ ORDER BY label_name ASC
+ """,
+ (sag_id,),
+ ) or []
+
+ if not hardware_rows:
+ if hardware_id_filter is not None:
+ raise HTTPException(status_code=404, detail="Valgt hardware er ikke knyttet til sagen")
+ raise HTTPException(status_code=400, detail="Ingen hardware er knyttet til sagen")
+
+ user_id = None
+ try:
+ user_id = _get_user_id_from_request(request)
+ except HTTPException:
+ user_id = None
+
+ jobs: List[LabelJob] = []
+ for hw in hardware_rows:
+ token = _create_document_token(
+ sag_id=sag_id,
+ token_type="hardware_label",
+ user_id=user_id,
+ hardware_id=hw.get("id"),
+ )
+ meta = (
+ f"ID: HW-{hw.get('id')} SAG-{sag_id} "
+ f"SN: {hw.get('serial_number') or '-'} Tag: {hw.get('label_tag') or '-'} Type: {hw.get('label_type') or '-'}"
+ )
+ jobs.append(
+ LabelJob(
+ name=str(hw.get("label_name") or "Ukendt enhed"),
+ meta_line=meta,
+ token=token,
+ )
+ )
+
+ service = BrotherLabelPrintService(
+ model=model,
+ host=host,
+ port=port,
+ label_size=label_size,
+ )
+
+ try:
+ printed = service.print_jobs(jobs)
+ except Exception as exc:
+ logger.error("❌ Direct label print failed for SAG-%s: %s", sag_id, exc)
+ raise HTTPException(status_code=500, detail=f"Direkte print fejlede: {exc}")
+
+ return {
+ "status": "ok",
+ "printed": printed,
+ "hardware_ids": [int(hw.get("id")) for hw in hardware_rows if hw.get("id") is not None],
+ "printer": {
+ "model": model,
+ "host": host,
+ "port": port,
+ "label_size": label_size,
+ },
+ }
+
+
# ============================================================================
# FILES - Case Files
# ============================================================================
@@ -2281,6 +3237,53 @@ async def download_sag_file(sag_id: int, file_id: int, download: bool = False):
headers=headers
)
+
+@router.get("/sag/{sag_id}/files/{file_id}/preview-image")
+async def preview_sag_pdf_as_image(sag_id: int, file_id: int, page: int = Query(1, ge=1), scale: float = Query(2.8, ge=1.0, le=5.0)):
+ """Render a PDF page as PNG for consistent in-app preview sizing."""
+ query = "SELECT * FROM sag_files WHERE id = %s AND sag_id = %s"
+ result = execute_query(query, (file_id, sag_id))
+
+ if not result:
+ raise HTTPException(status_code=404, detail="File not found")
+
+ file_data = result[0]
+ path = _resolve_attachment_path(file_data["stored_name"])
+
+ if not path.exists():
+ raise HTTPException(status_code=404, detail="File lost on server")
+
+ content_type = (file_data.get("content_type") or "").lower()
+ filename = str(file_data.get("filename") or "").lower()
+ if "pdf" not in content_type and not filename.endswith(".pdf"):
+ raise HTTPException(status_code=400, detail="Preview image is only supported for PDF files")
+
+ try:
+ import pypdfium2 as pdfium
+
+ document = pdfium.PdfDocument(str(path))
+ page_index = min(max(page - 1, 0), max(len(document) - 1, 0))
+ pdf_page = document.get_page(page_index)
+ bitmap = pdf_page.render(scale=scale)
+ png_bytes = bitmap.to_pil().convert("RGB")
+
+ import io
+ buffer = io.BytesIO()
+ png_bytes.save(buffer, format="PNG", optimize=True)
+ data = buffer.getvalue()
+
+ try:
+ pdf_page.close()
+ except Exception:
+ pass
+
+ return Response(content=data, media_type="image/png")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("❌ PDF preview render failed for SAG-%s file %s: %s", sag_id, file_id, e)
+ raise HTTPException(status_code=500, detail="Could not render PDF preview")
+
@router.delete("/sag/{sag_id}/files/{file_id}")
async def delete_sag_file(sag_id: int, file_id: int):
"""Delete a file."""
@@ -2326,6 +3329,7 @@ async def get_sag_emails(sag_id: int):
SELECT
e.*,
COALESCE(
+ NULLIF(REGEXP_REPLACE(TRIM(COALESCE(e.thread_key, '')), '[<>\\s]', '', 'g'), ''),
NULLIF(REGEXP_REPLACE((REGEXP_SPLIT_TO_ARRAY(COALESCE(e.email_references, ''), E'[\\s,]+'))[1], '[<>\\s]', '', 'g'), ''),
NULLIF(
REGEXP_REPLACE(
@@ -2336,7 +3340,6 @@ async def get_sag_emails(sag_id: int):
),
''
),
- NULLIF(REGEXP_REPLACE(TRIM(COALESCE(e.thread_key, '')), '[<>\\s]', '', 'g'), ''),
NULLIF(
REGEXP_REPLACE(
LOWER(TRIM(COALESCE(e.subject, ''))),
@@ -2576,6 +3579,10 @@ async def send_sag_email(sag_id: int, payload: SagSendEmailRequest, request: Req
)
effective_payload_thread_key = payload.thread_key or selected_thread_key
+ if not effective_payload_thread_key:
+ # Brand-new thread: assign a local key immediately so signature/BMCid
+ # can carry a resolvable thread identity from the first outbound email.
+ effective_payload_thread_key = _generate_local_thread_key_for_new_outbound(sag_id)
provisional_thread_key = _derive_thread_key_for_outbound(
effective_payload_thread_key,
@@ -2587,6 +3594,21 @@ async def send_sag_email(sag_id: int, payload: SagSendEmailRequest, request: Req
body_text = _append_signature_to_body(body_text, signature)
body_html = _append_signature_to_html(payload.body_html, signature)
+ # Inject hidden BMCid tracker into HTML body so replies can be routed back
+ bmc_id_tag = _build_case_bmc_id_tag(sag_id, provisional_thread_key)
+ hidden_tracker = f'BMCid: {bmc_id_tag} '
+ if body_html:
+ body_html = f"{hidden_tracker}{body_html}"
+ elif body_text:
+ # Synthesize minimal HTML wrapper with tracker when only plain text exists
+ import html as _html
+ body_html = f"{hidden_tracker}{_html.escape(body_text)}"
+
+ # Ensure subject carries [SAG-XX] prefix for reliable subject-line matching
+ sag_prefix = f"[SAG-{sag_id}]"
+ if sag_prefix not in subject:
+ subject = f"{sag_prefix} {subject}"
+
email_service = EmailService()
success, send_message, generated_message_id, provider_thread_key = await email_service.send_email_with_attachments(
to_addresses=to_addresses,
diff --git a/app/modules/sag/frontend/views.py b/app/modules/sag/frontend/views.py
index f35fc8b..a2f53fc 100644
--- a/app/modules/sag/frontend/views.py
+++ b/app/modules/sag/frontend/views.py
@@ -12,6 +12,59 @@ logger = logging.getLogger(__name__)
router = APIRouter()
+def _render_api_print_bridge(api_path: str, page_title: str) -> str:
+ safe_api_path = json.dumps(api_path)
+ safe_title = json.dumps(page_title)
+ return f"""
+
+
+
+
+ Henter printvisning...
+
+
+
+"""
+
+
def _is_deadline_overdue(deadline_value) -> bool:
if not deadline_value:
return False
@@ -128,7 +181,15 @@ async def sager_liste(
COALESCE(u.full_name, u.username) AS ansvarlig_navn,
g.name AS assigned_group_name,
nt.title AS next_todo_title,
- nt.due_date AS next_todo_due_date
+ nt.due_date AS next_todo_due_date,
+ COALESCE(ec.unread_email_count, 0) AS unread_email_count,
+ ec.oldest_unread_received_date,
+ CASE
+ WHEN COALESCE(ec.unread_email_count, 0) = 0 THEN 'none'
+ WHEN ec.oldest_unread_received_date <= NOW() - INTERVAL '72 hours' THEN 'hot'
+ WHEN ec.oldest_unread_received_date <= NOW() - INTERVAL '24 hours' THEN 'warm'
+ ELSE 'fresh'
+ END AS unread_email_level
FROM sag_sager s
LEFT JOIN customers c ON s.customer_id = c.id
LEFT JOIN users u ON u.user_id = s.ansvarlig_bruger_id
@@ -157,6 +218,14 @@ async def sager_liste(
t.created_at ASC
LIMIT 1
) nt ON true
+ LEFT JOIN LATERAL (
+ SELECT
+ COUNT(*) FILTER (WHERE em.deleted_at IS NULL AND COALESCE(em.is_read, FALSE) = FALSE) AS unread_email_count,
+ MIN(em.received_date) FILTER (WHERE em.deleted_at IS NULL AND COALESCE(em.is_read, FALSE) = FALSE) AS oldest_unread_received_date
+ FROM sag_emails se
+ JOIN email_messages em ON em.id = se.email_id
+ WHERE se.sag_id = s.id
+ ) ec ON true
LEFT JOIN sag_sager ds ON ds.id = s.deferred_until_case_id
WHERE s.deleted_at IS NULL
"""
@@ -196,10 +265,26 @@ async def sager_liste(
COALESCE(u.full_name, u.username) AS ansvarlig_navn,
NULL::text AS assigned_group_name,
NULL::text AS next_todo_title,
- NULL::timestamp AS next_todo_due_date
+ NULL::timestamp AS next_todo_due_date,
+ COALESCE(ec.unread_email_count, 0) AS unread_email_count,
+ ec.oldest_unread_received_date,
+ CASE
+ WHEN COALESCE(ec.unread_email_count, 0) = 0 THEN 'none'
+ WHEN ec.oldest_unread_received_date <= NOW() - INTERVAL '72 hours' THEN 'hot'
+ WHEN ec.oldest_unread_received_date <= NOW() - INTERVAL '24 hours' THEN 'warm'
+ ELSE 'fresh'
+ END AS unread_email_level
FROM sag_sager s
LEFT JOIN customers c ON s.customer_id = c.id
LEFT JOIN users u ON u.user_id = s.ansvarlig_bruger_id
+ LEFT JOIN LATERAL (
+ SELECT
+ COUNT(*) FILTER (WHERE em.deleted_at IS NULL AND COALESCE(em.is_read, FALSE) = FALSE) AS unread_email_count,
+ MIN(em.received_date) FILTER (WHERE em.deleted_at IS NULL AND COALESCE(em.is_read, FALSE) = FALSE) AS oldest_unread_received_date
+ FROM sag_emails se
+ JOIN email_messages em ON em.id = se.email_id
+ WHERE se.sag_id = s.id
+ ) ec ON true
WHERE s.deleted_at IS NULL
"""
fallback_params = []
@@ -289,6 +374,7 @@ async def sager_liste(
"toggle_include_deferred_url": toggle_include_deferred_url,
"assignment_users": _fetch_assignment_users(),
"assignment_groups": _fetch_assignment_groups(),
+ "current_customer_id": customer_id_int,
"current_ansvarlig_bruger_id": ansvarlig_bruger_id_int,
"current_assigned_group_id": assigned_group_id_int,
})
@@ -307,6 +393,7 @@ async def sager_liste(
"toggle_include_deferred_url": str(request.url),
"assignment_users": [],
"assignment_groups": [],
+ "current_customer_id": customer_id_int,
"current_ansvarlig_bruger_id": ansvarlig_bruger_id_int,
"current_assigned_group_id": assigned_group_id_int,
})
@@ -320,6 +407,32 @@ async def opret_sag_side(request: Request):
"assignment_groups": _fetch_assignment_groups(),
})
+
+@router.get("/sag/{sag_id}/work-orders/print", response_class=HTMLResponse)
+async def sag_work_order_print_page(request: Request, sag_id: int):
+ auto_print = str(request.query_params.get("auto_print", "0")).lower() in {"1", "true", "yes", "on"}
+ api_path = f"/api/v1/sag/{sag_id}/work-orders/print"
+ if auto_print:
+ api_path = f"{api_path}?auto_print=1"
+ html = _render_api_print_bridge(
+ api_path=api_path,
+ page_title=f"Arbejdsseddel SAG-{sag_id}",
+ )
+ return HTMLResponse(content=html)
+
+
+@router.get("/sag/{sag_id}/labels/hardware/print", response_class=HTMLResponse)
+async def sag_hardware_labels_print_page(request: Request, sag_id: int):
+ auto_print = str(request.query_params.get("auto_print", "0")).lower() in {"1", "true", "yes", "on"}
+ api_path = f"/api/v1/sag/{sag_id}/labels/hardware/print"
+ if auto_print:
+ api_path = f"{api_path}?auto_print=1"
+ html = _render_api_print_bridge(
+ api_path=api_path,
+ page_title=f"Hardware labels SAG-{sag_id}",
+ )
+ return HTMLResponse(content=html)
+
@router.get("/sag/varekob-salg", response_class=HTMLResponse)
async def sag_varekob_salg(request: Request):
"""Display orders overview for all purchases and sales."""
diff --git a/app/modules/sag/templates/create.html b/app/modules/sag/templates/create.html
index f0f217b..ba5dfa9 100644
--- a/app/modules/sag/templates/create.html
+++ b/app/modules/sag/templates/create.html
@@ -124,6 +124,18 @@
[data-bs-theme="dark"] .selected-item button {
color: #a6d5fa;
}
+
+ .case-top-alerts .alert {
+ border-left: 6px solid;
+ }
+
+ .case-top-alerts .alert-warning {
+ border-left-color: #f59f00;
+ }
+
+ .case-top-alerts .alert-danger {
+ border-left-color: #e03131;
+ }
{% endblock %}
@@ -139,6 +151,8 @@
+
+
@@ -311,6 +325,79 @@
let contactSearchTimeout;
let successAlertTimeout;
let telefoniPrefill = { contactId: null, title: null, callId: null, customerId: null, description: null };
+ let topAlertLoadToken = 0;
+
+ function escapeTopAlertHtml(value) {
+ return String(value ?? '')
+ .replace(/&/g, '&')
+ .replace(//g, '>')
+ .replace(/"/g, '"')
+ .replace(/'/g, ''');
+ }
+
+ async function loadCreateTopAlertsForCustomer(customerId) {
+ const container = document.getElementById('caseTopAlerts');
+ if (!container) {
+ return;
+ }
+
+ if (!customerId) {
+ container.classList.add('d-none');
+ container.innerHTML = '';
+ return;
+ }
+
+ const loadToken = ++topAlertLoadToken;
+ container.classList.remove('d-none');
+ container.innerHTML = '
+ Henter kunde-alerts... ';
+
+ try {
+ const response = await fetch(`/api/v1/alert-notes/check?entity_type=customer&entity_id=${customerId}`, {
+ credentials: 'include'
+ });
+
+ if (loadToken !== topAlertLoadToken) {
+ return;
+ }
+
+ if (!response.ok) {
+ throw new Error(`HTTP ${response.status}`);
+ }
+
+ const data = await response.json();
+ const alerts = (data?.alerts || []).filter((alert) => ['critical', 'warning'].includes(String(alert?.severity || '').toLowerCase()));
+
+ if (!alerts.length) {
+ container.classList.add('d-none');
+ container.innerHTML = '';
+ return;
+ }
+
+ container.innerHTML = alerts.map((alert) => {
+ const isCritical = String(alert.severity || '').toLowerCase() === 'critical';
+ const klass = isCritical ? 'alert-danger' : 'alert-warning';
+ const label = isCritical ? 'KRITISK' : 'ADVARSEL';
+ const title = escapeTopAlertHtml(alert.title || 'Vigtig kundeinformation');
+ const message = escapeTopAlertHtml(alert.message || '');
+
+ return `
+
+ ${label}: ${title}
+ ${message ? `
+ `;
+ }).join('');
+ container.classList.remove('d-none');
+ } catch (error) {
+ if (loadToken !== topAlertLoadToken) {
+ return;
+ }
+ console.error('Failed to load customer alerts on sag create:', error);
+ container.innerHTML = '${message} ` : ''}
+ Advarsel: Kunde-alerts kunne ikke hentes. ';
+ container.classList.remove('d-none');
+ }
+ }
// Helper function to show success alert
function showSuccessAlert(message, duration = 3000) {
@@ -436,6 +523,7 @@
document.getElementById('customerSearch').value = '';
document.getElementById('customerResults').classList.add('d-none');
renderSelections();
+ loadCreateTopAlertsForCustomer(id);
// Show notification
if (!skipAlert) {
@@ -447,6 +535,7 @@
selectedCustomer = null;
document.getElementById('customer_id').value = '';
renderSelections();
+ loadCreateTopAlertsForCustomer(null);
}
async function selectContact(id, name) {
diff --git a/app/modules/sag/templates/detail.html b/app/modules/sag/templates/detail.html
index fb01c20..18fdc9c 100644
--- a/app/modules/sag/templates/detail.html
+++ b/app/modules/sag/templates/detail.html
@@ -1026,6 +1026,78 @@
min-width: 96px;
}
+ .email-column-shell {
+ border: 1px solid var(--border-color, #dbe3ea);
+ border-radius: 14px;
+ overflow: hidden;
+ background: linear-gradient(180deg, rgba(255, 255, 255, 0.96), rgba(247, 250, 253, 0.96));
+ box-shadow: 0 6px 20px rgba(15, 76, 117, 0.06);
+ }
+
+ .email-column-shell .column-header {
+ background: linear-gradient(90deg, rgba(15, 76, 117, 0.08), rgba(15, 76, 117, 0.02));
+ }
+
+ .email-thread-item .participants-line {
+ font-size: 0.78rem;
+ color: var(--text-secondary);
+ }
+
+ .email-thread-item.active .participants-line {
+ color: rgba(255, 255, 255, 0.82);
+ }
+
+ .mail-read-chip {
+ font-size: 0.72rem;
+ letter-spacing: 0.02em;
+ border-radius: 999px;
+ }
+
+ .email-tab-unread-badge {
+ display: none;
+ margin-left: 0.45rem;
+ min-width: 1.35rem;
+ height: 1.35rem;
+ padding: 0 0.38rem;
+ border-radius: 999px;
+ font-size: 0.72rem;
+ font-weight: 700;
+ line-height: 1.35rem;
+ text-align: center;
+ background: #2f9e44;
+ color: #fff;
+ box-shadow: 0 0 0 2px rgba(255, 255, 255, 0.92);
+ }
+
+ .email-tab-unread-badge.is-warm {
+ background: #f08c00;
+ }
+
+ .email-tab-unread-badge.is-hot {
+ background: #c92a2a;
+ animation: unreadPulse 1.6s ease-in-out infinite;
+ }
+
+ @keyframes unreadPulse {
+ 0% { transform: scale(1); }
+ 50% { transform: scale(1.09); }
+ 100% { transform: scale(1); }
+ }
+
+ [data-bs-theme="dark"] .email-column-shell {
+ background: linear-gradient(180deg, rgba(19, 28, 38, 0.96), rgba(17, 24, 33, 0.96));
+ border-color: rgba(117, 194, 239, 0.2);
+ box-shadow: 0 8px 24px rgba(0, 0, 0, 0.25);
+ }
+
+ [data-bs-theme="dark"] .email-column-shell .column-header {
+ background: linear-gradient(90deg, rgba(117, 194, 239, 0.16), rgba(117, 194, 239, 0.05));
+ }
+
+ [data-bs-theme="dark"] .email-tab-unread-badge {
+ box-shadow: 0 0 0 2px rgba(20, 28, 36, 0.95);
+ }
+
[data-bs-theme="dark"] .narrative-description {
border-color: rgba(117, 194, 239, 0.24);
background: linear-gradient(180deg, rgba(117, 194, 239, 0.14), rgba(117, 194, 239, 0.06));
@@ -1494,7 +1566,33 @@
.hardware-list-header,
.hardware-row {
- grid-template-columns: 1.3fr 1fr auto;
+ grid-template-columns: minmax(0, 1.5fr) minmax(110px, 1fr) 56px 56px;
+ }
+
+ .hardware-list-header span:nth-child(3),
+ .hardware-list-header span:nth-child(4),
+ .hardware-row > *:nth-child(3),
+ .hardware-row > *:nth-child(4) {
+ justify-self: end;
+ }
+
+ .hardware-row > div:first-child {
+ min-width: 0;
+ }
+
+ .hardware-row > div:first-child a {
+ display: inline-block;
+ max-width: 100%;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ white-space: nowrap;
+ }
+
+ .hardware-row small {
+ min-width: 0;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ white-space: nowrap;
}
.location-list-header,
@@ -2220,6 +2318,14 @@
Registrer session
+
Arbejdsdokumenter
+
+
+
@@ -2407,6 +2515,7 @@
@@ -3183,6 +3289,7 @@
{% endblock %}
diff --git a/app/services/brother_label_print_service.py b/app/services/brother_label_print_service.py
new file mode 100644
index 0000000..feec62b
--- /dev/null
+++ b/app/services/brother_label_print_service.py
@@ -0,0 +1,263 @@
+"""Brother QL direct print service for case hardware labels."""
+
+from __future__ import annotations
+
+import logging
+import socket
+from dataclasses import dataclass
+from typing import Iterable, List, Optional
+
+from PIL import Image, ImageDraw, ImageFont
+
+# Compatibility shim: brother_ql may still reference Image.ANTIALIAS,
+# which was removed in newer Pillow releases.
+if not hasattr(Image, "ANTIALIAS") and hasattr(Image, "Resampling"):
+ Image.ANTIALIAS = Image.Resampling.LANCZOS
+
+logger = logging.getLogger(__name__)
+
+try:
+ from brother_ql.backends.helpers import send
+ from brother_ql.conversion import convert
+ from brother_ql.raster import BrotherQLRaster
+ from brother_ql.labels import ALL_LABELS
+except Exception: # pragma: no cover - handled at runtime
+ send = None
+ convert = None
+ BrotherQLRaster = None
+ ALL_LABELS = None
+
+
+_CODE39_PATTERNS = {
+ "0": "nnnwwnwnn", "1": "wnnwnnnnw", "2": "nnwwnnnnw", "3": "wnwwnnnnn",
+ "4": "nnnwwnnnw", "5": "wnnwwnnnn", "6": "nnwwwnnnn", "7": "nnnwnnwnw",
+ "8": "wnnwnnwnn", "9": "nnwwnnwnn", "A": "wnnnnwnnw", "B": "nnwnnwnnw",
+ "C": "wnwnnwnnn", "D": "nnnnwwnnw", "E": "wnnnwwnnn", "F": "nnwnwwnnn",
+ "G": "nnnnnwwnw", "H": "wnnnnwwnn", "I": "nnwnnwwnn", "J": "nnnnwwwnn",
+ "K": "wnnnnnnww", "L": "nnwnnnnww", "M": "wnwnnnnwn", "N": "nnnnwnnww",
+ "O": "wnnnwnnwn", "P": "nnwnwnnwn", "Q": "nnnnnnwww", "R": "wnnnnnwwn",
+ "S": "nnwnnnwwn", "T": "nnnnwnwwn", "U": "wwnnnnnnw", "V": "nwwnnnnnw",
+ "W": "wwwnnnnnn", "X": "nwnnwnnnw", "Y": "wwnnwnnnn", "Z": "nwwnwnnnn",
+ "-": "nwnnnnwnw", ".": "wwnnnnwnn", " ": "nwwnnnwnn", "$": "nwnwnwnnn",
+ "/": "nwnwnnnwn", "+": "nwnnnwnwn", "%": "nnnwnwnwn", "*": "nwnnwnwnn",
+}
+
+
+@dataclass
+class LabelJob:
+ name: str
+ meta_line: str
+ token: str
+
+
+class BrotherLabelPrintService:
+ def __init__(
+ self,
+ model: str,
+ host: str,
+ port: int,
+ label_size: str,
+ ) -> None:
+ self.model = (model or "QL-710W").strip()
+ self.host = (host or "").strip()
+ self.port = int(port or 9100)
+ self.label_size = self._normalize_label_size((label_size or "62").strip())
+ self.label_spec = self._resolve_label_spec(self.label_size)
+ self.printable_width = self._resolve_printable_width(self.label_size)
+ self.printable_height = self._resolve_printable_height(self.label_size)
+ self.is_die_cut = bool(self.label_spec and getattr(self.label_spec, "form_factor", None) and "DIE_CUT" in str(getattr(self.label_spec, "form_factor", "")))
+
+ @property
+ def printer_identifier(self) -> str:
+ return f"tcp://{self.host}:{self.port}"
+
+ def print_jobs(self, jobs: Iterable[LabelJob]) -> int:
+ if not self.host:
+ raise ValueError("Printer host is missing")
+ if not send or not convert or not BrotherQLRaster:
+ raise RuntimeError("brother_ql library is not installed in this environment")
+
+ send_func = send
+ convert_func = convert
+ raster_cls = BrotherQLRaster
+
+ rendered_images = [self._build_label_image(job) for job in jobs]
+ if not rendered_images:
+ return 0
+
+ qlr = raster_cls(self.model)
+ instructions = convert_func(
+ qlr=qlr,
+ images=rendered_images,
+ label=self.label_size,
+ rotate='auto' if self.is_die_cut else 0,
+ cut=True,
+ dither=False,
+ compress=False,
+ red=False,
+ dpi_600=False,
+ )
+
+ self._send_to_printer(instructions, send_func)
+ return len(rendered_images)
+
+ def _send_to_printer(self, instructions: List[bytes], send_func) -> None:
+ target = self.printer_identifier
+ # brother_ql helper changed call signature across versions.
+ try:
+ send_func(instructions, target, "network", blocking=True)
+ return
+ except TypeError:
+ pass
+
+ try:
+ send_func(instructions=instructions, printer_identifier=target, backend_identifier="network", blocking=True)
+ return
+ except TypeError:
+ pass
+
+ # Final fallback to raw socket stream for network printers.
+ payload = b"".join(instructions)
+ with socket.create_connection((self.host, self.port), timeout=10) as conn:
+ conn.sendall(payload)
+
+ def _build_label_image(self, job: LabelJob) -> Image.Image:
+ width = self.printable_width
+ height = self.printable_height if self.printable_height > 0 else 220
+ image = Image.new("RGB", (width, height), "white")
+ draw = ImageDraw.Draw(image)
+ font_title = ImageFont.load_default()
+ font_meta = ImageFont.load_default()
+ font_token = ImageFont.load_default()
+
+ title = (job.name or "Ukendt enhed")[:52]
+ meta = (job.meta_line or "-")[:88]
+ token = (job.token or "")[:64]
+
+ left = 12
+ top = 8
+ right = max(left + 1, width - 12)
+
+ # Compact layout for die-cut labels to fit exact printable area.
+ if self.is_die_cut:
+ title_y = top
+ meta_y = title_y + 18
+ barcode_y = meta_y + 16
+ token_y = min(height - 14, barcode_y + max(26, int(height * 0.28)) + 4)
+ bar_height = max(24, min(int(height * 0.28), height - barcode_y - 22))
+ else:
+ title_y = 12
+ meta_y = 34
+ barcode_y = 64
+ token_y = min(height - 16, 170)
+ bar_height = max(48, min(92, height - barcode_y - 26))
+
+ draw.text((left, title_y), title, fill="black", font=font_title)
+ draw.text((left, meta_y), meta, fill="black", font=font_meta)
+ self._draw_code39(draw, token, x=left, y=barcode_y, max_width=max(60, right - left), bar_height=bar_height)
+ draw.text((left, token_y), token, fill="black", font=font_token)
+ return image
+
+ def _normalize_label_size(self, label_size: str) -> str:
+ wanted = str(label_size or "").strip()
+ if wanted == "29":
+ # Legacy compatibility: old config often used "29" while hardware stock is 62x29 die-cut.
+ logger.warning("⚠️ Label size '29' mapped to '62x29' for Brother QL hardware labels")
+ return "62x29"
+ return wanted or "62"
+
+ @staticmethod
+ def _resolve_label_spec(label_size: str):
+ if not ALL_LABELS:
+ return None
+ wanted = str(label_size or "").strip()
+ for lbl in ALL_LABELS:
+ if getattr(lbl, "identifier", "") == wanted:
+ return lbl
+ return None
+
+ @staticmethod
+ def _resolve_printable_width(label_size: str) -> int:
+ default_width = 696 # 62mm endless printable width
+ if not ALL_LABELS:
+ return default_width
+ try:
+ wanted = str(label_size or "").strip()
+ for lbl in ALL_LABELS:
+ if getattr(lbl, "identifier", "") == wanted:
+ dots = getattr(lbl, "dots_printable", None)
+ if isinstance(dots, tuple) and len(dots) > 0 and int(dots[0]) > 0:
+ return int(dots[0])
+ except Exception:
+ return default_width
+ return default_width
+
+ @staticmethod
+ def _resolve_printable_height(label_size: str) -> int:
+ if not ALL_LABELS:
+ return 220
+ try:
+ wanted = str(label_size or "").strip()
+ for lbl in ALL_LABELS:
+ if getattr(lbl, "identifier", "") == wanted:
+ dots = getattr(lbl, "dots_printable", None)
+ if isinstance(dots, tuple) and len(dots) > 1 and int(dots[1]) > 0:
+ return int(dots[1])
+ return 220
+ except Exception:
+ return 220
+ return 220
+
+ def _draw_code39(
+ self,
+ draw: ImageDraw.ImageDraw,
+ value: str,
+ x: int,
+ y: int,
+ max_width: int,
+ bar_height: int,
+ ) -> None:
+ safe = "".join(ch for ch in (value or "").upper() if ch in _CODE39_PATTERNS and ch != "*")
+ if not safe:
+ safe = "EMPTY"
+ seq = f"*{safe}*"
+
+ # Prefer physically narrower bars first; scanners struggle when Code39
+ # modules become too wide on small die-cut labels.
+ variants = [
+ (1, 2, 0),
+ (1, 3, 1),
+ (2, 5, 1),
+ ]
+
+ narrow, wide, gap = variants[0]
+ for candidate in variants:
+ c_narrow, c_wide, c_gap = candidate
+ width = self._code39_width(seq, c_narrow, c_wide, c_gap)
+ if width <= max_width:
+ narrow, wide, gap = c_narrow, c_wide, c_gap
+ break
+
+ cursor = x
+ for ch in seq:
+ pattern = _CODE39_PATTERNS[ch]
+ for idx, code in enumerate(pattern):
+ stroke = wide if code == "w" else narrow
+ if idx % 2 == 0:
+ draw.rectangle([cursor, y, cursor + stroke - 1, y + bar_height], fill="black")
+ cursor += stroke
+ if idx < len(pattern) - 1:
+ cursor += gap
+ cursor += gap
+
+ @staticmethod
+ def _code39_width(sequence: str, narrow: int, wide: int, gap: int) -> int:
+ total = 0
+ for ch in sequence:
+ pattern = _CODE39_PATTERNS[ch]
+ for idx, code in enumerate(pattern):
+ total += wide if code == "w" else narrow
+ if idx < len(pattern) - 1:
+ total += gap
+ total += gap
+ return total
diff --git a/app/services/cvr_service.py b/app/services/cvr_service.py
index 20410ef..db8201f 100644
--- a/app/services/cvr_service.py
+++ b/app/services/cvr_service.py
@@ -1,20 +1,59 @@
"""
-CVR.dk API service for looking up Danish company information
-Free public API - no authentication required
-Adapted from OmniSync for BMC Hub
+CVR service for looking up Danish company information.
+
+Primary provider: FirmaAPI (authenticated).
+Legacy fallback: cvrapi.dk when no FirmaAPI key is configured.
"""
import asyncio
import aiohttp
import logging
from typing import Optional, Dict
+from app.core.config import settings
+
logger = logging.getLogger(__name__)
class CVRService:
- """Service for CVR.dk API lookups"""
-
- BASE_URL = "https://cvrapi.dk/api"
+ """Service for CVR lookups using FirmaAPI (or legacy fallback)."""
+
+ LEGACY_BASE_URL = "https://cvrapi.dk/api"
+
+ @property
+ def firmaapi_base_url(self) -> str:
+ return settings.FIRMAAPI_BASE_URL.rstrip("/")
+
+ @property
+ def firmaapi_timeout(self) -> aiohttp.ClientTimeout:
+ return aiohttp.ClientTimeout(total=settings.FIRMAAPI_TIMEOUT_SECONDS)
+
+ @property
+ def has_firmaapi_key(self) -> bool:
+ return bool((settings.FIRMAAPI_API_KEY or "").strip())
+
+ def _firmaapi_headers(self) -> Dict[str, str]:
+ api_key = (settings.FIRMAAPI_API_KEY or "").strip()
+ return {
+ "Authorization": f"Bearer {api_key}",
+ "Accept": "application/json",
+ }
+
+ @staticmethod
+ def _normalize_payload(payload: Dict) -> Dict:
+ return {
+ "cvr": payload.get("cvr") or payload.get("vat"),
+ "name": payload.get("name"),
+ "address": payload.get("address"),
+ "city": payload.get("city"),
+ "zipcode": payload.get("zipcode"),
+ "postal_code": payload.get("zipcode") or payload.get("postal_code"),
+ "country": payload.get("country") or "DK",
+ "phone": payload.get("phone"),
+ "email": payload.get("email"),
+ "website": payload.get("website"),
+ "status": payload.get("status"),
+ "source": "firmaapi" if payload.get("meta", {}).get("source") == "FirmaAPI" else payload.get("source", "firmaapi"),
+ }
async def lookup_by_name(self, company_name: str) -> Optional[Dict]:
"""
@@ -33,43 +72,44 @@ class CVRService:
clean_name = company_name.strip()
try:
- params = {
- 'search': clean_name,
- 'country': 'dk'
- }
-
+ if self.has_firmaapi_key:
+ async with aiohttp.ClientSession() as session:
+ async with session.get(
+ f"{self.firmaapi_base_url}/company/search",
+ params={"q": clean_name, "limit": 1},
+ headers=self._firmaapi_headers(),
+ timeout=self.firmaapi_timeout,
+ ) as response:
+ if response.status == 200:
+ data = await response.json()
+ results = data.get("results") or []
+ if results:
+ match = results[0]
+ logger.info("✅ Found CVR %s for '%s' via FirmaAPI", match.get("cvr"), company_name)
+ return self._normalize_payload(match)
+ return None
+
+ if response.status == 404:
+ return None
+
+ detail = await response.text()
+ logger.error("❌ FirmaAPI name lookup error %s for '%s': %s", response.status, company_name, detail[:240])
+ return None
+
+ # Legacy fallback without API key
+ params = {"search": clean_name, "country": "dk"}
async with aiohttp.ClientSession() as session:
async with session.get(
- f"{self.BASE_URL}",
+ f"{self.LEGACY_BASE_URL}",
params=params,
- timeout=aiohttp.ClientTimeout(total=10)
+ timeout=aiohttp.ClientTimeout(total=10),
) as response:
if response.status == 200:
data = await response.json()
-
- if data and 'vat' in data:
- logger.info(f"✅ Found CVR {data['vat']} for '{company_name}'")
- return {
- 'cvr': data.get('vat'),
- 'name': data.get('name'),
- 'address': data.get('address'),
- 'city': data.get('city'),
- 'zipcode': data.get('zipcode'),
- 'country': data.get('country'),
- 'phone': data.get('phone'),
- 'email': data.get('email'),
- 'vat': data.get('vat'),
- 'status': data.get('status')
- }
-
- elif response.status == 404:
- logger.warning(f"⚠️ No CVR found for '{company_name}'")
- return None
-
- else:
- logger.error(f"❌ CVR API error {response.status} for '{company_name}'")
- return None
-
+ if data and "vat" in data:
+ return self._normalize_payload(data)
+ return None
+
except asyncio.TimeoutError:
logger.error(f"⏱️ CVR API timeout for '{company_name}'")
return None
@@ -99,33 +139,39 @@ class CVRService:
return None
try:
+ if self.has_firmaapi_key:
+ async with aiohttp.ClientSession() as session:
+ async with session.get(
+ f"{self.firmaapi_base_url}/company/{cvr_clean}",
+ headers=self._firmaapi_headers(),
+ timeout=self.firmaapi_timeout,
+ ) as response:
+ if response.status == 200:
+ data = await response.json()
+ logger.info("✅ Validated CVR %s via FirmaAPI", cvr_clean)
+ return self._normalize_payload(data)
+
+ if response.status in (400, 404):
+ return None
+
+ detail = await response.text()
+ logger.error("❌ FirmaAPI CVR lookup error %s for %s: %s", response.status, cvr_clean, detail[:240])
+ return None
+
+ # Legacy fallback without API key
async with aiohttp.ClientSession() as session:
async with session.get(
- f"{self.BASE_URL}",
- params={'vat': cvr_clean, 'country': 'dk'},
- timeout=aiohttp.ClientTimeout(total=10)
+ f"{self.LEGACY_BASE_URL}",
+ params={"vat": cvr_clean, "country": "dk"},
+ timeout=aiohttp.ClientTimeout(total=10),
) as response:
if response.status == 200:
data = await response.json()
-
- if data and 'vat' in data:
- logger.info(f"✅ Validated CVR {cvr_clean}")
- return {
- 'cvr': data.get('vat'),
- 'name': data.get('name'),
- 'address': data.get('address'),
- 'city': data.get('city'),
- 'zipcode': data.get('zipcode'),
- 'postal_code': data.get('zipcode'), # Alias for consistency
- 'country': data.get('country'),
- 'phone': data.get('phone'),
- 'email': data.get('email'),
- 'vat': data.get('vat'),
- 'status': data.get('status')
- }
-
+ if data and "vat" in data:
+ logger.info("✅ Validated CVR %s via legacy CVR API", cvr_clean)
+ return self._normalize_payload(data)
return None
-
+
except Exception as e:
logger.error(f"❌ CVR validation error for {cvr_number}: {e}")
return None
diff --git a/app/services/email_service.py b/app/services/email_service.py
index bc04425..0697306 100644
--- a/app/services/email_service.py
+++ b/app/services/email_service.py
@@ -766,12 +766,100 @@ class EmailService:
query = "SELECT id FROM email_messages WHERE message_id = %s AND deleted_at IS NULL"
result = execute_query(query, (message_id,))
return len(result) > 0
+
+ def _adopt_parent_thread_key(self, email_data: Dict, derived_thread_key: Optional[str]) -> Optional[str]:
+ """Look up parent emails by References/In-Reply-To and adopt their thread_key
+ so outgoing+incoming emails share the same canonical group key."""
+
+ # Strategy 1: If the email has an explicit provider thread key (e.g. Graph
+ # conversationId), check if ANY existing email in the DB already uses it as
+ # its thread_key. ConversationId is the most reliable stable identifier
+ # across all emails in an Exchange conversation.
+ explicit_thread_key = self._normalize_message_id_value(email_data.get("thread_key"))
+ if explicit_thread_key:
+ try:
+ rows = execute_query(
+ """
+ SELECT thread_key
+ FROM email_messages
+ WHERE deleted_at IS NULL
+ AND LOWER(REGEXP_REPLACE(COALESCE(thread_key, ''), '[<>\\s]', '', 'g')) = %s
+ LIMIT 1
+ """,
+ (explicit_thread_key,),
+ )
+ if rows:
+ logger.info(
+ "🧵 Adopted conversationId thread_key '%s' for incoming email (derived was '%s')",
+ explicit_thread_key,
+ derived_thread_key,
+ )
+ return explicit_thread_key
+ except Exception as e:
+ logger.warning("⚠️ Failed conversationId thread_key lookup: %s", e)
+
+ # Strategy 2: Look up parent emails by message_id matching our
+ # References/In-Reply-To headers.
+ parent_ids: List[str] = []
+ ref_ids = self._extract_reference_ids(email_data.get("email_references"))
+ parent_ids.extend(ref_ids)
+ in_reply = self._normalize_message_id_value(email_data.get("in_reply_to"))
+ if in_reply and in_reply not in parent_ids:
+ parent_ids.append(in_reply)
+
+ if not parent_ids:
+ # Strategy 3: No thread headers at all — try conversationId as thread_key
+ # even if no existing email has it yet (new conversation from Graph).
+ if explicit_thread_key:
+ return explicit_thread_key
+ return derived_thread_key
+
+ # Query parent emails that already have a thread_key stored
+ placeholders = ",".join(["%s"] * len(parent_ids))
+ try:
+ rows = execute_query(
+ f"""
+ SELECT thread_key
+ FROM email_messages
+ WHERE deleted_at IS NULL
+ AND thread_key IS NOT NULL
+ AND TRIM(thread_key) != ''
+ AND LOWER(REGEXP_REPLACE(COALESCE(message_id, ''), '[<>\\s]', '', 'g')) IN ({placeholders})
+ ORDER BY received_date ASC
+ LIMIT 1
+ """,
+ tuple(parent_ids),
+ )
+ if rows and rows[0].get("thread_key"):
+ adopted = self._normalize_message_id_value(rows[0]["thread_key"])
+ if adopted:
+ logger.info(
+ "🧵 Adopted parent thread_key '%s' for incoming email (derived was '%s')",
+ adopted,
+ derived_thread_key,
+ )
+ return adopted
+ except Exception as e:
+ logger.warning("⚠️ Failed to adopt parent thread_key: %s", e)
+
+ # Fallback: prefer the explicit conversationId over derived References[0]
+ # since the References message-id often doesn't match any stored message_id
+ if explicit_thread_key:
+ return explicit_thread_key
+
+ return derived_thread_key
async def save_email(self, email_data: Dict) -> Optional[int]:
"""Save email to database"""
try:
thread_key = self._derive_thread_key(email_data)
+ # When this email is a reply, look up the parent email(s) by
+ # message_id matching our References/In-Reply-To. If the parent
+ # already has a thread_key stored, adopt it so both emails share the
+ # same canonical key and are grouped in the same visual thread.
+ thread_key = self._adopt_parent_thread_key(email_data, thread_key)
+
try:
query = """
INSERT INTO email_messages
diff --git a/app/services/email_workflow_service.py b/app/services/email_workflow_service.py
index 888abf4..e3a788c 100644
--- a/app/services/email_workflow_service.py
+++ b/app/services/email_workflow_service.py
@@ -11,10 +11,12 @@ import re
import json
import hashlib
import shutil
+import io
from pathlib import Path
from decimal import Decimal
+from uuid import uuid4
-from app.core.database import execute_query, execute_insert, execute_update
+from app.core.database import execute_query, execute_insert, execute_update, table_has_column
from app.core.config import settings
from app.services.email_activity_logger import email_activity_logger
@@ -37,6 +39,8 @@ class EmailWorkflowService:
'bankruptcy',
'recording'
}
+
+ _SCAN_TOKEN_PATTERN = re.compile(r'\bBMCSCAN-[A-Z0-9-]{10,100}\b', re.IGNORECASE)
async def execute_workflows(self, email_data: Dict) -> Dict:
"""
@@ -91,11 +95,16 @@ class EmailWorkflowService:
logger.info("✅ Bankruptcy system workflow executed successfully")
# Special System Workflow: Helpdesk SAG routing
- # - If SAG/tråd-hint findes => forsøg altid routing til eksisterende sag
+ # - If SAG/tråd-hint findes => forsøg routing til eksisterende sag
+ # - Newsletters/spam skip routing ENTIRELY (even with thread hints)
# - Uden hints: brug klassifikationsgating som før
+ HARD_SKIP = {'newsletter', 'spam'}
should_try_helpdesk = (
- classification not in self.HELPDESK_SKIP_CLASSIFICATIONS
- or has_hint
+ classification not in HARD_SKIP
+ and (
+ classification not in self.HELPDESK_SKIP_CLASSIFICATIONS
+ or has_hint
+ )
)
if should_try_helpdesk:
@@ -223,12 +232,16 @@ class EmailWorkflowService:
return domain or None
def has_helpdesk_routing_hint(self, email_data: Dict) -> bool:
- """Return True when email has explicit routing hints (SAG or thread headers/key)."""
- if self._extract_sag_id(email_data):
+ """Return True when email has explicit routing hints (SAG tag, BMCid, or reply headers).
+
+ NOTE: A bare thread_key (Graph conversationId) is NOT a routing hint
+ because every Graph email has one, including newsletters and spam.
+ Only actual reply indicators (In-Reply-To, References), explicit
+ SAG tags, or BMCid markers count as hints."""
+ if self._extract_bmc_id(email_data):
return True
- explicit_thread_key = self._normalize_message_id(email_data.get('thread_key'))
- if explicit_thread_key:
+ if self._extract_sag_id(email_data):
return True
if self._normalize_message_id(email_data.get('in_reply_to')):
@@ -239,7 +252,33 @@ class EmailWorkflowService:
return False
+ def _extract_bmc_id(self, email_data: Dict) -> Optional[Dict[str, Any]]:
+ """Extract structured BMCid from email body/subject.
+
+ Returns dict with 'sag_id' (int) and 'thread_suffix' (str, e.g. '472193')
+ or None if no BMCid is found.
+ """
+ candidates = [
+ email_data.get('body_html') or '',
+ email_data.get('body_text') or '',
+ email_data.get('subject') or '',
+ ]
+ pattern = r'\bBMCid\s*:\s*s(\d+)t(\d+)\b'
+ for value in candidates:
+ match = re.search(pattern, value, re.IGNORECASE)
+ if match:
+ return {
+ 'sag_id': int(match.group(1)),
+ 'thread_suffix': match.group(2),
+ }
+ return None
+
def _extract_sag_id(self, email_data: Dict) -> Optional[int]:
+ # First try structured BMCid (most reliable)
+ bmc_id = self._extract_bmc_id(email_data)
+ if bmc_id:
+ return bmc_id['sag_id']
+
candidates = [
email_data.get('subject') or '',
email_data.get('in_reply_to') or '',
@@ -249,14 +288,15 @@ class EmailWorkflowService:
]
# Accept both strict and human variants used in real subjects, e.g.:
+ # - [SAG-53] (hidden/subject prefix)
# - SAG-53
# - SAG #53
# - Sag 53
sag_patterns = [
+ r'\[SAG-(\d+)\]',
r'\bSAG-(\d+)\b',
r'\bSAG\s*#\s*(\d+)\b',
r'\bSAG\s+(\d+)\b',
- r'\bBMCid\s*:\s*s(\d+)t\d+\b',
]
for value in candidates:
@@ -327,11 +367,14 @@ class EmailWorkflowService:
FROM sag_emails se
JOIN email_messages em ON em.id = se.email_id
WHERE em.deleted_at IS NULL
- AND LOWER(REGEXP_REPLACE(COALESCE(em.thread_key, ''), '[<>\\s]', '', 'g')) = %s
+ AND (
+ LOWER(REGEXP_REPLACE(COALESCE(em.thread_key, ''), '[<>\\s]', '', 'g')) = %s
+ OR LOWER(REGEXP_REPLACE(COALESCE(em.message_id, ''), '[<>\\s]', '', 'g')) = %s
+ )
ORDER BY se.created_at DESC
LIMIT 1
""",
- (thread_key,)
+ (thread_key, thread_key)
)
return rows[0]['sag_id'] if rows else None
except Exception:
@@ -357,11 +400,23 @@ class EmailWorkflowService:
)
return rows[0]['sag_id'] if rows else None
+ # Sender domains that should never trigger customer-domain SAG creation.
+ # Includes own sending domain and common automated senders.
+ _IGNORED_SENDER_DOMAINS = {
+ 'bmcnetworks.dk',
+ 'bmchub.local',
+ }
+
def _find_customer_by_domain(self, domain: str) -> Optional[Dict[str, Any]]:
if not domain:
return None
domain = domain.lower().strip()
+
+ # Never match the system's own sending domain as a customer
+ if domain in self._IGNORED_SENDER_DOMAINS:
+ return None
+
domain_alt = domain[4:] if domain.startswith('www.') else f"www.{domain}"
query = """
@@ -378,6 +433,114 @@ class EmailWorkflowService:
rows = execute_query(query, (domain, domain_alt))
return rows[0] if rows else None
+ def _find_thread_key_by_bmc_suffix(self, sag_id: int, thread_suffix: str) -> Optional[str]:
+ """Find the thread_key of an outgoing email whose BMCid matches s{sag_id}t{thread_suffix}."""
+ try:
+ # Legacy compatibility: older outbound emails used t001 when the
+ # provisional thread key was unknown. In that case, pick the most
+ # recent outbound thread key in the same case as best effort.
+ if str(thread_suffix) == '001':
+ fallback = execute_query(
+ """
+ SELECT em.thread_key
+ FROM sag_emails se
+ JOIN email_messages em ON em.id = se.email_id
+ WHERE se.sag_id = %s
+ AND em.deleted_at IS NULL
+ AND em.thread_key IS NOT NULL
+ AND TRIM(em.thread_key) != ''
+ AND LOWER(COALESCE(em.sender_email, '')) = %s
+ ORDER BY em.received_date DESC
+ LIMIT 1
+ """,
+ (sag_id, 'noreply@bmcnetworks.dk'),
+ )
+ if fallback and fallback[0].get('thread_key'):
+ return fallback[0]['thread_key']
+
+ rows = execute_query(
+ """
+ SELECT em.thread_key
+ FROM sag_emails se
+ JOIN email_messages em ON em.id = se.email_id
+ WHERE se.sag_id = %s
+ AND em.deleted_at IS NULL
+ AND em.thread_key IS NOT NULL
+ AND TRIM(em.thread_key) != ''
+ ORDER BY em.received_date DESC
+ """,
+ (sag_id,),
+ )
+ if not rows:
+ return None
+
+ # Rebuild the BMCid suffix for each candidate thread_key
+ # and return the one that matches our target suffix.
+ for row in rows:
+ tk = row['thread_key']
+ normalized = re.sub(r"[^a-z0-9]+", "", str(tk).lower())
+ if not normalized:
+ continue
+ digest = hashlib.sha1(normalized.encode("utf-8")).hexdigest()
+ candidate_suffix = str((int(digest[:8], 16) % 900000) + 100000)
+ if candidate_suffix == thread_suffix:
+ return tk
+ return None
+ except Exception as e:
+ logger.warning("⚠️ Failed BMCid thread_key lookup: %s", e)
+ return None
+
+ def _update_email_thread_key(self, email_id: int, thread_key: str) -> None:
+ """Set the thread_key on an email so it groups correctly."""
+ execute_update(
+ "UPDATE email_messages SET thread_key = %s, updated_at = CURRENT_TIMESTAMP WHERE id = %s",
+ (thread_key, email_id),
+ )
+
+ async def _finalize_sag_routing(
+ self, email_id: int, email_data: Dict, sag_id: int, routing_source: str
+ ) -> Dict[str, Any]:
+ """Link an email to an existing SAG and mark as processed."""
+ case_rows = execute_query(
+ "SELECT id, customer_id, titel FROM sag_sager WHERE id = %s AND deleted_at IS NULL",
+ (sag_id,),
+ )
+ if not case_rows:
+ logger.warning("⚠️ Email %s referenced SAG-%s but case was not found", email_id, sag_id)
+ return {'status': 'skipped', 'action': 'sag_id_not_found', 'sag_id': sag_id}
+
+ case = case_rows[0]
+ self._add_helpdesk_comment(sag_id, email_data)
+ self._link_email_to_sag(sag_id, email_id)
+
+ execute_update(
+ """
+ UPDATE email_messages
+ SET linked_case_id = %s,
+ customer_id = COALESCE(customer_id, %s),
+ status = 'processed',
+ folder = 'Processed',
+ processed_at = CURRENT_TIMESTAMP,
+ auto_processed = true
+ WHERE id = %s
+ """,
+ (sag_id, case.get('customer_id'), email_id),
+ )
+
+ token_for_attach = None
+ token_route = self._resolve_scan_token_route(email_id, email_data)
+ if token_route:
+ token_for_attach = token_route.get('token')
+ self._auto_attach_scanner_email(email_id, sag_id, token_for_attach)
+
+ return {
+ 'status': 'completed',
+ 'action': 'updated_existing_sag',
+ 'sag_id': sag_id,
+ 'customer_id': case.get('customer_id'),
+ 'routing_source': routing_source,
+ }
+
def _link_email_to_sag(self, sag_id: int, email_id: int) -> None:
execute_update(
"""
@@ -390,6 +553,379 @@ class EmailWorkflowService:
(sag_id, email_id, sag_id, email_id)
)
+ def _extract_scan_tokens(self, *values: Optional[str]) -> List[str]:
+ tokens: List[str] = []
+ for value in values:
+ if not value:
+ continue
+ found = self._SCAN_TOKEN_PATTERN.findall(str(value))
+ if found:
+ tokens.extend(token.upper() for token in found)
+ return list(dict.fromkeys(tokens))
+
+ def _resolve_scan_token_route(self, email_id: int, email_data: Dict) -> Optional[Dict[str, Any]]:
+ text_tokens = self._extract_scan_tokens(
+ email_data.get('subject'),
+ email_data.get('body_text'),
+ email_data.get('body_html'),
+ email_data.get('in_reply_to'),
+ email_data.get('email_references'),
+ )
+
+ filename_tokens: List[str] = []
+ attachment_content_tokens: List[str] = []
+ try:
+ attachment_rows = execute_query(
+ """
+ SELECT filename, content_type, content_data, file_path
+ FROM email_attachments
+ WHERE email_id = %s
+ ORDER BY id ASC
+ """,
+ (email_id,),
+ ) or []
+ for row in attachment_rows:
+ filename_tokens.extend(self._extract_scan_tokens(row.get('filename')))
+ attachment_content_tokens.extend(
+ self._extract_scan_tokens_from_attachment(
+ filename=row.get('filename'),
+ content_type=row.get('content_type'),
+ content_data=row.get('content_data'),
+ file_path=row.get('file_path'),
+ )
+ )
+ except Exception as exc:
+ logger.warning("⚠️ Failed to inspect attachment filenames for scan token: %s", exc)
+
+ all_tokens = list(dict.fromkeys(text_tokens + filename_tokens + attachment_content_tokens))
+ if not all_tokens:
+ return self._resolve_scan_route_from_scanner_headers(email_data)
+
+ placeholders = ','.join(['%s'] * len(all_tokens))
+ try:
+ rows = execute_query(
+ f"""
+ SELECT token, sag_id, token_type
+ FROM sag_document_tokens
+ WHERE token IN ({placeholders})
+ AND (expires_at IS NULL OR expires_at > CURRENT_TIMESTAMP)
+ ORDER BY consumed_at IS NULL DESC, created_at DESC
+ LIMIT 1
+ """,
+ tuple(all_tokens),
+ )
+ if rows:
+ return rows[0]
+
+ # Fallback for scanner workflows where token only exists in barcode image
+ # and therefore not in plain text metadata.
+ return self._resolve_scan_route_from_scanner_headers(email_data)
+ except Exception as exc:
+ logger.warning("⚠️ Scan token lookup failed: %s", exc)
+ return self._resolve_scan_route_from_scanner_headers(email_data)
+
+ def _extract_scan_tokens_from_attachment(
+ self,
+ filename: Optional[str],
+ content_type: Optional[str],
+ content_data: Optional[Any],
+ file_path: Optional[str],
+ ) -> List[str]:
+ tokens: List[str] = []
+
+ payload: Optional[bytes] = None
+ if content_data is not None:
+ try:
+ payload = bytes(content_data)
+ except Exception:
+ payload = None
+
+ if payload is None and file_path:
+ try:
+ payload = Path(file_path).read_bytes()
+ except Exception:
+ payload = None
+
+ if not payload:
+ return tokens
+
+ # 1) Cheap text extraction directly from bytes catches tokens in OCR-layer PDFs,
+ # plain text files, or metadata-rich attachments.
+ try:
+ sample = payload[:1_500_000]
+ tokens.extend(self._extract_scan_tokens(sample.decode('utf-8', errors='ignore')))
+ tokens.extend(self._extract_scan_tokens(sample.decode('latin-1', errors='ignore')))
+ except Exception:
+ pass
+
+ ext = (Path(str(filename or '')).suffix or '').lower().strip('.')
+ ctype = (content_type or '').lower()
+
+ # 2) PDF text-layer extraction (when available) for scanned documents with OCR.
+ if ext == 'pdf' or 'pdf' in ctype:
+ try:
+ from pypdf import PdfReader # type: ignore
+
+ reader = PdfReader(io.BytesIO(payload))
+ text_chunks: List[str] = []
+ for page in reader.pages[:5]:
+ extracted = page.extract_text() or ''
+ if extracted:
+ text_chunks.append(extracted)
+ if text_chunks:
+ tokens.extend(self._extract_scan_tokens("\n".join(text_chunks)))
+ except Exception:
+ pass
+
+ # 3) Decode barcode directly from scanned attachments.
+ # This catches cases where BMCSCAN exists only as a barcode image.
+ try:
+ if ext == 'pdf' or 'pdf' in ctype:
+ tokens.extend(self._extract_scan_tokens_from_pdf_barcode(payload))
+ else:
+ tokens.extend(self._extract_scan_tokens_from_image_barcode(payload))
+ except Exception:
+ pass
+
+ return list(dict.fromkeys(token.upper() for token in tokens if token))
+
+ def _extract_scan_tokens_from_image_barcode(self, payload: bytes) -> List[str]:
+ try:
+ from PIL import Image # type: ignore
+ from pyzbar.pyzbar import decode as zbar_decode # type: ignore
+ except Exception:
+ return []
+
+ try:
+ image = Image.open(io.BytesIO(payload))
+ except Exception:
+ return []
+
+ decoded_tokens: List[str] = []
+ variants = [image]
+ try:
+ variants.append(image.convert('L'))
+ variants.append(image.convert('L').point(lambda p: 255 if p > 140 else 0))
+ except Exception:
+ pass
+
+ for variant in variants:
+ try:
+ for item in zbar_decode(variant):
+ raw = item.data.decode('utf-8', errors='ignore')
+ decoded_tokens.extend(self._extract_scan_tokens(raw))
+ except Exception:
+ continue
+
+ return list(dict.fromkeys(decoded_tokens))
+
+ def _extract_scan_tokens_from_pdf_barcode(self, payload: bytes) -> List[str]:
+ try:
+ import pypdfium2 as pdfium # type: ignore
+ from pyzbar.pyzbar import decode as zbar_decode # type: ignore
+ except Exception:
+ return []
+
+ decoded_tokens: List[str] = []
+
+ try:
+ doc = pdfium.PdfDocument(io.BytesIO(payload))
+ except Exception:
+ return []
+
+ page_count = min(len(doc), 3)
+ for page_index in range(page_count):
+ page = None
+ try:
+ page = doc.get_page(page_index)
+ bitmap = page.render(scale=2.2)
+ pil_image = bitmap.to_pil()
+
+ for variant in (pil_image, pil_image.convert('L')):
+ for item in zbar_decode(variant):
+ raw = item.data.decode('utf-8', errors='ignore')
+ decoded_tokens.extend(self._extract_scan_tokens(raw))
+ except Exception:
+ continue
+ finally:
+ try:
+ if page is not None:
+ page.close()
+ except Exception:
+ pass
+
+ return list(dict.fromkeys(decoded_tokens))
+
+ def _resolve_scan_route_from_scanner_headers(self, email_data: Dict) -> Optional[Dict[str, Any]]:
+ """Infer case route from scanner-generated message-id timestamps.
+
+ Some scanner/MFP flows only include the barcode token inside the attached image/PDF,
+ while headers contain a timestamped local message-id such as
+ `<1.20260401075731@172.16.31.35>`. We map that timestamp to the nearest recent,
+ unconsumed document token.
+ """
+
+ header_values = [
+ email_data.get('in_reply_to'),
+ email_data.get('email_references'),
+ email_data.get('message_id'),
+ email_data.get('thread_key'),
+ ]
+
+ candidates: List[datetime] = []
+ ts_pattern = re.compile(r'(20\d{12})')
+
+ for raw in header_values:
+ if not raw:
+ continue
+ for match in ts_pattern.findall(str(raw)):
+ try:
+ candidates.append(datetime.strptime(match, "%Y%m%d%H%M%S"))
+ except ValueError:
+ continue
+
+ if not candidates:
+ return None
+
+ for ts in candidates:
+ try:
+ rows = execute_query(
+ """
+ SELECT token, sag_id, token_type, created_at
+ FROM sag_document_tokens
+ WHERE consumed_at IS NULL
+ AND created_at BETWEEN %s::timestamp - INTERVAL '90 minutes'
+ AND %s::timestamp + INTERVAL '20 minutes'
+ ORDER BY ABS(EXTRACT(EPOCH FROM (created_at - %s::timestamp))) ASC,
+ CASE WHEN token_type = 'work_order' THEN 0 ELSE 1 END,
+ id DESC
+ LIMIT 1
+ """,
+ (ts, ts, ts),
+ ) or []
+ if rows:
+ row = rows[0]
+ logger.info(
+ "🔎 Inferred scanner route via header timestamp %s -> SAG-%s (%s)",
+ ts.isoformat(),
+ row.get('sag_id'),
+ row.get('token'),
+ )
+ return {
+ 'token': row.get('token'),
+ 'sag_id': row.get('sag_id'),
+ 'token_type': row.get('token_type'),
+ }
+ except Exception as exc:
+ logger.warning("⚠️ Scanner header timestamp route lookup failed: %s", exc)
+
+ return None
+
+ def _copy_email_attachments_to_case(self, email_id: int, sag_id: int, source_token: Optional[str]) -> int:
+ attachments = execute_query(
+ """
+ SELECT filename, content_type, size_bytes, file_path, content_data
+ FROM email_attachments
+ WHERE email_id = %s
+ ORDER BY id ASC
+ """,
+ (email_id,),
+ ) or []
+ if not attachments:
+ return 0
+
+ upload_base = Path(settings.UPLOAD_DIR).resolve()
+ (upload_base / "sag_files").mkdir(parents=True, exist_ok=True)
+
+ has_source_email = table_has_column("sag_files", "source_email_id")
+ has_source_type = table_has_column("sag_files", "source_type")
+ has_source_token = table_has_column("sag_files", "source_token")
+
+ copied = 0
+ for attachment in attachments:
+ filename = Path(attachment.get('filename') or 'scanned-document.bin').name
+
+ if has_source_email:
+ existing = execute_query(
+ """
+ SELECT 1
+ FROM sag_files
+ WHERE sag_id = %s
+ AND source_email_id = %s
+ AND filename = %s
+ LIMIT 1
+ """,
+ (sag_id, email_id, filename),
+ ) or []
+ if existing:
+ continue
+
+ payload = attachment.get('content_data')
+ if payload is None and attachment.get('file_path'):
+ try:
+ payload = Path(attachment['file_path']).read_bytes()
+ except Exception as exc:
+ logger.warning("⚠️ Could not read attachment file (%s): %s", filename, exc)
+ continue
+
+ if payload is None:
+ continue
+
+ raw_payload = bytes(payload)
+ stored_name = f"sag_files/{uuid4().hex}_{filename}"
+ target_path = upload_base / stored_name
+
+ try:
+ target_path.write_bytes(raw_payload)
+ except Exception as exc:
+ logger.warning("⚠️ Could not write case file from attachment (%s): %s", filename, exc)
+ continue
+
+ columns = ["sag_id", "filename", "content_type", "size_bytes", "stored_name"]
+ values: List[Any] = [
+ sag_id,
+ filename,
+ attachment.get('content_type') or 'application/octet-stream',
+ attachment.get('size_bytes') or len(raw_payload),
+ stored_name,
+ ]
+ if has_source_email:
+ columns.append("source_email_id")
+ values.append(email_id)
+ if has_source_type:
+ columns.append("source_type")
+ values.append("scanner_email")
+ if has_source_token:
+ columns.append("source_token")
+ values.append(source_token)
+
+ execute_query(
+ f"INSERT INTO sag_files ({', '.join(columns)}) VALUES ({', '.join(['%s'] * len(values))})",
+ tuple(values),
+ )
+ copied += 1
+
+ return copied
+
+ def _auto_attach_scanner_email(self, email_id: int, sag_id: int, token: Optional[str]) -> None:
+ try:
+ copied = self._copy_email_attachments_to_case(email_id, sag_id, token)
+ if copied > 0:
+ logger.info("📎 Auto-attached %s attachment(s) from email %s to SAG-%s", copied, email_id, sag_id)
+
+ if token:
+ execute_update(
+ """
+ UPDATE sag_document_tokens
+ SET consumed_at = COALESCE(consumed_at, CURRENT_TIMESTAMP),
+ consumed_email_id = COALESCE(consumed_email_id, %s)
+ WHERE token = %s
+ """,
+ (email_id, token),
+ )
+ except Exception as exc:
+ logger.warning("⚠️ Scanner auto-attach failed for email %s: %s", email_id, exc)
+
def _strip_quoted_email_text(self, body_text: str) -> str:
"""Return only the newest reply content (remove quoted history/signatures)."""
if not body_text:
@@ -491,6 +1027,41 @@ class EmailWorkflowService:
sag_id_from_thread_key = self._find_sag_id_from_thread_key(derived_thread_key)
sag_id_from_thread = self._find_sag_id_from_thread_headers(email_data)
sag_id_from_tag = self._extract_sag_id(email_data)
+ scan_token_route = self._resolve_scan_token_route(email_id, email_data)
+
+ if scan_token_route and scan_token_route.get('sag_id'):
+ matched_sag_id = int(scan_token_route['sag_id'])
+ logger.info("🔎 Scan token matched email %s to SAG-%s", email_id, matched_sag_id)
+ return await self._finalize_sag_routing(email_id, email_data, matched_sag_id, 'scan_token')
+
+ # Priority 0: BMCid is the most reliable signal — it's our own hidden
+ # marker embedded in every outgoing case email. When present, it
+ # provides the sag_id directly and the thread_suffix lets us adopt
+ # the correct thread_key for multi-thread SAGs.
+ bmc_id = self._extract_bmc_id(email_data)
+ if bmc_id:
+ bmc_sag_id = bmc_id['sag_id']
+ bmc_thread_suffix = bmc_id['thread_suffix']
+ # Look up the thread_key of the outgoing email whose BMCid matches
+ bmc_thread_key = self._find_thread_key_by_bmc_suffix(bmc_sag_id, bmc_thread_suffix)
+ if bmc_thread_key:
+ # Adopt the outgoing email's thread_key so reply groups correctly
+ self._update_email_thread_key(email_id, bmc_thread_key)
+ logger.info(
+ "🔖 BMCid s%st%s matched → SAG-%s (thread_key=%s)",
+ bmc_sag_id, bmc_thread_suffix, bmc_sag_id, bmc_thread_key,
+ )
+ sag_id = bmc_sag_id
+ routing_source = 'bmc_id'
+ # Skip the remaining priority chain — BMCid is authoritative
+ return await self._finalize_sag_routing(email_id, email_data, sag_id, routing_source)
+
+ # Fallback: try the explicit provider thread key (e.g. Graph conversationId)
+ # separately when the derived key (References[0]) differs from it.
+ provider_thread_key = self._normalize_message_id(email_data.get('thread_key'))
+ sag_id_from_provider = None
+ if provider_thread_key and provider_thread_key != derived_thread_key:
+ sag_id_from_provider = self._find_sag_id_from_thread_key(provider_thread_key)
routing_source = None
sag_id = None
@@ -513,6 +1084,11 @@ class EmailWorkflowService:
routing_source = 'thread_headers'
logger.info("🔗 Matched email %s to SAG-%s via thread headers", email_id, sag_id)
+ if sag_id_from_provider and not sag_id:
+ sag_id = sag_id_from_provider
+ routing_source = 'provider_thread_key'
+ logger.info("🧵 Matched email %s to SAG-%s via provider thread key (conversationId)", email_id, sag_id)
+
if sag_id_from_tag:
if sag_id and sag_id != sag_id_from_tag:
logger.warning(
@@ -528,40 +1104,7 @@ class EmailWorkflowService:
# 1) Existing SAG via subject/headers
if sag_id:
- case_rows = execute_query(
- "SELECT id, customer_id, titel FROM sag_sager WHERE id = %s AND deleted_at IS NULL",
- (sag_id,)
- )
-
- if not case_rows:
- logger.warning("⚠️ Email %s referenced SAG-%s but case was not found", email_id, sag_id)
- return {'status': 'skipped', 'action': 'sag_id_not_found', 'sag_id': sag_id}
-
- case = case_rows[0]
- self._add_helpdesk_comment(sag_id, email_data)
- self._link_email_to_sag(sag_id, email_id)
-
- execute_update(
- """
- UPDATE email_messages
- SET linked_case_id = %s,
- customer_id = COALESCE(customer_id, %s),
- status = 'processed',
- folder = 'Processed',
- processed_at = CURRENT_TIMESTAMP,
- auto_processed = true
- WHERE id = %s
- """,
- (sag_id, case.get('customer_id'), email_id)
- )
-
- return {
- 'status': 'completed',
- 'action': 'updated_existing_sag',
- 'sag_id': sag_id,
- 'customer_id': case.get('customer_id'),
- 'routing_source': routing_source
- }
+ return await self._finalize_sag_routing(email_id, email_data, sag_id, routing_source)
# 2) No SAG id -> create only if sender domain belongs to known customer
sender_domain = self._extract_sender_domain(email_data)
@@ -589,6 +1132,7 @@ class EmailWorkflowService:
(case['id'], customer['id'], email_id)
)
+ self._auto_attach_scanner_email(email_id, case['id'], None)
logger.info("✅ Created SAG-%s from email %s for customer %s", case['id'], email_id, customer['id'])
return {
'status': 'completed',
diff --git a/app/services/reminder_notification_service.py b/app/services/reminder_notification_service.py
index f70d267..ee1e98d 100644
--- a/app/services/reminder_notification_service.py
+++ b/app/services/reminder_notification_service.py
@@ -102,7 +102,7 @@ class ReminderNotificationService:
)
# Get user email
- user_query = "SELECT email FROM users WHERE id = %s"
+ user_query = "SELECT email FROM users WHERE user_id = %s"
user = execute_query(user_query, (user_id,))
user_email = user[0]['email'] if user else None
diff --git a/app/services/vaultwarden_service.py b/app/services/vaultwarden_service.py
new file mode 100644
index 0000000..f2b9595
--- /dev/null
+++ b/app/services/vaultwarden_service.py
@@ -0,0 +1,185 @@
+import logging
+from typing import Any, Dict, List, Optional
+from urllib.parse import quote
+
+import httpx
+
+from app.core.config import settings
+
+logger = logging.getLogger(__name__)
+
+
+class VaultwardenServiceError(Exception):
+ pass
+
+
+def _is_configured() -> bool:
+ return bool((settings.VAULTWARDEN_BASE_URL or "").strip()) and bool((settings.VAULTWARDEN_API_TOKEN or "").strip())
+
+
+def _base_url() -> str:
+ return (settings.VAULTWARDEN_BASE_URL or "").strip().rstrip("/")
+
+
+def _headers() -> Dict[str, str]:
+ token = (settings.VAULTWARDEN_API_TOKEN or "").strip()
+ return {
+ "Authorization": f"Bearer {token}",
+ "X-API-Token": token,
+ "Accept": "application/json",
+ }
+
+
+def _extract_from_cipher(payload: dict) -> Optional[dict]:
+ if not isinstance(payload, dict):
+ return None
+
+ login = payload.get("login") or payload.get("Login") or {}
+ if not isinstance(login, dict):
+ login = {}
+
+ username = login.get("username") or login.get("Username")
+ password = login.get("password") or login.get("Password")
+ totp = login.get("totp") or login.get("Totp")
+
+ uris = login.get("uris") or login.get("Uris") or []
+ url = None
+ if isinstance(uris, list) and uris:
+ first = uris[0] or {}
+ if isinstance(first, dict):
+ url = first.get("uri") or first.get("Uri")
+
+ if not any([username, password, totp, url, payload.get("notes") or payload.get("Notes")]):
+ return None
+
+ return {
+ "item_id": str(payload.get("id") or payload.get("Id") or "") or None,
+ "item_name": payload.get("name") or payload.get("Name"),
+ "username": username,
+ "password": password,
+ "totp": totp,
+ "notes": payload.get("notes") or payload.get("Notes"),
+ "url": url,
+ }
+
+
+def _extract_from_custom_payload(payload: Any) -> Optional[dict]:
+ if isinstance(payload, dict):
+ direct = {
+ "item_id": payload.get("item_id") or payload.get("id"),
+ "item_name": payload.get("item_name") or payload.get("name"),
+ "username": payload.get("username"),
+ "password": payload.get("password"),
+ "totp": payload.get("totp") or payload.get("otp"),
+ "notes": payload.get("notes"),
+ "url": payload.get("url"),
+ }
+ if any(direct.values()):
+ return direct
+
+ nested = payload.get("data")
+ if isinstance(nested, dict):
+ nested_res = _extract_from_custom_payload(nested)
+ if nested_res:
+ return nested_res
+
+ cipher_res = _extract_from_cipher(payload)
+ if cipher_res:
+ return cipher_res
+
+ if isinstance(payload, list):
+ for item in payload:
+ extracted = _extract_from_custom_payload(item)
+ if extracted:
+ return extracted
+
+ return None
+
+
+async def _get_json(client: httpx.AsyncClient, url: str) -> Any:
+ response = await client.get(url)
+ if response.status_code == 404:
+ return None
+ response.raise_for_status()
+ if not response.content:
+ return None
+ return response.json()
+
+
+async def resolve_vault_credentials(
+ *,
+ preferred_item_id: Optional[str],
+ fallback_item_ids: List[str],
+ search_hint: Optional[str],
+) -> dict:
+ if not _is_configured():
+ return {
+ "status": "unavailable",
+ "configured": False,
+ "message": "Vaultwarden er ikke konfigureret.",
+ "checked_item_ids": [],
+ "credential": None,
+ }
+
+ checked_item_ids: List[str] = []
+ item_id_candidates = [preferred_item_id] + list(fallback_item_ids)
+ deduped_candidates: List[str] = []
+ seen = set()
+ for item_id in item_id_candidates:
+ candidate = (item_id or "").strip()
+ if not candidate or candidate in seen:
+ continue
+ seen.add(candidate)
+ deduped_candidates.append(candidate)
+
+ timeout = httpx.Timeout(connect=6.0, read=10.0, write=10.0, pool=6.0)
+ async with httpx.AsyncClient(timeout=timeout, headers=_headers(), follow_redirects=True) as client:
+ base = _base_url()
+
+ for item_id in deduped_candidates:
+ checked_item_ids.append(item_id)
+ try:
+ payload = await _get_json(client, f"{base}/api/ciphers/{quote(item_id)}")
+ extracted = _extract_from_custom_payload(payload)
+ if extracted:
+ return {
+ "status": "ok",
+ "configured": True,
+ "message": "Vault-opslag gennemfoert.",
+ "checked_item_ids": checked_item_ids,
+ "credential": extracted,
+ }
+ except httpx.HTTPError as exc:
+ logger.warning("Vaultwarden item lookup failed for id=%s: %s", item_id, exc)
+
+ hint = (search_hint or "").strip()
+ if hint:
+ encoded_hint = quote(hint)
+ search_endpoints = [
+ f"{base}/api/links/credentials?search={encoded_hint}",
+ f"{base}/api/ciphers?search={encoded_hint}",
+ f"{base}/api/ciphers?url={encoded_hint}",
+ ]
+
+ for endpoint in search_endpoints:
+ try:
+ payload = await _get_json(client, endpoint)
+ extracted = _extract_from_custom_payload(payload)
+ if extracted:
+ return {
+ "status": "ok",
+ "configured": True,
+ "message": "Vault-opslag gennemfoert.",
+ "checked_item_ids": checked_item_ids,
+ "credential": extracted,
+ }
+ except httpx.HTTPError as exc:
+ logger.info("Vaultwarden search endpoint failed (%s): %s", endpoint, exc)
+
+ return {
+ "status": "not_found",
+ "configured": True,
+ "message": "Ingen vault credentials fundet for linket.",
+ "checked_item_ids": checked_item_ids,
+ "credential": None,
+ }
diff --git a/app/settings/backend/router.py b/app/settings/backend/router.py
index bc442e9..4fee5a9 100644
--- a/app/settings/backend/router.py
+++ b/app/settings/backend/router.py
@@ -242,6 +242,26 @@ async def update_setting(key: str, setting: SettingUpdate):
(key, setting.value, category, description, value_type, is_public),
)
+ _label_printer_keys = {
+ "label_printer_enabled": ("integrations", "Enable direct label printing", "boolean", True),
+ "label_printer_model": ("integrations", "Brother printer model for direct labels", "string", True),
+ "label_printer_host": ("integrations", "Brother printer host/IP", "string", True),
+ "label_printer_port": ("integrations", "Brother printer TCP port", "integer", True),
+ "label_printer_label_size": ("integrations", "Brother label size code", "string", True),
+ }
+ if not result and key in _label_printer_keys:
+ category, description, value_type, is_public = _label_printer_keys[key]
+ result = execute_query(
+ """
+ INSERT INTO settings (key, value, category, description, value_type, is_public)
+ VALUES (%s, %s, %s, %s, %s, %s)
+ ON CONFLICT (key)
+ DO UPDATE SET value = EXCLUDED.value, updated_at = CURRENT_TIMESTAMP
+ RETURNING *
+ """,
+ (key, setting.value, category, description, value_type, is_public),
+ )
+
# Mission camera settings may not exist on older hubs before migration.
if not result and key in {"mission_camera_enabled", "mission_camera_name", "mission_camera_feed_url", "mission_camera_spotlight_seconds", "mission_access_pin"}:
defaults = {
diff --git a/app/settings/frontend/settings.html b/app/settings/frontend/settings.html
index 6957499..0ec9fef 100644
--- a/app/settings/frontend/settings.html
+++ b/app/settings/frontend/settings.html
@@ -259,6 +259,48 @@
+
+
+
@@ -2046,6 +2088,7 @@ async function loadSettings() {
await loadTagsManagement();
await loadNextcloudInstances();
await loadAnydeskSettings();
+ await loadLabelPrinterSettings();
} catch (error) {
console.error('Error loading settings:', error);
}
@@ -2162,6 +2205,83 @@ async function saveAnydeskSettings() {
}
}
+async function loadLabelPrinterSettings() {
+ const keys = [
+ 'label_printer_enabled',
+ 'label_printer_model',
+ 'label_printer_host',
+ 'label_printer_port',
+ 'label_printer_label_size'
+ ];
+ try {
+ const results = await Promise.allSettled(
+ keys.map(k => fetch(`/api/v1/settings/${k}`, { credentials: 'include' }).then(r => r.ok ? r.json() : null))
+ );
+ const vals = {};
+ results.forEach((r, i) => { if (r.status === 'fulfilled' && r.value) vals[keys[i]] = r.value.value; });
+
+ document.getElementById('labelPrinterEnabled').checked = vals.label_printer_enabled === 'true';
+ document.getElementById('labelPrinterModel').value = vals.label_printer_model || 'QL-710W';
+ document.getElementById('labelPrinterHost').value = vals.label_printer_host || '172.16.31.32';
+ document.getElementById('labelPrinterPort').value = vals.label_printer_port || '9100';
+ document.getElementById('labelPrinterSize').value = vals.label_printer_label_size || '62';
+ } catch (e) {
+ console.warn('Label printer settings load failed:', e);
+ }
+}
+
+async function saveLabelPrinterSettings() {
+ const enabled = document.getElementById('labelPrinterEnabled').checked;
+ const model = (document.getElementById('labelPrinterModel').value || '').trim() || 'QL-710W';
+ const host = (document.getElementById('labelPrinterHost').value || '').trim();
+ const port = (document.getElementById('labelPrinterPort').value || '').trim() || '9100';
+ const size = (document.getElementById('labelPrinterSize').value || '').trim() || '62';
+ const statusEl = document.getElementById('labelPrinterSaveStatus');
+
+ if (enabled && !host) {
+ showNotification('Angiv printer IP/host', 'error');
+ return;
+ }
+
+ if (!/^\d{1,5}$/.test(port) || Number(port) < 1 || Number(port) > 65535) {
+ showNotification('Ugyldig port', 'error');
+ return;
+ }
+
+ statusEl.textContent = 'Gemmer...';
+ statusEl.className = 'small text-muted';
+
+ const putSettingStrict = async (key, value) => {
+ const response = await fetch(`/api/v1/settings/${key}`, {
+ method: 'PUT',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ value: String(value) })
+ });
+ if (!response.ok) {
+ throw new Error(await getErrorMessage(response, `Kunne ikke gemme ${key}`));
+ }
+ };
+
+ try {
+ await Promise.all([
+ putSettingStrict('label_printer_enabled', enabled ? 'true' : 'false'),
+ putSettingStrict('label_printer_model', model),
+ putSettingStrict('label_printer_host', host),
+ putSettingStrict('label_printer_port', String(port)),
+ putSettingStrict('label_printer_label_size', size),
+ ]);
+
+ statusEl.textContent = '✅ Gemt';
+ statusEl.className = 'small text-success';
+ setTimeout(() => { statusEl.textContent = ''; }, 3000);
+ showNotification('Label printer indstillinger gemt', 'success');
+ } catch (error) {
+ statusEl.textContent = '❌ Kunne ikke gemme';
+ statusEl.className = 'small text-danger';
+ showNotification('Kunne ikke gemme label printer indstillinger', 'error');
+ }
+}
+
async function loadNextcloudInstances() {
try {
const response = await fetch('/api/v1/nextcloud/instances');
diff --git a/app/shared/frontend/base.html b/app/shared/frontend/base.html
index 662f781..26002f7 100644
--- a/app/shared/frontend/base.html
+++ b/app/shared/frontend/base.html
@@ -220,6 +220,7 @@
+
+
+
+
+
+ Brother Label Printer (Direkte print)+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Tip: QL-710W bruger typisk port 9100. Label-størrelse kan fx være 62.
+
+
+
+
+
+
+ + Email ++ + Åbn Email + +
+
+
+
@@ -560,7 +581,51 @@
document.addEventListener('DOMContentLoaded', () => {
const searchModal = new bootstrap.Modal(document.getElementById('globalSearchModal'));
+ const searchBubbleBtn = document.getElementById('globalSearchBtn');
+ const remindersBubbleBtn = document.getElementById('globalRemindersBtn');
+ const profileModalEl = document.getElementById('profileModal');
+ const profileModalInstance = profileModalEl ? new bootstrap.Modal(profileModalEl) : null;
const globalSearchInput = document.getElementById('globalSearchInput');
+
+ function openGlobalSearchModal() {
+ searchModal.show();
+ setTimeout(() => {
+ if (globalSearchInput) {
+ globalSearchInput.focus();
+ }
+ loadLiveStats();
+ loadRecentActivity();
+ }, 300);
+ }
+
+ function openRemindersModalTab() {
+ if (!profileModalInstance || !profileModalEl) {
+ return;
+ }
+ profileModalInstance.show();
+ setTimeout(() => {
+ const remindersTabBtn = document.getElementById('profile-reminders-tab');
+ if (remindersTabBtn) {
+ bootstrap.Tab.getOrCreateInstance(remindersTabBtn).show();
+ }
+ loadReminderPreferences();
+ loadProfileReminders();
+ }, 220);
+ }
+
+ if (searchBubbleBtn) {
+ searchBubbleBtn.addEventListener('click', (e) => {
+ e.preventDefault();
+ openGlobalSearchModal();
+ });
+ }
+
+ if (remindersBubbleBtn) {
+ remindersBubbleBtn.addEventListener('click', (e) => {
+ e.preventDefault();
+ openRemindersModalTab();
+ });
+ }
// Search input listener with debounce
let searchTimeout;
@@ -583,6 +648,9 @@
navigateResults(-1);
} else if (e.key === 'Enter') {
e.preventDefault();
+ if (navigateToSagFromScan(e.target.value)) {
+ return;
+ }
selectCurrentResult();
}
});
@@ -593,15 +661,7 @@
// Cmd+K / Ctrl+K for global search
if ((e.metaKey || e.ctrlKey) && e.key === 'k') {
e.preventDefault();
- console.log('Cmd+K pressed - opening search modal'); // Debug
- searchModal.show();
- setTimeout(() => {
- if (globalSearchInput) {
- globalSearchInput.focus();
- }
- loadLiveStats();
- loadRecentActivity();
- }, 300);
+ openGlobalSearchModal();
}
// '+' key for QuickCreate (not in input fields)
@@ -651,6 +711,7 @@
document.getElementById('workflowActions').style.display = 'none';
document.getElementById('crmResults').style.display = 'none';
document.getElementById('supportResults').style.display = 'none';
+ if (document.getElementById('emailResults')) document.getElementById('emailResults').style.display = 'none';
if (document.getElementById('salesResults')) document.getElementById('salesResults').style.display = 'none';
if (document.getElementById('financeResults')) document.getElementById('financeResults').style.display = 'none';
});
@@ -811,12 +872,41 @@
}
}
+ function extractSagIdFromScanToken(value) {
+ const cleaned = String(value || '').toUpperCase().replace(/\s+/g, ' ').trim();
+ if (!cleaned) return null;
+
+ // Scanner tokens from work order and hardware labels
+ const workOrderMatch = cleaned.match(/\bBMCSCAN-WO-S(\d+)\b/);
+ if (workOrderMatch) return parseInt(workOrderMatch[1], 10);
+
+ const hardwareMatch = cleaned.match(/\bBMCSCAN-HW-(\d+)\b/);
+ if (hardwareMatch) return parseInt(hardwareMatch[1], 10);
+
+ return null;
+ }
+
+ function navigateToSagFromScan(value) {
+ const sagId = extractSagIdFromScanToken(value);
+ if (!sagId || Number.isNaN(sagId)) {
+ return false;
+ }
+
+ window.location.href = `/sag/${sagId}`;
+ return true;
+ }
+
// Global search function
async function performGlobalSearch(query) {
+ if (navigateToSagFromScan(query)) {
+ return;
+ }
+
if (!query || query.trim().length < 2) {
document.getElementById('emptyState').style.display = 'block';
document.getElementById('crmResults').style.display = 'none';
document.getElementById('supportResults').style.display = 'none';
+ if (document.getElementById('emailResults')) document.getElementById('emailResults').style.display = 'none';
if (document.getElementById('salesResults')) document.getElementById('salesResults').style.display = 'none';
if (document.getElementById('financeResults')) document.getElementById('financeResults').style.display = 'none';
return;
@@ -887,6 +977,51 @@
} catch (e) {
console.log('Contacts search not available');
}
+
+ // Search emails
+ try {
+ const emailsResponse = await fetch(`/api/v1/emails?q=${encodeURIComponent(query)}&limit=5`);
+ const emailsData = await emailsResponse.json();
+
+ if (Array.isArray(emailsData) && emailsData.length > 0) {
+ hasResults = true;
+ const emailResults = document.getElementById('emailResults');
+ if (emailResults) {
+ emailResults.style.display = 'block';
+ const emailList = emailResults.querySelector('.result-items');
+ if (emailList) {
+ emailList.innerHTML = emailsData.map(mail => {
+ const received = mail.received_date
+ ? new Date(mail.received_date).toLocaleString('da-DK')
+ : '-';
+ const sender = mail.sender_name || mail.sender_email || '-';
+ const isUnread = !Boolean(mail.is_read);
+ return `
+
+
+ `;
+ }).join('');
+ }
+ }
+ } else {
+ const emailResults = document.getElementById('emailResults');
+ if (emailResults) emailResults.style.display = 'none';
+ }
+ } catch (e) {
+ console.log('Email search not available');
+ const emailResults = document.getElementById('emailResults');
+ if (emailResults) emailResults.style.display = 'none';
+ }
// Search hardware
try {
diff --git a/check_threads.sql b/check_threads.sql
new file mode 100644
index 0000000..de6184b
--- /dev/null
+++ b/check_threads.sql
@@ -0,0 +1,20 @@
+-- Check thread fragmentation per SAG
+WITH resolved AS (
+ SELECT
+ se.sag_id,
+ em.id,
+ em.thread_key,
+ em.folder,
+ COALESCE(
+ NULLIF(REGEXP_REPLACE(TRIM(COALESCE(em.thread_key, '')), '[<>\s]', '', 'g'), ''),
+ CONCAT('email-', em.id::text)
+ ) AS resolved_key
+ FROM sag_emails se
+ JOIN email_messages em ON em.id = se.email_id
+ WHERE em.deleted_at IS NULL
+)
+SELECT sag_id, COUNT(DISTINCT resolved_key) as thread_count, COUNT(*) as email_count
+FROM resolved
+GROUP BY sag_id
+HAVING COUNT(DISTINCT resolved_key) > 1
+ORDER BY thread_count DESC;
diff --git a/docker-compose.yml b/docker-compose.yml
index 37b38cb..fe76184 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -50,7 +50,7 @@ services:
environment:
# Override database URL to point to postgres service
- DATABASE_URL=postgresql://${POSTGRES_USER:-bmc_hub}:${POSTGRES_PASSWORD:-bmc_hub}@postgres:5432/${POSTGRES_DB:-bmc_hub}
- - ENABLE_RELOAD=false
+ - ENABLE_RELOAD=${ENABLE_RELOAD:-true}
- APIGW_TOKEN=${APIGW_TOKEN}
- APIGATEWAY_URL=${APIGATEWAY_URL}
- APIGW_TIMEOUT_SECONDS=${APIGW_TIMEOUT_SECONDS}
diff --git a/migrations/156_backfill_email_thread_keys.sql b/migrations/156_backfill_email_thread_keys.sql
new file mode 100644
index 0000000..48e0e3e
--- /dev/null
+++ b/migrations/156_backfill_email_thread_keys.sql
@@ -0,0 +1,48 @@
+-- Migration 156: Backfill email thread_keys from parent emails
+-- Ensures replies inherit the same thread_key as their parent so they group together visually.
+
+-- Step 1: For emails that have in_reply_to or email_references pointing to an existing
+-- email with a thread_key, adopt the parent's thread_key.
+UPDATE email_messages child
+SET thread_key = parent.thread_key,
+ updated_at = CURRENT_TIMESTAMP
+FROM email_messages parent
+WHERE child.deleted_at IS NULL
+ AND parent.deleted_at IS NULL
+ AND parent.thread_key IS NOT NULL
+ AND TRIM(parent.thread_key) != ''
+ AND (
+ -- Match via in_reply_to -> parent message_id
+ (
+ child.in_reply_to IS NOT NULL
+ AND TRIM(child.in_reply_to) != ''
+ AND LOWER(REGEXP_REPLACE(parent.message_id, '[<>\s]', '', 'g'))
+ = LOWER(REGEXP_REPLACE(
+ (REGEXP_SPLIT_TO_ARRAY(TRIM(child.in_reply_to), E'[\\s,]+'))[1],
+ '[<>\s]', '', 'g'
+ ))
+ )
+ OR
+ -- Match via first reference -> parent message_id
+ (
+ child.email_references IS NOT NULL
+ AND TRIM(child.email_references) != ''
+ AND LOWER(REGEXP_REPLACE(parent.message_id, '[<>\s]', '', 'g'))
+ = LOWER(REGEXP_REPLACE(
+ (REGEXP_SPLIT_TO_ARRAY(TRIM(child.email_references), E'[\\s,]+'))[1],
+ '[<>\s]', '', 'g'
+ ))
+ )
+ )
+ -- Only update if the thread_key would actually change
+ AND (
+ child.thread_key IS NULL
+ OR TRIM(child.thread_key) = ''
+ OR LOWER(REGEXP_REPLACE(child.thread_key, '[<>\s]', '', 'g'))
+ != LOWER(REGEXP_REPLACE(parent.thread_key, '[<>\s]', '', 'g'))
+ );
+
+-- Step 2: REMOVED - was incorrectly forcing all emails in a SAG to share one thread_key.
+-- Each SAG can have multiple independent email threads (different recipients/subjects).
+-- Thread grouping is based on actual RFC 5322 threading headers, not SAG membership.
+-- See migration 157 for the fix.
diff --git a/migrations/157_fix_thread_keys_multi_thread.sql b/migrations/157_fix_thread_keys_multi_thread.sql
new file mode 100644
index 0000000..f6bb8ff
--- /dev/null
+++ b/migrations/157_fix_thread_keys_multi_thread.sql
@@ -0,0 +1,57 @@
+-- Migration 157: Fix thread_keys - restore correct per-conversation grouping
+-- Migration 156 Step 2 incorrectly forced ALL emails in a SAG to share one thread_key.
+-- This migration restores the correct thread_key based on actual email conversation headers.
+
+-- Step 1: Restore thread_key for emails that have a Graph conversationId stored
+-- (these were overwritten by the dominant-thread backfill).
+-- The conversationId is the most reliable conversation identifier from Exchange/Graph.
+
+-- Step 2: Re-derive thread_keys from actual email headers.
+-- Priority: conversationId (if provider) > parent's thread_key > References[0] > In-Reply-To > message_id
+-- We re-derive for ALL emails to undo the forced unification.
+
+-- First, recalculate based on actual References/In-Reply-To parent chain.
+-- For emails that are replies (have in_reply_to or email_references), adopt the
+-- thread_key of the ACTUAL parent email (matched by message_id), not just any email in the SAG.
+UPDATE email_messages child
+SET thread_key = parent.thread_key,
+ updated_at = CURRENT_TIMESTAMP
+FROM email_messages parent
+WHERE child.deleted_at IS NULL
+ AND parent.deleted_at IS NULL
+ AND parent.thread_key IS NOT NULL
+ AND TRIM(parent.thread_key) != ''
+ AND (
+ -- Match via in_reply_to -> parent message_id
+ (
+ child.in_reply_to IS NOT NULL
+ AND TRIM(child.in_reply_to) != ''
+ AND LOWER(REGEXP_REPLACE(parent.message_id, '[<>\s]', '', 'g'))
+ = LOWER(REGEXP_REPLACE(
+ (REGEXP_SPLIT_TO_ARRAY(TRIM(child.in_reply_to), E'[\\s,]+'))[1],
+ '[<>\s]', '', 'g'
+ ))
+ )
+ OR
+ -- Match via first reference -> parent message_id
+ (
+ child.email_references IS NOT NULL
+ AND TRIM(child.email_references) != ''
+ AND LOWER(REGEXP_REPLACE(parent.message_id, '[<>\s]', '', 'g'))
+ = LOWER(REGEXP_REPLACE(
+ (REGEXP_SPLIT_TO_ARRAY(TRIM(child.email_references), E'[\\s,]+'))[1],
+ '[<>\s]', '', 'g'
+ ))
+ )
+ );
+
+-- For emails that are conversation starters (no in_reply_to, no references),
+-- reset thread_key to their own message_id so they start their own thread.
+UPDATE email_messages
+SET thread_key = LOWER(REGEXP_REPLACE(COALESCE(message_id, ''), '[<>\s]', '', 'g')),
+ updated_at = CURRENT_TIMESTAMP
+WHERE deleted_at IS NULL
+ AND (in_reply_to IS NULL OR TRIM(in_reply_to) = '')
+ AND (email_references IS NULL OR TRIM(email_references) = '')
+ AND message_id IS NOT NULL
+ AND TRIM(message_id) != '';
diff --git a/migrations/158_sag_work_orders_and_scan_tokens.sql b/migrations/158_sag_work_orders_and_scan_tokens.sql
new file mode 100644
index 0000000..5001fb7
--- /dev/null
+++ b/migrations/158_sag_work_orders_and_scan_tokens.sql
@@ -0,0 +1,32 @@
+-- Migration 158: SAG work-order scan tokens and file provenance
+-- Enables token-based auto-linking of scanned documents to cases.
+
+CREATE TABLE IF NOT EXISTS sag_document_tokens (
+ id SERIAL PRIMARY KEY,
+ sag_id INTEGER NOT NULL REFERENCES sag_sager(id) ON DELETE CASCADE,
+ token VARCHAR(120) NOT NULL UNIQUE,
+ token_type VARCHAR(40) NOT NULL,
+ hardware_id INTEGER REFERENCES hardware_assets(id) ON DELETE SET NULL,
+ created_by_user_id INTEGER REFERENCES users(user_id) ON DELETE SET NULL,
+ expires_at TIMESTAMP,
+ consumed_at TIMESTAMP,
+ consumed_email_id INTEGER REFERENCES email_messages(id) ON DELETE SET NULL,
+ created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ CONSTRAINT sag_document_tokens_type_check CHECK (token_type IN ('work_order', 'hardware_label'))
+);
+
+CREATE INDEX IF NOT EXISTS idx_sag_document_tokens_sag_id ON sag_document_tokens(sag_id);
+CREATE INDEX IF NOT EXISTS idx_sag_document_tokens_token_type ON sag_document_tokens(token_type);
+CREATE INDEX IF NOT EXISTS idx_sag_document_tokens_consumed ON sag_document_tokens(consumed_at);
+
+ALTER TABLE sag_files
+ ADD COLUMN IF NOT EXISTS source_email_id INTEGER REFERENCES email_messages(id) ON DELETE SET NULL,
+ ADD COLUMN IF NOT EXISTS source_type VARCHAR(40),
+ ADD COLUMN IF NOT EXISTS source_token VARCHAR(120);
+
+UPDATE sag_files
+SET source_type = 'upload'
+WHERE source_type IS NULL;
+
+CREATE INDEX IF NOT EXISTS idx_sag_files_source_email_id ON sag_files(source_email_id);
+CREATE INDEX IF NOT EXISTS idx_sag_files_source_token ON sag_files(source_token);
diff --git a/requirements.txt b/requirements.txt
index 363702c..67f977f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -20,3 +20,6 @@ APScheduler==3.10.4
pdfplumber==0.11.4
av==13.1.0
Pillow==11.0.0
+brother_ql==0.9.4
+pyzbar==0.1.9
+pypdfium2==4.30.0
diff --git a/tmp/links_import.sql b/tmp/links_import.sql
new file mode 100644
index 0000000..3cfbb9b
--- /dev/null
+++ b/tmp/links_import.sql
@@ -0,0 +1,134 @@
+BEGIN;
+
+CREATE TEMP TABLE tmp_links_import (
+ name TEXT NOT NULL,
+ url TEXT NOT NULL,
+ category_name TEXT NOT NULL
+);
+
+INSERT INTO tmp_links_import (name, url, category_name) VALUES
+('Guacamole','https://rdp-dash.bmcnetworks.dk/guacamole/#/','Interne systemer (Admin)'),
+('MailChimp','https://login.mailchimp.com','Interne systemer (Admin)'),
+('Plesk','https://isp.bmcnetworks.dk:8443/login_up.php','Interne systemer (Admin)'),
+('Speedtest (admin)','http://speedtest.bmcnetworks.dk/results/stats.php','Interne systemer (Admin)'),
+('Uisp','https://uisp.bmcnetworks.dk','Interne systemer (Admin)'),
+('s3 Admin','http://172.16.30.13:9001','Interne systemer (Admin)'),
+('Mailarkiv admin','https://arkiv.bmcmailarkiv.dk/','Interne systemer (Admin)'),
+('Ducky Mail admin','https://mailadmin.bmcdenmark.com','BMC Mail server'),
+('Webmail','https://mail.bmcdenmark.com','BMC Mail server'),
+('BMC Anydesk','https://get.anydesk.com/0RRDdvHP/BMCsupport.exe','Public links'),
+('Ninite Std software','https://ninite.com/.net4.8-.net5-.net6-.net7-.netx5-.netx6-.netx7-adoptjava8-adoptjavax11-adoptjavax17-adoptjavax8-firefox-vlc/ninite.exe','Public links'),
+('Norva24 Nextcloud','https://norva24tv.acdu.dk/login','Norva24'),
+('SFTP Nextcloud liste','https://bmcdenmark.sharepoint.com','Norva24'),
+('Maskinsikkerhed Nextcloud','https://ms.docs.bmcnetworks.dk/login?redirect_url=/apps/dashboard/','Maskinsikkerhed'),
+('Android Kiosk','https://downloads.pronestor.com','PFA'),
+('Anydesk PFA','https://my.anydesk.com','PFA'),
+('Clickshare barco','http://xms.cloud.barco.com','PFA'),
+('Clickshare guide','https://theunion.dk','PFA'),
+('Meraki PFA','https://n717.meraki.com','PFA'),
+('The Union Planner','https://the-union.pronestor.com','PFA'),
+('care.oniadea','https://care.oniadea.com','PFA'),
+('BMC Nextcloud','https://nc.bmcnetworks.dk','Interne systemer'),
+('BMC Sharepoint','https://bmcdenmark.sharepoint.com','Interne systemer'),
+('2fAuth','https://2f.bmcnetworks.dk/','Interne systemer'),
+('Seafile','https://docs.bmcnetworks.dk','Interne systemer'),
+('Vaultwarden','https://bw.bmcnetworks.dk/#/','Interne systemer'),
+('BMC mail arkiv','https://bmcnetworks.bmcmailarkiv.dk','Interne systemer'),
+('Uptime Kuma','https://kuma.bmcnetworks.dk/dashboard','Interne systemer'),
+('uISP OLD','https://unms-pri.bmcnetworks.dk','Interne systemer'),
+('Smokeping','https://smokeping.bmcnetworks.dk','Interne systemer'),
+('Teknik WIKI','https://wiki.bmcnetworks.dk','Interne systemer'),
+('Unifi','https://unifi.bmcnetworks.dk:8443','Interne systemer'),
+('Unifi old','https://unifi-sdn.bmcnetworks.dk:8443/','Interne systemer'),
+('BMC Office install','http://software.bmcnetworks.dk','Externe systemer'),
+('BMC Speakonline','https://phone-wizard.com','Externe systemer'),
+('Cloudfactory Portal','http://portal.cloudfactory.dk','Externe systemer'),
+('Eset MSP','https://msp.eset.com','Externe systemer'),
+('Minside Telefoni','https://minside.bmcnetworks.dk','Externe systemer'),
+('My Globalconnect','https://my.globalconnect.dk','Externe systemer'),
+('SentinelOne','https://euce1-teamblue.sentinelone.net','Externe systemer'),
+('Simply CRM portal','https://tickets.simply-crm.com','Externe systemer'),
+('Globalconnect','https://nn.globalconnect.dk','Externe systemer'),
+('Simply CRM','https://bmcnetworks.simply-crm.dk','Externe systemer'),
+('Portal admin','https://mit.bmcnetworks.dk','Externe systemer'),
+('Jira','https://bmcdenmark.atlassian.net','Externe systemer'),
+('Avast hub','http://businesshub.avast.com','Externe systemer'),
+('Booking mødelokale','https://3048.torvekoekken.dk','Externe systemer'),
+('CP SMS','https://www.cpsms.dk','Externe systemer'),
+('Curanet','https://reseller.curanet.dk','Externe systemer'),
+('Mit GC','https://nn.globalconnect.dk','Externe systemer'),
+('Shipmondo','https://app.shipmondo.com','Externe systemer'),
+('e-conomic','https://secure.e-conomic.com','Externe systemer'),
+('Provision Yealink','https://dm.yealink.com','Externe systemer'),
+('Carl-Ras','https://www.carl-ras.dk','Grosister'),
+('Deltaco','https://www.deltaco.dk','Grosister'),
+('Serverschmiede','https://www.serverschmiede.com','Grosister'),
+('DCS','http://dcs.dk','Grosister'),
+('Also','https://www.also.com','Grosister'),
+('EET','https://www.eetgroup.com','Grosister'),
+('Farnell','https://dk.farnell.com','Grosister'),
+('Lemvigh-Müller','https://www.lemu.dk','Grosister'),
+('Lan-Com','https://lan-com.dk','Grosister'),
+('Clerk','https://my.clerk.io','ITvarer.dk'),
+('OnPay Manager','https://manage.onpay.io','ITvarer.dk'),
+('Stedger','https://dashboard.stedger.com','ITvarer.dk'),
+('Webshop admin','https://itvarer.bmcnetworks.dk','ITvarer.dk'),
+('3 Erhverv','https://www.3.dk','Tele sites'),
+('ICH','http://ich01.supertel.dk','Tele sites'),
+('Mastedatabasen','https://www.mastedatabasen.dk','Tele sites'),
+('BMCnas','https://172.16.20.28/cgi-bin/','Hardware'),
+('HP Officejet','http://172.16.20.187','Hardware'),
+('TrueNAS','https://172.16.30.9','Hardware'),
+('Flame search tips','https://github.com/pawelmalak/flame/wiki/Search-bar','Diverse'),
+('Mentech','http://mentech.dk','Diverse'),
+('BMCnet.dk','http://bmcnet.dk','bmcnet.dk'),
+('bmcnet admin','https://reseller.curanet.dk','bmcnet.dk'),
+('Power DNS','http://172.16.20.25','Old links'),
+('SugarCRM','http://sugar.intranet.bmc','Old links'),
+('Teknik intra','http://teknik.intranet.bmc','Old links');
+
+INSERT INTO link_categories (name, icon, sort_order)
+SELECT DISTINCT category_name, 'bi-link-45deg', 100
+FROM tmp_links_import
+ON CONFLICT (name) DO NOTHING;
+
+INSERT INTO links (name, type, url, environment, is_critical, is_favorite)
+SELECT t.name, 'http', t.url, 'prod', FALSE, FALSE
+FROM tmp_links_import t
+WHERE NOT EXISTS (
+ SELECT 1
+ FROM links l
+ WHERE l.deleted_at IS NULL
+ AND l.name = t.name
+ AND l.url = t.url
+);
+
+INSERT INTO link_category_map (link_id, category_id)
+SELECT l.id, c.id
+FROM tmp_links_import t
+JOIN link_categories c ON c.name = t.category_name
+JOIN LATERAL (
+ SELECT id
+ FROM links
+ WHERE deleted_at IS NULL
+ AND name = t.name
+ AND url = t.url
+ ORDER BY id ASC
+ LIMIT 1
+) l ON TRUE
+ON CONFLICT DO NOTHING;
+
+COMMIT;
+
+SELECT
+ (SELECT COUNT(*) FROM tmp_links_import) AS source_rows,
+ (SELECT COUNT(*) FROM link_categories WHERE name IN (SELECT DISTINCT category_name FROM tmp_links_import)) AS matched_categories,
+ (SELECT COUNT(*) FROM links WHERE deleted_at IS NULL AND (name, url) IN (SELECT name, url FROM tmp_links_import)) AS matched_links,
+ (SELECT COUNT(*)
+ FROM link_category_map lcm
+ JOIN links l ON l.id = lcm.link_id
+ JOIN link_categories c ON c.id = lcm.category_id
+ WHERE l.deleted_at IS NULL
+ AND (l.name, l.url) IN (SELECT name, url FROM tmp_links_import)
+ AND c.name IN (SELECT DISTINCT category_name FROM tmp_links_import)
+ ) AS matched_mappings;
+
+
+ ${escapeHtml(mail.subject || '(Ingen emne)')}
+
+ ${escapeHtml(sender)}
+ ${mail.linked_case_id ? ` • Sag #${mail.linked_case_id}` : ''}
+ ${isUnread ? ' • Ulæst' : ''}
+ • ${escapeHtml(received)}
+
+ | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||