@@ -924,6 +932,8 @@ async function loadCustomer() {
customerData = await response.json();
displayCustomer(customerData);
+ await loadUtilityCompany();
+
// Check data consistency
await checkDataConsistency();
} catch (error) {
@@ -1015,6 +1025,78 @@ function displayCustomer(customer) {
document.getElementById('createdAt').textContent = new Date(customer.created_at).toLocaleString('da-DK');
}
+async function loadUtilityCompany() {
+ const nameEl = document.getElementById('utilityCompanyName');
+ const contactEl = document.getElementById('utilityCompanyContact');
+ if (!nameEl || !contactEl) return;
+
+ if (!customerData?.address) {
+ nameEl.textContent = 'Ingen adresse angivet';
+ contactEl.textContent = 'Tilføj adresse for at hente netselskab';
+ return;
+ }
+
+ nameEl.innerHTML = '
Henter netselskab...';
+ contactEl.textContent = '';
+
+ try {
+ const response = await fetch(`/api/v1/customers/${customerId}/utility-company`);
+ let payload = {};
+ try {
+ payload = await response.json();
+ } catch (err) {
+ console.warn('Utility payload could not be parsed', err);
+ }
+
+ if (!response.ok) {
+ throw new Error(payload.detail || 'Fejl ved netselskabsopslag');
+ }
+
+ if (!payload.found) {
+ nameEl.textContent = 'Netselskab ikke fundet';
+ contactEl.textContent = payload.message || 'Kontroller adressen og prøv igen';
+ return;
+ }
+
+ displayUtilityCompany(payload);
+ } catch (error) {
+ console.error('Error fetching utility company:', error);
+ nameEl.textContent = 'Kunne ikke hente netselskab';
+ contactEl.textContent = 'Prøv igen senere';
+ }
+}
+
+function displayUtilityCompany(payload) {
+ const nameEl = document.getElementById('utilityCompanyName');
+ const contactEl = document.getElementById('utilityCompanyContact');
+ if (!nameEl || !contactEl) return;
+
+ const supplier = payload?.supplier;
+ if (!supplier) {
+ nameEl.textContent = 'Netselskab ikke fundet';
+ contactEl.textContent = payload?.message || 'Ingen data fra API';
+ return;
+ }
+
+ nameEl.textContent = supplier.name || 'Ukendt netselskab';
+
+ const contactPieces = [];
+ if (supplier.phone) {
+ contactPieces.push(`Tlf. ${escapeHtml(supplier.phone)}`);
+ }
+
+ if (supplier.website) {
+ const normalized = supplier.website.toLowerCase().startsWith('http')
+ ? supplier.website
+ : `https://${supplier.website}`;
+ const href = escapeHtml(normalized);
+ const label = escapeHtml(supplier.website);
+ contactPieces.push(`
${label}`);
+ }
+
+ contactEl.innerHTML = contactPieces.length > 0 ? contactPieces.join(' • ') : 'Ingen kontaktinfo';
+}
+
async function loadContacts() {
const container = document.getElementById('contactsContainer');
container.innerHTML = '
';
diff --git a/app/emails/backend/router.py b/app/emails/backend/router.py
index 24cf82e..3263f24 100644
--- a/app/emails/backend/router.py
+++ b/app/emails/backend/router.py
@@ -177,6 +177,7 @@ async def list_emails(
em.id, em.message_id, em.subject, em.sender_email, em.sender_name,
em.received_date, em.classification, em.confidence_score, em.status,
em.is_read, em.has_attachments, em.attachment_count,
+ em.body_text, em.body_html,
er.name as rule_name,
v.name as supplier_name,
NULL as customer_name
diff --git a/app/emails/frontend/emails.html b/app/emails/frontend/emails.html
index 282d975..d5b1dac 100644
--- a/app/emails/frontend/emails.html
+++ b/app/emails/frontend/emails.html
@@ -1740,10 +1740,15 @@ function renderEmailDetail(email) {
- ${email.body_html ? `
` :
+ ${email.body_html ? `
` :
`
${escapeHtml(email.body_text || 'Ingen indhold')}`}
`;
+ // If HTML, inject it as innerHTML after rendering
+ if (email.body_html) {
+ const htmlDiv = pane.querySelector('.email-html-body');
+ if (htmlDiv) htmlDiv.innerHTML = email.body_html;
+ }
}
function renderEmailAnalysis(email) {
diff --git a/app/opportunities/backend/router.py b/app/opportunities/backend/router.py
index 3ecef46..e1f24e4 100644
--- a/app/opportunities/backend/router.py
+++ b/app/opportunities/backend/router.py
@@ -1,21 +1,327 @@
+
"""
Opportunities (Pipeline) Router
Hub-local sales pipeline
"""
-from fastapi import APIRouter, HTTPException, Query
+from pathlib import Path
+from uuid import uuid4
+
+from fastapi import APIRouter, HTTPException, Query, UploadFile, File, Form, Request
+from fastapi.responses import FileResponse
from pydantic import BaseModel
-from typing import Optional, List, Dict
+from typing import Optional, List, Dict, Any, Tuple
from datetime import date, datetime
import json
import logging
+import os
+import shutil
+from app.core.config import settings
from app.core.database import execute_query, execute_query_single, execute_update
from app.services.opportunity_service import handle_stage_change
+from app.services.email_service import EmailService
+import email
+from email.header import decode_header
+try:
+ import extract_msg
+except ImportError:
+ extract_msg = None
logger = logging.getLogger(__name__)
router = APIRouter()
+@router.post("/opportunities/{opportunity_id}/email-links", tags=["Opportunities"])
+async def add_opportunity_email_link(opportunity_id: int, payload: dict):
+ """Add a linked email to an opportunity"""
+ email_id = payload.get("email_id")
+ if not email_id or not isinstance(email_id, int):
+ raise HTTPException(status_code=400, detail="Invalid email_id")
+
+ try:
+ _get_opportunity(opportunity_id)
+ except HTTPException:
+ raise HTTPException(status_code=404, detail="Opportunity not found")
+
+ try:
+ execute_query(
+ "INSERT INTO pipeline_opportunity_emails (opportunity_id, email_id) VALUES (%s, %s) ON CONFLICT DO NOTHING",
+ (opportunity_id, email_id)
+ )
+ except Exception as e:
+ logger.error(f"Failed to add email link: {e}")
+ raise HTTPException(status_code=500, detail="Kunne ikke tilføje email-link")
+
+ return _get_opportunity(opportunity_id)
+
+@router.delete("/opportunities/{opportunity_id}/email-links/{email_id}", tags=["Opportunities"])
+async def remove_opportunity_email_link(opportunity_id: int, email_id: int):
+ """Remove a linked email from an opportunity"""
+ try:
+ execute_query(
+ "DELETE FROM pipeline_opportunity_emails WHERE opportunity_id = %s AND email_id = %s",
+ (opportunity_id, email_id)
+ )
+ except Exception as e:
+ logger.error(f"Failed to remove email link: {e}")
+ raise HTTPException(status_code=500, detail="Kunne ikke fjerne email-link")
+
+ return {"success": True}
+
+@router.patch("/opportunities/{opportunity_id}/email-link", tags=["Opportunities"])
+async def update_opportunity_email_link(opportunity_id: int, payload: dict):
+ """Legacy endpoint: Update the linked email (single) -> Redirects to add link"""
+ # For backward compatibility, we treat this as "add link" for now
+ return await add_opportunity_email_link(opportunity_id, payload)
+
+
+def _decode_header_str(header_val):
+ if not header_val:
+ return ""
+ try:
+ decoded_list = decode_header(header_val)
+ result = ""
+ for content, encoding in decoded_list:
+ if isinstance(content, bytes):
+ if encoding:
+ try:
+ result += content.decode(encoding)
+ except LookupError:
+ result += content.decode('utf-8', errors='ignore')
+ except Exception:
+ result += content.decode('utf-8', errors='ignore')
+ else:
+ result += content.decode('utf-8', errors='ignore')
+ else:
+ result += str(content)
+ return result
+ except Exception:
+ return str(header_val)
+
+async def _process_uploaded_email(file: UploadFile, opportunity_id: int) -> dict:
+ content = await file.read()
+ filename = file.filename.lower()
+ email_data = {}
+
+ # Generate a unique message ID if one doesn't exist to prevent collisions/logic errors
+ temp_id = str(uuid4())
+
+ if filename.endswith('.msg'):
+ if not extract_msg:
+ raise HTTPException(status_code=500, detail="Library 'extract-msg' not installed")
+
+ # extract-msg needs a file-like object or path. BytesIO works.
+ import io
+ msg = extract_msg.Message(io.BytesIO(content))
+
+ # Map fields
+ email_data = {
+ 'message_id': msg.messageId or f"msg-{temp_id}",
+ 'subject': msg.subject or "No Subject",
+ 'sender_email': msg.sender or "",
+ 'sender_name': msg.sender or "", # msg.sender is often "Name
" or just email
+ 'recipient_email': msg.to or "",
+ 'cc': msg.cc or "",
+ 'body_text': msg.body,
+ 'body_html': msg.htmlBody, # might be None
+ 'received_date': msg.date or datetime.now(),
+ 'folder': 'Imported',
+ 'attachment_count': len(msg.attachments),
+ 'has_attachments': len(msg.attachments) > 0,
+ 'attachments': []
+ }
+
+ # Handle msg attachments (simplified, might need more work for full fidelity)
+ for att in msg.attachments:
+ # Binary attachments in msg
+ if hasattr(att, 'data'):
+ email_data['attachments'].append({
+ 'filename': att.longFilename or att.shortFilename or 'attachment',
+ 'content': att.data,
+ 'size': len(att.data),
+ 'content_type': 'application/octet-stream'
+ })
+
+ elif filename.endswith('.eml'):
+ msg = email.message_from_bytes(content)
+
+ # Helper to get body
+ body_text = ""
+ body_html = ""
+ if msg.is_multipart():
+ for part in msg.walk():
+ ctype = part.get_content_type()
+ if ctype == "text/plain" and not body_text:
+ body_text = part.get_payload(decode=True).decode('utf-8', errors='ignore')
+ elif ctype == "text/html" and not body_html:
+ body_html = part.get_payload(decode=True).decode('utf-8', errors='ignore')
+ else:
+ body_text = msg.get_payload(decode=True).decode('utf-8', errors='ignore')
+
+ # Attachments
+ attachments = []
+ for part in msg.walk():
+ if part.get_content_maintype() == 'multipart': continue
+ if part.get_content_type() in ['text/plain', 'text/html']: continue
+ fname = part.get_filename()
+ if fname:
+ payload = part.get_payload(decode=True)
+ if payload:
+ attachments.append({
+ 'filename': _decode_header_str(fname),
+ 'content': payload,
+ 'size': len(payload),
+ 'content_type': part.get_content_type()
+ })
+
+ email_data = {
+ 'message_id': msg.get('Message-ID', f"eml-{temp_id}"),
+ 'subject': _decode_header_str(msg.get('Subject', 'No Subject')),
+ 'sender_email': _decode_header_str(msg.get('From', '')),
+ 'sender_name': _decode_header_str(msg.get('From', '')),
+ 'recipient_email': _decode_header_str(msg.get('To', '')),
+ 'cc': _decode_header_str(msg.get('Cc', '')),
+ 'body_text': body_text,
+ 'body_html': body_html,
+ 'received_date': datetime.now(), # EML date parsing is complex, default to now for import
+ 'folder': 'Imported',
+ 'has_attachments': len(attachments) > 0,
+ 'attachment_count': len(attachments),
+ 'attachments': attachments
+ }
+
+ # Try parse date
+ if msg.get('Date'):
+ try:
+ from email.utils import parsedate_to_datetime
+ email_data['received_date'] = parsedate_to_datetime(msg.get('Date'))
+ except: pass
+
+ else:
+ raise HTTPException(status_code=400, detail="Unsupported file format. Use .eml or .msg")
+
+ # Save via EmailService
+ svc = EmailService()
+
+ # Check if exists (by message_id)
+ # The svc.save_email method inserts. We might want to check for duplicates first?
+ # save_email returns id if new, or might fail?
+ # Actually email_messages table likely has unique constraint on message_id?
+ # Let's check save_email again. It does INSERT.
+ # We should search first.
+
+ # Simple check query
+ existing = execute_query_single("SELECT id FROM email_messages WHERE message_id = %s", (email_data['message_id'],))
+ if existing:
+ email_id = existing['id']
+ else:
+ email_id = await svc.save_email(email_data)
+ if not email_id:
+ raise HTTPException(status_code=500, detail="Failed to save imported email")
+
+ # Link to opportunity
+ try:
+ execute_query(
+ "INSERT INTO pipeline_opportunity_emails (opportunity_id, email_id) VALUES (%s, %s) ON CONFLICT DO NOTHING",
+ (opportunity_id, email_id)
+ )
+ except Exception as e:
+ logger.error(f"Failed to link imported email: {e}")
+ raise HTTPException(status_code=500, detail="Failed to link email")
+
+ return _get_opportunity(opportunity_id)
+
+
+@router.post("/opportunities/{opportunity_id}/upload-email", tags=["Opportunities"])
+async def upload_opportunity_email(opportunity_id: int, file: UploadFile = File(...)):
+ """Upload an .eml or .msg file and link it to the opportunity"""
+ return await _process_uploaded_email(file, opportunity_id)
+
+
+@router.post("/opportunities/{opportunity_id}/contacts", tags=["Opportunities"])
+async def add_opportunity_contact_link(opportunity_id: int, payload: dict):
+ """Link a contact to an opportunity"""
+ contact_id = payload.get("contact_id")
+ role = payload.get("role")
+
+ if not contact_id:
+ raise HTTPException(status_code=400, detail="Invalid contact_id")
+
+ try:
+ execute_query(
+ "INSERT INTO pipeline_opportunity_contacts (opportunity_id, contact_id, role) VALUES (%s, %s, %s) ON CONFLICT (opportunity_id, contact_id) DO UPDATE SET role = EXCLUDED.role",
+ (opportunity_id, contact_id, role)
+ )
+ except Exception as e:
+ logger.error(f"Failed to add contact link: {e}")
+ raise HTTPException(status_code=500, detail="Kunne ikke tilføje kontaktperson")
+
+ return _get_opportunity(opportunity_id)
+
+
+@router.delete("/opportunities/{opportunity_id}/contacts/{contact_id}", tags=["Opportunities"])
+async def remove_opportunity_contact_link(opportunity_id: int, contact_id: int):
+ """Remove a linked contact from an opportunity"""
+ try:
+ execute_query(
+ "DELETE FROM pipeline_opportunity_contacts WHERE opportunity_id = %s AND contact_id = %s",
+ (opportunity_id, contact_id)
+ )
+ except Exception as e:
+ logger.error(f"Failed to remove contact link: {e}")
+ raise HTTPException(status_code=500, detail="Kunne ikke fjerne kontaktperson")
+
+ return _get_opportunity(opportunity_id)
+
+
+UPLOAD_BASE_PATH = Path(settings.UPLOAD_DIR).resolve()
+COMMENT_ATTACHMENT_SUBDIR = "opportunity_comments"
+CONTRACT_ATTACHMENT_SUBDIR = "opportunity_contract_files"
+for subdir in (COMMENT_ATTACHMENT_SUBDIR, CONTRACT_ATTACHMENT_SUBDIR):
+ (UPLOAD_BASE_PATH / subdir).mkdir(parents=True, exist_ok=True)
+ALLOWED_EXTENSIONS = {ext.lower() for ext in settings.ALLOWED_EXTENSIONS}
+MAX_ATTACHMENT_SIZE = settings.EMAIL_MAX_UPLOAD_SIZE_MB * 1024 * 1024
+
+
+def _is_attachment_allowed(filename: str) -> bool:
+ extension = Path(filename).suffix.lower().lstrip(".")
+ return extension in ALLOWED_EXTENSIONS
+
+
+def _validate_attachment(upload_file: UploadFile) -> None:
+ if not _is_attachment_allowed(upload_file.filename):
+ raise HTTPException(400, detail="Unsupported attachment type")
+
+ upload_file.file.seek(0, os.SEEK_END)
+ size = upload_file.file.tell()
+ upload_file.file.seek(0)
+ if size > MAX_ATTACHMENT_SIZE:
+ raise HTTPException(
+ 400,
+ detail=f"Attachment exceeds size limit of {settings.EMAIL_MAX_UPLOAD_SIZE_MB} MB",
+ )
+
+
+def _generate_stored_name(filename: str, subdir: str) -> str:
+ cleaned = Path(filename).name
+ unique = f"{uuid4().hex}_{cleaned}"
+ return f"{subdir}/{unique}"
+
+
+def _resolve_attachment_path(stored_name: str) -> Path:
+ return UPLOAD_BASE_PATH / stored_name
+
+
+def _store_upload_file(upload_file: UploadFile, subdir: str) -> Tuple[str, int]:
+ _validate_attachment(upload_file)
+ stored_name = _generate_stored_name(upload_file.filename, subdir)
+ destination = _resolve_attachment_path(stored_name)
+ destination.parent.mkdir(parents=True, exist_ok=True)
+ upload_file.file.seek(0)
+ with destination.open("wb") as buffer:
+ shutil.copyfileobj(upload_file.file, buffer)
+ return stored_name, destination.stat().st_size
+
class PipelineStageBase(BaseModel):
name: str
@@ -102,6 +408,33 @@ class OpportunityCommentCreate(OpportunityCommentBase):
pass
+class OpportunityCommentAttachment(BaseModel):
+ id: int
+ filename: str
+ content_type: Optional[str] = None
+ size_bytes: Optional[int] = None
+ created_at: datetime
+ download_url: str
+
+
+class OpportunityEmailAttachment(BaseModel):
+ id: int
+ filename: str
+ content_type: Optional[str] = None
+ size_bytes: Optional[int] = None
+ created_at: datetime
+ download_url: str
+
+
+class OpportunityContractFile(BaseModel):
+ id: int
+ filename: str
+ content_type: Optional[str] = None
+ size_bytes: Optional[int] = None
+ created_at: datetime
+ download_url: str
+
+
class OpportunityCommentResponse(BaseModel):
id: int
opportunity_id: int
@@ -119,6 +452,8 @@ class OpportunityCommentResponse(BaseModel):
metadata: Optional[Dict] = None
created_at: datetime
updated_at: datetime
+ attachments: List[OpportunityCommentAttachment] = []
+ email_attachments: List[OpportunityEmailAttachment] = []
def _get_stage(stage_id: int):
@@ -152,6 +487,29 @@ def _get_opportunity(opportunity_id: int):
opportunity = execute_query_single(query, (opportunity_id,))
if not opportunity:
raise HTTPException(status_code=404, detail="Opportunity not found")
+
+ # Fetch linked emails
+ email_query = """
+ SELECT e.id, e.subject, e.sender_email, e.received_date, e.body_text, e.body_html
+ FROM email_messages e
+ JOIN pipeline_opportunity_emails poe ON e.id = poe.email_id
+ WHERE poe.opportunity_id = %s
+ ORDER BY e.received_date DESC
+ """
+ linked_emails = execute_query(email_query, (opportunity_id,))
+ opportunity["linked_emails"] = linked_emails or []
+
+ # Fetch linked contacts
+ contacts_query = """
+ SELECT c.id, c.first_name, c.last_name, c.email, c.phone, c.mobile_phone, poc.role
+ FROM contacts c
+ JOIN pipeline_opportunity_contacts poc ON c.id = poc.contact_id
+ WHERE poc.opportunity_id = %s
+ ORDER BY c.first_name, c.last_name
+ """
+ linked_contacts = execute_query(contacts_query, (opportunity_id,))
+ opportunity["linked_contacts"] = linked_contacts or []
+
return opportunity
@@ -176,7 +534,23 @@ def _fetch_opportunity_comments(opportunity_id: int):
WHERE c.opportunity_id = %s
ORDER BY c.created_at DESC
"""
- return execute_query(query, (opportunity_id,)) or []
+ comments = execute_query(query, (opportunity_id,)) or []
+ if not comments:
+ return []
+
+ comment_ids = [comment["id"] for comment in comments]
+ attachments_map = _fetch_comment_attachments_map(comment_ids)
+ email_ids = list({comment["email_id"] for comment in comments if comment.get("email_id")})
+ email_attachment_map = _fetch_email_attachments_map(email_ids)
+
+ for comment in comments:
+ comment["attachments"] = attachments_map.get(comment["id"], [])
+ if comment.get("email_id"):
+ comment["email_attachments"] = email_attachment_map.get(comment["email_id"], [])
+ else:
+ comment["email_attachments"] = []
+
+ return comments
def _fetch_comment(comment_id: int):
@@ -189,7 +563,169 @@ def _fetch_comment(comment_id: int):
WHERE c.id = %s
"""
result = execute_query(query, (comment_id,))
- return result[0] if result else None
+ if not result:
+ return None
+
+ comment = result[0]
+ attachments = _fetch_comment_attachments_map([comment_id])
+ comment["attachments"] = attachments.get(comment_id, [])
+ if comment.get("email_id"):
+ email_attachments = _fetch_email_attachments_map([comment["email_id"]])
+ comment["email_attachments"] = email_attachments.get(comment["email_id"], [])
+ else:
+ comment["email_attachments"] = []
+
+ return comment
+
+
+def _comment_attachment_download_url(opportunity_id: int, attachment_id: int) -> str:
+ return f"/api/v1/opportunities/{opportunity_id}/comment-attachments/{attachment_id}"
+
+
+def _email_attachment_download_url(email_id: int, attachment_id: int) -> str:
+ return f"/api/v1/emails/{email_id}/attachments/{attachment_id}"
+
+
+def _fetch_comment_attachments_map(comment_ids: List[int]) -> Dict[int, List[Dict[str, Any]]]:
+ if not comment_ids:
+ return {}
+
+ query = """
+ SELECT a.id, a.comment_id, a.opportunity_id, a.filename, a.content_type, a.size_bytes, a.created_at
+ FROM pipeline_opportunity_comment_attachments a
+ WHERE a.comment_id = ANY(%s)
+ ORDER BY a.created_at DESC
+ """
+ rows = execute_query(query, (comment_ids,)) or []
+ attachments_by_comment: Dict[int, List[Dict[str, Any]]] = {}
+ for row in rows:
+ attachments_by_comment.setdefault(row["comment_id"], []).append({
+ "id": row["id"],
+ "filename": row["filename"],
+ "content_type": row.get("content_type"),
+ "size_bytes": row.get("size_bytes"),
+ "created_at": row.get("created_at"),
+ "download_url": _comment_attachment_download_url(row["opportunity_id"], row["id"])
+ })
+ return attachments_by_comment
+
+
+def _fetch_email_attachments_map(email_ids: List[int]) -> Dict[int, List[Dict[str, Any]]]:
+ if not email_ids:
+ return {}
+
+ query = """
+ SELECT id, email_id, filename, content_type, size_bytes, created_at
+ FROM email_attachments
+ WHERE email_id = ANY(%s)
+ ORDER BY id ASC
+ """
+ rows = execute_query(query, (email_ids,)) or []
+ email_map: Dict[int, List[Dict[str, Any]]] = {}
+ for row in rows:
+ email_map.setdefault(row["email_id"], []).append({
+ "id": row["id"],
+ "filename": row["filename"],
+ "content_type": row.get("content_type"),
+ "size_bytes": row.get("size_bytes"),
+ "created_at": row.get("created_at"),
+ "download_url": _email_attachment_download_url(row["email_id"], row["id"])
+ })
+ return email_map
+
+
+def _contract_file_download_url(opportunity_id: int, file_id: int) -> str:
+ return f"/api/v1/opportunities/{opportunity_id}/contract-files/{file_id}"
+
+
+def _fetch_contract_files(opportunity_id: int) -> List[Dict[str, Any]]:
+ query = """
+ SELECT id, filename, content_type, size_bytes, stored_name, created_at
+ FROM pipeline_opportunity_contract_files
+ WHERE opportunity_id = %s
+ ORDER BY created_at DESC
+ """
+ rows = execute_query(query, (opportunity_id,)) or []
+ return [
+ {
+ "id": row["id"],
+ "filename": row["filename"],
+ "content_type": row.get("content_type"),
+ "size_bytes": row.get("size_bytes"),
+ "created_at": row.get("created_at"),
+ "download_url": _contract_file_download_url(opportunity_id, row["id"]),
+ }
+ for row in rows
+ ]
+
+
+def _save_contract_files(opportunity_id: int, files: List[UploadFile], uploaded_by_user_id: Optional[int] = None) -> List[Dict[str, Any]]:
+ if not files:
+ return []
+
+ insert_query = """
+ INSERT INTO pipeline_opportunity_contract_files
+ (opportunity_id, filename, content_type, size_bytes, stored_name, uploaded_by_user_id)
+ VALUES (%s, %s, %s, %s, %s, %s)
+ RETURNING id, filename, content_type, size_bytes, created_at
+ """
+
+ saved_files = []
+ for upload_file in files:
+ if not upload_file or not upload_file.filename:
+ continue
+
+ stored_name, size_bytes = _store_upload_file(upload_file, CONTRACT_ATTACHMENT_SUBDIR)
+ result = execute_query(
+ insert_query,
+ (
+ opportunity_id,
+ upload_file.filename,
+ upload_file.content_type,
+ size_bytes,
+ stored_name,
+ uploaded_by_user_id,
+ )
+ )
+ if result:
+ saved = result[0]
+ saved_files.append({
+ "id": saved["id"],
+ "filename": saved["filename"],
+ "content_type": saved.get("content_type"),
+ "size_bytes": saved.get("size_bytes"),
+ "created_at": saved.get("created_at"),
+ "download_url": _contract_file_download_url(opportunity_id, saved["id"]),
+ })
+
+ return saved_files
+def _save_comment_attachments(opportunity_id: int, comment_id: int, files: List[UploadFile], uploaded_by_user_id: Optional[int] = None) -> None:
+ if not files:
+ return
+
+ insert_query = """
+ INSERT INTO pipeline_opportunity_comment_attachments
+ (opportunity_id, comment_id, filename, content_type, size_bytes, stored_name, uploaded_by_user_id)
+ VALUES (%s, %s, %s, %s, %s, %s, %s)
+ """
+
+ for upload_file in files:
+ if not upload_file or not upload_file.filename:
+ continue
+
+ stored_name, size_bytes = _store_upload_file(upload_file, COMMENT_ATTACHMENT_SUBDIR)
+ execute_query(
+ insert_query,
+ (
+ opportunity_id,
+ comment_id,
+ upload_file.filename,
+ upload_file.content_type,
+ size_bytes,
+ stored_name,
+ uploaded_by_user_id,
+ )
+ )
# ============================
@@ -445,11 +981,64 @@ async def list_opportunity_comments(opportunity_id: int):
response_model=OpportunityCommentResponse,
tags=["Opportunities"]
)
-async def add_opportunity_comment(opportunity_id: int, comment: OpportunityCommentCreate):
+async def add_opportunity_comment(
+ opportunity_id: int,
+ request: Request,
+ content: Optional[str] = Form(None),
+ author_name: Optional[str] = Form(None),
+ user_id: Optional[int] = Form(None),
+ email_id: Optional[int] = Form(None),
+ contract_number: Optional[str] = Form(None),
+ contract_context: Optional[str] = Form(None),
+ contract_link: Optional[str] = Form(None),
+ metadata: Optional[str] = Form(None),
+ files: Optional[List[UploadFile]] = File(None),
+):
_get_opportunity(opportunity_id)
- author_name = comment.author_name or 'Hub Bruger'
- metadata_json = json.dumps(comment.metadata) if comment.metadata else None
+ if request.headers.get("content-type", "").startswith("application/json"):
+ payload: Dict[str, Any] = await request.json()
+ else:
+ payload = {
+ "content": content,
+ "author_name": author_name,
+ "user_id": user_id,
+ "email_id": email_id,
+ "contract_number": contract_number,
+ "contract_context": contract_context,
+ "contract_link": contract_link,
+ "metadata": metadata,
+ }
+
+ content_value = payload.get("content")
+ if not content_value:
+ raise HTTPException(status_code=400, detail="Kommentar er påkrævet")
+
+ resolved_author = payload.get("author_name") or 'Hub Bruger'
+ resolved_user_id = payload.get("user_id")
+ if isinstance(resolved_user_id, str):
+ try:
+ resolved_user_id = int(resolved_user_id)
+ except ValueError:
+ resolved_user_id = None
+
+ resolved_email_id = payload.get("email_id")
+ if isinstance(resolved_email_id, str):
+ try:
+ resolved_email_id = int(resolved_email_id)
+ except ValueError:
+ resolved_email_id = None
+ metadata_payload = payload.get("metadata")
+ metadata_obj = None
+ if metadata_payload:
+ if isinstance(metadata_payload, str):
+ try:
+ metadata_obj = json.loads(metadata_payload)
+ except json.JSONDecodeError:
+ metadata_obj = None
+ elif isinstance(metadata_payload, dict):
+ metadata_obj = metadata_payload
+ metadata_json = json.dumps(metadata_obj) if metadata_obj else None
query = """
INSERT INTO pipeline_opportunity_comments
@@ -463,13 +1052,13 @@ async def add_opportunity_comment(opportunity_id: int, comment: OpportunityComme
query,
(
opportunity_id,
- comment.user_id,
- author_name,
- comment.content,
- comment.email_id,
- comment.contract_number,
- comment.contract_context,
- comment.contract_link,
+ resolved_user_id,
+ resolved_author,
+ content_value,
+ resolved_email_id,
+ payload.get("contract_number"),
+ payload.get("contract_context"),
+ payload.get("contract_link"),
metadata_json
)
)
@@ -478,9 +1067,39 @@ async def add_opportunity_comment(opportunity_id: int, comment: OpportunityComme
raise HTTPException(status_code=500, detail="Kunne ikke oprette kommentar")
comment_id = result[0]["id"]
+ attachment_files = files or []
+ if attachment_files:
+ _save_comment_attachments(opportunity_id, comment_id, attachment_files, resolved_user_id)
+
return _fetch_comment(comment_id)
+@router.get("/opportunities/{opportunity_id}/comment-attachments/{attachment_id}", tags=["Opportunities"])
+async def download_comment_attachment(opportunity_id: int, attachment_id: int):
+ query = """
+ SELECT * FROM pipeline_opportunity_comment_attachments
+ WHERE id = %s AND opportunity_id = %s
+ """
+ result = execute_query(query, (attachment_id, opportunity_id))
+ if not result:
+ raise HTTPException(status_code=404, detail="Vedhæftet fil ikke fundet")
+
+ attachment = result[0]
+ stored_name = attachment.get("stored_name")
+ if not stored_name:
+ raise HTTPException(status_code=500, detail="Vedhæftet fil mangler sti")
+
+ file_path = _resolve_attachment_path(stored_name)
+ if not file_path.exists():
+ raise HTTPException(status_code=404, detail="Filen findes ikke på serveren")
+
+ return FileResponse(
+ path=file_path,
+ filename=attachment.get("filename"),
+ media_type=attachment.get("content_type") or "application/octet-stream"
+ )
+
+
@router.get(
"/contracts/search",
tags=["Opportunities"],
@@ -502,3 +1121,59 @@ async def search_contracts(query: str = Query(..., min_length=2), limit: int = Q
params = (f"%{query}%", limit)
results = execute_query(sql, params)
return results or []
+
+
+@router.get(
+ "/opportunities/{opportunity_id}/contract-files",
+ tags=["Opportunities"],
+ response_model=List[OpportunityContractFile]
+)
+async def list_contract_files(opportunity_id: int):
+ _get_opportunity(opportunity_id)
+ return _fetch_contract_files(opportunity_id)
+
+
+@router.post(
+ "/opportunities/{opportunity_id}/contract-files",
+ tags=["Opportunities"],
+ response_model=List[OpportunityContractFile]
+)
+async def upload_contract_files(opportunity_id: int, files: List[UploadFile] = File(...)):
+ _get_opportunity(opportunity_id)
+ if not files:
+ raise HTTPException(status_code=400, detail="Ingen filer at uploade")
+
+ saved = _save_contract_files(opportunity_id, files)
+ if not saved:
+ raise HTTPException(status_code=500, detail="Kunne ikke gemme filer")
+ return saved
+
+
+@router.get(
+ "/opportunities/{opportunity_id}/contract-files/{file_id}",
+ tags=["Opportunities"]
+)
+async def download_contract_file(opportunity_id: int, file_id: int):
+ query = """
+ SELECT * FROM pipeline_opportunity_contract_files
+ WHERE id = %s AND opportunity_id = %s
+ """
+ result = execute_query(query, (file_id, opportunity_id))
+ if not result:
+ raise HTTPException(status_code=404, detail="Filen ikke fundet")
+
+ row = result[0]
+ stored_name = row.get("stored_name")
+ if not stored_name:
+ raise HTTPException(status_code=500, detail="Filen mangler lagring")
+
+ path = _resolve_attachment_path(stored_name)
+ if not path.exists():
+ raise HTTPException(status_code=404, detail="Filen findes ikke på serveren")
+
+ return FileResponse(
+ path=path,
+ filename=row.get("filename"),
+ media_type=row.get("content_type") or "application/octet-stream",
+ headers={"Content-Disposition": f"inline; filename=\"{row.get('filename')}\""}
+ )
diff --git a/app/opportunities/frontend/opportunities.html b/app/opportunities/frontend/opportunities.html
index 1466e7f..5971147 100644
--- a/app/opportunities/frontend/opportunities.html
+++ b/app/opportunities/frontend/opportunities.html
@@ -222,12 +222,11 @@ function renderOpportunities() {
}
tbody.innerHTML = filtered.map(o => `
-
+
| ${escapeHtml(o.title)} |
- ${escapeHtml(o.customer_name || '-')}
- |
+ ${escapeHtml(o.customer_name || '-')} |
${formatCurrency(o.amount, o.currency)} |
- ${o.expected_close_date ? formatDate(o.expected_close_date) : '-'} |
+ ${o.expected_close_date ? formatDate(o.expected_close_date) : '-'} |
@@ -236,9 +235,7 @@ function renderOpportunities() {
|
${o.probability || 0}% |
-
+
|
`).join('');
diff --git a/app/opportunities/frontend/opportunity_detail.html b/app/opportunities/frontend/opportunity_detail.html
index f9c3b44..2fd43ba 100644
--- a/app/opportunities/frontend/opportunity_detail.html
+++ b/app/opportunities/frontend/opportunity_detail.html
@@ -1,89 +1,178 @@
-{% extends "shared/frontend/base.html" %}
-{% block title %}Mulighed - BMC Hub{% endblock %}
+{% extends "shared/frontend/base.html" %}
{% block extra_css %}
{% endblock %}
@@ -106,6 +195,40 @@
+
+
+
Pipeline‑status
+
+ Kunde
+ -
+
+
+ Stage
+ -
+
+
+ Værdi
+ -
+
+
+ Sandsynlighed
+ -
+
+
+
+
+
+
Kontaktpersoner
+
+
+
+
+
+
+
+
Grundoplysninger
@@ -162,30 +285,31 @@
Tilbud & Kontrakt
-
Felt til dokumentlink og kontraktstatus kommer i næste version.
+
Vedhæft relevante dokumenter og se filer fra valgte emails.
+
+
+ Træk filer hertil eller klik for at vælge
+ Støtter dokumenter, regneark og billeder
+
+
+
+
-
-
Pipeline‑status
-
- Kunde
- -
-
-
- Stage
- -
-
-
- Værdi
- -
-
-
- Sandsynlighed
- -
-
-
+
+
Linket email
+
+
+
+
+
+
+ Træk emails hertil (.msg, .eml)
+
+
+
Næste aktivitet
Aktivitetsmodul kommer senere.
@@ -239,24 +363,6 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
@@ -305,11 +411,27 @@
+
+
+
{% endblock %}
{% block extra_js %}