Release v2.1.0

This commit is contained in:
Christian 2026-01-29 00:36:32 +01:00
parent f059cb6c95
commit 4b467aeeec
15 changed files with 1760 additions and 186 deletions

24
RELEASE_NOTES_v2.1.0.md Normal file
View File

@ -0,0 +1,24 @@
# Release Notes v2.1.0
## New Features
- **Email Drag-and-Drop Upload**: Upload .msg and .eml files directly to opportunities by dragging them onto the email drop zone
- **Multiple Email Linking**: Link multiple emails to a single opportunity with search and persistent storage
- **Contact Persons Management**: Add, link, and manage contact persons for opportunities with roles and search functionality
- **File Uploads**: Upload files to opportunity comments and contract sections with drag-and-drop support
- **Utility Company Lookup**: Automatically lookup electricity suppliers for customer addresses via Elnet API
- **UI Reorganization**: Moved pipeline status to top-left for better visibility in opportunity detail view
- **Email HTML Rendering**: Display HTML email bodies in the email viewer
## Technical Changes
- Added Many-to-Many relationships for opportunity emails and contacts
- New database tables: pipeline_opportunity_emails, pipeline_opportunity_contacts, pipeline_opportunity_comment_attachments, pipeline_opportunity_contract_files
- Enhanced email processing to support .msg and .eml file uploads
- Improved file handling with size limits and type validation
- Updated customer detail page with utility company information
## Fixes
- Fixed API_BASE path issues in opportunity detail page
- Improved email attachment handling and display
---
Release Date: 29. januar 2026

View File

@ -1 +1 @@
2.0.6
2.1.0

View File

@ -21,6 +21,10 @@ class Settings(BaseSettings):
API_RELOAD: bool = False
ENABLE_RELOAD: bool = False # Added to match docker-compose.yml
# Elnet supplier lookup
ELNET_API_BASE_URL: str = "https://api.elnet.greenpowerdenmark.dk/api"
ELNET_TIMEOUT_SECONDS: int = 12
# Security
SECRET_KEY: str = "dev-secret-key-change-in-production"
ALLOWED_ORIGINS: List[str] = ["http://localhost:8000", "http://localhost:3000"]
@ -70,7 +74,7 @@ class Settings(BaseSettings):
EMAIL_PROCESS_INTERVAL_MINUTES: int = 5
EMAIL_WORKFLOWS_ENABLED: bool = True
EMAIL_MAX_UPLOAD_SIZE_MB: int = 50 # Max file size for email uploads
ALLOWED_EXTENSIONS: List[str] = [".pdf", ".jpg", ".jpeg", ".png", ".gif", ".doc", ".docx", ".xls", ".xlsx", ".zip"] # Allowed file extensions for uploads
ALLOWED_EXTENSIONS: List[str] = ["pdf", "jpg", "jpeg", "png", "gif", "doc", "docx", "xls", "xlsx", "zip"] # Allowed file extensions for uploads
# vTiger Cloud Integration
VTIGER_ENABLED: bool = False

View File

@ -8,8 +8,12 @@ from fastapi import APIRouter, HTTPException, Query
from typing import List, Optional, Dict
from pydantic import BaseModel
import logging
import asyncio
import aiohttp
from urllib.parse import quote
from app.core.database import execute_query, execute_query_single, execute_update
from app.core.config import settings
from app.services.cvr_service import get_cvr_service
from app.services.customer_activity_logger import CustomerActivityLogger
from app.services.customer_consistency import CustomerConsistencyService
@ -403,6 +407,85 @@ async def get_customer(customer_id: int):
@router.get("/customers/{customer_id}/utility-company")
async def get_customer_utility_company(customer_id: int):
"""Lookup the netselskab for a customer's address via the Elnet API"""
customer = execute_query_single(
"SELECT address, city, postal_code FROM customers WHERE id = %s",
(customer_id,)
)
if not customer:
raise HTTPException(status_code=404, detail="Customer not found")
address = customer.get('address')
if not address:
raise HTTPException(status_code=400, detail="Kunde har ikke en adresse")
components = [address.strip()]
city_parts = []
if customer.get('postal_code'):
city_parts.append(customer['postal_code'].strip())
if customer.get('city'):
city_parts.append(customer['city'].strip())
if city_parts:
components.append(' '.join(city_parts))
search_address = ", ".join([comp for comp in components if comp])
if not search_address:
raise HTTPException(status_code=400, detail="Ugyldig adressedata")
base_url = settings.ELNET_API_BASE_URL.rstrip('/')
lookup_url = f"{base_url}/supplierlookup/{quote(search_address, safe='')}"
timeout_seconds = settings.ELNET_TIMEOUT_SECONDS
try:
timeout = aiohttp.ClientTimeout(total=timeout_seconds)
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.get(lookup_url) as response:
if response.status == 404:
return {
"customer_id": customer_id,
"address": search_address,
"found": False,
"message": "Ingen netselskab matchede adressen"
}
if response.status != 200:
detail = await response.text()
logger.warning(
"⚠️ Elnet returned %s for %s (%s)",
response.status,
customer_id,
detail[:200]
)
raise HTTPException(status_code=502, detail="Fejl fra netselskabs-API")
payload = await response.json()
except asyncio.TimeoutError as exc:
logger.error("❌ Elnet request timed out for customer %s: %s", customer_id, exc)
raise HTTPException(status_code=504, detail="Timeout ved forespørgsel til netselskabet")
except aiohttp.ClientError as exc:
logger.error("❌ Elnet request failed for customer %s: %s", customer_id, exc)
raise HTTPException(status_code=502, detail="Kunne ikke kontakte netselskabets API")
except Exception as exc:
logger.error("❌ Unexpected error fetching netselskab for %s: %s", customer_id, exc)
raise HTTPException(status_code=500, detail="Fejl ved forespørgsel til netselskabet")
supplier = {
"def": payload.get("def"),
"name": payload.get("name"),
"phone": payload.get("phone"),
"website": payload.get("website")
}
return {
"customer_id": customer_id,
"address": search_address,
"found": bool(payload and payload.get("name")),
"supplier": supplier
}
@router.post("/customers")
async def create_customer(customer: CustomerCreate):
"""Create a new customer"""

View File

@ -398,6 +398,14 @@
<div class="col-12">
<div class="info-card">
<h5 class="fw-bold mb-3">Integration</h5>
<div class="info-row">
<span class="info-label">El-selskab</span>
<span class="info-value" id="utilityCompanyName">-</span>
</div>
<div class="info-row">
<span class="info-label">Kontakt</span>
<span class="info-value text-muted small" id="utilityCompanyContact">-</span>
</div>
<div class="row">
<div class="col-md-6">
<div class="info-row">
@ -924,6 +932,8 @@ async function loadCustomer() {
customerData = await response.json();
displayCustomer(customerData);
await loadUtilityCompany();
// Check data consistency
await checkDataConsistency();
} catch (error) {
@ -1015,6 +1025,78 @@ function displayCustomer(customer) {
document.getElementById('createdAt').textContent = new Date(customer.created_at).toLocaleString('da-DK');
}
async function loadUtilityCompany() {
const nameEl = document.getElementById('utilityCompanyName');
const contactEl = document.getElementById('utilityCompanyContact');
if (!nameEl || !contactEl) return;
if (!customerData?.address) {
nameEl.textContent = 'Ingen adresse angivet';
contactEl.textContent = 'Tilføj adresse for at hente netselskab';
return;
}
nameEl.innerHTML = '<span class="spinner-border spinner-border-sm text-primary me-2" role="status" aria-hidden="true"></span>Henter netselskab...';
contactEl.textContent = '';
try {
const response = await fetch(`/api/v1/customers/${customerId}/utility-company`);
let payload = {};
try {
payload = await response.json();
} catch (err) {
console.warn('Utility payload could not be parsed', err);
}
if (!response.ok) {
throw new Error(payload.detail || 'Fejl ved netselskabsopslag');
}
if (!payload.found) {
nameEl.textContent = 'Netselskab ikke fundet';
contactEl.textContent = payload.message || 'Kontroller adressen og prøv igen';
return;
}
displayUtilityCompany(payload);
} catch (error) {
console.error('Error fetching utility company:', error);
nameEl.textContent = 'Kunne ikke hente netselskab';
contactEl.textContent = 'Prøv igen senere';
}
}
function displayUtilityCompany(payload) {
const nameEl = document.getElementById('utilityCompanyName');
const contactEl = document.getElementById('utilityCompanyContact');
if (!nameEl || !contactEl) return;
const supplier = payload?.supplier;
if (!supplier) {
nameEl.textContent = 'Netselskab ikke fundet';
contactEl.textContent = payload?.message || 'Ingen data fra API';
return;
}
nameEl.textContent = supplier.name || 'Ukendt netselskab';
const contactPieces = [];
if (supplier.phone) {
contactPieces.push(`Tlf. ${escapeHtml(supplier.phone)}`);
}
if (supplier.website) {
const normalized = supplier.website.toLowerCase().startsWith('http')
? supplier.website
: `https://${supplier.website}`;
const href = escapeHtml(normalized);
const label = escapeHtml(supplier.website);
contactPieces.push(`<a href="${href}" target="_blank" rel="noreferrer noopener">${label}</a>`);
}
contactEl.innerHTML = contactPieces.length > 0 ? contactPieces.join(' • ') : 'Ingen kontaktinfo';
}
async function loadContacts() {
const container = document.getElementById('contactsContainer');
container.innerHTML = '<div class="col-12 text-center py-5"><div class="spinner-border text-primary"></div></div>';

View File

@ -177,6 +177,7 @@ async def list_emails(
em.id, em.message_id, em.subject, em.sender_email, em.sender_name,
em.received_date, em.classification, em.confidence_score, em.status,
em.is_read, em.has_attachments, em.attachment_count,
em.body_text, em.body_html,
er.name as rule_name,
v.name as supplier_name,
NULL as customer_name

View File

@ -1740,10 +1740,15 @@ function renderEmailDetail(email) {
</div>
<div class="email-body">
${email.body_html ? `<iframe srcdoc="${email.body_html.replace(/"/g, '&quot;')}"></iframe>` :
${email.body_html ? `<div class="email-html-body"></div>` :
`<pre style="white-space: pre-wrap; font-family: inherit;">${escapeHtml(email.body_text || 'Ingen indhold')}</pre>`}
</div>
`;
// If HTML, inject it as innerHTML after rendering
if (email.body_html) {
const htmlDiv = pane.querySelector('.email-html-body');
if (htmlDiv) htmlDiv.innerHTML = email.body_html;
}
}
function renderEmailAnalysis(email) {

View File

@ -1,21 +1,327 @@
"""
Opportunities (Pipeline) Router
Hub-local sales pipeline
"""
from fastapi import APIRouter, HTTPException, Query
from pathlib import Path
from uuid import uuid4
from fastapi import APIRouter, HTTPException, Query, UploadFile, File, Form, Request
from fastapi.responses import FileResponse
from pydantic import BaseModel
from typing import Optional, List, Dict
from typing import Optional, List, Dict, Any, Tuple
from datetime import date, datetime
import json
import logging
import os
import shutil
from app.core.config import settings
from app.core.database import execute_query, execute_query_single, execute_update
from app.services.opportunity_service import handle_stage_change
from app.services.email_service import EmailService
import email
from email.header import decode_header
try:
import extract_msg
except ImportError:
extract_msg = None
logger = logging.getLogger(__name__)
router = APIRouter()
@router.post("/opportunities/{opportunity_id}/email-links", tags=["Opportunities"])
async def add_opportunity_email_link(opportunity_id: int, payload: dict):
"""Add a linked email to an opportunity"""
email_id = payload.get("email_id")
if not email_id or not isinstance(email_id, int):
raise HTTPException(status_code=400, detail="Invalid email_id")
try:
_get_opportunity(opportunity_id)
except HTTPException:
raise HTTPException(status_code=404, detail="Opportunity not found")
try:
execute_query(
"INSERT INTO pipeline_opportunity_emails (opportunity_id, email_id) VALUES (%s, %s) ON CONFLICT DO NOTHING",
(opportunity_id, email_id)
)
except Exception as e:
logger.error(f"Failed to add email link: {e}")
raise HTTPException(status_code=500, detail="Kunne ikke tilføje email-link")
return _get_opportunity(opportunity_id)
@router.delete("/opportunities/{opportunity_id}/email-links/{email_id}", tags=["Opportunities"])
async def remove_opportunity_email_link(opportunity_id: int, email_id: int):
"""Remove a linked email from an opportunity"""
try:
execute_query(
"DELETE FROM pipeline_opportunity_emails WHERE opportunity_id = %s AND email_id = %s",
(opportunity_id, email_id)
)
except Exception as e:
logger.error(f"Failed to remove email link: {e}")
raise HTTPException(status_code=500, detail="Kunne ikke fjerne email-link")
return {"success": True}
@router.patch("/opportunities/{opportunity_id}/email-link", tags=["Opportunities"])
async def update_opportunity_email_link(opportunity_id: int, payload: dict):
"""Legacy endpoint: Update the linked email (single) -> Redirects to add link"""
# For backward compatibility, we treat this as "add link" for now
return await add_opportunity_email_link(opportunity_id, payload)
def _decode_header_str(header_val):
if not header_val:
return ""
try:
decoded_list = decode_header(header_val)
result = ""
for content, encoding in decoded_list:
if isinstance(content, bytes):
if encoding:
try:
result += content.decode(encoding)
except LookupError:
result += content.decode('utf-8', errors='ignore')
except Exception:
result += content.decode('utf-8', errors='ignore')
else:
result += content.decode('utf-8', errors='ignore')
else:
result += str(content)
return result
except Exception:
return str(header_val)
async def _process_uploaded_email(file: UploadFile, opportunity_id: int) -> dict:
content = await file.read()
filename = file.filename.lower()
email_data = {}
# Generate a unique message ID if one doesn't exist to prevent collisions/logic errors
temp_id = str(uuid4())
if filename.endswith('.msg'):
if not extract_msg:
raise HTTPException(status_code=500, detail="Library 'extract-msg' not installed")
# extract-msg needs a file-like object or path. BytesIO works.
import io
msg = extract_msg.Message(io.BytesIO(content))
# Map fields
email_data = {
'message_id': msg.messageId or f"msg-{temp_id}",
'subject': msg.subject or "No Subject",
'sender_email': msg.sender or "",
'sender_name': msg.sender or "", # msg.sender is often "Name <email>" or just email
'recipient_email': msg.to or "",
'cc': msg.cc or "",
'body_text': msg.body,
'body_html': msg.htmlBody, # might be None
'received_date': msg.date or datetime.now(),
'folder': 'Imported',
'attachment_count': len(msg.attachments),
'has_attachments': len(msg.attachments) > 0,
'attachments': []
}
# Handle msg attachments (simplified, might need more work for full fidelity)
for att in msg.attachments:
# Binary attachments in msg
if hasattr(att, 'data'):
email_data['attachments'].append({
'filename': att.longFilename or att.shortFilename or 'attachment',
'content': att.data,
'size': len(att.data),
'content_type': 'application/octet-stream'
})
elif filename.endswith('.eml'):
msg = email.message_from_bytes(content)
# Helper to get body
body_text = ""
body_html = ""
if msg.is_multipart():
for part in msg.walk():
ctype = part.get_content_type()
if ctype == "text/plain" and not body_text:
body_text = part.get_payload(decode=True).decode('utf-8', errors='ignore')
elif ctype == "text/html" and not body_html:
body_html = part.get_payload(decode=True).decode('utf-8', errors='ignore')
else:
body_text = msg.get_payload(decode=True).decode('utf-8', errors='ignore')
# Attachments
attachments = []
for part in msg.walk():
if part.get_content_maintype() == 'multipart': continue
if part.get_content_type() in ['text/plain', 'text/html']: continue
fname = part.get_filename()
if fname:
payload = part.get_payload(decode=True)
if payload:
attachments.append({
'filename': _decode_header_str(fname),
'content': payload,
'size': len(payload),
'content_type': part.get_content_type()
})
email_data = {
'message_id': msg.get('Message-ID', f"eml-{temp_id}"),
'subject': _decode_header_str(msg.get('Subject', 'No Subject')),
'sender_email': _decode_header_str(msg.get('From', '')),
'sender_name': _decode_header_str(msg.get('From', '')),
'recipient_email': _decode_header_str(msg.get('To', '')),
'cc': _decode_header_str(msg.get('Cc', '')),
'body_text': body_text,
'body_html': body_html,
'received_date': datetime.now(), # EML date parsing is complex, default to now for import
'folder': 'Imported',
'has_attachments': len(attachments) > 0,
'attachment_count': len(attachments),
'attachments': attachments
}
# Try parse date
if msg.get('Date'):
try:
from email.utils import parsedate_to_datetime
email_data['received_date'] = parsedate_to_datetime(msg.get('Date'))
except: pass
else:
raise HTTPException(status_code=400, detail="Unsupported file format. Use .eml or .msg")
# Save via EmailService
svc = EmailService()
# Check if exists (by message_id)
# The svc.save_email method inserts. We might want to check for duplicates first?
# save_email returns id if new, or might fail?
# Actually email_messages table likely has unique constraint on message_id?
# Let's check save_email again. It does INSERT.
# We should search first.
# Simple check query
existing = execute_query_single("SELECT id FROM email_messages WHERE message_id = %s", (email_data['message_id'],))
if existing:
email_id = existing['id']
else:
email_id = await svc.save_email(email_data)
if not email_id:
raise HTTPException(status_code=500, detail="Failed to save imported email")
# Link to opportunity
try:
execute_query(
"INSERT INTO pipeline_opportunity_emails (opportunity_id, email_id) VALUES (%s, %s) ON CONFLICT DO NOTHING",
(opportunity_id, email_id)
)
except Exception as e:
logger.error(f"Failed to link imported email: {e}")
raise HTTPException(status_code=500, detail="Failed to link email")
return _get_opportunity(opportunity_id)
@router.post("/opportunities/{opportunity_id}/upload-email", tags=["Opportunities"])
async def upload_opportunity_email(opportunity_id: int, file: UploadFile = File(...)):
"""Upload an .eml or .msg file and link it to the opportunity"""
return await _process_uploaded_email(file, opportunity_id)
@router.post("/opportunities/{opportunity_id}/contacts", tags=["Opportunities"])
async def add_opportunity_contact_link(opportunity_id: int, payload: dict):
"""Link a contact to an opportunity"""
contact_id = payload.get("contact_id")
role = payload.get("role")
if not contact_id:
raise HTTPException(status_code=400, detail="Invalid contact_id")
try:
execute_query(
"INSERT INTO pipeline_opportunity_contacts (opportunity_id, contact_id, role) VALUES (%s, %s, %s) ON CONFLICT (opportunity_id, contact_id) DO UPDATE SET role = EXCLUDED.role",
(opportunity_id, contact_id, role)
)
except Exception as e:
logger.error(f"Failed to add contact link: {e}")
raise HTTPException(status_code=500, detail="Kunne ikke tilføje kontaktperson")
return _get_opportunity(opportunity_id)
@router.delete("/opportunities/{opportunity_id}/contacts/{contact_id}", tags=["Opportunities"])
async def remove_opportunity_contact_link(opportunity_id: int, contact_id: int):
"""Remove a linked contact from an opportunity"""
try:
execute_query(
"DELETE FROM pipeline_opportunity_contacts WHERE opportunity_id = %s AND contact_id = %s",
(opportunity_id, contact_id)
)
except Exception as e:
logger.error(f"Failed to remove contact link: {e}")
raise HTTPException(status_code=500, detail="Kunne ikke fjerne kontaktperson")
return _get_opportunity(opportunity_id)
UPLOAD_BASE_PATH = Path(settings.UPLOAD_DIR).resolve()
COMMENT_ATTACHMENT_SUBDIR = "opportunity_comments"
CONTRACT_ATTACHMENT_SUBDIR = "opportunity_contract_files"
for subdir in (COMMENT_ATTACHMENT_SUBDIR, CONTRACT_ATTACHMENT_SUBDIR):
(UPLOAD_BASE_PATH / subdir).mkdir(parents=True, exist_ok=True)
ALLOWED_EXTENSIONS = {ext.lower() for ext in settings.ALLOWED_EXTENSIONS}
MAX_ATTACHMENT_SIZE = settings.EMAIL_MAX_UPLOAD_SIZE_MB * 1024 * 1024
def _is_attachment_allowed(filename: str) -> bool:
extension = Path(filename).suffix.lower().lstrip(".")
return extension in ALLOWED_EXTENSIONS
def _validate_attachment(upload_file: UploadFile) -> None:
if not _is_attachment_allowed(upload_file.filename):
raise HTTPException(400, detail="Unsupported attachment type")
upload_file.file.seek(0, os.SEEK_END)
size = upload_file.file.tell()
upload_file.file.seek(0)
if size > MAX_ATTACHMENT_SIZE:
raise HTTPException(
400,
detail=f"Attachment exceeds size limit of {settings.EMAIL_MAX_UPLOAD_SIZE_MB} MB",
)
def _generate_stored_name(filename: str, subdir: str) -> str:
cleaned = Path(filename).name
unique = f"{uuid4().hex}_{cleaned}"
return f"{subdir}/{unique}"
def _resolve_attachment_path(stored_name: str) -> Path:
return UPLOAD_BASE_PATH / stored_name
def _store_upload_file(upload_file: UploadFile, subdir: str) -> Tuple[str, int]:
_validate_attachment(upload_file)
stored_name = _generate_stored_name(upload_file.filename, subdir)
destination = _resolve_attachment_path(stored_name)
destination.parent.mkdir(parents=True, exist_ok=True)
upload_file.file.seek(0)
with destination.open("wb") as buffer:
shutil.copyfileobj(upload_file.file, buffer)
return stored_name, destination.stat().st_size
class PipelineStageBase(BaseModel):
name: str
@ -102,6 +408,33 @@ class OpportunityCommentCreate(OpportunityCommentBase):
pass
class OpportunityCommentAttachment(BaseModel):
id: int
filename: str
content_type: Optional[str] = None
size_bytes: Optional[int] = None
created_at: datetime
download_url: str
class OpportunityEmailAttachment(BaseModel):
id: int
filename: str
content_type: Optional[str] = None
size_bytes: Optional[int] = None
created_at: datetime
download_url: str
class OpportunityContractFile(BaseModel):
id: int
filename: str
content_type: Optional[str] = None
size_bytes: Optional[int] = None
created_at: datetime
download_url: str
class OpportunityCommentResponse(BaseModel):
id: int
opportunity_id: int
@ -119,6 +452,8 @@ class OpportunityCommentResponse(BaseModel):
metadata: Optional[Dict] = None
created_at: datetime
updated_at: datetime
attachments: List[OpportunityCommentAttachment] = []
email_attachments: List[OpportunityEmailAttachment] = []
def _get_stage(stage_id: int):
@ -152,6 +487,29 @@ def _get_opportunity(opportunity_id: int):
opportunity = execute_query_single(query, (opportunity_id,))
if not opportunity:
raise HTTPException(status_code=404, detail="Opportunity not found")
# Fetch linked emails
email_query = """
SELECT e.id, e.subject, e.sender_email, e.received_date, e.body_text, e.body_html
FROM email_messages e
JOIN pipeline_opportunity_emails poe ON e.id = poe.email_id
WHERE poe.opportunity_id = %s
ORDER BY e.received_date DESC
"""
linked_emails = execute_query(email_query, (opportunity_id,))
opportunity["linked_emails"] = linked_emails or []
# Fetch linked contacts
contacts_query = """
SELECT c.id, c.first_name, c.last_name, c.email, c.phone, c.mobile_phone, poc.role
FROM contacts c
JOIN pipeline_opportunity_contacts poc ON c.id = poc.contact_id
WHERE poc.opportunity_id = %s
ORDER BY c.first_name, c.last_name
"""
linked_contacts = execute_query(contacts_query, (opportunity_id,))
opportunity["linked_contacts"] = linked_contacts or []
return opportunity
@ -176,7 +534,23 @@ def _fetch_opportunity_comments(opportunity_id: int):
WHERE c.opportunity_id = %s
ORDER BY c.created_at DESC
"""
return execute_query(query, (opportunity_id,)) or []
comments = execute_query(query, (opportunity_id,)) or []
if not comments:
return []
comment_ids = [comment["id"] for comment in comments]
attachments_map = _fetch_comment_attachments_map(comment_ids)
email_ids = list({comment["email_id"] for comment in comments if comment.get("email_id")})
email_attachment_map = _fetch_email_attachments_map(email_ids)
for comment in comments:
comment["attachments"] = attachments_map.get(comment["id"], [])
if comment.get("email_id"):
comment["email_attachments"] = email_attachment_map.get(comment["email_id"], [])
else:
comment["email_attachments"] = []
return comments
def _fetch_comment(comment_id: int):
@ -189,7 +563,169 @@ def _fetch_comment(comment_id: int):
WHERE c.id = %s
"""
result = execute_query(query, (comment_id,))
return result[0] if result else None
if not result:
return None
comment = result[0]
attachments = _fetch_comment_attachments_map([comment_id])
comment["attachments"] = attachments.get(comment_id, [])
if comment.get("email_id"):
email_attachments = _fetch_email_attachments_map([comment["email_id"]])
comment["email_attachments"] = email_attachments.get(comment["email_id"], [])
else:
comment["email_attachments"] = []
return comment
def _comment_attachment_download_url(opportunity_id: int, attachment_id: int) -> str:
return f"/api/v1/opportunities/{opportunity_id}/comment-attachments/{attachment_id}"
def _email_attachment_download_url(email_id: int, attachment_id: int) -> str:
return f"/api/v1/emails/{email_id}/attachments/{attachment_id}"
def _fetch_comment_attachments_map(comment_ids: List[int]) -> Dict[int, List[Dict[str, Any]]]:
if not comment_ids:
return {}
query = """
SELECT a.id, a.comment_id, a.opportunity_id, a.filename, a.content_type, a.size_bytes, a.created_at
FROM pipeline_opportunity_comment_attachments a
WHERE a.comment_id = ANY(%s)
ORDER BY a.created_at DESC
"""
rows = execute_query(query, (comment_ids,)) or []
attachments_by_comment: Dict[int, List[Dict[str, Any]]] = {}
for row in rows:
attachments_by_comment.setdefault(row["comment_id"], []).append({
"id": row["id"],
"filename": row["filename"],
"content_type": row.get("content_type"),
"size_bytes": row.get("size_bytes"),
"created_at": row.get("created_at"),
"download_url": _comment_attachment_download_url(row["opportunity_id"], row["id"])
})
return attachments_by_comment
def _fetch_email_attachments_map(email_ids: List[int]) -> Dict[int, List[Dict[str, Any]]]:
if not email_ids:
return {}
query = """
SELECT id, email_id, filename, content_type, size_bytes, created_at
FROM email_attachments
WHERE email_id = ANY(%s)
ORDER BY id ASC
"""
rows = execute_query(query, (email_ids,)) or []
email_map: Dict[int, List[Dict[str, Any]]] = {}
for row in rows:
email_map.setdefault(row["email_id"], []).append({
"id": row["id"],
"filename": row["filename"],
"content_type": row.get("content_type"),
"size_bytes": row.get("size_bytes"),
"created_at": row.get("created_at"),
"download_url": _email_attachment_download_url(row["email_id"], row["id"])
})
return email_map
def _contract_file_download_url(opportunity_id: int, file_id: int) -> str:
return f"/api/v1/opportunities/{opportunity_id}/contract-files/{file_id}"
def _fetch_contract_files(opportunity_id: int) -> List[Dict[str, Any]]:
query = """
SELECT id, filename, content_type, size_bytes, stored_name, created_at
FROM pipeline_opportunity_contract_files
WHERE opportunity_id = %s
ORDER BY created_at DESC
"""
rows = execute_query(query, (opportunity_id,)) or []
return [
{
"id": row["id"],
"filename": row["filename"],
"content_type": row.get("content_type"),
"size_bytes": row.get("size_bytes"),
"created_at": row.get("created_at"),
"download_url": _contract_file_download_url(opportunity_id, row["id"]),
}
for row in rows
]
def _save_contract_files(opportunity_id: int, files: List[UploadFile], uploaded_by_user_id: Optional[int] = None) -> List[Dict[str, Any]]:
if not files:
return []
insert_query = """
INSERT INTO pipeline_opportunity_contract_files
(opportunity_id, filename, content_type, size_bytes, stored_name, uploaded_by_user_id)
VALUES (%s, %s, %s, %s, %s, %s)
RETURNING id, filename, content_type, size_bytes, created_at
"""
saved_files = []
for upload_file in files:
if not upload_file or not upload_file.filename:
continue
stored_name, size_bytes = _store_upload_file(upload_file, CONTRACT_ATTACHMENT_SUBDIR)
result = execute_query(
insert_query,
(
opportunity_id,
upload_file.filename,
upload_file.content_type,
size_bytes,
stored_name,
uploaded_by_user_id,
)
)
if result:
saved = result[0]
saved_files.append({
"id": saved["id"],
"filename": saved["filename"],
"content_type": saved.get("content_type"),
"size_bytes": saved.get("size_bytes"),
"created_at": saved.get("created_at"),
"download_url": _contract_file_download_url(opportunity_id, saved["id"]),
})
return saved_files
def _save_comment_attachments(opportunity_id: int, comment_id: int, files: List[UploadFile], uploaded_by_user_id: Optional[int] = None) -> None:
if not files:
return
insert_query = """
INSERT INTO pipeline_opportunity_comment_attachments
(opportunity_id, comment_id, filename, content_type, size_bytes, stored_name, uploaded_by_user_id)
VALUES (%s, %s, %s, %s, %s, %s, %s)
"""
for upload_file in files:
if not upload_file or not upload_file.filename:
continue
stored_name, size_bytes = _store_upload_file(upload_file, COMMENT_ATTACHMENT_SUBDIR)
execute_query(
insert_query,
(
opportunity_id,
comment_id,
upload_file.filename,
upload_file.content_type,
size_bytes,
stored_name,
uploaded_by_user_id,
)
)
# ============================
@ -445,11 +981,64 @@ async def list_opportunity_comments(opportunity_id: int):
response_model=OpportunityCommentResponse,
tags=["Opportunities"]
)
async def add_opportunity_comment(opportunity_id: int, comment: OpportunityCommentCreate):
async def add_opportunity_comment(
opportunity_id: int,
request: Request,
content: Optional[str] = Form(None),
author_name: Optional[str] = Form(None),
user_id: Optional[int] = Form(None),
email_id: Optional[int] = Form(None),
contract_number: Optional[str] = Form(None),
contract_context: Optional[str] = Form(None),
contract_link: Optional[str] = Form(None),
metadata: Optional[str] = Form(None),
files: Optional[List[UploadFile]] = File(None),
):
_get_opportunity(opportunity_id)
author_name = comment.author_name or 'Hub Bruger'
metadata_json = json.dumps(comment.metadata) if comment.metadata else None
if request.headers.get("content-type", "").startswith("application/json"):
payload: Dict[str, Any] = await request.json()
else:
payload = {
"content": content,
"author_name": author_name,
"user_id": user_id,
"email_id": email_id,
"contract_number": contract_number,
"contract_context": contract_context,
"contract_link": contract_link,
"metadata": metadata,
}
content_value = payload.get("content")
if not content_value:
raise HTTPException(status_code=400, detail="Kommentar er påkrævet")
resolved_author = payload.get("author_name") or 'Hub Bruger'
resolved_user_id = payload.get("user_id")
if isinstance(resolved_user_id, str):
try:
resolved_user_id = int(resolved_user_id)
except ValueError:
resolved_user_id = None
resolved_email_id = payload.get("email_id")
if isinstance(resolved_email_id, str):
try:
resolved_email_id = int(resolved_email_id)
except ValueError:
resolved_email_id = None
metadata_payload = payload.get("metadata")
metadata_obj = None
if metadata_payload:
if isinstance(metadata_payload, str):
try:
metadata_obj = json.loads(metadata_payload)
except json.JSONDecodeError:
metadata_obj = None
elif isinstance(metadata_payload, dict):
metadata_obj = metadata_payload
metadata_json = json.dumps(metadata_obj) if metadata_obj else None
query = """
INSERT INTO pipeline_opportunity_comments
@ -463,13 +1052,13 @@ async def add_opportunity_comment(opportunity_id: int, comment: OpportunityComme
query,
(
opportunity_id,
comment.user_id,
author_name,
comment.content,
comment.email_id,
comment.contract_number,
comment.contract_context,
comment.contract_link,
resolved_user_id,
resolved_author,
content_value,
resolved_email_id,
payload.get("contract_number"),
payload.get("contract_context"),
payload.get("contract_link"),
metadata_json
)
)
@ -478,9 +1067,39 @@ async def add_opportunity_comment(opportunity_id: int, comment: OpportunityComme
raise HTTPException(status_code=500, detail="Kunne ikke oprette kommentar")
comment_id = result[0]["id"]
attachment_files = files or []
if attachment_files:
_save_comment_attachments(opportunity_id, comment_id, attachment_files, resolved_user_id)
return _fetch_comment(comment_id)
@router.get("/opportunities/{opportunity_id}/comment-attachments/{attachment_id}", tags=["Opportunities"])
async def download_comment_attachment(opportunity_id: int, attachment_id: int):
query = """
SELECT * FROM pipeline_opportunity_comment_attachments
WHERE id = %s AND opportunity_id = %s
"""
result = execute_query(query, (attachment_id, opportunity_id))
if not result:
raise HTTPException(status_code=404, detail="Vedhæftet fil ikke fundet")
attachment = result[0]
stored_name = attachment.get("stored_name")
if not stored_name:
raise HTTPException(status_code=500, detail="Vedhæftet fil mangler sti")
file_path = _resolve_attachment_path(stored_name)
if not file_path.exists():
raise HTTPException(status_code=404, detail="Filen findes ikke på serveren")
return FileResponse(
path=file_path,
filename=attachment.get("filename"),
media_type=attachment.get("content_type") or "application/octet-stream"
)
@router.get(
"/contracts/search",
tags=["Opportunities"],
@ -502,3 +1121,59 @@ async def search_contracts(query: str = Query(..., min_length=2), limit: int = Q
params = (f"%{query}%", limit)
results = execute_query(sql, params)
return results or []
@router.get(
"/opportunities/{opportunity_id}/contract-files",
tags=["Opportunities"],
response_model=List[OpportunityContractFile]
)
async def list_contract_files(opportunity_id: int):
_get_opportunity(opportunity_id)
return _fetch_contract_files(opportunity_id)
@router.post(
"/opportunities/{opportunity_id}/contract-files",
tags=["Opportunities"],
response_model=List[OpportunityContractFile]
)
async def upload_contract_files(opportunity_id: int, files: List[UploadFile] = File(...)):
_get_opportunity(opportunity_id)
if not files:
raise HTTPException(status_code=400, detail="Ingen filer at uploade")
saved = _save_contract_files(opportunity_id, files)
if not saved:
raise HTTPException(status_code=500, detail="Kunne ikke gemme filer")
return saved
@router.get(
"/opportunities/{opportunity_id}/contract-files/{file_id}",
tags=["Opportunities"]
)
async def download_contract_file(opportunity_id: int, file_id: int):
query = """
SELECT * FROM pipeline_opportunity_contract_files
WHERE id = %s AND opportunity_id = %s
"""
result = execute_query(query, (file_id, opportunity_id))
if not result:
raise HTTPException(status_code=404, detail="Filen ikke fundet")
row = result[0]
stored_name = row.get("stored_name")
if not stored_name:
raise HTTPException(status_code=500, detail="Filen mangler lagring")
path = _resolve_attachment_path(stored_name)
if not path.exists():
raise HTTPException(status_code=404, detail="Filen findes ikke på serveren")
return FileResponse(
path=path,
filename=row.get("filename"),
media_type=row.get("content_type") or "application/octet-stream",
headers={"Content-Disposition": f"inline; filename=\"{row.get('filename')}\""}
)

View File

@ -222,12 +222,11 @@ function renderOpportunities() {
}
tbody.innerHTML = filtered.map(o => `
<tr>
<tr class="opportunity-row" style="cursor:pointer" onclick="goToDetail(${o.id})">
<td class="fw-semibold">${escapeHtml(o.title)}</td>
<td>${escapeHtml(o.customer_name || '-')}
</td>
<td>${escapeHtml(o.customer_name || '-')}</td>
<td>${formatCurrency(o.amount, o.currency)}</td>
<td>${o.expected_close_date ? formatDate(o.expected_close_date) : '<span class="text-muted">-</span>'}</td>
<td>${o.expected_close_date ? formatDate(o.expected_close_date) : '<span class=\"text-muted\">-</span>'}</td>
<td>
<span class="stage-pill">
<span class="stage-dot" style="background:${o.stage_color || '#0f4c75'}"></span>
@ -236,9 +235,7 @@ function renderOpportunities() {
</td>
<td>${o.probability || 0}%</td>
<td class="text-end">
<button class="btn btn-sm btn-outline-primary" onclick="goToDetail(${o.id})">
<i class="bi bi-arrow-right"></i>
</button>
<i class="bi bi-arrow-right"></i>
</td>
</tr>
`).join('');

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,20 @@
-- Migration 019: Opportunity comment attachments
-- Captures uploaded files that belong to a pipeline comment thread
CREATE TABLE IF NOT EXISTS pipeline_opportunity_comment_attachments (
id SERIAL PRIMARY KEY,
opportunity_id INTEGER NOT NULL REFERENCES pipeline_opportunities(id) ON DELETE CASCADE,
comment_id INTEGER NOT NULL REFERENCES pipeline_opportunity_comments(id) ON DELETE CASCADE,
filename VARCHAR(255) NOT NULL,
content_type VARCHAR(100),
size_bytes INTEGER,
stored_name TEXT NOT NULL,
uploaded_by_user_id INTEGER,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (uploaded_by_user_id) REFERENCES users(user_id) ON DELETE SET NULL
);
CREATE INDEX IF NOT EXISTS idx_pipeline_comment_attachments_comment_id ON pipeline_opportunity_comment_attachments(comment_id);
CREATE INDEX IF NOT EXISTS idx_pipeline_comment_attachments_opportunity_id ON pipeline_opportunity_comment_attachments(opportunity_id);
COMMENT ON TABLE pipeline_opportunity_comment_attachments IS 'User uploaded files stored alongside pipeline opportunity comments';

View File

@ -0,0 +1,18 @@
-- Migration 020: Opportunity contract files
-- Stores documents that belong to the offer/contract card and can be downloaded independently of comments
CREATE TABLE IF NOT EXISTS pipeline_opportunity_contract_files (
id SERIAL PRIMARY KEY,
opportunity_id INTEGER NOT NULL REFERENCES pipeline_opportunities(id) ON DELETE CASCADE,
filename VARCHAR(255) NOT NULL,
content_type VARCHAR(100),
size_bytes INTEGER,
stored_name TEXT NOT NULL,
uploaded_by_user_id INTEGER,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (uploaded_by_user_id) REFERENCES users(user_id) ON DELETE SET NULL
);
CREATE INDEX IF NOT EXISTS idx_pipeline_opportunity_contract_files_opportunity_id ON pipeline_opportunity_contract_files(opportunity_id);
COMMENT ON TABLE pipeline_opportunity_contract_files IS 'Files uploaded directly to the opportunity (Tilbud & Kontrakt) card.';

View File

@ -0,0 +1,4 @@
-- Migration 021: Add email_id to pipeline_opportunities for direct email linking
ALTER TABLE pipeline_opportunities
ADD COLUMN IF NOT EXISTS email_id INTEGER REFERENCES email_messages(id) ON DELETE SET NULL;
CREATE INDEX IF NOT EXISTS idx_pipeline_opportunities_email_id ON pipeline_opportunities(email_id);

View File

@ -0,0 +1,10 @@
CREATE TABLE IF NOT EXISTS pipeline_opportunity_emails (
opportunity_id INTEGER REFERENCES pipeline_opportunities(id) ON DELETE CASCADE,
email_id INTEGER REFERENCES email_messages(id) ON DELETE CASCADE,
created_at TIMESTAMPTZ DEFAULT NOW(),
PRIMARY KEY (opportunity_id, email_id)
);
INSERT INTO pipeline_opportunity_emails (opportunity_id, email_id)
SELECT id, email_id FROM pipeline_opportunities WHERE email_id IS NOT NULL
ON CONFLICT DO NOTHING;

View File

@ -0,0 +1,11 @@
-- Create link table for Opportunity <-> Contacts (Many-to-Many)
CREATE TABLE IF NOT EXISTS pipeline_opportunity_contacts (
opportunity_id INTEGER NOT NULL REFERENCES pipeline_opportunities(id) ON DELETE CASCADE,
contact_id INTEGER NOT NULL REFERENCES contacts(id) ON DELETE CASCADE,
role VARCHAR(100), -- optional role in this opportunity (e.g. "Decision Maker", "Influencer")
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
PRIMARY KEY (opportunity_id, contact_id)
);
-- Index for faster lookups
CREATE INDEX IF NOT EXISTS idx_pipeline_opportunity_contacts_opp ON pipeline_opportunity_contacts(opportunity_id);