diff --git a/.env.example b/.env.example index 5e124c5..7043df1 100644 --- a/.env.example +++ b/.env.example @@ -45,77 +45,18 @@ ECONOMIC_AGREEMENT_GRANT_TOKEN=your_agreement_grant_token_here # 🚨 SAFETY SWITCHES - Beskytter mod utilsigtede ændringer ECONOMIC_READ_ONLY=true # Set to false ONLY after testing ECONOMIC_DRY_RUN=true # Set to false ONLY when ready for production writes - # ===================================================== -# vTiger CRM Integration (Optional) +# vTiger Cloud Integration (Required for Subscriptions) # ===================================================== +# Get credentials from vTiger Cloud -> Settings -> Integration -> Webservices VTIGER_URL=https://your-instance.od2.vtiger.com -VTIGER_USERNAME=your_username@yourdomain.com -VTIGER_API_KEY=your_api_key_or_access_key -VTIGER_PASSWORD=your_password_if_using_basic_auth +VTIGER_USERNAME=your_vtiger_username +VTIGER_API_KEY=your_vtiger_api_key # ===================================================== -# TIME TRACKING MODULE - Isolated Settings +# Simply-CRM / Old vTiger On-Premise (Legacy) # ===================================================== - -# vTiger Integration Safety Flags -TIMETRACKING_VTIGER_READ_ONLY=true # 🚨 Bloker ALLE skrivninger til vTiger -TIMETRACKING_VTIGER_DRY_RUN=true # 🚨 Log uden at synkronisere - -# e-conomic Integration Safety Flags -TIMETRACKING_ECONOMIC_READ_ONLY=true # 🚨 Bloker ALLE skrivninger til e-conomic -TIMETRACKING_ECONOMIC_DRY_RUN=true # 🚨 Log uden at eksportere -TIMETRACKING_EXPORT_TYPE=draft # draft|booked (draft er sikrest) - -# Business Logic Settings -TIMETRACKING_DEFAULT_HOURLY_RATE=850.00 # DKK pr. time (fallback hvis kunde ikke har rate) -TIMETRACKING_AUTO_ROUND=true # Auto-afrund til nærmeste interval -TIMETRACKING_ROUND_INCREMENT=0.5 # Afrundingsinterval (0.25, 0.5, 1.0) -TIMETRACKING_ROUND_METHOD=up # up (op til), nearest (nærmeste), down (ned til) -TIMETRACKING_REQUIRE_APPROVAL=true # Kræv manuel godkendelse - -# Order Management Security -TIMETRACKING_ADMIN_UNLOCK_CODE= # 🔐 Admin kode til at låse eksporterede ordrer op (sæt en stærk kode!) - -# ===================================================== -# OLLAMA AI Integration (Optional - for document extraction) -# ===================================================== -OLLAMA_ENDPOINT=http://ai_direct.cs.blaahund.dk -OLLAMA_MODEL=qwen2.5-coder:7b - -# ===================================================== -# COMPANY INFO -# ===================================================== -OWN_CVR=29522790 # BMC Denmark ApS - ignore when detecting vendors - -# ===================================================== -# FILE UPLOAD -# ===================================================== -UPLOAD_DIR=uploads -MAX_FILE_SIZE_MB=50 - -# ===================================================== -# MODULE SYSTEM - Dynamic Feature Loading -# ===================================================== -# Enable/disable entire module system -MODULES_ENABLED=true - -# Directory for dynamic modules (default: app/modules) -MODULES_DIR=app/modules - -# Auto-reload modules on changes (dev only, requires restart) -MODULES_AUTO_RELOAD=true - -# ===================================================== -# MODULE-SPECIFIC CONFIGURATION -# ===================================================== -# Pattern: MODULES__{MODULE_NAME}__{KEY} -# Example module configuration: - -# MODULES__INVOICE_OCR__READ_ONLY=true -# MODULES__INVOICE_OCR__DRY_RUN=true -# MODULES__INVOICE_OCR__API_KEY=secret123 - -# MODULES__MY_FEATURE__READ_ONLY=false -# MODULES__MY_FEATURE__DRY_RUN=false -# MODULES__MY_FEATURE__SOME_SETTING=value +# Old vTiger installation (if different from cloud) +OLD_VTIGER_URL=http://your-old-vtiger-server.com +OLD_VTIGER_USERNAME=your_old_username +OLD_VTIGER_API_KEY=your_old_api_key \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 85839ee..c505fdb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,10 +8,6 @@ RUN apt-get update && apt-get install -y \ git \ libpq-dev \ gcc \ - postgresql-client \ - tesseract-ocr \ - tesseract-ocr-dan \ - tesseract-ocr-eng \ && rm -rf /var/lib/apt/lists/* # Build arguments for GitHub release deployment diff --git a/README.md b/README.md index a05a00a..b026b6a 100644 --- a/README.md +++ b/README.md @@ -7,12 +7,6 @@ Et centralt management system til BMC Networks - håndterer kunder, services, ha ## 🌟 Features - **Customer Management**: Komplet kundedatabase med CRM integration -- **Time Tracking Module**: vTiger integration med tidsregistrering og fakturering - - Automatisk sync fra vTiger (billable timelogs) - - Step-by-step godkendelses-wizard - - Auto-afrunding til 0.5 timer - - Klippekort-funktionalitet - - e-conomic export (draft orders) - **Hardware Tracking**: Registrering og sporing af kundeudstyr - **Service Management**: Håndtering af services og abonnementer - **Billing Integration**: Automatisk fakturering via e-conomic @@ -129,43 +123,12 @@ bmc_hub/ ## 🔌 API Endpoints -### Main API - `GET /api/v1/customers` - List customers - `GET /api/v1/hardware` - List hardware - `GET /api/v1/billing/invoices` - List invoices - `GET /health` - Health check -### Time Tracking Module -- `POST /api/v1/timetracking/sync` - Sync from vTiger (read-only) -- `GET /api/v1/timetracking/wizard/next` - Get next pending timelog -- `POST /api/v1/timetracking/wizard/approve/{id}` - Approve timelog -- `POST /api/v1/timetracking/orders/generate` - Generate invoice order -- `POST /api/v1/timetracking/export` - Export to e-conomic (with safety flags) -- `GET /api/v1/timetracking/export/test-connection` - Test e-conomic connection - -Se fuld dokumentation: http://localhost:8001/api/docs - -## 🚨 e-conomic Write Mode - -Time Tracking modulet kan eksportere ordrer til e-conomic med **safety-first approach**: - -### Safety Flags (default: SAFE) -```bash -TIMETRACKING_ECONOMIC_READ_ONLY=true # Block all writes -TIMETRACKING_ECONOMIC_DRY_RUN=true # Simulate writes (log only) -``` - -### Enable Write Mode -Se detaljeret guide: [docs/ECONOMIC_WRITE_MODE.md](docs/ECONOMIC_WRITE_MODE.md) - -**Quick steps:** -1. Test connection: `GET /api/v1/timetracking/export/test-connection` -2. Test dry-run: Set `READ_ONLY=false`, keep `DRY_RUN=true` -3. Export test order: `POST /api/v1/timetracking/export` -4. Enable production: Set **both** flags to `false` -5. Verify first order in e-conomic before bulk operations - -**CRITICAL**: All customers must have `economic_customer_number` (synced from vTiger `cf_854` field). +Se fuld dokumentation: http://localhost:8000/api/docs ## 🧪 Testing diff --git a/app/backups/backend/router.py b/app/backups/backend/router.py index 3e94a91..fb87635 100644 --- a/app/backups/backend/router.py +++ b/app/backups/backend/router.py @@ -161,7 +161,7 @@ async def list_backups( query += " ORDER BY created_at DESC LIMIT %s OFFSET %s" params.extend([limit, offset]) - backups = execute_query(query, tuple(params)) + backups = execute_query_single(query, tuple(params)) return backups if backups else [] @@ -171,9 +171,7 @@ async def get_backup(job_id: int): """Get details of a specific backup job""" backup = execute_query( "SELECT * FROM backup_jobs WHERE id = %s", - (job_id,), - fetchone=True - ) + (job_id,)) if not backup: raise HTTPException(status_code=404, detail=f"Backup job {job_id} not found") @@ -305,11 +303,9 @@ async def restore_backup(job_id: int, request: RestoreRequest): ) # Get backup job - backup = execute_query( + backup = execute_query_single( "SELECT * FROM backup_jobs WHERE id = %s", - (job_id,), - fetchone=True - ) + (job_id,)) if not backup: raise HTTPException(status_code=404, detail=f"Backup job {job_id} not found") @@ -359,11 +355,9 @@ async def delete_backup(job_id: int): Delete a backup job and its associated file """ # Get backup job - backup = execute_query( + backup = execute_query_single( "SELECT * FROM backup_jobs WHERE id = %s", - (job_id,), - fetchone=True - ) + (job_id,)) if not backup: raise HTTPException(status_code=404, detail=f"Backup job {job_id} not found") @@ -419,10 +413,8 @@ async def get_maintenance_status(): Used by frontend to display maintenance overlay """ - status = execute_query( - "SELECT * FROM system_status WHERE id = 1", - fetchone=True - ) + status = execute_query_single( + "SELECT * FROM system_status WHERE id = 1") if not status: # Return default status if not found diff --git a/app/backups/backend/scheduler.py b/app/backups/backend/scheduler.py index b654182..6963702 100644 --- a/app/backups/backend/scheduler.py +++ b/app/backups/backend/scheduler.py @@ -158,11 +158,9 @@ class BackupScheduler: db_job_id, files_job_id, duration) # Send success notification for database backup - db_backup = execute_query( + db_backup = execute_query_single( "SELECT * FROM backup_jobs WHERE id = %s", - (db_job_id,), - fetchone=True - ) + (db_job_id,)) if db_backup: await notifications.send_backup_success( @@ -217,11 +215,9 @@ class BackupScheduler: db_job_id, files_job_id, duration) # Send success notification for database backup - db_backup = execute_query( + db_backup = execute_query_single( "SELECT * FROM backup_jobs WHERE id = %s", - (db_job_id,), - fetchone=True - ) + (db_job_id,)) if db_backup: await notifications.send_backup_success( @@ -259,7 +255,7 @@ class BackupScheduler: try: # Find all completed backups not yet uploaded - pending_backups = execute_query( + pending_backups = execute_query_single( """SELECT * FROM backup_jobs WHERE status = 'completed' AND offsite_uploaded_at IS NULL @@ -295,9 +291,7 @@ class BackupScheduler: # Get updated retry count updated_backup = execute_query( "SELECT offsite_retry_count FROM backup_jobs WHERE id = %s", - (backup['id'],), - fetchone=True - ) + (backup['id'],)) # Send failure notification await notifications.send_offsite_failed( diff --git a/app/backups/backend/service.py b/app/backups/backend/service.py index 50c7dd8..b0e203d 100644 --- a/app/backups/backend/service.py +++ b/app/backups/backend/service.py @@ -285,7 +285,7 @@ class BackupService: logger.info("🔄 Starting backup rotation") # Find expired backups - expired_backups = execute_query( + expired_backups = execute_query_single( """SELECT id, file_path, is_monthly, retention_until FROM backup_jobs WHERE status = 'completed' @@ -333,9 +333,7 @@ class BackupService: # Get backup job backup = execute_query( "SELECT * FROM backup_jobs WHERE id = %s AND job_type = 'database'", - (job_id,), - fetchone=True - ) + (job_id,)) if not backup: logger.error("❌ Backup job not found: %s", job_id) @@ -442,11 +440,9 @@ class BackupService: return False # Get backup job - backup = execute_query( + backup = execute_query_single( "SELECT * FROM backup_jobs WHERE id = %s AND job_type = 'files'", - (job_id,), - fetchone=True - ) + (job_id,)) if not backup: logger.error("❌ Backup job not found: %s", job_id) @@ -516,11 +512,9 @@ class BackupService: return False # Get backup job - backup = execute_query( + backup = execute_query_single( "SELECT * FROM backup_jobs WHERE id = %s", - (job_id,), - fetchone=True - ) + (job_id,)) if not backup: logger.error("❌ Backup job not found: %s", job_id) diff --git a/app/backups/templates/index.html b/app/backups/templates/index.html index d53650b..e005944 100644 --- a/app/backups/templates/index.html +++ b/app/backups/templates/index.html @@ -380,6 +380,12 @@ // Load backups list async function loadBackups() { + // TODO: Implement /api/v1/backups/jobs endpoint + console.warn('⚠️ Backups API ikke implementeret endnu'); + document.getElementById('backups-table').innerHTML = 'Backup API er ikke implementeret endnu'; + return; + + /* Disabled until API implemented: try { const response = await fetch('/api/v1/backups/jobs?limit=50'); const backups = await response.json(); @@ -433,6 +439,10 @@ // Load storage stats async function loadStorageStats() { + // TODO: Implement /api/v1/backups/storage endpoint + return; + + /* Disabled until API implemented: try { const response = await fetch('/api/v1/backups/storage'); const stats = await response.json(); @@ -464,6 +474,10 @@ // Load notifications async function loadNotifications() { + // TODO: Implement /api/v1/backups/notifications endpoint + return; + + /* Disabled until API implemented: try { const response = await fetch('/api/v1/backups/notifications?limit=10'); const notifications = await response.json(); @@ -493,6 +507,10 @@ // Load scheduler status async function loadSchedulerStatus() { + // TODO: Implement /api/v1/backups/scheduler/status endpoint + return; + + /* Disabled until API implemented: try { const response = await fetch('/api/v1/backups/scheduler/status'); const status = await response.json(); @@ -528,9 +546,13 @@ async function createBackup(event) { event.preventDefault(); + const resultDiv = document.getElementById('backup-result'); + resultDiv.innerHTML = '
Backup API er ikke implementeret endnu
'; + return; + + /* Disabled until API implemented: const type = document.getElementById('backup-type').value; const isMonthly = document.getElementById('is-monthly').checked; - const resultDiv = document.getElementById('backup-result'); resultDiv.innerHTML = '
Creating backup...
'; @@ -558,10 +580,14 @@ async function uploadBackup(event) { event.preventDefault(); + const resultDiv = document.getElementById('upload-result'); + resultDiv.innerHTML = '
Backup upload API er ikke implementeret endnu
'; + return; + + /* Disabled until API implemented: const fileInput = document.getElementById('backup-file'); const type = document.getElementById('upload-type').value; const isMonthly = document.getElementById('upload-monthly').checked; - const resultDiv = document.getElementById('upload-result'); if (!fileInput.files || fileInput.files.length === 0) { resultDiv.innerHTML = '
Please select a file
'; @@ -613,6 +639,10 @@ // Confirm restore async function confirmRestore() { + alert('⚠️ Restore API er ikke implementeret endnu'); + return; + + /* Disabled until API implemented: if (!selectedJobId) return; try { @@ -639,6 +669,10 @@ // Upload to offsite async function uploadOffsite(jobId) { + alert('⚠️ Offsite upload API er ikke implementeret endnu'); + return; + + /* Disabled until API implemented: if (!confirm('Upload this backup to offsite storage?')) return; try { @@ -658,6 +692,10 @@ // Delete backup async function deleteBackup(jobId) { + alert('⚠️ Delete backup API er ikke implementeret endnu'); + return; + + /* Disabled until API implemented: if (!confirm('Delete this backup? This cannot be undone.')) return; try { @@ -676,6 +714,10 @@ // Acknowledge notification async function acknowledgeNotification(notificationId) { + console.warn('⚠️ Notification API ikke implementeret'); + return; + + /* Disabled until API implemented: try { await fetch(`/api/v1/backups/notifications/${notificationId}/acknowledge`, {method: 'POST'}); loadNotifications(); diff --git a/app/billing/backend/supplier_invoices.py b/app/billing/backend/supplier_invoices.py index 4817c43..47db4b6 100644 --- a/app/billing/backend/supplier_invoices.py +++ b/app/billing/backend/supplier_invoices.py @@ -203,7 +203,7 @@ async def list_supplier_invoices( query += " ORDER BY si.due_date ASC, si.invoice_date DESC" - invoices = execute_query(query, tuple(params) if params else ()) + invoices = execute_query_single(query, tuple(params) if params else ()) # Add lines to each invoice for invoice in invoices: @@ -324,9 +324,7 @@ async def get_file_pdf_text(file_id: int): # Get file info file_info = execute_query( "SELECT file_path, filename FROM incoming_files WHERE file_id = %s", - (file_id,), - fetchone=True - ) + (file_id,)) if not file_info: raise HTTPException(status_code=404, detail="Fil ikke fundet") @@ -357,21 +355,17 @@ async def get_file_extracted_data(file_id: int): """Hent AI-extracted data fra en uploaded fil""" try: # Get file info - file_info = execute_query( + file_info = execute_query_single( "SELECT * FROM incoming_files WHERE file_id = %s", - (file_id,), - fetchone=True - ) + (file_id,)) if not file_info: raise HTTPException(status_code=404, detail="Fil ikke fundet") # Get extraction results if exists - extraction = execute_query( + extraction = execute_query_single( "SELECT * FROM extractions WHERE file_id = %s ORDER BY created_at DESC LIMIT 1", - (file_id,), - fetchone=True - ) + (file_id,)) # Parse llm_response_json if it exists (from AI or template extraction) llm_json_data = None @@ -386,7 +380,7 @@ async def get_file_extracted_data(file_id: int): # Get extraction lines if exist extraction_lines = [] if extraction: - extraction_lines = execute_query( + extraction_lines = execute_query_single( """SELECT * FROM extraction_lines WHERE extraction_id = %s ORDER BY line_number""", @@ -493,9 +487,7 @@ async def download_pending_file(file_id: int): # Get file info file_info = execute_query( "SELECT * FROM incoming_files WHERE file_id = %s", - (file_id,), - fetchone=True - ) + (file_id,)) if not file_info: raise HTTPException(status_code=404, detail="Fil ikke fundet") @@ -533,21 +525,17 @@ async def link_vendor_to_extraction(file_id: int, data: dict): raise HTTPException(status_code=400, detail="vendor_id is required") # Verify vendor exists - vendor = execute_query( + vendor = execute_query_single( "SELECT id, name FROM vendors WHERE id = %s", - (vendor_id,), - fetchone=True - ) + (vendor_id,)) if not vendor: raise HTTPException(status_code=404, detail="Leverandør ikke fundet") # Get latest extraction for this file - extraction = execute_query( + extraction = execute_query_single( "SELECT extraction_id FROM extractions WHERE file_id = %s ORDER BY created_at DESC LIMIT 1", - (file_id,), - fetchone=True - ) + (file_id,)) if not extraction: raise HTTPException(status_code=404, detail="Ingen extraction fundet for denne fil") @@ -583,23 +571,19 @@ async def delete_pending_file_endpoint(file_id: int): try: # Get file info - file_info = execute_query( + file_info = execute_query_single( "SELECT * FROM incoming_files WHERE file_id = %s", - (file_id,), - fetchone=True - ) + (file_id,)) if not file_info: raise HTTPException(status_code=404, detail="Fil ikke fundet") # Check if already converted to invoice - invoice_exists = execute_query( + invoice_exists = execute_query_single( """SELECT si.id FROM supplier_invoices si JOIN extractions e ON si.extraction_id = e.extraction_id WHERE e.file_id = %s""", - (file_id,), - fetchone=True - ) + (file_id,)) if invoice_exists: raise HTTPException( @@ -665,21 +649,17 @@ async def link_vendor_to_extraction(file_id: int, data: dict): raise HTTPException(status_code=400, detail="vendor_id er påkrævet") # Verify vendor exists - vendor = execute_query( + vendor = execute_query_single( "SELECT id, name FROM vendors WHERE id = %s", - (vendor_id,), - fetchone=True - ) + (vendor_id,)) if not vendor: raise HTTPException(status_code=404, detail=f"Leverandør {vendor_id} ikke fundet") # Get latest extraction for this file - extraction = execute_query( + extraction = execute_query_single( "SELECT extraction_id FROM extractions WHERE file_id = %s ORDER BY created_at DESC LIMIT 1", - (file_id,), - fetchone=True - ) + (file_id,)) if not extraction: raise HTTPException(status_code=404, detail="Ingen extraction fundet for denne fil") @@ -711,16 +691,14 @@ async def create_invoice_from_extraction(file_id: int): """Opret leverandørfaktura fra extraction data""" try: # Get latest extraction for this file - extraction = execute_query( + extraction = execute_query_single( """SELECT e.*, v.name as vendor_name FROM extractions e LEFT JOIN vendors v ON v.id = e.vendor_matched_id WHERE e.file_id = %s ORDER BY e.created_at DESC LIMIT 1""", - (file_id,), - fetchone=True - ) + (file_id,)) if not extraction: raise HTTPException(status_code=404, detail="Ingen extraction fundet for denne fil") @@ -733,17 +711,15 @@ async def create_invoice_from_extraction(file_id: int): ) # Check if invoice already exists - existing = execute_query( + existing = execute_query_single( "SELECT id FROM supplier_invoices WHERE extraction_id = %s", - (extraction['extraction_id'],), - fetchone=True - ) + (extraction['extraction_id'],)) if existing: raise HTTPException(status_code=400, detail="Faktura er allerede oprettet fra denne extraction") # Get extraction lines - lines = execute_query( + lines = execute_query_single( """SELECT * FROM extraction_lines WHERE extraction_id = %s ORDER BY line_number""", @@ -892,9 +868,7 @@ async def list_templates(): if vendor_cvr: vendor = execute_query( "SELECT id, name FROM vendors WHERE cvr_number = %s", - (vendor_cvr,), - fetchone=True - ) + (vendor_cvr,)) if vendor: vendor_id = vendor['id'] vendor_name = vendor['name'] @@ -935,7 +909,7 @@ async def get_template(template_id: int): LEFT JOIN vendors v ON t.vendor_id = v.id WHERE t.template_id = %s AND t.is_active = true """ - template = execute_query(query, (template_id,), fetchone=True) + template = execute_query_single(query, (template_id,)) if not template: raise HTTPException(status_code=404, detail="Template not found") @@ -969,11 +943,9 @@ async def search_vendor_by_info(request: Dict): # Search by CVR first (most accurate) if vendor_cvr: - vendor = execute_query( + vendor = execute_query_single( "SELECT id, name, cvr_number FROM vendors WHERE cvr_number = %s", - (vendor_cvr,), - fetchone=True - ) + (vendor_cvr,)) if vendor: return { "found": True, @@ -984,7 +956,7 @@ async def search_vendor_by_info(request: Dict): # Search by name (fuzzy) if vendor_name: - vendors = execute_query( + vendors = execute_query_single( "SELECT id, name, cvr_number FROM vendors WHERE LOWER(name) LIKE LOWER(%s) LIMIT 5", (f"%{vendor_name}%",) ) @@ -1178,15 +1150,13 @@ async def get_supplier_invoice(invoice_id: int): FROM supplier_invoices si LEFT JOIN vendors v ON si.vendor_id = v.id WHERE si.id = %s""", - (invoice_id,), - fetchone=True - ) + (invoice_id,)) if not invoice: raise HTTPException(status_code=404, detail=f"Invoice {invoice_id} not found") # Get lines - lines = execute_query( + lines = execute_query_single( "SELECT * FROM supplier_invoice_lines WHERE supplier_invoice_id = %s ORDER BY line_number", (invoice_id,) ) @@ -1313,9 +1283,7 @@ async def update_supplier_invoice(invoice_id: int, data: Dict): # Check if invoice exists existing = execute_query( "SELECT id, status FROM supplier_invoices WHERE id = %s", - (invoice_id,), - fetchone=True - ) + (invoice_id,)) if not existing: raise HTTPException(status_code=404, detail=f"Invoice {invoice_id} not found") @@ -1368,11 +1336,9 @@ async def update_supplier_invoice(invoice_id: int, data: Dict): async def delete_supplier_invoice(invoice_id: int): """Delete supplier invoice (soft delete if integrated with e-conomic)""" try: - invoice = execute_query( + invoice = execute_query_single( "SELECT id, invoice_number, economic_voucher_number FROM supplier_invoices WHERE id = %s", - (invoice_id,), - fetchone=True - ) + (invoice_id,)) if not invoice: raise HTTPException(status_code=404, detail=f"Invoice {invoice_id} not found") @@ -1410,11 +1376,9 @@ class ApproveRequest(BaseModel): async def approve_supplier_invoice(invoice_id: int, request: ApproveRequest): """Approve supplier invoice for payment""" try: - invoice = execute_query( + invoice = execute_query_single( "SELECT id, invoice_number, status FROM supplier_invoices WHERE id = %s", - (invoice_id,), - fetchone=True - ) + (invoice_id,)) if not invoice: raise HTTPException(status_code=404, detail=f"Faktura {invoice_id} ikke fundet") @@ -1448,14 +1412,12 @@ async def send_to_economic(invoice_id: int): """ try: # Get invoice with lines - invoice = execute_query( + invoice = execute_query_single( """SELECT si.*, v.economic_supplier_number as vendor_economic_id, v.name as vendor_full_name FROM supplier_invoices si LEFT JOIN vendors v ON si.vendor_id = v.id WHERE si.id = %s""", - (invoice_id,), - fetchone=True - ) + (invoice_id,)) if not invoice: raise HTTPException(status_code=404, detail=f"Invoice {invoice_id} not found") @@ -1467,7 +1429,7 @@ async def send_to_economic(invoice_id: int): raise HTTPException(status_code=400, detail="Invoice already sent to e-conomic") # Get lines - lines = execute_query( + lines = execute_query_single( "SELECT * FROM supplier_invoice_lines WHERE supplier_invoice_id = %s ORDER BY line_number", (invoice_id,) ) @@ -1505,9 +1467,7 @@ async def send_to_economic(invoice_id: int): # Get default journal number from settings journal_setting = execute_query( - "SELECT setting_value FROM supplier_invoice_settings WHERE setting_key = 'economic_default_journal'", - fetchone=True - ) + "SELECT setting_value FROM supplier_invoice_settings WHERE setting_key = 'economic_default_journal'") journal_number = int(journal_setting['setting_value']) if journal_setting else 1 # Build VAT breakdown from lines @@ -1634,7 +1594,7 @@ async def get_payment_overview(): try: today = date.today().isoformat() - stats = execute_query(""" + stats = execute_query_single(""" SELECT COUNT(*) as total_count, SUM(CASE WHEN paid_date IS NOT NULL THEN 1 ELSE 0 END) as paid_count, @@ -1647,7 +1607,7 @@ async def get_payment_overview(): SUM(CASE WHEN paid_date IS NULL AND due_date < %s THEN total_amount ELSE 0 END) as overdue_amount FROM supplier_invoices WHERE status != 'cancelled' - """, (today, today, today, today, today), fetchone=True) + """, (today, today, today, today, today)) return { "total_invoices": stats.get('total_count', 0) if stats else 0, @@ -1670,7 +1630,7 @@ async def get_payment_overview(): async def get_stats_by_vendor(): """Get supplier invoice statistics grouped by vendor""" try: - stats = execute_query(""" + stats = execute_query_single(""" SELECT v.id as vendor_id, v.name as vendor_name, @@ -1762,22 +1722,18 @@ async def upload_supplier_invoice(file: UploadFile = File(...)): # Check for duplicate file existing_file = execute_query( "SELECT file_id, status FROM incoming_files WHERE checksum = %s", - (checksum,), - fetchone=True - ) + (checksum,)) if existing_file: temp_path.unlink(missing_ok=True) logger.warning(f"⚠️ Duplicate file detected: {checksum[:16]}...") # Get existing invoice if linked - existing_invoice = execute_query( + existing_invoice = execute_query_single( """SELECT si.* FROM supplier_invoices si JOIN extractions e ON si.extraction_id = e.extraction_id WHERE e.file_id = %s""", - (existing_file['file_id'],), - fetchone=True - ) + (existing_file['file_id'],)) return { "status": "duplicate", @@ -1797,14 +1753,12 @@ async def upload_supplier_invoice(file: UploadFile = File(...)): logger.info(f"💾 Saved file as: {final_path.name}") # Insert file record - file_record = execute_query( + file_record = execute_query_single( """INSERT INTO incoming_files (filename, original_filename, file_path, file_size, mime_type, checksum, status) VALUES (%s, %s, %s, %s, %s, %s, 'processing') RETURNING file_id""", (final_path.name, file.filename, str(final_path), total_size, - ollama_service._get_mime_type(final_path), checksum), - fetchone=True - ) + ollama_service._get_mime_type(final_path), checksum)) file_id = file_record['file_id'] # Extract text from file @@ -1843,16 +1797,14 @@ async def upload_supplier_invoice(file: UploadFile = File(...)): logger.info(f"🔍 Checking for duplicate invoice number: {document_number}") # Check 1: Search in local database (supplier_invoices table) - existing_invoice = execute_query( + existing_invoice = execute_query_single( """SELECT si.id, si.invoice_number, si.created_at, v.name as vendor_name FROM supplier_invoices si LEFT JOIN vendors v ON v.id = si.vendor_id WHERE si.invoice_number = %s ORDER BY si.created_at DESC LIMIT 1""", - (document_number,), - fetchone=True - ) + (document_number,)) if existing_invoice: # DUPLICATE FOUND IN DATABASE @@ -2055,11 +2007,9 @@ async def reprocess_uploaded_file(file_id: int): try: # Get file record - file_record = execute_query( + file_record = execute_query_single( "SELECT * FROM incoming_files WHERE file_id = %s", - (file_id,), - fetchone=True - ) + (file_id,)) if not file_record: raise HTTPException(status_code=404, detail=f"Fil {file_id} ikke fundet") @@ -2120,11 +2070,9 @@ async def reprocess_uploaded_file(file_id: int): logger.info(f"📋 Using invoice2data template") # Try to find vendor from extracted CVR if extracted_fields.get('vendor_vat'): - vendor = execute_query( + vendor = execute_query_single( "SELECT id FROM vendors WHERE cvr_number = %s", - (extracted_fields['vendor_vat'],), - fetchone=True - ) + (extracted_fields['vendor_vat'],)) if vendor: vendor_id = vendor['id'] @@ -2134,11 +2082,9 @@ async def reprocess_uploaded_file(file_id: int): # Fallback: match by issuer name if vendor_id is None and extracted_fields.get('issuer'): - vendor = execute_query( + vendor = execute_query_single( "SELECT id FROM vendors WHERE name ILIKE %s ORDER BY id LIMIT 1", - (extracted_fields['issuer'],), - fetchone=True - ) + (extracted_fields['issuer'],)) if vendor: vendor_id = vendor['id'] @@ -2301,11 +2247,9 @@ async def reprocess_uploaded_file(file_id: int): # Add warning if no template exists if not template_id and vendor_id: - vendor = execute_query( + vendor = execute_query_single( "SELECT name FROM vendors WHERE id = %s", - (vendor_id,), - fetchone=True - ) + (vendor_id,)) if vendor: result["warning"] = f"⚠️ Ingen template fundet for {vendor['name']} - brugte AI extraction (langsommere)" diff --git a/app/contacts/backend/router.py b/app/contacts/backend/router.py index 51a59b1..b65b152 100644 --- a/app/contacts/backend/router.py +++ b/app/contacts/backend/router.py @@ -49,7 +49,7 @@ async def get_contacts( FROM contacts c {where_sql} """ - count_result = execute_query(count_query, tuple(params), fetchone=True) + count_result = execute_query_single(count_query, tuple(params)) total = count_result['count'] if count_result else 0 # Get contacts with company count @@ -71,7 +71,7 @@ async def get_contacts( """ params.extend([limit, offset]) - contacts = execute_query(query, tuple(params)) # Default is fetchall + contacts = execute_query_single(query, tuple(params)) # Default is fetchall return { "contacts": contacts or [], @@ -99,7 +99,7 @@ async def get_contact(contact_id: int): FROM contacts WHERE id = %s """ - contact = execute_query(contact_query, (contact_id,), fetchone=True) + contact = execute_query(contact_query, (contact_id,)) if not contact: raise HTTPException(status_code=404, detail="Contact not found") @@ -114,7 +114,7 @@ async def get_contact(contact_id: int): WHERE cc.contact_id = %s ORDER BY cc.is_primary DESC, cu.name """ - companies = execute_query(companies_query, (contact_id,)) # Default is fetchall + companies = execute_query_single(companies_query, (contact_id,)) # Default is fetchall contact['companies'] = companies or [] return contact @@ -171,7 +171,7 @@ async def update_contact(contact_id: int, contact: ContactUpdate): """ try: # Check if contact exists - existing = execute_query("SELECT id FROM contacts WHERE id = %s", (contact_id,), fetchone=True) + existing = execute_query("SELECT id FROM contacts WHERE id = %s", (contact_id,)) if not existing: raise HTTPException(status_code=404, detail="Contact not found") @@ -258,12 +258,12 @@ async def link_contact_to_company(contact_id: int, link: ContactCompanyLink): """ try: # Check if contact exists - contact = execute_query("SELECT id FROM contacts WHERE id = %s", (contact_id,), fetchone=True) + contact = execute_query_single("SELECT id FROM contacts WHERE id = %s", (contact_id,)) if not contact: raise HTTPException(status_code=404, detail="Contact not found") # Check if company exists - customer = execute_query("SELECT id FROM customers WHERE id = %s", (link.customer_id,), fetchone=True) + customer = execute_query_single("SELECT id FROM customers WHERE id = %s", (link.customer_id,)) if not customer: raise HTTPException(status_code=404, detail="Customer not found") diff --git a/app/core/auth_dependencies.py b/app/core/auth_dependencies.py index 7bc1031..84f4bea 100644 --- a/app/core/auth_dependencies.py +++ b/app/core/auth_dependencies.py @@ -47,11 +47,9 @@ async def get_current_user( # Get additional user details from database from app.core.database import execute_query - user_details = execute_query( + user_details = execute_query_single( "SELECT email, full_name FROM users WHERE id = %s", - (user_id,), - fetchone=True - ) + (user_id,)) return { "id": user_id, diff --git a/app/core/auth_service.py b/app/core/auth_service.py index eaf3054..bc4e399 100644 --- a/app/core/auth_service.py +++ b/app/core/auth_service.py @@ -85,11 +85,9 @@ class AuthService: # Check if token is revoked jti = payload.get('jti') if jti: - session = execute_query( + session = execute_query_single( "SELECT revoked FROM sessions WHERE token_jti = %s", - (jti,), - fetchone=True - ) + (jti,)) if session and session.get('revoked'): logger.warning(f"⚠️ Revoked token used: {jti[:10]}...") return None @@ -117,14 +115,12 @@ class AuthService: User dict if successful, None otherwise """ # Get user - user = execute_query( + user = execute_query_single( """SELECT id, username, email, password_hash, full_name, is_active, is_superadmin, failed_login_attempts, locked_until FROM users WHERE username = %s OR email = %s""", - (username, username), - fetchone=True - ) + (username, username)) if not user: logger.warning(f"❌ Login failed: User not found - {username}") @@ -213,15 +209,13 @@ class AuthService: List of permission codes """ # Check if user is superadmin first - user = execute_query( + user = execute_query_single( "SELECT is_superadmin FROM users WHERE id = %s", - (user_id,), - fetchone=True - ) + (user_id,)) # Superadmins have all permissions if user and user['is_superadmin']: - all_perms = execute_query("SELECT code FROM permissions") + all_perms = execute_query_single("SELECT code FROM permissions") return [p['code'] for p in all_perms] if all_perms else [] # Get permissions through groups @@ -250,21 +244,19 @@ class AuthService: # Superadmins have all permissions user = execute_query( "SELECT is_superadmin FROM users WHERE id = %s", - (user_id,), - fetchone=True - ) + (user_id,)) if user and user['is_superadmin']: return True # Check if user has permission through groups - result = execute_query(""" + result = execute_query_single(""" SELECT COUNT(*) as cnt FROM permissions p JOIN group_permissions gp ON p.id = gp.permission_id JOIN user_groups ug ON gp.group_id = ug.group_id WHERE ug.user_id = %s AND p.code = %s - """, (user_id, permission_code), fetchone=True) + """, (user_id, permission_code)) return bool(result and result['cnt'] > 0) diff --git a/app/core/config.py b/app/core/config.py index 74607c5..90c6451 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -33,169 +33,28 @@ class Settings(BaseSettings): ECONOMIC_READ_ONLY: bool = True ECONOMIC_DRY_RUN: bool = True - # vTiger CRM Integration + # Ollama LLM + OLLAMA_ENDPOINT: str = "http://localhost:11434" + OLLAMA_MODEL: str = "llama3.2:3b" + + # vTiger Cloud Integration VTIGER_URL: str = "" VTIGER_USERNAME: str = "" VTIGER_API_KEY: str = "" - VTIGER_PASSWORD: str = "" # Fallback hvis API key ikke virker - # Simply-CRM Integration (Legacy System med CVR data) - OLD_VTIGER_URL: str = "https://bmcnetworks.simply-crm.dk" - OLD_VTIGER_USERNAME: str = "ct" - OLD_VTIGER_ACCESS_KEY: str = "b00ff2b7c08d591" + # Simply-CRM (Old vTiger On-Premise) + OLD_VTIGER_URL: str = "" + OLD_VTIGER_USERNAME: str = "" + OLD_VTIGER_API_KEY: str = "" - # Time Tracking Module - vTiger Integration (Isoleret) - TIMETRACKING_VTIGER_READ_ONLY: bool = True # 🚨 SAFETY: Bloker ALLE skrivninger til vTiger - TIMETRACKING_VTIGER_DRY_RUN: bool = True # 🚨 SAFETY: Log uden at synkronisere - - # Time Tracking Module - Order Management - TIMETRACKING_ADMIN_UNLOCK_CODE: str = "" # Kode for at låse eksporterede ordrer op - - # Time Tracking Module - e-conomic Integration (Isoleret) - TIMETRACKING_ECONOMIC_READ_ONLY: bool = True # 🚨 SAFETY: Bloker ALLE skrivninger til e-conomic - TIMETRACKING_ECONOMIC_DRY_RUN: bool = True # 🚨 SAFETY: Log uden at eksportere - TIMETRACKING_EXPORT_TYPE: str = "draft" # draft|booked (draft er sikrest) - - # Time Tracking Module - Business Logic - TIMETRACKING_DEFAULT_HOURLY_RATE: float = 850.00 # DKK pr. time (fallback) - TIMETRACKING_AUTO_ROUND: bool = True # Auto-afrund til nærmeste 0.5 time - TIMETRACKING_ROUND_INCREMENT: float = 0.5 # Afrundingsinterval (0.25, 0.5, 1.0) - TIMETRACKING_ROUND_METHOD: str = "up" # up (op til), nearest (nærmeste), down (ned til) - TIMETRACKING_REQUIRE_APPROVAL: bool = True # Kræv manuel godkendelse (ikke auto-approve) - - # Ollama AI Integration - OLLAMA_ENDPOINT: str = "http://ai_direct.cs.blaahund.dk" - OLLAMA_MODEL: str = "qwen2.5-coder:7b" # qwen2.5-coder fungerer bedre til JSON udtrækning - - # Ticket System Module - TICKET_ENABLED: bool = True - TICKET_EMAIL_INTEGRATION: bool = False # 🚨 SAFETY: Disable email-to-ticket until configured - TICKET_AUTO_ASSIGN: bool = False # Auto-assign tickets based on rules - TICKET_DEFAULT_PRIORITY: str = "normal" # low|normal|high|urgent - TICKET_REQUIRE_CUSTOMER: bool = False # Allow tickets without customer link - TICKET_NOTIFICATION_ENABLED: bool = False # Notify on status changes - - # Ticket System - e-conomic Integration - TICKET_ECONOMIC_READ_ONLY: bool = True # 🚨 SAFETY: Block all writes to e-conomic - TICKET_ECONOMIC_DRY_RUN: bool = True # 🚨 SAFETY: Log without executing - TICKET_ECONOMIC_AUTO_EXPORT: bool = False # Auto-export billable worklog - - # Email System Configuration - EMAIL_TO_TICKET_ENABLED: bool = False # 🚨 SAFETY: Disable auto-processing until configured - - # Email Fetching (IMAP) - USE_GRAPH_API: bool = False # Use Microsoft Graph API instead of IMAP (preferred) - IMAP_SERVER: str = "outlook.office365.com" - IMAP_PORT: int = 993 - IMAP_USE_SSL: bool = True - IMAP_USERNAME: str = "" - IMAP_PASSWORD: str = "" - IMAP_FOLDER: str = "INBOX" - IMAP_READ_ONLY: bool = True # 🚨 SAFETY: Never mark emails as read or modify mailbox - - # Microsoft Graph API (OAuth2) - GRAPH_TENANT_ID: str = "" - GRAPH_CLIENT_ID: str = "" - GRAPH_CLIENT_SECRET: str = "" - GRAPH_USER_EMAIL: str = "" # Email account to monitor - - # Email Processing - EMAIL_PROCESS_INTERVAL_MINUTES: int = 5 # Background job frequency - EMAIL_MAX_FETCH_PER_RUN: int = 50 # Limit emails per processing cycle - EMAIL_RETENTION_DAYS: int = 90 # Days to keep emails before soft delete - - # Email Classification (AI) - EMAIL_AI_ENABLED: bool = True - EMAIL_AI_CONFIDENCE_THRESHOLD: float = 0.7 # Minimum confidence for auto-processing - EMAIL_AUTO_CLASSIFY: bool = True # Run AI classification on new emails - - # Email Rules Engine (DEPRECATED - Use workflows instead) - EMAIL_RULES_ENABLED: bool = False # 🚨 LEGACY: Disabled by default, use EMAIL_WORKFLOWS_ENABLED instead - EMAIL_RULES_AUTO_PROCESS: bool = False # 🚨 SAFETY: Require manual approval initially - - # Email Workflows (RECOMMENDED) - EMAIL_WORKFLOWS_ENABLED: bool = True # Enable automated workflows based on classification (replaces rules) - - # Company Info - OWN_CVR: str = "29522790" # BMC Denmark ApS - ignore when detecting vendors - - # File Upload - UPLOAD_DIR: str = "uploads" - MAX_FILE_SIZE_MB: int = 50 - ALLOWED_EXTENSIONS: List[str] = [".pdf", ".png", ".jpg", ".jpeg", ".txt", ".csv"] - - # Module System Configuration - MODULES_ENABLED: bool = True # Enable/disable entire module system - MODULES_DIR: str = "app/modules" # Directory for dynamic modules - MODULES_AUTO_RELOAD: bool = True # Hot-reload modules on changes (dev only) - - # Backup System Configuration - # Safety switches (default to safe mode) - BACKUP_ENABLED: bool = False # 🚨 SAFETY: Disable backups until explicitly enabled - BACKUP_DRY_RUN: bool = True # 🚨 SAFETY: Log operations without executing - BACKUP_READ_ONLY: bool = True # 🚨 SAFETY: Allow reads but block destructive operations - - # Backup formats - DB_DAILY_FORMAT: str = "dump" # dump (compressed) or sql (plain text) - DB_MONTHLY_FORMAT: str = "sql" # Monthly backups use plain SQL for readability - - # Backup scope - BACKUP_INCLUDE_UPLOADS: bool = True # Include uploads/ directory - BACKUP_INCLUDE_LOGS: bool = True # Include logs/ directory - BACKUP_INCLUDE_DATA: bool = True # Include data/ directory (templates, configs) - - # Storage configuration - BACKUP_STORAGE_PATH: str = "/opt/backups" # Production: /opt/backups, Dev: ./backups - BACKUP_MAX_SIZE_GB: int = 50 # Maximum total backup storage size - STORAGE_WARNING_THRESHOLD_PCT: int = 80 # Warn when storage exceeds this percentage - - # Rotation policy - RETENTION_DAYS: int = 30 # Keep daily backups for 30 days - MONTHLY_KEEP_MONTHS: int = 12 # Keep monthly backups for 12 months - - # Offsite configuration (SFTP/SSH) - OFFSITE_ENABLED: bool = False # 🚨 SAFETY: Disable offsite uploads until configured - OFFSITE_WEEKLY_DAY: str = "sunday" # Day for weekly offsite upload (monday-sunday) - OFFSITE_RETRY_MAX_ATTEMPTS: int = 3 # Maximum retry attempts for failed uploads - OFFSITE_RETRY_DELAY_HOURS: int = 1 # Hours between retry attempts - SFTP_HOST: str = "" # SFTP server hostname or IP - SFTP_PORT: int = 22 # SFTP server port - SFTP_USER: str = "" # SFTP username - SFTP_PASSWORD: str = "" # SFTP password (if not using SSH key) - SSH_KEY_PATH: str = "" # Path to SSH private key (preferred over password) - SFTP_REMOTE_PATH: str = "/backups/bmc_hub" # Remote directory for backups - - # Notification configuration (Mattermost) - MATTERMOST_ENABLED: bool = False # 🚨 SAFETY: Disable until webhook configured - MATTERMOST_WEBHOOK_URL: str = "" # Mattermost incoming webhook URL - MATTERMOST_CHANNEL: str = "backups" # Channel name for backup notifications - NOTIFY_ON_FAILURE: bool = True # Send notification on backup/offsite failures - NOTIFY_ON_SUCCESS_OFFSITE: bool = True # Send notification on successful offsite upload + # Simply-CRM (Separate System) + SIMPLYCRM_URL: str = "" + SIMPLYCRM_USERNAME: str = "" + SIMPLYCRM_API_KEY: str = "" class Config: env_file = ".env" case_sensitive = True - extra = "ignore" # Ignore extra fields from .env settings = Settings() - - -def get_module_config(module_name: str, key: str, default=None): - """ - Hent modul-specifik konfiguration fra miljøvariabel - - Pattern: MODULES__{MODULE_NAME}__{KEY} - Eksempel: MODULES__MY_MODULE__API_KEY - - Args: - module_name: Navn på modul (fx "my_module") - key: Config key (fx "API_KEY") - default: Default værdi hvis ikke sat - - Returns: - Konfigurationsværdi eller default - """ - import os - env_key = f"MODULES__{module_name.upper()}__{key.upper()}" - return os.getenv(env_key, default) diff --git a/app/core/database.py b/app/core/database.py index ca51828..f29e4d2 100644 --- a/app/core/database.py +++ b/app/core/database.py @@ -55,37 +55,21 @@ def get_db(): release_db_connection(conn) -def execute_query(query: str, params: Optional[tuple] = None, fetchone: bool = False): - """ - Execute a SQL query and return results - - Args: - query: SQL query string - params: Query parameters tuple - fetchone: If True, return single row dict, otherwise list of dicts - - Returns: - Single dict if fetchone=True, otherwise list of dicts - """ +def execute_query(query: str, params: tuple = None, fetch: bool = True): + """Execute a SQL query and return results""" conn = get_db_connection() try: with conn.cursor(cursor_factory=RealDictCursor) as cursor: - cursor.execute(query, params or ()) + cursor.execute(query, params) - # Check if this is a write operation (INSERT, UPDATE, DELETE) + # Auto-detect write operations and commit query_upper = query.strip().upper() - is_write = any(query_upper.startswith(cmd) for cmd in ['INSERT', 'UPDATE', 'DELETE']) + if query_upper.startswith(('INSERT', 'UPDATE', 'DELETE')): + conn.commit() - if fetchone: - row = cursor.fetchone() - if is_write: - conn.commit() - return dict(row) if row else None - else: - rows = cursor.fetchall() - if is_write: - conn.commit() - return [dict(row) for row in rows] + if fetch: + return cursor.fetchall() + return cursor.rowcount except Exception as e: conn.rollback() logger.error(f"Query error: {e}") @@ -94,35 +78,15 @@ def execute_query(query: str, params: Optional[tuple] = None, fetchone: bool = F release_db_connection(conn) -def execute_insert(query: str, params: tuple = ()) -> Optional[int]: - """ - Execute an INSERT query and return last row id - - Args: - query: SQL INSERT query (will add RETURNING id if not present) - params: Query parameters tuple - - Returns: - Last inserted row ID or None - """ +def execute_insert(query: str, params: tuple = None): + """Execute INSERT query and return new ID""" conn = get_db_connection() try: with conn.cursor(cursor_factory=RealDictCursor) as cursor: - # PostgreSQL requires RETURNING clause - if "RETURNING" not in query.upper(): - query = query.rstrip(";") + " RETURNING id" cursor.execute(query, params) - result = cursor.fetchone() conn.commit() - - # If result exists, return the first column value (typically ID) - if result: - # If it's a dict, get first value - if isinstance(result, dict): - return list(result.values())[0] - # If it's a tuple/list, get first element - return result[0] - return None + result = cursor.fetchone() + return result['id'] if result and 'id' in result else None except Exception as e: conn.rollback() logger.error(f"Insert error: {e}") @@ -131,24 +95,14 @@ def execute_insert(query: str, params: tuple = ()) -> Optional[int]: release_db_connection(conn) -def execute_update(query: str, params: tuple = ()) -> int: - """ - Execute an UPDATE/DELETE query and return affected rows - - Args: - query: SQL UPDATE/DELETE query - params: Query parameters tuple - - Returns: - Number of affected rows - """ +def execute_update(query: str, params: tuple = None): + """Execute UPDATE/DELETE query and return affected rows""" conn = get_db_connection() try: with conn.cursor(cursor_factory=RealDictCursor) as cursor: cursor.execute(query, params) - rowcount = cursor.rowcount conn.commit() - return rowcount + return cursor.rowcount except Exception as e: conn.rollback() logger.error(f"Update error: {e}") @@ -157,66 +111,7 @@ def execute_update(query: str, params: tuple = ()) -> int: release_db_connection(conn) -def execute_module_migration(module_name: str, migration_sql: str) -> bool: - """ - Kør en migration for et specifikt modul - - Args: - module_name: Navn på modulet - migration_sql: SQL migration kode - - Returns: - True hvis success, False ved fejl - """ - conn = get_db_connection() - try: - with conn.cursor(cursor_factory=RealDictCursor) as cursor: - # Sikr at module_migrations tabel eksisterer - cursor.execute(""" - CREATE TABLE IF NOT EXISTS module_migrations ( - id SERIAL PRIMARY KEY, - module_name VARCHAR(100) NOT NULL, - migration_name VARCHAR(255) NOT NULL, - applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - success BOOLEAN DEFAULT TRUE, - error_message TEXT, - UNIQUE(module_name, migration_name) - ) - """) - - # Kør migration - cursor.execute(migration_sql) - conn.commit() - - logger.info(f"✅ Migration for {module_name} success") - return True - - except Exception as e: - conn.rollback() - logger.error(f"❌ Migration failed for {module_name}: {e}") - return False - finally: - release_db_connection(conn) - - -def check_module_table_exists(table_name: str) -> bool: - """ - Check om en modul tabel eksisterer - - Args: - table_name: Tabel navn (fx "my_module_customers") - - Returns: - True hvis tabellen eksisterer - """ - query = """ - SELECT EXISTS ( - SELECT FROM information_schema.tables - WHERE table_schema = 'public' - AND table_name = %s - ) - """ - result = execute_query(query, (table_name,), fetchone=True) - if result and isinstance(result, dict): - return result.get('exists', False) - return False +def execute_query_single(query: str, params: tuple = None): + """Execute query and return single row (backwards compatibility for fetchone=True)""" + result = execute_query(query, params) + return result[0] if result and len(result) > 0 else None diff --git a/app/customers/backend/router.py b/app/customers/backend/router.py index 9b76962..2dbc32b 100644 --- a/app/customers/backend/router.py +++ b/app/customers/backend/router.py @@ -9,7 +9,7 @@ from typing import List, Optional, Dict from pydantic import BaseModel import logging -from app.core.database import execute_query, execute_insert, execute_update +from app.core.database import execute_query, execute_query_single from app.services.cvr_service import get_cvr_service logger = logging.getLogger(__name__) @@ -82,13 +82,24 @@ async def list_customers( source: Filter by source ('vtiger' or 'local') is_active: Filter by active status """ - # Build query + # Build query with primary contact info query = """ SELECT c.*, - COUNT(DISTINCT cc.contact_id) as contact_count + COUNT(DISTINCT cc.contact_id) as contact_count, + CONCAT(pc.first_name, ' ', pc.last_name) as contact_name, + pc.email as contact_email, + COALESCE(pc.mobile, pc.phone) as contact_phone FROM customers c LEFT JOIN contact_companies cc ON cc.customer_id = c.id + LEFT JOIN LATERAL ( + SELECT con.first_name, con.last_name, con.email, con.phone, con.mobile + FROM contact_companies ccomp + JOIN contacts con ON ccomp.contact_id = con.id + WHERE ccomp.customer_id = c.id + ORDER BY ccomp.is_primary DESC, con.id ASC + LIMIT 1 + ) pc ON true WHERE 1=1 """ params = [] @@ -117,7 +128,7 @@ async def list_customers( params.append(is_active) query += """ - GROUP BY c.id + GROUP BY c.id, pc.first_name, pc.last_name, pc.email, pc.phone, pc.mobile ORDER BY c.name LIMIT %s OFFSET %s """ @@ -148,7 +159,7 @@ async def list_customers( count_query += " AND is_active = %s" count_params.append(is_active) - count_result = execute_query(count_query, tuple(count_params), fetchone=True) + count_result = execute_query_single(count_query, tuple(count_params)) total = count_result['total'] if count_result else 0 return { @@ -163,21 +174,17 @@ async def list_customers( async def get_customer(customer_id: int): """Get single customer by ID with contact count and vTiger BMC Låst status""" # Get customer - customer = execute_query( + customer = execute_query_single( "SELECT * FROM customers WHERE id = %s", - (customer_id,), - fetchone=True - ) + (customer_id,)) if not customer: raise HTTPException(status_code=404, detail="Customer not found") # Get contact count - contact_count_result = execute_query( + contact_count_result = execute_query_single( "SELECT COUNT(*) as count FROM contact_companies WHERE customer_id = %s", - (customer_id,), - fetchone=True - ) + (customer_id,)) contact_count = contact_count_result['count'] if contact_count_result else 0 @@ -230,11 +237,9 @@ async def create_customer(customer: CustomerCreate): logger.info(f"✅ Created customer {customer_id}: {customer.name}") # Fetch and return created customer - created = execute_query( + created = execute_query_single( "SELECT * FROM customers WHERE id = %s", - (customer_id,), - fetchone=True - ) + (customer_id,)) return created except Exception as e: @@ -246,11 +251,9 @@ async def create_customer(customer: CustomerCreate): async def update_customer(customer_id: int, update: CustomerUpdate): """Update customer information""" # Verify customer exists - existing = execute_query( + existing = execute_query_single( "SELECT id FROM customers WHERE id = %s", - (customer_id,), - fetchone=True - ) + (customer_id,)) if not existing: raise HTTPException(status_code=404, detail="Customer not found") @@ -275,11 +278,9 @@ async def update_customer(customer_id: int, update: CustomerUpdate): logger.info(f"✅ Updated customer {customer_id}") # Fetch and return updated customer - updated = execute_query( + updated = execute_query_single( "SELECT * FROM customers WHERE id = %s", - (customer_id,), - fetchone=True - ) + (customer_id,)) return updated except Exception as e: @@ -294,11 +295,9 @@ async def lock_customer_subscriptions(customer_id: int, lock_request: dict): locked = lock_request.get('locked', False) # Get customer - customer = execute_query( + customer = execute_query_single( "SELECT id, name FROM customers WHERE id = %s", - (customer_id,), - fetchone=True - ) + (customer_id,)) if not customer: raise HTTPException(status_code=404, detail="Customer not found") @@ -327,7 +326,7 @@ async def lock_customer_subscriptions(customer_id: int, lock_request: dict): @router.get("/customers/{customer_id}/contacts") async def get_customer_contacts(customer_id: int): """Get all contacts for a specific customer""" - rows = execute_query(""" + rows = execute_query_single(""" SELECT c.*, cc.is_primary, @@ -348,9 +347,7 @@ async def create_customer_contact(customer_id: int, contact: ContactCreate): # Verify customer exists customer = execute_query( "SELECT id FROM customers WHERE id = %s", - (customer_id,), - fetchone=True - ) + (customer_id,)) if not customer: raise HTTPException(status_code=404, detail="Customer not found") @@ -383,11 +380,9 @@ async def create_customer_contact(customer_id: int, contact: ContactCreate): logger.info(f"✅ Created contact {contact_id} for customer {customer_id}") # Fetch and return created contact - created = execute_query( + created = execute_query_single( "SELECT * FROM contacts WHERE id = %s", - (contact_id,), - fetchone=True - ) + (contact_id,)) return created except Exception as e: @@ -420,11 +415,9 @@ async def get_customer_subscriptions(customer_id: int): from app.services.vtiger_service import get_vtiger_service # Get customer with vTiger ID - customer = execute_query( + customer = execute_query_single( "SELECT id, name, vtiger_id FROM customers WHERE id = %s", - (customer_id,), - fetchone=True - ) + (customer_id,)) if not customer: raise HTTPException(status_code=404, detail="Customer not found") @@ -506,20 +499,29 @@ async def get_customer_subscriptions(customer_id: int): # Note: Simply-CRM returns one row per line item, so we need to group them query = f"SELECT * FROM SalesOrder WHERE account_id='{simplycrm_account_id}';" all_simplycrm_orders = await simplycrm.query(query) + logger.info(f"🔍 Simply-CRM raw query returned {len(all_simplycrm_orders or [])} orders for account {simplycrm_account_id}") # Group line items by order ID # Filter: Only include orders with recurring_frequency (otherwise not subscription) orders_dict = {} + filtered_closed = 0 + filtered_no_freq = 0 for row in (all_simplycrm_orders or []): status = row.get('sostatus', '').lower() if status in ['closed', 'cancelled']: + filtered_closed += 1 + logger.debug(f" ⏭️ Skipping closed order: {row.get('subject', 'N/A')} ({status})") continue # MUST have recurring_frequency to be a subscription recurring_frequency = row.get('recurring_frequency', '').strip() if not recurring_frequency: + filtered_no_freq += 1 + logger.debug(f" ⏭️ Skipping order without frequency: {row.get('subject', 'N/A')}") continue + logger.info(f" ✅ Including order: {row.get('subject', 'N/A')} - {recurring_frequency} ({status})") + order_id = row.get('id') if order_id not in orders_dict: # First occurrence - create order object @@ -548,7 +550,7 @@ async def get_customer_subscriptions(customer_id: int): }) simplycrm_sales_orders = list(orders_dict.values()) - logger.info(f"📥 Found {len(simplycrm_sales_orders)} unique open sales orders in Simply-CRM") + logger.info(f"📥 Found {len(simplycrm_sales_orders)} unique recurring orders in Simply-CRM (filtered out: {filtered_closed} closed, {filtered_no_freq} without frequency)") else: logger.info(f"ℹ️ No Simply-CRM account found for '{customer_name}'") except Exception as e: @@ -608,9 +610,7 @@ async def create_subscription(customer_id: int, subscription: SubscriptionCreate # Get customer's vTiger ID customer = execute_query( "SELECT vtiger_id FROM customers WHERE id = %s", - (customer_id,), - fetchone=True - ) + (customer_id,)) if not customer or not customer.get('vtiger_id'): raise HTTPException(status_code=404, detail="Customer not linked to vTiger") @@ -686,11 +686,9 @@ async def delete_subscription(subscription_id: str, customer_id: int = None): try: # Check if subscriptions are locked for this customer (if customer_id provided) if customer_id: - customer = execute_query( + customer = execute_query_single( "SELECT subscriptions_locked FROM customers WHERE id = %s", - (customer_id,), - fetchone=True - ) + (customer_id,)) if customer and customer.get('subscriptions_locked'): raise HTTPException( status_code=403, diff --git a/app/customers/backend/views.py b/app/customers/backend/views.py index d7026cd..1e8bd41 100644 --- a/app/customers/backend/views.py +++ b/app/customers/backend/views.py @@ -8,7 +8,7 @@ templates = Jinja2Templates(directory="app") @router.get("/customers", response_class=HTMLResponse) async def customers_page(request: Request): """ - Render the customers list page + Render the customers page """ return templates.TemplateResponse("customers/frontend/customers.html", {"request": request}) diff --git a/app/customers/frontend/customer_detail.html b/app/customers/frontend/customer_detail.html index 509a513..6f0de4b 100644 --- a/app/customers/frontend/customer_detail.html +++ b/app/customers/frontend/customer_detail.html @@ -215,7 +215,7 @@
  • Godkend Worklog
  • Ny Ticket
  • -
  • Klippekort
  • +
  • Prepaid Cards
  • Knowledge Base
  • @@ -1054,8 +1054,16 @@ function checkMaintenanceMode() { fetch('/api/v1/backups/maintenance') - .then(response => response.json()) + .then(response => { + if (!response.ok) { + // Silently ignore 404 - maintenance endpoint not implemented yet + return null; + } + return response.json(); + }) .then(data => { + if (!data) return; // Skip if endpoint doesn't exist + const overlay = document.getElementById('maintenance-overlay'); const messageEl = document.getElementById('maintenance-message'); const etaEl = document.getElementById('maintenance-eta'); @@ -1092,11 +1100,11 @@ } }) .catch(error => { - console.error('Maintenance check error:', error); + // Silently ignore errors - maintenance check is not critical }); } - // Check on page load + // Check on page load (optional feature, don't block if not available) checkMaintenanceMode(); // Check periodically (every 30 seconds when not in maintenance) diff --git a/app/ticket/backend/economic_export.py b/app/ticket/backend/economic_export.py index 8d3bb5b..cd3ce37 100644 --- a/app/ticket/backend/economic_export.py +++ b/app/ticket/backend/economic_export.py @@ -19,7 +19,7 @@ from typing import Dict, List, Optional from datetime import date, datetime from decimal import Decimal -from app.core.database import execute_query, execute_update +from app.core.database import execute_query, execute_update, execute_query_single from app.core.config import settings from app.services.economic_service import EconomicService from psycopg2.extras import Json @@ -164,7 +164,7 @@ class TicketEconomicExportService: WHERE id = %s """ - customer = execute_query(query, (customer_id,), fetchone=True) + customer = execute_query_single(query, (customer_id,)) if not customer: logger.error(f"❌ Customer {customer_id} not found") diff --git a/app/ticket/backend/email_integration.py b/app/ticket/backend/email_integration.py index 10e1701..4cf2f9e 100644 --- a/app/ticket/backend/email_integration.py +++ b/app/ticket/backend/email_integration.py @@ -14,7 +14,7 @@ import re from typing import Dict, Any, Optional, List from datetime import datetime -from app.core.database import execute_query, execute_insert +from app.core.database import execute_query, execute_insert, execute_query_single from app.ticket.backend.ticket_service import TicketService from app.ticket.backend.models import TTicketCreate, TicketPriority from psycopg2.extras import Json @@ -122,7 +122,7 @@ class EmailTicketIntegration: # Find ticket by ticket_number query = "SELECT id FROM tticket_tickets WHERE ticket_number = %s" - result = execute_query(query, (ticket_number,), fetchone=True) + result = execute_query_single(query, (ticket_number,)) if not result: logger.warning(f"⚠️ Ticket {ticket_number} not found - creating new ticket instead") diff --git a/app/ticket/backend/klippekort_service.py b/app/ticket/backend/klippekort_service.py index 9a13297..7e242f5 100644 --- a/app/ticket/backend/klippekort_service.py +++ b/app/ticket/backend/klippekort_service.py @@ -12,7 +12,7 @@ from datetime import datetime from decimal import Decimal from typing import Optional, Dict, Any, List -from app.core.database import execute_query, execute_insert, execute_update +from app.core.database import execute_query, execute_insert, execute_update, execute_query_single from app.ticket.backend.models import ( TPrepaidCard, TPrepaidCardCreate, @@ -54,14 +54,12 @@ class KlippekortService: from psycopg2.extras import Json # Check if customer already has an active card - existing = execute_query( + existing = execute_query_single( """ SELECT id, card_number FROM tticket_prepaid_cards WHERE customer_id = %s AND status = 'active' """, - (card_data.customer_id,), - fetchone=True - ) + (card_data.customer_id,)) if existing: raise ValueError( @@ -113,11 +111,9 @@ class KlippekortService: ) # Fetch created card - card = execute_query( + card = execute_query_single( "SELECT * FROM tticket_prepaid_cards WHERE id = %s", - (card_id,), - fetchone=True - ) + (card_id,)) logger.info(f"✅ Created prepaid card {card['card_number']} (ID: {card_id})") return card @@ -125,20 +121,16 @@ class KlippekortService: @staticmethod def get_card(card_id: int) -> Optional[Dict[str, Any]]: """Get prepaid card by ID""" - return execute_query( + return execute_query_single( "SELECT * FROM tticket_prepaid_cards WHERE id = %s", - (card_id,), - fetchone=True - ) + (card_id,)) @staticmethod def get_card_with_stats(card_id: int) -> Optional[Dict[str, Any]]: """Get prepaid card with usage statistics""" - return execute_query( + return execute_query_single( "SELECT * FROM tticket_prepaid_balances WHERE id = %s", - (card_id,), - fetchone=True - ) + (card_id,)) @staticmethod def get_active_card_for_customer(customer_id: int) -> Optional[Dict[str, Any]]: @@ -147,14 +139,12 @@ class KlippekortService: Returns None if no active card exists. """ - return execute_query( + return execute_query_single( """ SELECT * FROM tticket_prepaid_cards WHERE customer_id = %s AND status = 'active' """, - (customer_id,), - fetchone=True - ) + (customer_id,)) @staticmethod def check_balance(customer_id: int) -> Dict[str, Any]: @@ -299,11 +289,9 @@ class KlippekortService: logger.warning(f"💳 Card {card['card_number']} is now depleted") # Fetch transaction - transaction = execute_query( + transaction = execute_query_single( "SELECT * FROM tticket_prepaid_transactions WHERE id = %s", - (transaction_id,), - fetchone=True - ) + (transaction_id,)) logger.info(f"✅ Deducted {hours}h from card {card['card_number']}, new balance: {new_balance}h") return transaction @@ -368,11 +356,9 @@ class KlippekortService: ) ) - transaction = execute_query( + transaction = execute_query_single( "SELECT * FROM tticket_prepaid_transactions WHERE id = %s", - (transaction_id,), - fetchone=True - ) + (transaction_id,)) logger.info(f"✅ Topped up card {card['card_number']} with {hours}h, new balance: {new_balance}h") return transaction @@ -392,7 +378,7 @@ class KlippekortService: Returns: List of transaction dicts """ - transactions = execute_query( + transactions = execute_query_single( """ SELECT * FROM tticket_prepaid_transactions WHERE card_id = %s @@ -496,9 +482,7 @@ class KlippekortService: # Fetch updated card updated = execute_query( "SELECT * FROM tticket_prepaid_cards WHERE id = %s", - (card_id,), - fetchone=True - ) + (card_id,)) logger.info(f"✅ Cancelled card {card['card_number']}") return updated diff --git a/app/ticket/backend/models.py b/app/ticket/backend/models.py index b84ef9a..7255b1e 100644 --- a/app/ticket/backend/models.py +++ b/app/ticket/backend/models.py @@ -495,3 +495,289 @@ class PrepaidCardDeductRequest(BaseModel): """Request model for deducting hours from prepaid card""" worklog_id: int = Field(..., gt=0, description="Worklog ID der skal trækkes fra kort") hours: Decimal = Field(..., gt=0, description="Timer at trække") + + +# ============================================================================ +# TICKET RELATIONS MODELS (Migration 026) +# ============================================================================ + +class TicketRelationType(str, Enum): + """Ticket relation types""" + MERGED_INTO = "merged_into" + SPLIT_FROM = "split_from" + PARENT_OF = "parent_of" + CHILD_OF = "child_of" + RELATED_TO = "related_to" + + +class TTicketRelationBase(BaseModel): + """Base model for ticket relation""" + ticket_id: int + related_ticket_id: int + relation_type: TicketRelationType + reason: Optional[str] = None + + +class TTicketRelationCreate(TTicketRelationBase): + """Create ticket relation""" + pass + + +class TTicketRelation(TTicketRelationBase): + """Full ticket relation model""" + id: int + created_by_user_id: Optional[int] = None + created_at: datetime + + class Config: + from_attributes = True + + +# ============================================================================ +# CALENDAR EVENTS MODELS +# ============================================================================ + +class CalendarEventType(str, Enum): + """Calendar event types""" + APPOINTMENT = "appointment" + DEADLINE = "deadline" + MILESTONE = "milestone" + REMINDER = "reminder" + FOLLOW_UP = "follow_up" + + +class CalendarEventStatus(str, Enum): + """Calendar event status""" + PENDING = "pending" + CONFIRMED = "confirmed" + COMPLETED = "completed" + CANCELLED = "cancelled" + + +class TTicketCalendarEventBase(BaseModel): + """Base model for calendar event""" + ticket_id: int + title: str = Field(..., min_length=1, max_length=200) + description: Optional[str] = None + event_type: CalendarEventType = Field(default=CalendarEventType.APPOINTMENT) + event_date: date + event_time: Optional[str] = None + duration_minutes: Optional[int] = None + all_day: bool = False + status: CalendarEventStatus = Field(default=CalendarEventStatus.PENDING) + + +class TTicketCalendarEventCreate(TTicketCalendarEventBase): + """Create calendar event""" + suggested_by_ai: bool = False + ai_confidence: Optional[Decimal] = None + ai_source_text: Optional[str] = None + + +class TTicketCalendarEvent(TTicketCalendarEventBase): + """Full calendar event model""" + id: int + suggested_by_ai: bool = False + ai_confidence: Optional[Decimal] = None + ai_source_text: Optional[str] = None + created_by_user_id: Optional[int] = None + created_at: datetime + updated_at: Optional[datetime] = None + completed_at: Optional[datetime] = None + + class Config: + from_attributes = True + + +# ============================================================================ +# TEMPLATES MODELS +# ============================================================================ + +class TTicketTemplateBase(BaseModel): + """Base model for template""" + name: str = Field(..., min_length=1, max_length=200) + description: Optional[str] = None + category: Optional[str] = None + subject_template: Optional[str] = Field(None, max_length=500) + body_template: str = Field(..., min_length=1) + available_placeholders: Optional[List[str]] = None + default_attachments: Optional[dict] = None + is_active: bool = True + requires_approval: bool = False + + +class TTicketTemplateCreate(TTicketTemplateBase): + """Create template""" + pass + + +class TTicketTemplate(TTicketTemplateBase): + """Full template model""" + id: int + created_by_user_id: Optional[int] = None + created_at: datetime + updated_at: Optional[datetime] = None + last_used_at: Optional[datetime] = None + usage_count: int = 0 + + class Config: + from_attributes = True + + +class TemplateRenderRequest(BaseModel): + """Request to render template with data""" + template_id: int + ticket_id: int + custom_data: Optional[dict] = None + + +class TemplateRenderResponse(BaseModel): + """Rendered template""" + subject: Optional[str] = None + body: str + placeholders_used: List[str] + + +# ============================================================================ +# AI SUGGESTIONS MODELS +# ============================================================================ + +class AISuggestionType(str, Enum): + """AI suggestion types""" + CONTACT_UPDATE = "contact_update" + NEW_CONTACT = "new_contact" + CATEGORY = "category" + TAG = "tag" + PRIORITY = "priority" + DEADLINE = "deadline" + CALENDAR_EVENT = "calendar_event" + TEMPLATE = "template" + MERGE = "merge" + RELATED_TICKET = "related_ticket" + + +class AISuggestionStatus(str, Enum): + """AI suggestion status""" + PENDING = "pending" + ACCEPTED = "accepted" + REJECTED = "rejected" + AUTO_EXPIRED = "auto_expired" + + +class TTicketAISuggestionBase(BaseModel): + """Base model for AI suggestion""" + ticket_id: int + suggestion_type: AISuggestionType + suggestion_data: dict # Struktureret data om forslaget + confidence: Optional[Decimal] = None + reasoning: Optional[str] = None + source_text: Optional[str] = None + source_comment_id: Optional[int] = None + + +class TTicketAISuggestionCreate(TTicketAISuggestionBase): + """Create AI suggestion""" + expires_at: Optional[datetime] = None + + +class TTicketAISuggestion(TTicketAISuggestionBase): + """Full AI suggestion model""" + id: int + status: AISuggestionStatus = Field(default=AISuggestionStatus.PENDING) + reviewed_by_user_id: Optional[int] = None + reviewed_at: Optional[datetime] = None + created_at: datetime + expires_at: Optional[datetime] = None + + class Config: + from_attributes = True + + +class AISuggestionReviewRequest(BaseModel): + """Request to accept/reject AI suggestion""" + action: str = Field(..., pattern="^(accept|reject)$") + note: Optional[str] = None + + +# ============================================================================ +# EMAIL METADATA MODELS +# ============================================================================ + +class TTicketEmailMetadataBase(BaseModel): + """Base model for email metadata""" + ticket_id: int + message_id: Optional[str] = None + in_reply_to: Optional[str] = None + references: Optional[str] = None + from_email: str + from_name: Optional[str] = None + from_signature: Optional[str] = None + + +class TTicketEmailMetadataCreate(TTicketEmailMetadataBase): + """Create email metadata""" + matched_contact_id: Optional[int] = None + match_confidence: Optional[Decimal] = None + match_method: Optional[str] = None + suggested_contacts: Optional[dict] = None + extracted_phone: Optional[str] = None + extracted_address: Optional[str] = None + extracted_company: Optional[str] = None + extracted_title: Optional[str] = None + + +class TTicketEmailMetadata(TTicketEmailMetadataCreate): + """Full email metadata model""" + id: int + created_at: datetime + updated_at: Optional[datetime] = None + + class Config: + from_attributes = True + + +# ============================================================================ +# AUDIT LOG MODELS +# ============================================================================ + +class TTicketAuditLog(BaseModel): + """Audit log entry""" + id: int + ticket_id: int + action: str + field_name: Optional[str] = None + old_value: Optional[str] = None + new_value: Optional[str] = None + user_id: Optional[int] = None + performed_at: datetime + reason: Optional[str] = None + metadata: Optional[dict] = None + + class Config: + from_attributes = True + + +# ============================================================================ +# EXTENDED REQUEST MODELS +# ============================================================================ + +class TicketMergeRequest(BaseModel): + """Request to merge tickets""" + source_ticket_ids: List[int] = Field(..., min_length=1, description="Tickets at lægge sammen") + target_ticket_id: int = Field(..., description="Primær ticket der skal beholdes") + reason: Optional[str] = Field(None, description="Hvorfor lægges de sammen") + + +class TicketSplitRequest(BaseModel): + """Request to split ticket""" + source_ticket_id: int = Field(..., description="Ticket at splitte") + comment_ids: List[int] = Field(..., min_length=1, description="Kommentarer til ny ticket") + new_subject: str = Field(..., min_length=1, description="Emne på ny ticket") + new_description: Optional[str] = Field(None, description="Beskrivelse på ny ticket") + reason: Optional[str] = Field(None, description="Hvorfor splittes ticketen") + + +class TicketDeadlineUpdateRequest(BaseModel): + """Request to update ticket deadline""" + deadline: Optional[datetime] = None + reason: Optional[str] = None diff --git a/app/ticket/backend/router.py b/app/ticket/backend/router.py index ce30937..5d08c82 100644 --- a/app/ticket/backend/router.py +++ b/app/ticket/backend/router.py @@ -26,9 +26,28 @@ from app.ticket.backend.models import ( TicketListResponse, TicketStatusUpdateRequest, WorklogReviewResponse, - WorklogBillingRequest + WorklogBillingRequest, + # Migration 026 models + TTicketRelation, + TTicketRelationCreate, + TTicketCalendarEvent, + TTicketCalendarEventCreate, + CalendarEventStatus, + TTicketTemplate, + TTicketTemplateCreate, + TemplateRenderRequest, + TemplateRenderResponse, + TTicketAISuggestion, + TTicketAISuggestionCreate, + AISuggestionStatus, + AISuggestionType, + AISuggestionReviewRequest, + TTicketAuditLog, + TicketMergeRequest, + TicketSplitRequest, + TicketDeadlineUpdateRequest ) -from app.core.database import execute_query, execute_insert, execute_update +from app.core.database import execute_query, execute_insert, execute_update, execute_query_single from datetime import date logger = logging.getLogger(__name__) @@ -81,7 +100,7 @@ async def list_tickets( total_query += " AND customer_id = %s" params.append(customer_id) - total_result = execute_query(total_query, tuple(params), fetchone=True) + total_result = execute_query_single(total_query, tuple(params)) total = total_result['count'] if total_result else 0 return TicketListResponse( @@ -217,7 +236,7 @@ async def list_comments(ticket_id: int): List all comments for a ticket """ try: - comments = execute_query( + comments = execute_query_single( "SELECT * FROM tticket_comments WHERE ticket_id = %s ORDER BY created_at ASC", (ticket_id,) ) @@ -322,9 +341,7 @@ async def create_worklog( worklog = execute_query( "SELECT * FROM tticket_worklog WHERE id = %s", - (worklog_id,), - fetchone=True - ) + (worklog_id,)) logger.info(f"✅ Created worklog entry {worklog_id} for ticket {ticket_id}") return worklog @@ -347,11 +364,9 @@ async def update_worklog( """ try: # Get current worklog - current = execute_query( + current = execute_query_single( "SELECT * FROM tticket_worklog WHERE id = %s", - (worklog_id,), - fetchone=True - ) + (worklog_id,)) if not current: raise HTTPException(status_code=404, detail=f"Worklog {worklog_id} not found") @@ -384,11 +399,9 @@ async def update_worklog( ) # Fetch updated - worklog = execute_query( + worklog = execute_query_single( "SELECT * FROM tticket_worklog WHERE id = %s", - (worklog_id,), - fetchone=True - ) + (worklog_id,)) return worklog @@ -427,7 +440,7 @@ async def review_worklog( query += " ORDER BY w.work_date DESC, t.customer_id" - worklogs = execute_query(query, tuple(params)) + worklogs = execute_query_single(query, tuple(params)) # Calculate totals total_hours = Decimal('0') @@ -467,9 +480,7 @@ async def mark_worklog_billable( # Get worklog worklog = execute_query( "SELECT * FROM tticket_worklog WHERE id = %s", - (worklog_id,), - fetchone=True - ) + (worklog_id,)) if not worklog: logger.warning(f"⚠️ Worklog {worklog_id} not found, skipping") @@ -700,7 +711,7 @@ async def get_stats_by_status(): Get ticket statistics grouped by status """ try: - stats = execute_query( + stats = execute_query_single( "SELECT * FROM tticket_stats_by_status ORDER BY status" ) return stats or [] @@ -725,9 +736,7 @@ async def get_open_tickets_stats(): COUNT(*) FILTER (WHERE priority = 'urgent') as urgent_count, AVG(age_hours) as avg_age_hours FROM tticket_open_tickets - """, - fetchone=True - ) + """) return stats or {} @@ -816,3 +825,586 @@ async def execute_economic_export( except Exception as e: logger.error(f"❌ Error executing export: {e}") raise HTTPException(status_code=500, detail=str(e)) + + +# ============================================================================ +# TICKET RELATIONS ENDPOINTS (Migration 026) +# ============================================================================ + +@router.post("/tickets/{ticket_id}/merge", tags=["Ticket Relations"]) +async def merge_tickets(ticket_id: int, request: TicketMergeRequest): + """ + Flet flere tickets sammen til én primær ticket + + **Process**: + 1. Validerer at alle source tickets eksisterer + 2. Kopierer kommentarer og worklogs til target ticket + 3. Opretter relation records + 4. Markerer source tickets som merged + 5. Logger i audit trail + """ + try: + # Validate target ticket exists + target_ticket = execute_query_single( + "SELECT id, ticket_number, subject FROM tticket_tickets WHERE id = %s", + (request.target_ticket_id,) + ) + if not target_ticket: + raise HTTPException(status_code=404, detail=f"Target ticket {request.target_ticket_id} not found") + + merged_count = 0 + for source_id in request.source_ticket_ids: + # Validate source ticket + source_ticket = execute_query_single( + "SELECT id, ticket_number FROM tticket_tickets WHERE id = %s", + (source_id,) + ) + if not source_ticket: + logger.warning(f"⚠️ Source ticket {source_id} not found, skipping") + continue + + # Create relation + execute_query( + """INSERT INTO tticket_relations (ticket_id, related_ticket_id, relation_type, reason, created_by_user_id) + VALUES (%s, %s, 'merged_into', %s, 1) + ON CONFLICT (ticket_id, related_ticket_id, relation_type) DO NOTHING""", + (source_id, request.target_ticket_id, request.reason) + ) + + # Mark source as merged + execute_query( + """UPDATE tticket_tickets + SET is_merged = true, merged_into_ticket_id = %s, status = 'closed' + WHERE id = %s""", + (request.target_ticket_id, source_id), + fetch=False + ) + + # Log audit + execute_query( + """INSERT INTO tticket_audit_log (ticket_id, action, new_value, reason) + VALUES (%s, 'merged_into', %s, %s)""", + (source_id, str(request.target_ticket_id), request.reason) + ) + + merged_count += 1 + logger.info(f"✅ Merged ticket {source_id} into {request.target_ticket_id}") + + return { + "status": "success", + "merged_count": merged_count, + "target_ticket": target_ticket, + "message": f"Successfully merged {merged_count} ticket(s)" + } + + except HTTPException: + raise + except Exception as e: + logger.error(f"❌ Error merging tickets: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/tickets/{ticket_id}/split", tags=["Ticket Relations"]) +async def split_ticket(ticket_id: int, request: TicketSplitRequest): + """ + Opdel en ticket i to - flyt kommentarer til ny ticket + + **Process**: + 1. Opretter ny ticket med nyt subject + 2. Flytter valgte kommentarer til ny ticket + 3. Opretter relation + 4. Logger i audit trail + """ + try: + # Validate source ticket + source_ticket = execute_query_single( + "SELECT * FROM tticket_tickets WHERE id = %s", + (request.source_ticket_id,) + ) + if not source_ticket: + raise HTTPException(status_code=404, detail=f"Source ticket {request.source_ticket_id} not found") + + # Create new ticket (inherit customer, contact, priority) + new_ticket_id = execute_insert( + """INSERT INTO tticket_tickets + (subject, description, status, priority, customer_id, contact_id, source, created_by_user_id) + VALUES (%s, %s, 'open', %s, %s, %s, 'manual', 1) + RETURNING id""", + (request.new_subject, request.new_description, source_ticket['priority'], + source_ticket['customer_id'], source_ticket['contact_id']) + ) + + new_ticket_number = execute_query_single( + "SELECT ticket_number FROM tticket_tickets WHERE id = %s", + (new_ticket_id,) + )['ticket_number'] + + # Move comments + moved_comments = 0 + for comment_id in request.comment_ids: + result = execute_query( + "UPDATE tticket_comments SET ticket_id = %s WHERE id = %s AND ticket_id = %s", + (new_ticket_id, comment_id, request.source_ticket_id), + fetch=False + ) + if result: + moved_comments += 1 + + # Create relation + execute_query( + """INSERT INTO tticket_relations (ticket_id, related_ticket_id, relation_type, reason, created_by_user_id) + VALUES (%s, %s, 'split_from', %s, 1)""", + (new_ticket_id, request.source_ticket_id, request.reason) + ) + + # Log audit + execute_query( + """INSERT INTO tticket_audit_log (ticket_id, action, new_value, reason) + VALUES (%s, 'split_into', %s, %s)""", + (request.source_ticket_id, str(new_ticket_id), request.reason) + ) + + logger.info(f"✅ Split ticket {request.source_ticket_id} into {new_ticket_id}, moved {moved_comments} comments") + + return { + "status": "success", + "new_ticket_id": new_ticket_id, + "new_ticket_number": new_ticket_number, + "moved_comments": moved_comments, + "message": f"Successfully split ticket into {new_ticket_number}" + } + + except HTTPException: + raise + except Exception as e: + logger.error(f"❌ Error splitting ticket: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/tickets/{ticket_id}/relations", tags=["Ticket Relations"]) +async def get_ticket_relations(ticket_id: int): + """Hent alle relationer for en ticket (begge retninger)""" + try: + relations = execute_query( + """SELECT r.*, + t.ticket_number as related_ticket_number, + t.subject as related_subject, + t.status as related_status + FROM tticket_all_relations r + LEFT JOIN tticket_tickets t ON r.related_ticket_id = t.id + WHERE r.ticket_id = %s + ORDER BY r.created_at DESC""", + (ticket_id,) + ) + + return {"relations": relations, "total": len(relations)} + + except Exception as e: + logger.error(f"❌ Error fetching relations: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/tickets/{ticket_id}/relations", tags=["Ticket Relations"]) +async def create_ticket_relation(ticket_id: int, relation: TTicketRelationCreate): + """Opret en relation mellem to tickets""" + try: + # Validate both tickets exist + for tid in [relation.ticket_id, relation.related_ticket_id]: + ticket = execute_query_single("SELECT id FROM tticket_tickets WHERE id = %s", (tid,)) + if not ticket: + raise HTTPException(status_code=404, detail=f"Ticket {tid} not found") + + execute_query( + """INSERT INTO tticket_relations (ticket_id, related_ticket_id, relation_type, reason, created_by_user_id) + VALUES (%s, %s, %s, %s, 1)""", + (relation.ticket_id, relation.related_ticket_id, relation.relation_type, relation.reason) + ) + + return {"status": "success", "message": "Relation created"} + + except HTTPException: + raise + except Exception as e: + logger.error(f"❌ Error creating relation: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +# ============================================================================ +# CALENDAR EVENTS ENDPOINTS +# ============================================================================ + +@router.get("/tickets/{ticket_id}/calendar-events", tags=["Calendar"]) +async def get_calendar_events(ticket_id: int): + """Hent alle kalender events for en ticket""" + try: + events = execute_query( + """SELECT * FROM tticket_calendar_events + WHERE ticket_id = %s + ORDER BY event_date DESC, event_time DESC NULLS LAST""", + (ticket_id,) + ) + + return {"events": events, "total": len(events)} + + except Exception as e: + logger.error(f"❌ Error fetching calendar events: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/tickets/{ticket_id}/calendar-events", tags=["Calendar"]) +async def create_calendar_event(ticket_id: int, event: TTicketCalendarEventCreate): + """Opret kalender event (manual eller AI-foreslået)""" + try: + event_id = execute_insert( + """INSERT INTO tticket_calendar_events + (ticket_id, title, description, event_type, event_date, event_time, + duration_minutes, all_day, status, suggested_by_ai, ai_confidence, + ai_source_text, created_by_user_id) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 1) + RETURNING id""", + (ticket_id, event.title, event.description, event.event_type, + event.event_date, event.event_time, event.duration_minutes, + event.all_day, event.status, event.suggested_by_ai, + event.ai_confidence, event.ai_source_text) + ) + + logger.info(f"✅ Created calendar event {event_id} for ticket {ticket_id}") + + return {"status": "success", "event_id": event_id, "message": "Calendar event created"} + + except Exception as e: + logger.error(f"❌ Error creating calendar event: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.put("/tickets/{ticket_id}/calendar-events/{event_id}", tags=["Calendar"]) +async def update_calendar_event(ticket_id: int, event_id: int, status: CalendarEventStatus): + """Opdater calendar event status""" + try: + execute_query( + """UPDATE tticket_calendar_events + SET status = %s, updated_at = CURRENT_TIMESTAMP, + completed_at = CASE WHEN %s = 'completed' THEN CURRENT_TIMESTAMP ELSE completed_at END + WHERE id = %s AND ticket_id = %s""", + (status, status, event_id, ticket_id), + fetch=False + ) + + return {"status": "success", "message": "Event updated"} + + except Exception as e: + logger.error(f"❌ Error updating calendar event: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.delete("/tickets/{ticket_id}/calendar-events/{event_id}", tags=["Calendar"]) +async def delete_calendar_event(ticket_id: int, event_id: int): + """Slet calendar event""" + try: + execute_query( + "DELETE FROM tticket_calendar_events WHERE id = %s AND ticket_id = %s", + (event_id, ticket_id), + fetch=False + ) + + return {"status": "success", "message": "Event deleted"} + + except Exception as e: + logger.error(f"❌ Error deleting calendar event: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +# ============================================================================ +# TEMPLATES ENDPOINTS +# ============================================================================ + +@router.get("/templates", response_model=List[TTicketTemplate], tags=["Templates"]) +async def list_templates( + category: Optional[str] = Query(None, description="Filter by category"), + active_only: bool = Query(True, description="Only show active templates") +): + """List alle tilgængelige templates""" + try: + query = "SELECT * FROM tticket_templates WHERE 1=1" + params = [] + + if category: + query += " AND category = %s" + params.append(category) + + if active_only: + query += " AND is_active = true" + + query += " ORDER BY category, name" + + templates = execute_query(query, tuple(params) if params else None) + + return templates + + except Exception as e: + logger.error(f"❌ Error listing templates: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/templates", tags=["Templates"]) +async def create_template(template: TTicketTemplateCreate): + """Opret ny template""" + try: + template_id = execute_insert( + """INSERT INTO tticket_templates + (name, description, category, subject_template, body_template, + available_placeholders, default_attachments, is_active, + requires_approval, created_by_user_id) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, 1) + RETURNING id""", + (template.name, template.description, template.category, + template.subject_template, template.body_template, + template.available_placeholders, template.default_attachments, + template.is_active, template.requires_approval) + ) + + logger.info(f"✅ Created template {template_id}: {template.name}") + + return {"status": "success", "template_id": template_id, "message": "Template created"} + + except Exception as e: + logger.error(f"❌ Error creating template: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/tickets/{ticket_id}/render-template", response_model=TemplateRenderResponse, tags=["Templates"]) +async def render_template(ticket_id: int, request: TemplateRenderRequest): + """ + Render template med ticket data + + Erstatter placeholders med faktiske værdier: + - {{ticket_number}} + - {{ticket_subject}} + - {{customer_name}} + - {{contact_name}} + - etc. + """ + try: + # Get template + template = execute_query_single( + "SELECT * FROM tticket_templates WHERE id = %s", + (request.template_id,) + ) + if not template: + raise HTTPException(status_code=404, detail="Template not found") + + # Get ticket with customer and contact data + ticket_data = execute_query_single( + """SELECT t.*, + c.name as customer_name, + con.name as contact_name, + con.email as contact_email + FROM tticket_tickets t + LEFT JOIN customers c ON t.customer_id = c.id + LEFT JOIN contacts con ON t.contact_id = con.id + WHERE t.id = %s""", + (ticket_id,) + ) + if not ticket_data: + raise HTTPException(status_code=404, detail="Ticket not found") + + # Build replacement dict + replacements = { + '{{ticket_number}}': ticket_data.get('ticket_number', ''), + '{{ticket_subject}}': ticket_data.get('subject', ''), + '{{customer_name}}': ticket_data.get('customer_name', ''), + '{{contact_name}}': ticket_data.get('contact_name', ''), + '{{contact_email}}': ticket_data.get('contact_email', ''), + } + + # Add custom data + if request.custom_data: + for key, value in request.custom_data.items(): + replacements[f'{{{{{key}}}}}'] = str(value) + + # Render subject and body + rendered_subject = template['subject_template'] + rendered_body = template['body_template'] + placeholders_used = [] + + for placeholder, value in replacements.items(): + if placeholder in rendered_body or (rendered_subject and placeholder in rendered_subject): + placeholders_used.append(placeholder) + if rendered_subject: + rendered_subject = rendered_subject.replace(placeholder, value) + rendered_body = rendered_body.replace(placeholder, value) + + return TemplateRenderResponse( + subject=rendered_subject, + body=rendered_body, + placeholders_used=placeholders_used + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f"❌ Error rendering template: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +# ============================================================================ +# AI SUGGESTIONS ENDPOINTS +# ============================================================================ + +@router.get("/tickets/{ticket_id}/suggestions", response_model=List[TTicketAISuggestion], tags=["AI Suggestions"]) +async def get_ai_suggestions( + ticket_id: int, + status: Optional[AISuggestionStatus] = Query(None, description="Filter by status"), + suggestion_type: Optional[AISuggestionType] = Query(None, description="Filter by type") +): + """Hent AI forslag for ticket""" + try: + query = "SELECT * FROM tticket_ai_suggestions WHERE ticket_id = %s" + params = [ticket_id] + + if status: + query += " AND status = %s" + params.append(status) + + if suggestion_type: + query += " AND suggestion_type = %s" + params.append(suggestion_type) + + query += " ORDER BY created_at DESC" + + suggestions = execute_query(query, tuple(params)) + + return suggestions + + except Exception as e: + logger.error(f"❌ Error fetching AI suggestions: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/tickets/{ticket_id}/suggestions/{suggestion_id}/review", tags=["AI Suggestions"]) +async def review_ai_suggestion(ticket_id: int, suggestion_id: int, review: AISuggestionReviewRequest): + """ + Accepter eller afvis AI forslag + + **VIGTIGT**: Denne endpoint ændrer KUN suggestion status. + Den udfører IKKE automatisk den foreslåede handling. + Brugeren skal selv implementere ændringen efter accept. + """ + try: + # Get suggestion + suggestion = execute_query_single( + "SELECT * FROM tticket_ai_suggestions WHERE id = %s AND ticket_id = %s", + (suggestion_id, ticket_id) + ) + if not suggestion: + raise HTTPException(status_code=404, detail="Suggestion not found") + + if suggestion['status'] != 'pending': + raise HTTPException(status_code=400, detail=f"Suggestion already {suggestion['status']}") + + # Update status + new_status = 'accepted' if review.action == 'accept' else 'rejected' + execute_query( + """UPDATE tticket_ai_suggestions + SET status = %s, reviewed_by_user_id = 1, reviewed_at = CURRENT_TIMESTAMP + WHERE id = %s""", + (new_status, suggestion_id), + fetch=False + ) + + # Log audit + execute_query( + """INSERT INTO tticket_audit_log (ticket_id, action, new_value, reason) + VALUES (%s, %s, %s, %s)""", + (ticket_id, f'ai_suggestion_{review.action}ed', + f"{suggestion['suggestion_type']}: {suggestion_id}", review.note) + ) + + logger.info(f"✅ AI suggestion {suggestion_id} {review.action}ed for ticket {ticket_id}") + + return { + "status": "success", + "action": review.action, + "suggestion_type": suggestion['suggestion_type'], + "message": f"Suggestion {review.action}ed. Manual implementation required if accepted." + } + + except HTTPException: + raise + except Exception as e: + logger.error(f"❌ Error reviewing AI suggestion: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +# ============================================================================ +# DEADLINE ENDPOINT +# ============================================================================ + +@router.put("/tickets/{ticket_id}/deadline", tags=["Tickets"]) +async def update_ticket_deadline(ticket_id: int, request: TicketDeadlineUpdateRequest): + """Opdater ticket deadline""" + try: + # Get current deadline + current = execute_query_single( + "SELECT deadline FROM tticket_tickets WHERE id = %s", + (ticket_id,) + ) + if not current: + raise HTTPException(status_code=404, detail="Ticket not found") + + # Update deadline + execute_query( + "UPDATE tticket_tickets SET deadline = %s WHERE id = %s", + (request.deadline, ticket_id), + fetch=False + ) + + # Log audit (handled by trigger automatically) + if request.reason: + execute_query( + """INSERT INTO tticket_audit_log (ticket_id, action, field_name, old_value, new_value, reason) + VALUES (%s, 'deadline_change', 'deadline', %s, %s, %s)""", + (ticket_id, str(current.get('deadline')), str(request.deadline), request.reason) + ) + + logger.info(f"✅ Updated deadline for ticket {ticket_id}: {request.deadline}") + + return { + "status": "success", + "old_deadline": current.get('deadline'), + "new_deadline": request.deadline, + "message": "Deadline updated" + } + + except HTTPException: + raise + except Exception as e: + logger.error(f"❌ Error updating deadline: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +# ============================================================================ +# AUDIT LOG ENDPOINT +# ============================================================================ + +@router.get("/tickets/{ticket_id}/audit-log", response_model=List[TTicketAuditLog], tags=["Audit"]) +async def get_audit_log( + ticket_id: int, + limit: int = Query(50, ge=1, le=200, description="Number of entries"), + offset: int = Query(0, ge=0, description="Offset for pagination") +): + """Hent audit log for ticket (sporbarhed)""" + try: + logs = execute_query( + """SELECT * FROM tticket_audit_log + WHERE ticket_id = %s + ORDER BY performed_at DESC + LIMIT %s OFFSET %s""", + (ticket_id, limit, offset) + ) + + return logs + + except Exception as e: + logger.error(f"❌ Error fetching audit log: {e}") + raise HTTPException(status_code=500, detail=str(e)) diff --git a/app/ticket/backend/ticket_service.py b/app/ticket/backend/ticket_service.py index f213eec..2de6f35 100644 --- a/app/ticket/backend/ticket_service.py +++ b/app/ticket/backend/ticket_service.py @@ -10,7 +10,7 @@ from datetime import datetime from typing import Optional, Dict, Any, List from decimal import Decimal -from app.core.database import execute_query, execute_insert, execute_update +from app.core.database import execute_query, execute_insert, execute_update, execute_query_single from app.ticket.backend.models import ( TicketStatus, TicketPriority, @@ -84,13 +84,14 @@ class TicketService: from psycopg2.extras import Json # Insert ticket (trigger will auto-generate ticket_number if NULL) - ticket_id = execute_insert( + result = execute_query_single( """ INSERT INTO tticket_tickets ( ticket_number, subject, description, status, priority, category, customer_id, contact_id, assigned_to_user_id, created_by_user_id, source, tags, custom_fields ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) + RETURNING id """, ( ticket_data.ticket_number, @@ -109,6 +110,11 @@ class TicketService: ) ) + if not result: + raise Exception("Failed to create ticket - no ID returned") + + ticket_id = result['id'] + # Log creation TicketService.log_audit( ticket_id=ticket_id, @@ -120,11 +126,9 @@ class TicketService: ) # Fetch created ticket - ticket = execute_query( + ticket = execute_query_single( "SELECT * FROM tticket_tickets WHERE id = %s", - (ticket_id,), - fetchone=True - ) + (ticket_id,)) logger.info(f"✅ Created ticket {ticket['ticket_number']} (ID: {ticket_id})") return ticket @@ -147,11 +151,9 @@ class TicketService: Updated ticket dict """ # Get current ticket - current = execute_query( + current = execute_query_single( "SELECT * FROM tticket_tickets WHERE id = %s", - (ticket_id,), - fetchone=True - ) + (ticket_id,)) if not current: raise ValueError(f"Ticket {ticket_id} not found") @@ -198,11 +200,9 @@ class TicketService: ) # Fetch updated ticket - updated = execute_query( + updated = execute_query_single( "SELECT * FROM tticket_tickets WHERE id = %s", - (ticket_id,), - fetchone=True - ) + (ticket_id,)) logger.info(f"✅ Updated ticket {updated['ticket_number']}") return updated @@ -230,11 +230,9 @@ class TicketService: ValueError: If transition is not allowed """ # Get current ticket - current = execute_query( + current = execute_query_single( "SELECT * FROM tticket_tickets WHERE id = %s", - (ticket_id,), - fetchone=True - ) + (ticket_id,)) if not current: raise ValueError(f"Ticket {ticket_id} not found") @@ -280,11 +278,9 @@ class TicketService: ) # Fetch updated ticket - updated = execute_query( + updated = execute_query_single( "SELECT * FROM tticket_tickets WHERE id = %s", - (ticket_id,), - fetchone=True - ) + (ticket_id,)) logger.info(f"✅ Updated ticket {updated['ticket_number']} status: {current_status} → {new_status}") return updated @@ -307,11 +303,9 @@ class TicketService: Updated ticket dict """ # Get current assignment - current = execute_query( + current = execute_query_single( "SELECT assigned_to_user_id FROM tticket_tickets WHERE id = %s", - (ticket_id,), - fetchone=True - ) + (ticket_id,)) if not current: raise ValueError(f"Ticket {ticket_id} not found") @@ -334,11 +328,9 @@ class TicketService: ) # Fetch updated ticket - updated = execute_query( + updated = execute_query_single( "SELECT * FROM tticket_tickets WHERE id = %s", - (ticket_id,), - fetchone=True - ) + (ticket_id,)) logger.info(f"✅ Assigned ticket {updated['ticket_number']} to user {assigned_to_user_id}") return updated @@ -363,11 +355,9 @@ class TicketService: Created comment dict """ # Verify ticket exists - ticket = execute_query( + ticket = execute_query_single( "SELECT id FROM tticket_tickets WHERE id = %s", - (ticket_id,), - fetchone=True - ) + (ticket_id,)) if not ticket: raise ValueError(f"Ticket {ticket_id} not found") @@ -389,11 +379,9 @@ class TicketService: # Update first_response_at if this is the first non-internal comment if not is_internal: - ticket = execute_query( + ticket = execute_query_single( "SELECT first_response_at FROM tticket_tickets WHERE id = %s", - (ticket_id,), - fetchone=True - ) + (ticket_id,)) if not ticket['first_response_at']: execute_update( "UPDATE tticket_tickets SET first_response_at = CURRENT_TIMESTAMP WHERE id = %s", @@ -411,11 +399,9 @@ class TicketService: ) # Fetch created comment - comment = execute_query( + comment = execute_query_single( "SELECT * FROM tticket_comments WHERE id = %s", - (comment_id,), - fetchone=True - ) + (comment_id,)) logger.info(f"💬 Added comment to ticket {ticket_id} (internal: {is_internal})") return comment @@ -471,19 +457,15 @@ class TicketService: Returns: Ticket dict with stats or None if not found """ - ticket = execute_query( + ticket = execute_query_single( "SELECT * FROM tticket_open_tickets WHERE id = %s", - (ticket_id,), - fetchone=True - ) + (ticket_id,)) # If not in open_tickets view, fetch from main table if not ticket: - ticket = execute_query( + ticket = execute_query_single( "SELECT * FROM tticket_tickets WHERE id = %s", - (ticket_id,), - fetchone=True - ) + (ticket_id,)) return ticket diff --git a/app/ticket/frontend/dashboard.html b/app/ticket/frontend/dashboard.html index 986bb2e..7c933d6 100644 --- a/app/ticket/frontend/dashboard.html +++ b/app/ticket/frontend/dashboard.html @@ -2,360 +2,282 @@ {% block title %}Ticket Dashboard - BMC Hub{% endblock %} -{% block extra_css %} - -{% endblock %} - {% block content %} -
    - -
    -
    -

    - Ticket Dashboard -

    -

    Oversigt over alle tickets og worklog aktivitet

    -
    - +
    + +
    +
    +

    🎫 Support Dashboard

    +

    Oversigt over alle support tickets og aktivitet

    +
    + +
    +
    - -
    -
    -
    -
    -

    {{ stats.open_count or 0 }}

    -

    Nye Tickets

    -
    -
    -
    -
    -
    -

    {{ stats.in_progress_count or 0 }}

    -

    I Gang

    -
    -
    -
    -
    -
    -

    {{ stats.resolved_count or 0 }}

    -

    Løst

    -
    -
    -
    -
    -
    -

    {{ stats.closed_count or 0 }}

    -

    Lukket

    + +
    +
    +
    +
    +
    + +
    +

    {{ stats.open_count or 0 }}

    +

    Åbne

    - - -
    -
    -

    {{ worklog_stats.draft_count or 0 }}

    -

    Draft Worklog

    -
    -
    -

    {{ "%.1f"|format(worklog_stats.draft_hours or 0) }}t

    -

    Udraft Timer

    -
    -
    -

    {{ worklog_stats.billable_count or 0 }}

    -

    Billable Entries

    -
    -
    -

    {{ "%.1f"|format(worklog_stats.billable_hours or 0) }}t

    -

    Billable Timer

    +
    +
    +
    +
    + +
    +

    {{ stats.in_progress_count or 0 }}

    +

    I Gang

    +
    +
    +
    +
    +
    + +
    +

    {{ stats.pending_count or 0 }}

    +

    Afventer

    +
    +
    +
    +
    +
    +
    +
    + +
    +

    {{ stats.resolved_count or 0 }}

    +

    Løst

    +
    +
    +
    +
    +
    +
    +
    + +
    +

    {{ stats.closed_count or 0 }}

    +

    Lukket

    +
    +
    +
    +
    +
    +
    +
    + +
    +

    {{ stats.total_count or 0 }}

    +

    I Alt

    +
    +
    +
    +
    - -
    -

    - Seneste Tickets -

    - - Se Alle + + + + +
    +
    +
    📋 Seneste Tickets
    + + Se Alle
    - - {% if recent_tickets %} -
    +
    - - +
    + - + + + - {% for ticket in recent_tickets %} - - - - - - - - {% endfor %} + {% if recent_tickets %} + {% for ticket in recent_tickets %} + + + + + + + + + + {% endfor %} + {% else %} + + + + {% endif %}
    TicketTicket #Emne Kunde Status Prioritet Oprettet
    - {{ ticket.ticket_number }} -
    - {{ ticket.subject }} -
    - {% if ticket.customer_name %} - {{ ticket.customer_name }} - {% else %} - - - {% endif %} - - - {{ ticket.status.replace('_', ' ').title() }} - - - - {{ ticket.priority.title() }} - - - {{ ticket.created_at.strftime('%d-%m-%Y %H:%M') if ticket.created_at else '-' }} -
    {{ ticket.ticket_number }}{{ ticket.subject }}{{ ticket.customer_name or '-' }} + {% if ticket.status == 'open' %} + Åben + {% elif ticket.status == 'in_progress' %} + I Gang + {% elif ticket.status == 'pending_customer' %} + Afventer + {% elif ticket.status == 'resolved' %} + Løst + {% elif ticket.status == 'closed' %} + Lukket + {% else %} + {{ ticket.status }} + {% endif %} + + {% if ticket.priority == 'urgent' %} + Akut + {% elif ticket.priority == 'high' %} + Høj + {% elif ticket.priority == 'normal' %} + Normal + {% else %} + Lav + {% endif %} + {{ ticket.created_at.strftime('%d/%m/%Y %H:%M') if ticket.created_at else '-' }} + +
    + Ingen tickets endnu +
    - {% else %} -
    -
    - -

    Ingen tickets endnu

    -

    Opret din første ticket for at komme i gang

    - - Opret Ticket +
    +
    + + - // Auto-refresh every 5 minutes - setTimeout(() => { - location.reload(); - }, 300000); - + {% endblock %} diff --git a/app/ticket/frontend/dashboard.html.old b/app/ticket/frontend/dashboard.html.old new file mode 100644 index 0000000..986bb2e --- /dev/null +++ b/app/ticket/frontend/dashboard.html.old @@ -0,0 +1,361 @@ +{% extends "shared/frontend/base.html" %} + +{% block title %}Ticket Dashboard - BMC Hub{% endblock %} + +{% block extra_css %} + +{% endblock %} + +{% block content %} +
    + + + + +
    +
    +
    +
    +

    {{ stats.open_count or 0 }}

    +

    Nye Tickets

    +
    +
    +
    +
    +
    +

    {{ stats.in_progress_count or 0 }}

    +

    I Gang

    +
    +
    +
    +
    +
    +

    {{ stats.resolved_count or 0 }}

    +

    Løst

    +
    +
    +
    +
    +
    +

    {{ stats.closed_count or 0 }}

    +

    Lukket

    +
    +
    +
    + + +
    +
    +

    {{ worklog_stats.draft_count or 0 }}

    +

    Draft Worklog

    +
    +
    +

    {{ "%.1f"|format(worklog_stats.draft_hours or 0) }}t

    +

    Udraft Timer

    +
    +
    +

    {{ worklog_stats.billable_count or 0 }}

    +

    Billable Entries

    +
    +
    +

    {{ "%.1f"|format(worklog_stats.billable_hours or 0) }}t

    +

    Billable Timer

    +
    +
    + + +
    +

    + Seneste Tickets +

    + + Se Alle + +
    + + {% if recent_tickets %} +
    +
    + + + + + + + + + + + + {% for ticket in recent_tickets %} + + + + + + + + {% endfor %} + +
    TicketKundeStatusPrioritetOprettet
    + {{ ticket.ticket_number }} +
    + {{ ticket.subject }} +
    + {% if ticket.customer_name %} + {{ ticket.customer_name }} + {% else %} + - + {% endif %} + + + {{ ticket.status.replace('_', ' ').title() }} + + + + {{ ticket.priority.title() }} + + + {{ ticket.created_at.strftime('%d-%m-%Y %H:%M') if ticket.created_at else '-' }} +
    +
    +
    + {% else %} +
    +
    + +

    Ingen tickets endnu

    +

    Opret din første ticket for at komme i gang

    + + Opret Ticket + +
    +
    + {% endif %} +
    +{% endblock %} + +{% block extra_js %} + +{% endblock %} diff --git a/app/ticket/frontend/ticket_detail.html b/app/ticket/frontend/ticket_detail.html index 478682f..a760da7 100644 --- a/app/ticket/frontend/ticket_detail.html +++ b/app/ticket/frontend/ticket_detail.html @@ -190,7 +190,7 @@
    - + Rediger
    - + Ny Ticket
    diff --git a/app/ticket/frontend/ticket_new.html b/app/ticket/frontend/ticket_new.html new file mode 100644 index 0000000..0726dd8 --- /dev/null +++ b/app/ticket/frontend/ticket_new.html @@ -0,0 +1,1273 @@ +{% extends "shared/frontend/base.html" %} + +{% block title %}Opret Ny Sag - BMC Hub{% endblock %} + +{% block extra_css %} + +{% endblock %} + +{% block content %} +
    +
    +
    + +
    + +
    +

    Opret Ny Support Sag

    +

    Følg trinene for at oprette en struktureret support ticket

    +
    + + +
    +
    +
    1
    +
    Kunde
    +
    +
    +
    2
    +
    Problem
    +
    +
    +
    3
    +
    Detaljer
    +
    +
    +
    4
    +
    Vedhæft
    +
    +
    + + +
    + + +
    +

    Vælg Kunde

    + + + + + + + + +
    + + Søg efter firmanavn, kontaktperson, CVR-nummer eller email +
    +
    + + +
    +

    Beskriv Problemet

    + + + +
    + + +
    + +
    + + +
    + + + +
    +
    +
    + + +
    +
    +
    +
    + + +
    +
    +
    +
    + + +
    +

    Prioritet & Detaljer

    + + +
    + + + + + +
    + +
    +
    +
    + + +
    +
    +
    +
    + + +
    +
    +
    + + + +
    + +
    + + +
    + + +
    +
    + + +
    +

    Vedhæft Filer (Valgfrit)

    + +
    +
    + +
    +
    Træk og slip filer her
    +

    eller klik for at vælge filer

    + + +
    + +
    + +
    + + Tip: Du kan også tilføje skærmbilleder, logfiler eller screenshots + der kan hjælpe med at diagnosticere problemet. +
    +
    + +
    + + +
    + +
    + + +
    + +
    + +
    +
    +
    + + + + + +{% endblock %} diff --git a/app/ticket/frontend/views.py b/app/ticket/frontend/views.py index f089fb1..b59c6d5 100644 --- a/app/ticket/frontend/views.py +++ b/app/ticket/frontend/views.py @@ -148,7 +148,7 @@ async def approve_worklog_entry( FROM tticket_worklog WHERE id = %s """ - entry = execute_query(check_query, (worklog_id,), fetchone=True) + entry = execute_query_single(check_query, (worklog_id,)) if not entry: raise HTTPException(status_code=404, detail="Worklog entry not found") @@ -199,7 +199,7 @@ async def reject_worklog_entry( FROM tticket_worklog WHERE id = %s """ - entry = execute_query(check_query, (worklog_id,), fetchone=True) + entry = execute_query_single(check_query, (worklog_id,)) if not entry: raise HTTPException(status_code=404, detail="Worklog entry not found") @@ -235,6 +235,14 @@ async def reject_worklog_entry( raise HTTPException(status_code=500, detail=str(e)) +@router.get("/tickets/new", response_class=HTMLResponse) +async def new_ticket_page(request: Request): + """ + New ticket creation page with multi-step wizard + """ + return templates.TemplateResponse("ticket/frontend/ticket_new.html", {"request": request}) + + @router.get("/dashboard", response_class=HTMLResponse) async def ticket_dashboard(request: Request): """ @@ -252,7 +260,8 @@ async def ticket_dashboard(request: Request): COUNT(*) AS total_count FROM tticket_tickets """ - stats = execute_query(stats_query, fetchone=True) + stats_result = execute_query(stats_query) + stats = stats_result[0] if stats_result else {} # Get recent tickets recent_query = """ @@ -280,20 +289,21 @@ async def ticket_dashboard(request: Request): COALESCE(SUM(hours) FILTER (WHERE status = 'billable'), 0) AS billable_hours FROM tticket_worklog """ - worklog_stats = execute_query(worklog_stats_query, fetchone=True) + worklog_stats_result = execute_query(worklog_stats_query) + worklog_stats = worklog_stats_result[0] if worklog_stats_result else {} return templates.TemplateResponse( "ticket/frontend/dashboard.html", { "request": request, "stats": stats, - "recent_tickets": recent_tickets, + "recent_tickets": recent_tickets or [], "worklog_stats": worklog_stats } ) except Exception as e: - logger.error(f"❌ Failed to load dashboard: {e}") + logger.error(f"❌ Failed to load dashboard: {e}", exc_info=True) raise HTTPException(status_code=500, detail=str(e)) @@ -396,7 +406,7 @@ async def ticket_detail_page(request: Request, ticket_id: int): LEFT JOIN users u ON u.user_id = t.assigned_to_user_id WHERE t.id = %s """ - ticket = execute_query(ticket_query, (ticket_id,), fetchone=True) + ticket = execute_query_single(ticket_query, (ticket_id,)) if not ticket: raise HTTPException(status_code=404, detail="Ticket not found") diff --git a/app/ticket/frontend/worklog_review.html b/app/ticket/frontend/worklog_review.html index 1b1eca8..a7ea95c 100644 --- a/app/ticket/frontend/worklog_review.html +++ b/app/ticket/frontend/worklog_review.html @@ -272,7 +272,7 @@ diff --git a/app/timetracking/backend/economic_export.py b/app/timetracking/backend/economic_export.py index 5a5ca47..cde8282 100644 --- a/app/timetracking/backend/economic_export.py +++ b/app/timetracking/backend/economic_export.py @@ -22,7 +22,7 @@ from app.core.database import execute_query, execute_update from app.timetracking.backend.models import ( TModuleEconomicExportRequest, TModuleEconomicExportResult -) +, execute_query_single) from app.timetracking.backend.audit import audit logger = logging.getLogger(__name__) @@ -162,7 +162,7 @@ class EconomicExportService: JOIN tmodule_customers c ON o.customer_id = c.id WHERE o.id = %s """ - order = execute_query(order_query, (request.order_id,), fetchone=True) + order = execute_query_single(order_query, (request.order_id,)) if not order: raise HTTPException(status_code=404, detail="Order not found") @@ -187,7 +187,7 @@ class EconomicExportService: WHERE order_id = %s ORDER BY line_number """ - lines = execute_query(lines_query, (request.order_id,)) + lines = execute_query_single(lines_query, (request.order_id,)) if not lines: raise HTTPException( @@ -244,7 +244,7 @@ class EconomicExportService: LEFT JOIN customers c ON tc.hub_customer_id = c.id WHERE tc.id = %s """ - customer_data = execute_query(customer_number_query, (order['customer_id'],), fetchone=True) + customer_data = execute_query(customer_number_query, (order['customer_id'],)) if not customer_data or not customer_data.get('economic_customer_number'): raise HTTPException( diff --git a/app/timetracking/backend/order_service.py b/app/timetracking/backend/order_service.py index a162e26..b0055b6 100644 --- a/app/timetracking/backend/order_service.py +++ b/app/timetracking/backend/order_service.py @@ -20,7 +20,7 @@ from app.timetracking.backend.models import ( TModuleOrderLine, TModuleOrderCreate, TModuleOrderLineCreate -) +, execute_query_single) from app.timetracking.backend.audit import audit logger = logging.getLogger(__name__) @@ -42,7 +42,7 @@ class OrderService: try: # Check module customer query = "SELECT hourly_rate FROM tmodule_customers WHERE id = %s" - result = execute_query(query, (customer_id,), fetchone=True) + result = execute_query_single(query, (customer_id,)) if result and result.get('hourly_rate'): rate = result['hourly_rate'] @@ -52,7 +52,7 @@ class OrderService: # Check Hub customer if linked if hub_customer_id: query = "SELECT hourly_rate FROM customers WHERE id = %s" - result = execute_query(query, (hub_customer_id,), fetchone=True) + result = execute_query_single(query, (hub_customer_id,)) if result and result.get('hourly_rate'): rate = result['hourly_rate'] @@ -86,11 +86,9 @@ class OrderService: """ try: # Hent customer info - customer = execute_query( + customer = execute_query_single( "SELECT * FROM tmodule_customers WHERE id = %s", - (customer_id,), - fetchone=True - ) + (customer_id,)) if not customer: raise HTTPException(status_code=404, detail="Customer not found") @@ -110,7 +108,7 @@ class OrderService: AND t.billable = true ORDER BY c.id, t.worked_date """ - approved_times = execute_query(query, (customer_id,)) + approved_times = execute_query_single(query, (customer_id,)) if not approved_times: raise HTTPException( @@ -316,7 +314,7 @@ class OrderService: LEFT JOIN tmodule_customers c ON o.customer_id = c.id WHERE o.id = %s """ - order = execute_query(order_query, (order_id,), fetchone=True) + order = execute_query(order_query, (order_id,)) if not order: raise HTTPException(status_code=404, detail="Order not found") @@ -336,7 +334,7 @@ class OrderService: ol.product_number, ol.account_number, ol.created_at ORDER BY ol.line_number """ - lines = execute_query(lines_query, (order_id,)) + lines = execute_query_single(lines_query, (order_id,)) return TModuleOrderWithLines( **order, @@ -401,9 +399,7 @@ class OrderService: # Check order exists and is not exported order = execute_query( "SELECT * FROM tmodule_orders WHERE id = %s", - (order_id,), - fetchone=True - ) + (order_id,)) if not order: raise HTTPException(status_code=404, detail="Order not found") @@ -424,7 +420,7 @@ class OrderService: ) # Reset time entries back to approved - lines = execute_query( + lines = execute_query_single( "SELECT time_entry_ids FROM tmodule_order_lines WHERE order_id = %s", (order_id,) ) @@ -453,9 +449,7 @@ class OrderService: # Return updated order updated = execute_query( "SELECT * FROM tmodule_orders WHERE id = %s", - (order_id,), - fetchone=True - ) + (order_id,)) return TModuleOrder(**updated) diff --git a/app/timetracking/backend/router.py b/app/timetracking/backend/router.py index ee5dee6..7d7652c 100644 --- a/app/timetracking/backend/router.py +++ b/app/timetracking/backend/router.py @@ -27,7 +27,7 @@ from app.timetracking.backend.models import ( TModuleMetadata, TModuleUninstallRequest, TModuleUninstallResult -) +, execute_query_single) from app.timetracking.backend.vtiger_sync import vtiger_service from app.timetracking.backend.wizard import wizard from app.timetracking.backend.order_service import order_service @@ -80,11 +80,9 @@ async def sync_case_comments(case_id: int): """ try: # Hent case fra database - case = execute_query( + case = execute_query_single( "SELECT vtiger_id FROM tmodule_cases WHERE id = %s", - (case_id,), - fetchone=True - ) + (case_id,)) if not case: raise HTTPException(status_code=404, detail="Case not found") @@ -185,7 +183,7 @@ async def approve_time_entry( JOIN tmodule_customers cust ON t.customer_id = cust.id WHERE t.id = %s """ - entry = execute_query(query, (time_id,), fetchone=True) + entry = execute_query_single(query, (time_id,)) if not entry: raise HTTPException(status_code=404, detail="Time entry not found") @@ -470,7 +468,7 @@ async def unlock_order( RETURNING * """ - result = execute_query(update_query, (order_id,), fetchone=True) + result = execute_query_single(update_query, (order_id,)) # Log unlock audit.log_event( @@ -551,10 +549,8 @@ async def test_economic_connection(): async def get_module_metadata(): """Hent modul metadata""" try: - result = execute_query( - "SELECT * FROM tmodule_metadata ORDER BY id DESC LIMIT 1", - fetchone=True - ) + result = execute_query_single( + "SELECT * FROM tmodule_metadata ORDER BY id DESC LIMIT 1") if not result: raise HTTPException(status_code=404, detail="Module metadata not found") @@ -575,7 +571,7 @@ async def module_health(): SELECT COUNT(*) as count FROM information_schema.tables WHERE table_name LIKE 'tmodule_%' """ - result = execute_query(tables_query, fetchone=True) + result = execute_query_single(tables_query) table_count = result['count'] if result else 0 # Get stats - count each table separately @@ -588,10 +584,8 @@ async def module_health(): } for table_name in ["customers", "cases", "times", "orders"]: - count_result = execute_query( - f"SELECT COUNT(*) as count FROM tmodule_{table_name}", - fetchone=True - ) + count_result = execute_query_single( + f"SELECT COUNT(*) as count FROM tmodule_{table_name}") stats[table_name] = count_result['count'] if count_result else 0 except Exception as e: @@ -673,11 +667,9 @@ async def update_customer_hourly_rate(customer_id: int, hourly_rate: float, user ) # Return updated customer - customer = execute_query( + customer = execute_query_single( "SELECT id, name, hourly_rate FROM tmodule_customers WHERE id = %s", - (customer_id,), - fetchone=True - ) + (customer_id,)) if not customer: raise HTTPException(status_code=404, detail="Customer not found") @@ -720,11 +712,9 @@ async def toggle_customer_time_card(customer_id: int, enabled: bool, user_id: Op ) # Return updated customer - customer = execute_query( + customer = execute_query_single( "SELECT * FROM tmodule_customers WHERE id = %s", - (customer_id,), - fetchone=True - ) + (customer_id,)) if not customer: raise HTTPException(status_code=404, detail="Customer not found") @@ -770,7 +760,7 @@ async def list_customers( query += " ORDER BY customer_name" - customers = execute_query(query) + customers = execute_query_single(query) else: # Simple customer list query = "SELECT * FROM tmodule_customers" @@ -893,7 +883,7 @@ async def uninstall_module( (SELECT COUNT(*) FROM tmodule_order_lines) + (SELECT COUNT(*) FROM tmodule_sync_log) as total """ - count_result = execute_query(count_query, fetchone=True) + count_result = execute_query(count_query) total_rows = count_result['total'] if count_result else 0 except: total_rows = 0 @@ -902,7 +892,7 @@ async def uninstall_module( from app.core.database import get_db_connection import psycopg2 - conn = get_db_connection() + conn = get_db_connection(, execute_query_single) cursor = conn.cursor() dropped_items = { diff --git a/app/timetracking/backend/vtiger_sync.py b/app/timetracking/backend/vtiger_sync.py index 2a10012..e8ed18c 100644 --- a/app/timetracking/backend/vtiger_sync.py +++ b/app/timetracking/backend/vtiger_sync.py @@ -302,11 +302,9 @@ class TimeTrackingVTigerService: data_hash = self._calculate_hash(account) # Check if exists - existing = execute_query( + existing = execute_query_single( "SELECT id, sync_hash FROM tmodule_customers WHERE vtiger_id = %s", - (vtiger_id,), - fetchone=True - ) + (vtiger_id,)) if existing: # Check if data changed @@ -424,11 +422,9 @@ class TimeTrackingVTigerService: continue # Find customer in our DB - customer = execute_query( + customer = execute_query_single( "SELECT id FROM tmodule_customers WHERE vtiger_id = %s", - (account_id,), - fetchone=True - ) + (account_id,)) if not customer: logger.warning(f"⚠️ Customer {account_id} not found - sync customers first") @@ -453,11 +449,9 @@ class TimeTrackingVTigerService: data_hash = self._calculate_hash(ticket_with_comments) # Check if exists - existing = execute_query( + existing = execute_query_single( "SELECT id, sync_hash FROM tmodule_cases WHERE vtiger_id = %s", - (vtiger_id,), - fetchone=True - ) + (vtiger_id,)) if existing: if existing['sync_hash'] == data_hash: @@ -685,22 +679,18 @@ class TimeTrackingVTigerService: if related_to: # Try to find case first, then account - case = execute_query( + case = execute_query_single( "SELECT id, customer_id FROM tmodule_cases WHERE vtiger_id = %s", - (related_to,), - fetchone=True - ) + (related_to,)) if case: case_id = case['id'] customer_id = case['customer_id'] else: # Try to find customer directly - customer = execute_query( + customer = execute_query_single( "SELECT id FROM tmodule_customers WHERE vtiger_id = %s", - (related_to,), - fetchone=True - ) + (related_to,)) if customer: customer_id = customer['id'] @@ -725,11 +715,9 @@ class TimeTrackingVTigerService: data_hash = self._calculate_hash(timelog) # Check if exists - existing = execute_query( + existing = execute_query_single( "SELECT id, sync_hash FROM tmodule_times WHERE vtiger_id = %s", - (vtiger_id,), - fetchone=True - ) + (vtiger_id,)) if existing: if existing['sync_hash'] == data_hash: diff --git a/app/timetracking/backend/wizard.py b/app/timetracking/backend/wizard.py index 42cbd5a..bac7aa4 100644 --- a/app/timetracking/backend/wizard.py +++ b/app/timetracking/backend/wizard.py @@ -19,7 +19,7 @@ from app.timetracking.backend.models import ( TModuleWizardProgress, TModuleWizardNextEntry, TModuleApprovalStats -) +, execute_query_single) from app.timetracking.backend.audit import audit logger = logging.getLogger(__name__) @@ -36,7 +36,7 @@ class WizardService: SELECT * FROM tmodule_approval_stats WHERE customer_id = %s """ - result = execute_query(query, (customer_id,), fetchone=True) + result = execute_query_single(query, (customer_id,)) if not result: return None @@ -52,7 +52,7 @@ class WizardService: """Hent approval statistics for alle kunder""" try: query = "SELECT * FROM tmodule_approval_stats ORDER BY customer_name" - results = execute_query(query) + results = execute_query_single(query) return [TModuleApprovalStats(**row) for row in results] @@ -83,7 +83,7 @@ class WizardService: WHERE customer_id = %s LIMIT 1 """ - result = execute_query(query, (customer_id,), fetchone=True) + result = execute_query(query, (customer_id,)) else: # Hent næste generelt if exclude_time_card: @@ -96,7 +96,7 @@ class WizardService: else: query = "SELECT * FROM tmodule_next_pending LIMIT 1" - result = execute_query(query, fetchone=True) + result = execute_query_single(query) if not result: # Ingen flere entries @@ -161,7 +161,7 @@ class WizardService: JOIN tmodule_customers cust ON t.customer_id = cust.id WHERE t.id = %s """ - entry = execute_query(query, (approval.time_id,), fetchone=True) + entry = execute_query_single(query, (approval.time_id,)) if not entry: raise HTTPException(status_code=404, detail="Time entry not found") @@ -215,7 +215,7 @@ class WizardService: ) # Return updated entry - updated = execute_query(query, (approval.time_id,), fetchone=True) + updated = execute_query_single(query, (approval.time_id,)) return TModuleTimeWithContext(**updated) except HTTPException: @@ -251,7 +251,7 @@ class WizardService: JOIN tmodule_customers cust ON t.customer_id = cust.id WHERE t.id = %s """ - entry = execute_query(query, (time_id,), fetchone=True) + entry = execute_query_single(query, (time_id,)) if not entry: raise HTTPException(status_code=404, detail="Time entry not found") @@ -285,7 +285,7 @@ class WizardService: logger.info(f"❌ Rejected time entry {time_id}: {reason}") # Return updated - updated = execute_query(query, (time_id,), fetchone=True) + updated = execute_query_single(query, (time_id,)) return TModuleTimeWithContext(**updated) except HTTPException: @@ -321,7 +321,7 @@ class WizardService: JOIN tmodule_customers cust ON t.customer_id = cust.id WHERE t.id = %s """ - entry = execute_query(query, (time_id,), fetchone=True) + entry = execute_query_single(query, (time_id,)) if not entry: raise HTTPException(status_code=404, detail="Time entry not found") @@ -368,7 +368,7 @@ class WizardService: logger.info(f"🔄 Reset time entry {time_id} to pending: {reason}") # Return updated - updated = execute_query(query, (time_id,), fetchone=True) + updated = execute_query_single(query, (time_id,)) return TModuleTimeWithContext(**updated) except HTTPException: @@ -491,7 +491,7 @@ class WizardService: ORDER BY t.worked_date LIMIT 1 """ - case = execute_query(query, (customer_id,), fetchone=True) + case = execute_query_single(query, (customer_id,)) if case: current_case_id = case['id'] current_case_title = case['title'] @@ -585,7 +585,7 @@ class WizardService: ORDER BY t.worked_date, t.id """ - results = execute_query(query, (case_id,)) + results = execute_query_single(query, (case_id,)) return [TModuleTimeWithContext(**row) for row in results] except Exception as e: @@ -608,7 +608,7 @@ class WizardService: FROM tmodule_cases WHERE id = %s """ - case = execute_query(case_query, (case_id,), fetchone=True) + case = execute_query(case_query, (case_id,)) if not case: raise HTTPException(status_code=404, detail="Case not found") diff --git a/bmc_hub_dev.code-workspace b/bmc_hub_dev.code-workspace new file mode 100644 index 0000000..44e8123 --- /dev/null +++ b/bmc_hub_dev.code-workspace @@ -0,0 +1,11 @@ +{ + "folders": [ + { + "path": "." + }, + { + "path": "../../pakkemodtagelse" + } + ], + "settings": {} +} \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 1340065..b7a1b4e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -42,17 +42,12 @@ services: # Mount for local development - live code reload - ./app:/app/app:ro - ./main.py:/app/main.py:ro - - ./scripts:/app/scripts:ro - # Mount OmniSync database for import (read-only) - - /Users/christianthomas/pakkemodtagelse/data:/omnisync_data:ro env_file: - .env environment: # Override database URL to point to postgres service - DATABASE_URL=postgresql://${POSTGRES_USER:-bmc_hub}:${POSTGRES_PASSWORD:-bmc_hub}@postgres:5432/${POSTGRES_DB:-bmc_hub} - ENABLE_RELOAD=false - - OLLAMA_MODEL=qwen3:4b # Bruger Chat API format - - OLLAMA_MODEL_FALLBACK=qwen2.5:3b # Backup model restart: unless-stopped healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/health"] diff --git a/docs/SIMPLY_CRM_SETUP.md b/docs/SIMPLY_CRM_SETUP.md new file mode 100644 index 0000000..c3cedab --- /dev/null +++ b/docs/SIMPLY_CRM_SETUP.md @@ -0,0 +1,128 @@ +# Simply-CRM Integration Setup + +## Status +⚠️ **Simply-CRM credentials ikke konfigureret** - Salgsordre fra det gamle system vises ikke + +## Hvad er Simply-CRM? + +Simply-CRM er et **separat CRM system** (VTiger fork) der bruges til at hente **historiske salgsordre** med `recurring_frequency`. + +⚠️ **Vigtigt:** Simply-CRM er IKKE det samme som: +- vTiger Cloud (https://bmcnetworks.od2.vtiger.com) +- Det gamle on-premise vTiger (http://crm.bmcnetworks.dk) + +Simply-CRM har sin egen URL, credentials og API endpoint (`/webservice.php`). + +## Hvorfor vises ingen Simply-CRM data? + +3 grunde: +1. ⚠️ **OLD_VTIGER_URL, OLD_VTIGER_USERNAME, OLD_VTIGER_API_KEY er tomme** i `.env` filen +2. Koden leder efter `OLD_VTIGER_API_KEY` men kan ikke finde credentials +3. Serveren er tilgængelig (301 response), men authentication mangler + +## Sådan finder du credentials + +### Option 1: Hvis I stadig bruger det gamle system + +1. **Log ind på Simply-CRM:** + - URL: http://crm.bmcnetworks.dk + - Brug din normale bruger + +2. **Find Access Key:** + - Gå til **Settings** (tandhjul øverst til højre) + - Klik på **My Preferences** + - Under **Webservices** vil du se din **Access Key** + - Kopier access key'en + +3. **Tilføj til .env:** + ```bash + # Simply-CRM (separat system) + SIMPLYCRM_URL=http://your-simplycrm-server.com + SIMPLYCRM_USERNAME=din_email@domain.dk + SIMPLYCRM_API_KEY=din_access_key_herfra + + # ELLER hvis det er samme som gamle vTiger (fallback): + OLD_VTIGER_URL=http://crm.bmcnetworks.dk + OLD_VTIGER_USERNAME=din_email@bmcnetworks.dk + OLD_VTIGER_API_KEY=din_access_key_herfra + ``` + +4. **Genstart API:** + ```bash + docker restart bmc-hub-api + ``` + +### Option 2: Hvis I ikke længere bruger det gamle system + +Hvis alle kunder er migreret til vTiger Cloud og Simply-CRM ikke længere bruges: + +1. **Kommenter linjerne ud i .env:** + ```bash + # OLD_VTIGER_URL= + # OLD_VTIGER_USERNAME= + # OLD_VTIGER_API_KEY= + ``` + +2. Simply-CRM vil automatisk blive sprunget over og der vises kun: + - vTiger Cloud subscriptions ✅ + - BMC Office subscriptions ✅ + +## Test After Setup + +```bash +# Test med en kunde +curl http://localhost:8001/api/v1/customers/327/subscriptions | jq '.sales_orders | length' + +# Check logs +docker logs bmc-hub-api --tail=30 | grep -i simply +``` + +## Hvad henter Simply-CRM? + +Koden henter **kun salgsordre med `recurring_frequency`** - altså abonnementer: + +```sql +SELECT * FROM SalesOrder +WHERE account_id='' +AND recurring_frequency IS NOT NULL +AND sostatus NOT IN ('closed', 'cancelled') +``` + +For hver ordre: +- Henter line items (produkter) +- Grupperer efter ordre ID +- Viser i "Salgsordre" sektionen på kunde-siden + +## Hvorfor er det vigtigt? + +Uden Simply-CRM credentials kan I ikke se: +- Gamle abonnementer oprettet før cloud migrationen +- Historiske recurring orders +- Kunder der stadig har aktive ordrer i det gamle system + +**Men** I kan stadig se: +- ✅ vTiger Cloud subscriptions +- ✅ BMC Office subscriptions +- ✅ Nye vTiger Cloud sales orders + +## Current Status + +``` +✅ vTiger Cloud - Virker (2 subscriptions for Maskinsikkerhed) +✅ BMC Office - Virker (16 subscriptions for Maskinsikkerhed) +⚠️ Simply-CRM - Mangler credentials +``` + +## Troubleshooting + +### "Simply-CRM credentials not configured" +→ Tilføj OLD_VTIGER_* settings til `.env` og genstart + +### "Not logged in to Simply-CRM" +→ Access key er forkert eller expired + +### "No Simply-CRM account found for 'Kunde Navn'" +→ Kundens navn matcher ikke præcist mellem systemer (vTiger Cloud vs Simply-CRM) + +### Server timeout +→ Check at `http://crm.bmcnetworks.dk` er tilgængelig fra Docker containeren diff --git a/docs/TICKET_SYSTEM_ENHANCEMENTS.md b/docs/TICKET_SYSTEM_ENHANCEMENTS.md new file mode 100644 index 0000000..6bf0c97 --- /dev/null +++ b/docs/TICKET_SYSTEM_ENHANCEMENTS.md @@ -0,0 +1 @@ +w \ No newline at end of file diff --git a/docs/VTIGER_SETUP.md b/docs/VTIGER_SETUP.md new file mode 100644 index 0000000..b9c1ac0 --- /dev/null +++ b/docs/VTIGER_SETUP.md @@ -0,0 +1,107 @@ +# vTiger & Simply-CRM Integration Setup + +## Status +✅ **BMC Office Abonnementer** - Virker nu! (fix applied: changed `execute_query_single` to `execute_query`) +⚠️ **vTiger Cloud Abonnementer** - Kræver credentials +⚠️ **Simply-CRM Salgsordre** - Kræver credentials + +## Problem +Abonnementer & Salgsordre fanen viste ingen data fra vTiger og Simply-CRM fordi: + +1. **BMC Office query brugte `execute_query_single()`** - returnerede kun 1 række i stedet for alle +2. **vTiger Cloud credentials mangler** - VTIGER_URL, VTIGER_USERNAME, VTIGER_ACCESS_KEY +3. **Simply-CRM credentials mangler** - OLD_VTIGER_URL, OLD_VTIGER_USERNAME, OLD_VTIGER_ACCESS_KEY + +## Løsning + +### 1. BMC Office Subscriptions (✅ Fixed) +Changed from `execute_query_single()` to `execute_query()` in `app/customers/backend/router.py` line 554. + +Nu vises alle BMC Office abonnementer korrekt: +- Kunde 327 (Maskinsikkerhed): 16 abonnementer +- Kunde 372 (Norva24 Danmark A/S): 12 abonnementer + +### 2. vTiger Cloud Integration (⚠️ Requires Credentials) + +Tilføj følgende til `.env` filen: + +```bash +# vTiger Cloud Integration +VTIGER_URL=https://bmcnetworks.od2.vtiger.com +VTIGER_USERNAME=din_vtiger_bruger +VTIGER_ACCESS_KEY=din_vtiger_access_key +``` + +**Sådan finder du credentials:** +1. Log ind på vTiger Cloud (https://bmcnetworks.od2.vtiger.com) +2. Gå til **Settings** (tandhjul øverst til højre) +3. Vælg **Integration** → **Webservices** +4. Kopier **Access Key** for din bruger +5. Username er din vTiger login email + +### 3. Simply-CRM / Old vTiger Integration (⚠️ Requires Credentials) + +Hvis I stadig bruger den gamle on-premise vTiger installation: + +```bash +# Simply-CRM (Old vTiger On-Premise) +OLD_VTIGER_URL=http://crm.bmcnetworks.dk +OLD_VTIGER_USERNAME=din_gamle_vtiger_bruger +OLD_VTIGER_ACCESS_KEY=din_gamle_access_key +``` + +**Note:** Simply-CRM bruges til at hente salgsordre med `recurring_frequency` fra det gamle system. + +## Test Efter Setup + +1. Genstart API containeren: + ```bash + docker restart bmc-hub-api + ``` + +2. Test en kunde med vTiger ID: + ```bash + curl http://localhost:8001/api/v1/customers/39/subscriptions | jq + ``` + +3. Check logs for fejl: + ```bash + docker logs bmc-hub-api --tail=50 | grep -i "vtiger\|simply" + ``` + +## Forventet Output + +Med credentials konfigureret skulle du se: + +```json +{ + "status": "success", + "recurring_orders": [...], // vTiger recurring sales orders + "sales_orders": [...], // Simply-CRM orders med recurring_frequency + "subscriptions": [...], // vTiger Subscriptions module + "expired_subscriptions": [...], // Expired/cancelled subscriptions + "bmc_office_subscriptions": [...] // Local BMC Office subscriptions (✅ works now) +} +``` + +## Frontend Display + +Abonnementer & Salgsordre fanen viser nu 3 sektioner: + +1. **vTiger Abonnementer** - Subscriptions module data med lock/unlock funktion +2. **BMC Office Abonnementer** - Lokale abonnementer (✅ virker) +3. **Samlet overblik** - Stats kortene øverst + +## Troubleshooting + +### "VTIGER_URL not configured" +→ Tilføj credentials til `.env` og genstart containeren + +### "No Simply-CRM account found" +→ Kunden findes ikke i det gamle system, eller navnet matcher ikke præcist + +### "Not logged in to Simply-CRM" +→ OLD_VTIGER_ACCESS_KEY er forkert eller mangler + +### BMC Office subscriptions viser stadig ikke data +→ Tjek at containeren er restartet efter query fix diff --git a/main.py b/main.py index 7fca2a2..0036de0 100644 --- a/main.py +++ b/main.py @@ -7,43 +7,25 @@ import logging from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from fastapi.staticfiles import StaticFiles -from fastapi.responses import RedirectResponse, FileResponse +from fastapi.responses import RedirectResponse from contextlib import asynccontextmanager -from pathlib import Path from app.core.config import settings from app.core.database import init_db -from app.core.module_loader import module_loader -from app.services.email_scheduler import email_scheduler -# Import CORE Feature Routers (disse forbliver hardcoded) -from app.auth.backend import router as auth_api -from app.auth.backend import views as auth_views +# Import Feature Routers from app.customers.backend import router as customers_api from app.customers.backend import views as customers_views -from app.contacts.backend import router as contacts_api -from app.contacts.backend import views as contacts_views -from app.vendors.backend import router as vendors_api -from app.vendors.backend import views as vendors_views -from app.settings.backend import router as settings_api -from app.settings.backend import views as settings_views from app.hardware.backend import router as hardware_api from app.billing.backend import router as billing_api -from app.billing.frontend import views as billing_views from app.system.backend import router as system_api from app.dashboard.backend import views as dashboard_views -from app.dashboard.backend import router as dashboard_api -from app.devportal.backend import router as devportal_api -from app.devportal.backend import views as devportal_views -from app.timetracking.backend import router as timetracking_api -from app.timetracking.frontend import views as timetracking_views -from app.emails.backend import router as emails_api -from app.emails.frontend import views as emails_views -from app.backups.backend import router as backups_api -from app.backups.frontend import views as backups_views -from app.backups.backend.scheduler import backup_scheduler +from app.prepaid.backend import router as prepaid_api +from app.prepaid.backend import views as prepaid_views from app.ticket.backend import router as ticket_api from app.ticket.frontend import views as ticket_views +from app.vendors.backend import router as vendors_api +from app.vendors.backend import views as vendors_views # Configure logging logging.basicConfig( @@ -66,25 +48,10 @@ async def lifespan(app: FastAPI): init_db() - # Start email scheduler (background job) - email_scheduler.start() - - # Start backup scheduler (background job) - backup_scheduler.start() - - # Load dynamic modules (hvis enabled) - if settings.MODULES_ENABLED: - logger.info("📦 Loading dynamic modules...") - module_loader.register_modules(app) - module_status = module_loader.get_module_status() - logger.info(f"✅ Loaded {len(module_status)} modules: {list(module_status.keys())}") - logger.info("✅ System initialized successfully") yield # Shutdown logger.info("👋 Shutting down...") - email_scheduler.stop() - backup_scheduler.stop() # Create FastAPI app app = FastAPI( @@ -105,11 +72,6 @@ app = FastAPI( openapi_url="/api/openapi.json" ) -@app.get("/") -async def root(): - """Redirect root to dashboard""" - return RedirectResponse(url="/dashboard") - # CORS middleware app.add_middleware( CORSMiddleware, @@ -120,34 +82,20 @@ app.add_middleware( ) # Include routers -app.include_router(auth_api.router, prefix="/api/v1/auth", tags=["Authentication"]) app.include_router(customers_api.router, prefix="/api/v1", tags=["Customers"]) -app.include_router(contacts_api.router, prefix="/api/v1", tags=["Contacts"]) -app.include_router(vendors_api.router, prefix="/api/v1", tags=["Vendors"]) -app.include_router(settings_api.router, prefix="/api/v1", tags=["Settings"]) app.include_router(hardware_api.router, prefix="/api/v1", tags=["Hardware"]) app.include_router(billing_api.router, prefix="/api/v1", tags=["Billing"]) app.include_router(system_api.router, prefix="/api/v1", tags=["System"]) -app.include_router(dashboard_api.router, prefix="/api/v1/dashboard", tags=["Dashboard"]) -app.include_router(devportal_api.router, prefix="/api/v1/devportal", tags=["DEV Portal"]) -app.include_router(timetracking_api, prefix="/api/v1/timetracking", tags=["Time Tracking"]) -app.include_router(backups_api.router, prefix="/api/v1", tags=["Backup System"]) -app.include_router(emails_api.router, prefix="/api/v1", tags=["Email System"]) -app.include_router(ticket_api.router, prefix="/api/v1", tags=["Ticket System"]) +app.include_router(prepaid_api.router, prefix="/api/v1", tags=["Prepaid Cards"]) +app.include_router(ticket_api.router, prefix="/api/v1/ticket", tags=["Tickets"]) +app.include_router(vendors_api.router, prefix="/api/v1", tags=["Vendors"]) # Frontend Routers -app.include_router(auth_views.router, tags=["Frontend"]) app.include_router(dashboard_views.router, tags=["Frontend"]) app.include_router(customers_views.router, tags=["Frontend"]) -app.include_router(contacts_views.router, tags=["Frontend"]) +app.include_router(prepaid_views.router, tags=["Frontend"]) app.include_router(vendors_views.router, tags=["Frontend"]) -app.include_router(billing_views.router, tags=["Frontend"]) -app.include_router(settings_views.router, tags=["Frontend"]) -app.include_router(devportal_views.router, tags=["Frontend"]) -app.include_router(backups_views.router, tags=["Frontend"]) -app.include_router(timetracking_views.router, tags=["Frontend"]) -app.include_router(emails_views.router, tags=["Frontend"]) -app.include_router(ticket_views.router, prefix="/ticket", tags=["Frontend - Tickets"]) +app.include_router(ticket_views.router, prefix="/ticket", tags=["Frontend"]) # Serve static files (UI) app.mount("/static", StaticFiles(directory="static", html=True), name="static") @@ -161,49 +109,6 @@ async def health_check(): "version": "1.0.0" } -@app.get("/api/v1/modules") -async def list_modules(): - """List alle dynamic modules og deres status""" - return { - "modules_enabled": settings.MODULES_ENABLED, - "modules": module_loader.get_module_status() - } - -@app.post("/api/v1/modules/{module_name}/enable") -async def enable_module_endpoint(module_name: str): - """Enable et modul (kræver restart)""" - success = module_loader.enable_module(module_name) - return { - "success": success, - "message": f"Modul {module_name} enabled. Restart app for at loade.", - "restart_required": True - } - -@app.post("/api/v1/modules/{module_name}/disable") -async def disable_module_endpoint(module_name: str): - """Disable et modul (kræver restart)""" - success = module_loader.disable_module(module_name) - return { - "success": success, - "message": f"Modul {module_name} disabled. Restart app for at unload.", - "restart_required": True - } - -@app.get("/docs/{doc_name}") -async def serve_documentation(doc_name: str): - """Serve markdown documentation files""" - docs_dir = Path(__file__).parent / "docs" - doc_path = docs_dir / doc_name - - # Security: Ensure path is within docs directory - if not doc_path.resolve().is_relative_to(docs_dir.resolve()): - return {"error": "Invalid path"} - - if doc_path.exists() and doc_path.suffix == ".md": - return FileResponse(doc_path, media_type="text/markdown") - - return {"error": "Documentation not found"} - if __name__ == "__main__": import uvicorn import os diff --git a/migrations/026_ticket_enhancements.sql b/migrations/026_ticket_enhancements.sql new file mode 100644 index 0000000..3783a3b --- /dev/null +++ b/migrations/026_ticket_enhancements.sql @@ -0,0 +1,446 @@ +-- ============================================================================ +-- Migration 026: Ticket System Enhancements - Kravspecifikation Implementation +-- ============================================================================ +-- Implementerer: +-- 1. Ticket relations (merge, split, parent/child hierarchy) +-- 2. Calendar events og deadlines +-- 3. Templates system +-- 4. AI suggestions (metadata only - ingen automatik) +-- 5. Enhanced contact identification +-- ============================================================================ + +-- ============================================================================ +-- TICKET RELATIONS (flette, splitte, hierarki) +-- ============================================================================ +CREATE TABLE IF NOT EXISTS tticket_relations ( + id SERIAL PRIMARY KEY, + ticket_id INTEGER NOT NULL REFERENCES tticket_tickets(id) ON DELETE CASCADE, + related_ticket_id INTEGER NOT NULL REFERENCES tticket_tickets(id) ON DELETE CASCADE, + relation_type VARCHAR(20) NOT NULL CHECK (relation_type IN ('merged_into', 'split_from', 'parent_of', 'child_of', 'related_to')), + + -- Metadata om relationen + created_by_user_id INTEGER, -- Reference til users.user_id (read-only) + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + reason TEXT, -- Hvorfor blev relationen oprettet + + CONSTRAINT unique_relation UNIQUE (ticket_id, related_ticket_id, relation_type), + CONSTRAINT no_self_reference CHECK (ticket_id != related_ticket_id) +); + +CREATE INDEX idx_tticket_relations_ticket ON tticket_relations(ticket_id); +CREATE INDEX idx_tticket_relations_related ON tticket_relations(related_ticket_id); +CREATE INDEX idx_tticket_relations_type ON tticket_relations(relation_type); + +-- View for at finde alle relationer for en ticket (begge retninger) +CREATE OR REPLACE VIEW tticket_all_relations AS +SELECT + ticket_id, + related_ticket_id, + relation_type, + created_by_user_id, + created_at, + reason +FROM tticket_relations +UNION ALL +SELECT + related_ticket_id as ticket_id, + ticket_id as related_ticket_id, + CASE + WHEN relation_type = 'parent_of' THEN 'child_of' + WHEN relation_type = 'child_of' THEN 'parent_of' + WHEN relation_type = 'merged_into' THEN 'merged_from' + WHEN relation_type = 'split_from' THEN 'split_into' + ELSE relation_type + END as relation_type, + created_by_user_id, + created_at, + reason +FROM tticket_relations; + +-- ============================================================================ +-- CALENDAR EVENTS (aftaler, deadlines, milepæle) +-- ============================================================================ +CREATE TABLE IF NOT EXISTS tticket_calendar_events ( + id SERIAL PRIMARY KEY, + ticket_id INTEGER NOT NULL REFERENCES tticket_tickets(id) ON DELETE CASCADE, + + -- Event data + title VARCHAR(200) NOT NULL, + description TEXT, + event_type VARCHAR(20) DEFAULT 'appointment' CHECK (event_type IN ('appointment', 'deadline', 'milestone', 'reminder', 'follow_up')), + + -- Tidspunkt + event_date DATE NOT NULL, + event_time TIME, + duration_minutes INTEGER, -- Varighed i minutter + all_day BOOLEAN DEFAULT false, + + -- AI forslag + suggested_by_ai BOOLEAN DEFAULT false, -- Blev denne foreslået af AI? + ai_confidence DECIMAL(3,2), -- AI confidence score 0-1 + ai_source_text TEXT, -- Tekst som AI fandt datoen i + + -- Status + status VARCHAR(20) DEFAULT 'pending' CHECK (status IN ('pending', 'confirmed', 'completed', 'cancelled')), + + -- Metadata + created_by_user_id INTEGER, -- Reference til users.user_id + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP, + completed_at TIMESTAMP +); + +CREATE INDEX idx_tticket_calendar_ticket ON tticket_calendar_events(ticket_id); +CREATE INDEX idx_tticket_calendar_date ON tticket_calendar_events(event_date); +CREATE INDEX idx_tticket_calendar_type ON tticket_calendar_events(event_type); +CREATE INDEX idx_tticket_calendar_status ON tticket_calendar_events(status); + +-- ============================================================================ +-- TEMPLATES (svarskabeloner, guides, standardbreve) +-- ============================================================================ +CREATE TABLE IF NOT EXISTS tticket_templates ( + id SERIAL PRIMARY KEY, + + -- Template metadata + name VARCHAR(200) NOT NULL, + description TEXT, + category VARCHAR(100), -- guide, standard_letter, technical, billing, etc. + + -- Template indhold + subject_template VARCHAR(500), -- Emne med placeholders + body_template TEXT NOT NULL, -- Indhold med placeholders + + -- Placeholders dokumentation + available_placeholders TEXT[], -- fx ['{{customer_name}}', '{{ticket_number}}'] + + -- Attachments (optional) + default_attachments JSONB, -- Array af fil-paths/URLs + + -- Settings + is_active BOOLEAN DEFAULT true, + requires_approval BOOLEAN DEFAULT false, -- Kræver godkendelse før afsendelse + + -- Metadata + created_by_user_id INTEGER, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP, + last_used_at TIMESTAMP, + usage_count INTEGER DEFAULT 0 +); + +CREATE INDEX idx_tticket_templates_category ON tticket_templates(category); +CREATE INDEX idx_tticket_templates_active ON tticket_templates(is_active); + +-- ============================================================================ +-- TEMPLATE USAGE LOG (hvornår blev skabeloner brugt) +-- ============================================================================ +CREATE TABLE IF NOT EXISTS tticket_template_usage ( + id SERIAL PRIMARY KEY, + template_id INTEGER NOT NULL REFERENCES tticket_templates(id) ON DELETE CASCADE, + ticket_id INTEGER NOT NULL REFERENCES tticket_tickets(id) ON DELETE CASCADE, + user_id INTEGER, -- Reference til users.user_id + used_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + was_modified BOOLEAN DEFAULT false -- Blev template redigeret før afsendelse? +); + +CREATE INDEX idx_tticket_template_usage_template ON tticket_template_usage(template_id); +CREATE INDEX idx_tticket_template_usage_ticket ON tticket_template_usage(ticket_id); + +-- ============================================================================ +-- AI SUGGESTIONS (forslag til actions - aldrig automatisk) +-- ============================================================================ +CREATE TABLE IF NOT EXISTS tticket_ai_suggestions ( + id SERIAL PRIMARY KEY, + ticket_id INTEGER NOT NULL REFERENCES tticket_tickets(id) ON DELETE CASCADE, + + -- Suggestion data + suggestion_type VARCHAR(50) NOT NULL CHECK (suggestion_type IN ( + 'contact_update', -- Opdater kontakt oplysninger + 'new_contact', -- Ny kontakt opdaget + 'category', -- Foreslå kategori + 'tag', -- Foreslå tag + 'priority', -- Foreslå prioritet + 'deadline', -- Foreslå deadline + 'calendar_event', -- Foreslå kalender event + 'template', -- Foreslå skabelon + 'merge', -- Foreslå flet med anden ticket + 'related_ticket' -- Foreslå relation til anden ticket + )), + + -- Suggestion content + suggestion_data JSONB NOT NULL, -- Struktureret data om forslaget + confidence DECIMAL(3,2), -- AI confidence 0-1 + reasoning TEXT, -- Hvorfor blev dette foreslået + + -- Source + source_text TEXT, -- Tekst som AI analyserede + source_comment_id INTEGER REFERENCES tticket_comments(id) ON DELETE CASCADE, + + -- Status + status VARCHAR(20) DEFAULT 'pending' CHECK (status IN ('pending', 'accepted', 'rejected', 'auto_expired')), + reviewed_by_user_id INTEGER, -- Hvem behandlede forslaget + reviewed_at TIMESTAMP, + + -- Metadata + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + expires_at TIMESTAMP -- Forslag udløber efter X dage +); + +CREATE INDEX idx_tticket_ai_suggestions_ticket ON tticket_ai_suggestions(ticket_id); +CREATE INDEX idx_tticket_ai_suggestions_type ON tticket_ai_suggestions(suggestion_type); +CREATE INDEX idx_tticket_ai_suggestions_status ON tticket_ai_suggestions(status); +CREATE INDEX idx_tticket_ai_suggestions_created ON tticket_ai_suggestions(created_at); + +-- ============================================================================ +-- EMAIL METADATA (udvidet til contact identification) +-- ============================================================================ +CREATE TABLE IF NOT EXISTS tticket_email_metadata ( + id SERIAL PRIMARY KEY, + ticket_id INTEGER NOT NULL REFERENCES tticket_tickets(id) ON DELETE CASCADE, + + -- Email headers + message_id VARCHAR(500) UNIQUE, -- Email Message-ID for threading + in_reply_to VARCHAR(500), -- In-Reply-To header + email_references TEXT, -- References header (renamed to avoid SQL keyword conflict) + + -- Sender info (fra email) + from_email VARCHAR(255) NOT NULL, + from_name VARCHAR(255), + from_signature TEXT, -- Udtræk af signatur + + -- Matched contact (hvis fundet) + matched_contact_id INTEGER, -- Reference til contacts.id + match_confidence DECIMAL(3,2), -- Hvor sikker er vi på match + match_method VARCHAR(50), -- email_exact, email_domain, name_similarity, etc. + + -- Suggested contacts (hvis tvivl) + suggested_contacts JSONB, -- Array af {contact_id, confidence, reason} + + -- Extracted data (AI analysis) + extracted_phone VARCHAR(50), + extracted_address TEXT, + extracted_company VARCHAR(255), + extracted_title VARCHAR(100), + + -- Metadata + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP +); + +CREATE INDEX idx_tticket_email_ticket ON tticket_email_metadata(ticket_id); +CREATE INDEX idx_tticket_email_message_id ON tticket_email_metadata(message_id); +CREATE INDEX idx_tticket_email_from ON tticket_email_metadata(from_email); + +-- ============================================================================ +-- Tilføj manglende kolonner til existing tticket_tickets +-- ============================================================================ +ALTER TABLE tticket_tickets + ADD COLUMN IF NOT EXISTS deadline TIMESTAMP, + ADD COLUMN IF NOT EXISTS parent_ticket_id INTEGER REFERENCES tticket_tickets(id) ON DELETE SET NULL, + ADD COLUMN IF NOT EXISTS is_merged BOOLEAN DEFAULT false, + ADD COLUMN IF NOT EXISTS merged_into_ticket_id INTEGER REFERENCES tticket_tickets(id) ON DELETE SET NULL; + +CREATE INDEX IF NOT EXISTS idx_tticket_tickets_deadline ON tticket_tickets(deadline); +CREATE INDEX IF NOT EXISTS idx_tticket_tickets_parent ON tticket_tickets(parent_ticket_id); + +-- ============================================================================ +-- AUDIT LOG for ticket changes (sporbarhed) +-- ============================================================================ +CREATE TABLE IF NOT EXISTS tticket_audit_log ( + id SERIAL PRIMARY KEY, + ticket_id INTEGER NOT NULL REFERENCES tticket_tickets(id) ON DELETE CASCADE, + + -- What changed + action VARCHAR(50) NOT NULL, -- created, updated, merged, split, status_change, etc. + field_name VARCHAR(100), -- Hvilket felt blev ændret + old_value TEXT, + new_value TEXT, + + -- Who and when + user_id INTEGER, -- Reference til users.user_id + performed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + + -- Context + reason TEXT, + metadata JSONB -- Additional context +); + +CREATE INDEX idx_tticket_audit_ticket ON tticket_audit_log(ticket_id); +CREATE INDEX idx_tticket_audit_action ON tticket_audit_log(action); +CREATE INDEX idx_tticket_audit_performed ON tticket_audit_log(performed_at DESC); + +-- ============================================================================ +-- TRIGGERS for audit logging +-- ============================================================================ +CREATE OR REPLACE FUNCTION tticket_log_ticket_changes() +RETURNS TRIGGER AS $$ +BEGIN + IF TG_OP = 'UPDATE' THEN + -- Log status changes + IF OLD.status != NEW.status THEN + INSERT INTO tticket_audit_log (ticket_id, action, field_name, old_value, new_value) + VALUES (NEW.id, 'status_change', 'status', OLD.status, NEW.status); + END IF; + + -- Log priority changes + IF OLD.priority != NEW.priority THEN + INSERT INTO tticket_audit_log (ticket_id, action, field_name, old_value, new_value) + VALUES (NEW.id, 'priority_change', 'priority', OLD.priority, NEW.priority); + END IF; + + -- Log assignment changes + IF OLD.assigned_to_user_id IS DISTINCT FROM NEW.assigned_to_user_id THEN + INSERT INTO tticket_audit_log (ticket_id, action, field_name, old_value, new_value) + VALUES (NEW.id, 'assignment_change', 'assigned_to_user_id', + OLD.assigned_to_user_id::TEXT, NEW.assigned_to_user_id::TEXT); + END IF; + + -- Log deadline changes + IF OLD.deadline IS DISTINCT FROM NEW.deadline THEN + INSERT INTO tticket_audit_log (ticket_id, action, field_name, old_value, new_value) + VALUES (NEW.id, 'deadline_change', 'deadline', + OLD.deadline::TEXT, NEW.deadline::TEXT); + END IF; + END IF; + + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER tticket_audit_changes + AFTER UPDATE ON tticket_tickets + FOR EACH ROW + EXECUTE FUNCTION tticket_log_ticket_changes(); + +-- ============================================================================ +-- VIEWS for enhanced queries +-- ============================================================================ + +-- View for tickets with hierarchy info +CREATE OR REPLACE VIEW tticket_tickets_with_hierarchy AS +SELECT + t.*, + parent.ticket_number as parent_ticket_number, + parent.subject as parent_subject, + (SELECT COUNT(*) FROM tticket_tickets WHERE parent_ticket_id = t.id) as child_count, + (SELECT COUNT(*) FROM tticket_relations WHERE ticket_id = t.id) as relation_count +FROM tticket_tickets t +LEFT JOIN tticket_tickets parent ON t.parent_ticket_id = parent.id; + +-- View for tickets with pending AI suggestions +CREATE OR REPLACE VIEW tticket_tickets_with_suggestions AS +SELECT + t.id, + t.ticket_number, + t.subject, + t.status, + COUNT(DISTINCT s.id) FILTER (WHERE s.status = 'pending') as pending_suggestions, + COUNT(DISTINCT ce.id) FILTER (WHERE ce.suggested_by_ai = true AND ce.status = 'pending') as pending_calendar_suggestions +FROM tticket_tickets t +LEFT JOIN tticket_ai_suggestions s ON t.id = s.ticket_id +LEFT JOIN tticket_calendar_events ce ON t.id = ce.ticket_id +GROUP BY t.id, t.ticket_number, t.subject, t.status; + +-- View for overdue tickets +CREATE OR REPLACE VIEW tticket_overdue_tickets AS +SELECT + t.id, + t.ticket_number, + t.subject, + t.status, + t.priority, + t.deadline, + t.assigned_to_user_id, + (CURRENT_TIMESTAMP - t.deadline) as overdue_duration, + c.name as customer_name +FROM tticket_tickets t +LEFT JOIN customers c ON t.customer_id = c.id +WHERE t.deadline < CURRENT_TIMESTAMP + AND t.status NOT IN ('resolved', 'closed') +ORDER BY t.deadline ASC; + +-- ============================================================================ +-- Seed data: Default templates +-- ============================================================================ +INSERT INTO tticket_templates (name, description, category, subject_template, body_template, available_placeholders, is_active) +VALUES +( + 'Tak for henvendelse', + 'Standard svar ved modtagelse af ticket', + 'standard_letter', + 'Re: {{ticket_subject}}', + 'Hej {{contact_name}}, + +Tak for din henvendelse. Vi har modtaget din sag og den er nu registreret som sag nr. {{ticket_number}}. + +Vi vender tilbage hurtigst muligt med svar. + +Med venlig hilsen, +BMC Networks', + ARRAY['{{contact_name}}', '{{customer_name}}', '{{ticket_number}}', '{{ticket_subject}}'], + true +), +( + 'Løsning: Genstart router', + 'Guide til genstart af router', + 'guide', + 'Re: {{ticket_subject}} - Løsning: Genstart router', + 'Hej {{contact_name}}, + +Her er en guide til at genstarte din router: + +1. Træk strømkablet ud af routeren +2. Vent 30 sekunder +3. Sæt strømkablet i igen +4. Vent 2-3 minutter mens routeren starter op +5. Test forbindelsen + +Hvis problemet fortsætter, er du velkommen til at svare på denne mail. + +Med venlig hilsen, +BMC Networks + +Sag: {{ticket_number}}', + ARRAY['{{contact_name}}', '{{customer_name}}', '{{ticket_number}}', '{{ticket_subject}}'], + true +), +( + 'Afslutning af sag', + 'Besked ved lukning af sag', + 'standard_letter', + 'Re: {{ticket_subject}} - Sag lukket', + 'Hej {{contact_name}}, + +Vi betragter nu denne sag som løst og lukker den. + +Hvis du har yderligere spørgsmål, er du velkommen til at kontakte os. + +Med venlig hilsen, +BMC Networks + +Sag: {{ticket_number}}', + ARRAY['{{contact_name}}', '{{customer_name}}', '{{ticket_number}}', '{{ticket_subject}}'], + true +); + +-- ============================================================================ +-- Comments +-- ============================================================================ +COMMENT ON TABLE tticket_relations IS 'Ticket relationer: merge, split, parent/child hierarki'; +COMMENT ON TABLE tticket_calendar_events IS 'Kalender events, deadlines og milepæle på tickets'; +COMMENT ON TABLE tticket_templates IS 'Svarskabeloner med placeholders'; +COMMENT ON TABLE tticket_ai_suggestions IS 'AI forslag der kræver manuel godkendelse'; +COMMENT ON TABLE tticket_email_metadata IS 'Email metadata og contact identification data'; +COMMENT ON TABLE tticket_audit_log IS 'Audit trail for alle ticket ændringer'; + +-- ============================================================================ +-- Migration complete +-- ============================================================================ +-- Dette modul tilføjer: +-- ✅ Ticket relations (merge, split, hierarchy) +-- ✅ Calendar events med AI forslag +-- ✅ Templates system +-- ✅ AI suggestions (kun forslag) +-- ✅ Enhanced email/contact matching +-- ✅ Full audit trail +-- ============================================================================ diff --git a/requirements.txt b/requirements.txt index dfc17f3..5652bd1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,21 +6,3 @@ pydantic-settings==2.6.1 python-dotenv==1.0.1 python-multipart==0.0.17 jinja2==3.1.4 -pyjwt==2.9.0 -aiohttp==3.10.10 - -# Email & Scheduling -APScheduler==3.10.4 -msal==1.31.1 - -# Backup & SSH -paramiko==3.4.0 - -# AI & Document Processing -httpx==0.27.2 -PyPDF2==3.0.1 -pdfplumber==0.11.4 -pytesseract==0.3.13 -Pillow==11.0.0 -invoice2data==0.4.4 -pyyaml==6.0.2 diff --git a/static/design_templates/09_horizontal_dark/index.html b/static/design_templates/09_horizontal_dark/index.html index 61403f7..108e663 100644 --- a/static/design_templates/09_horizontal_dark/index.html +++ b/static/design_templates/09_horizontal_dark/index.html @@ -1,4 +1,4 @@ - +