2025-12-06 11:04:19 +01:00
|
|
|
from fastapi import APIRouter, HTTPException
|
2026-02-12 07:03:18 +01:00
|
|
|
from app.core.database import execute_query, execute_query_single
|
2025-12-06 11:04:19 +01:00
|
|
|
from typing import Dict, Any, List
|
|
|
|
|
import logging
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
router = APIRouter()
|
|
|
|
|
|
|
|
|
|
@router.get("/stats", response_model=Dict[str, Any])
|
|
|
|
|
async def get_dashboard_stats():
|
|
|
|
|
"""
|
|
|
|
|
Get aggregated statistics for the dashboard
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
logger.info("📊 Fetching dashboard stats...")
|
|
|
|
|
|
2026-02-12 07:03:18 +01:00
|
|
|
# 1. Customer Counts & Trends
|
2025-12-06 11:04:19 +01:00
|
|
|
logger.info("Fetching customer count...")
|
2025-12-16 15:36:11 +01:00
|
|
|
customer_res = execute_query_single("SELECT COUNT(*) as count FROM customers WHERE deleted_at IS NULL")
|
2025-12-06 11:04:19 +01:00
|
|
|
customer_count = customer_res['count'] if customer_res else 0
|
|
|
|
|
|
2026-02-12 07:03:18 +01:00
|
|
|
# New customers this month
|
|
|
|
|
new_customers_res = execute_query_single("""
|
|
|
|
|
SELECT COUNT(*) as count
|
|
|
|
|
FROM customers
|
|
|
|
|
WHERE deleted_at IS NULL
|
|
|
|
|
AND created_at >= DATE_TRUNC('month', CURRENT_DATE)
|
|
|
|
|
""")
|
|
|
|
|
new_customers_this_month = new_customers_res['count'] if new_customers_res else 0
|
2025-12-06 11:04:19 +01:00
|
|
|
|
2026-02-12 07:03:18 +01:00
|
|
|
# Previous month's new customers for trend calculation
|
|
|
|
|
prev_month_customers_res = execute_query_single("""
|
|
|
|
|
SELECT COUNT(*) as count
|
2025-12-06 11:04:19 +01:00
|
|
|
FROM customers
|
2026-02-12 07:03:18 +01:00
|
|
|
WHERE deleted_at IS NULL
|
|
|
|
|
AND created_at >= DATE_TRUNC('month', CURRENT_DATE - INTERVAL '1 month')
|
|
|
|
|
AND created_at < DATE_TRUNC('month', CURRENT_DATE)
|
2025-12-06 11:04:19 +01:00
|
|
|
""")
|
2026-02-12 07:03:18 +01:00
|
|
|
prev_month_customers = prev_month_customers_res['count'] if prev_month_customers_res else 0
|
2025-12-06 11:04:19 +01:00
|
|
|
|
2026-02-12 07:03:18 +01:00
|
|
|
customer_growth_pct = 0
|
|
|
|
|
if prev_month_customers > 0:
|
|
|
|
|
customer_growth_pct = round(((new_customers_this_month - prev_month_customers) / prev_month_customers) * 100, 1)
|
|
|
|
|
elif new_customers_this_month > 0:
|
|
|
|
|
customer_growth_pct = 100
|
|
|
|
|
|
|
|
|
|
# 2. Ticket Counts
|
|
|
|
|
logger.info("Fetching ticket stats...")
|
|
|
|
|
ticket_res = execute_query_single("""
|
|
|
|
|
SELECT COUNT(*) as total_count,
|
|
|
|
|
COUNT(*) FILTER (WHERE status IN ('open', 'in_progress')) as open_count,
|
|
|
|
|
COUNT(*) FILTER (WHERE priority = 'high' AND status IN ('open', 'in_progress')) as urgent_count
|
|
|
|
|
FROM tticket_tickets
|
2025-12-06 11:04:19 +01:00
|
|
|
""")
|
2026-02-12 07:03:18 +01:00
|
|
|
ticket_count = ticket_res['open_count'] if ticket_res else 0
|
|
|
|
|
urgent_ticket_count = ticket_res['urgent_count'] if ticket_res else 0
|
|
|
|
|
|
|
|
|
|
# 3. Hardware Count
|
|
|
|
|
logger.info("Fetching hardware count...")
|
|
|
|
|
hardware_res = execute_query_single("SELECT COUNT(*) as count FROM hardware")
|
|
|
|
|
hardware_count = hardware_res['count'] if hardware_res else 0
|
|
|
|
|
|
|
|
|
|
# 4. Revenue (from fixed price billing periods - current month)
|
|
|
|
|
logger.info("Fetching revenue stats...")
|
|
|
|
|
revenue_res = execute_query_single("""
|
|
|
|
|
SELECT COALESCE(SUM(base_amount + COALESCE(overtime_amount, 0)), 0) as total
|
|
|
|
|
FROM fixed_price_billing_periods
|
|
|
|
|
WHERE period_start >= DATE_TRUNC('month', CURRENT_DATE)
|
|
|
|
|
AND period_start < DATE_TRUNC('month', CURRENT_DATE + INTERVAL '1 month')
|
|
|
|
|
""")
|
|
|
|
|
current_revenue = float(revenue_res['total']) if revenue_res and revenue_res['total'] else 0
|
|
|
|
|
|
|
|
|
|
# Previous month revenue for trend
|
|
|
|
|
prev_revenue_res = execute_query_single("""
|
|
|
|
|
SELECT COALESCE(SUM(base_amount + COALESCE(overtime_amount, 0)), 0) as total
|
|
|
|
|
FROM fixed_price_billing_periods
|
|
|
|
|
WHERE period_start >= DATE_TRUNC('month', CURRENT_DATE - INTERVAL '1 month')
|
|
|
|
|
AND period_start < DATE_TRUNC('month', CURRENT_DATE)
|
|
|
|
|
""")
|
|
|
|
|
prev_revenue = float(prev_revenue_res['total']) if prev_revenue_res and prev_revenue_res['total'] else 0
|
|
|
|
|
|
|
|
|
|
revenue_growth_pct = 0
|
|
|
|
|
if prev_revenue > 0:
|
|
|
|
|
revenue_growth_pct = round(((current_revenue - prev_revenue) / prev_revenue) * 100, 1)
|
|
|
|
|
elif current_revenue > 0:
|
|
|
|
|
revenue_growth_pct = 100
|
2025-12-06 11:04:19 +01:00
|
|
|
|
|
|
|
|
logger.info("✅ Dashboard stats fetched successfully")
|
|
|
|
|
return {
|
2026-02-12 07:03:18 +01:00
|
|
|
"customers": {
|
|
|
|
|
"total": customer_count,
|
|
|
|
|
"new_this_month": new_customers_this_month,
|
|
|
|
|
"growth_pct": customer_growth_pct
|
|
|
|
|
},
|
|
|
|
|
"tickets": {
|
|
|
|
|
"open_count": ticket_count,
|
|
|
|
|
"urgent_count": urgent_ticket_count
|
|
|
|
|
},
|
|
|
|
|
"hardware": {
|
|
|
|
|
"total": hardware_count
|
2025-12-06 11:04:19 +01:00
|
|
|
},
|
2026-02-12 07:03:18 +01:00
|
|
|
"revenue": {
|
|
|
|
|
"current_month": current_revenue,
|
|
|
|
|
"growth_pct": revenue_growth_pct
|
|
|
|
|
}
|
2025-12-06 11:04:19 +01:00
|
|
|
}
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"❌ Error fetching dashboard stats: {e}", exc_info=True)
|
|
|
|
|
raise HTTPException(status_code=500, detail=str(e))
|
2025-12-06 13:13:05 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/search", response_model=Dict[str, List[Any]])
|
|
|
|
|
async def global_search(q: str):
|
|
|
|
|
"""
|
|
|
|
|
Global search across customers, contacts, and vendors
|
|
|
|
|
"""
|
|
|
|
|
if not q or len(q) < 2:
|
|
|
|
|
return {"customers": [], "contacts": [], "vendors": []}
|
|
|
|
|
|
|
|
|
|
search_term = f"%{q}%"
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
# Search Customers
|
|
|
|
|
customers = execute_query("""
|
|
|
|
|
SELECT id, name, email, 'Kunde' as type
|
|
|
|
|
FROM customers
|
|
|
|
|
WHERE deleted_at IS NULL AND (
|
|
|
|
|
name ILIKE %s OR
|
|
|
|
|
email ILIKE %s OR
|
|
|
|
|
cvr_number ILIKE %s OR
|
|
|
|
|
phone ILIKE %s OR
|
|
|
|
|
mobile_phone ILIKE %s
|
|
|
|
|
)
|
|
|
|
|
LIMIT 5
|
|
|
|
|
""", (search_term, search_term, search_term, search_term, search_term))
|
|
|
|
|
|
|
|
|
|
# Search Contacts
|
|
|
|
|
contacts = execute_query("""
|
|
|
|
|
SELECT id, first_name || ' ' || last_name as name, email, 'Kontakt' as type
|
|
|
|
|
FROM contacts
|
|
|
|
|
WHERE first_name ILIKE %s OR
|
|
|
|
|
last_name ILIKE %s OR
|
|
|
|
|
email ILIKE %s OR
|
|
|
|
|
phone ILIKE %s OR
|
|
|
|
|
mobile ILIKE %s
|
|
|
|
|
LIMIT 5
|
|
|
|
|
""", (search_term, search_term, search_term, search_term, search_term))
|
|
|
|
|
|
|
|
|
|
# Search Vendors
|
|
|
|
|
vendors = execute_query("""
|
|
|
|
|
SELECT id, name, email, 'Leverandør' as type
|
|
|
|
|
FROM vendors
|
|
|
|
|
WHERE is_active = true AND (
|
|
|
|
|
name ILIKE %s OR
|
|
|
|
|
email ILIKE %s OR
|
|
|
|
|
cvr_number ILIKE %s OR
|
|
|
|
|
phone ILIKE %s
|
|
|
|
|
)
|
|
|
|
|
LIMIT 5
|
|
|
|
|
""", (search_term, search_term, search_term, search_term))
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"customers": customers or [],
|
|
|
|
|
"contacts": contacts or [],
|
|
|
|
|
"vendors": vendors or []
|
|
|
|
|
}
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"❌ Error performing global search: {e}", exc_info=True)
|
|
|
|
|
return {"customers": [], "contacts": [], "vendors": []}
|
2025-12-06 21:27:47 +01:00
|
|
|
|
|
|
|
|
|
2026-02-01 11:58:44 +01:00
|
|
|
@router.get("/search/sag", response_model=List[Dict[str, Any]])
|
|
|
|
|
async def search_sag(q: str):
|
|
|
|
|
"""
|
|
|
|
|
Search for cases (sager) with customer information
|
|
|
|
|
"""
|
|
|
|
|
if not q or len(q) < 2:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
search_term = f"%{q}%"
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
# Search cases with customer names
|
|
|
|
|
sager = execute_query("""
|
|
|
|
|
SELECT
|
|
|
|
|
s.id,
|
|
|
|
|
s.titel,
|
|
|
|
|
s.beskrivelse,
|
|
|
|
|
s.status,
|
|
|
|
|
s.created_at,
|
|
|
|
|
s.customer_id,
|
|
|
|
|
c.name as customer_name
|
|
|
|
|
FROM sag_sager s
|
|
|
|
|
LEFT JOIN customers c ON s.customer_id = c.id
|
|
|
|
|
WHERE s.deleted_at IS NULL
|
|
|
|
|
AND (
|
|
|
|
|
CAST(s.id AS TEXT) ILIKE %s OR
|
|
|
|
|
s.titel ILIKE %s OR
|
|
|
|
|
s.beskrivelse ILIKE %s OR
|
|
|
|
|
c.name ILIKE %s
|
|
|
|
|
)
|
|
|
|
|
ORDER BY s.created_at DESC
|
|
|
|
|
LIMIT 20
|
|
|
|
|
""", (search_term, search_term, search_term, search_term))
|
|
|
|
|
|
|
|
|
|
return sager or []
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"❌ Error searching sager: {e}", exc_info=True)
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
2025-12-06 21:27:47 +01:00
|
|
|
@router.get("/live-stats", response_model=Dict[str, Any])
|
|
|
|
|
async def get_live_stats():
|
|
|
|
|
"""
|
|
|
|
|
Get live statistics for the three live boxes: Sales, Support, Økonomi
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
# Sales Stats (placeholder - replace with real data when tables exist)
|
|
|
|
|
sales_stats = {
|
|
|
|
|
"active_orders": 0,
|
|
|
|
|
"monthly_sales": 0,
|
|
|
|
|
"open_quotes": 0
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# Support Stats (placeholder)
|
|
|
|
|
support_stats = {
|
|
|
|
|
"open_tickets": 0,
|
|
|
|
|
"avg_response_time": 0,
|
|
|
|
|
"today_tickets": 0
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# Finance Stats (placeholder)
|
|
|
|
|
finance_stats = {
|
|
|
|
|
"unpaid_invoices_count": 0,
|
|
|
|
|
"unpaid_invoices_amount": 0,
|
|
|
|
|
"overdue_invoices": 0,
|
|
|
|
|
"today_payments": 0
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# Try to get real customer count as a demo
|
|
|
|
|
try:
|
2025-12-16 15:36:11 +01:00
|
|
|
customer_count = execute_query("SELECT COUNT(*) as count FROM customers WHERE deleted_at IS NULL")
|
2025-12-06 21:27:47 +01:00
|
|
|
sales_stats["active_orders"] = customer_count.get('count', 0) if customer_count else 0
|
|
|
|
|
except:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"sales": sales_stats,
|
|
|
|
|
"support": support_stats,
|
|
|
|
|
"finance": finance_stats
|
|
|
|
|
}
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"❌ Error fetching live stats: {e}", exc_info=True)
|
|
|
|
|
return {
|
|
|
|
|
"sales": {"active_orders": 0, "monthly_sales": 0, "open_quotes": 0},
|
|
|
|
|
"support": {"open_tickets": 0, "avg_response_time": 0, "today_tickets": 0},
|
|
|
|
|
"finance": {"unpaid_invoices_count": 0, "unpaid_invoices_amount": 0, "overdue_invoices": 0, "today_payments": 0}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2026-02-12 07:03:18 +01:00
|
|
|
@router.get("/reminders/upcoming", response_model=List[Dict[str, Any]])
|
|
|
|
|
async def get_upcoming_reminders():
|
|
|
|
|
"""
|
|
|
|
|
Get upcoming reminders for the dashboard calendar widget
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
# Get active reminders with next check date within 7 days
|
|
|
|
|
reminders = execute_query("""
|
|
|
|
|
SELECT
|
|
|
|
|
r.id,
|
|
|
|
|
r.sag_id,
|
|
|
|
|
r.title,
|
|
|
|
|
r.next_check_at as due_date,
|
|
|
|
|
r.priority,
|
|
|
|
|
s.titel as case_title
|
|
|
|
|
FROM sag_reminders r
|
|
|
|
|
LEFT JOIN sag_sager s ON r.sag_id = s.id
|
|
|
|
|
WHERE r.is_active = true
|
|
|
|
|
AND r.deleted_at IS NULL
|
|
|
|
|
AND r.next_check_at IS NOT NULL
|
|
|
|
|
AND r.next_check_at <= CURRENT_DATE + INTERVAL '7 days'
|
|
|
|
|
ORDER BY r.next_check_at ASC
|
|
|
|
|
LIMIT 10
|
|
|
|
|
""")
|
|
|
|
|
|
|
|
|
|
return reminders or []
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"❌ Error fetching upcoming reminders: {e}", exc_info=True)
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
2025-12-06 21:27:47 +01:00
|
|
|
@router.get("/recent-activity", response_model=List[Dict[str, Any]])
|
|
|
|
|
async def get_recent_activity():
|
|
|
|
|
"""
|
2026-02-12 07:03:18 +01:00
|
|
|
Get recent activity across the system for the dashboard feed
|
2025-12-06 21:27:47 +01:00
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
activities = []
|
|
|
|
|
|
|
|
|
|
# Recent customers
|
|
|
|
|
recent_customers = execute_query("""
|
|
|
|
|
SELECT id, name, created_at, 'customer' as activity_type, 'bi-building' as icon, 'primary' as color
|
|
|
|
|
FROM customers
|
|
|
|
|
WHERE deleted_at IS NULL
|
|
|
|
|
ORDER BY created_at DESC
|
2026-02-12 07:03:18 +01:00
|
|
|
LIMIT 5
|
2025-12-06 21:27:47 +01:00
|
|
|
""")
|
|
|
|
|
|
2026-02-12 07:03:18 +01:00
|
|
|
# Recent tickets
|
|
|
|
|
recent_tickets = execute_query("""
|
|
|
|
|
SELECT id, subject as name, created_at, 'ticket' as activity_type, 'bi-ticket' as icon, 'warning' as color
|
|
|
|
|
FROM tticket_tickets
|
2025-12-06 21:27:47 +01:00
|
|
|
ORDER BY created_at DESC
|
2026-02-12 07:03:18 +01:00
|
|
|
LIMIT 5
|
2025-12-06 21:27:47 +01:00
|
|
|
""")
|
|
|
|
|
|
2026-02-12 07:03:18 +01:00
|
|
|
# Recent cases (sager)
|
|
|
|
|
recent_cases = execute_query("""
|
|
|
|
|
SELECT id, titel as name, created_at, 'case' as activity_type, 'bi-folder' as icon, 'info' as color
|
|
|
|
|
FROM sag_sager
|
|
|
|
|
WHERE deleted_at IS NULL
|
2025-12-06 21:27:47 +01:00
|
|
|
ORDER BY created_at DESC
|
2026-02-12 07:03:18 +01:00
|
|
|
LIMIT 5
|
2025-12-06 21:27:47 +01:00
|
|
|
""")
|
|
|
|
|
|
|
|
|
|
# Combine all activities
|
|
|
|
|
if recent_customers:
|
|
|
|
|
activities.extend(recent_customers)
|
2026-02-12 07:03:18 +01:00
|
|
|
if recent_tickets:
|
|
|
|
|
activities.extend(recent_tickets)
|
|
|
|
|
if recent_cases:
|
|
|
|
|
activities.extend(recent_cases)
|
2025-12-06 21:27:47 +01:00
|
|
|
|
|
|
|
|
# Sort by created_at and limit
|
|
|
|
|
activities.sort(key=lambda x: x.get('created_at', ''), reverse=True)
|
|
|
|
|
|
2026-02-12 07:03:18 +01:00
|
|
|
return activities[:15]
|
2025-12-06 21:27:47 +01:00
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"❌ Error fetching recent activity: {e}", exc_info=True)
|
|
|
|
|
return []
|