diff --git a/.env.example b/.env.example
index 51fa1d6..1be8381 100644
--- a/.env.example
+++ b/.env.example
@@ -22,6 +22,14 @@ ENABLE_RELOAD=false # Set to true for live code reload (causes log spam in Dock
SECRET_KEY=change-this-in-production-use-random-string
CORS_ORIGINS=http://localhost:8000,http://localhost:3000
+# Shadow Admin (Emergency Access)
+SHADOW_ADMIN_ENABLED=false
+SHADOW_ADMIN_USERNAME=shadowadmin
+SHADOW_ADMIN_PASSWORD=
+SHADOW_ADMIN_TOTP_SECRET=
+SHADOW_ADMIN_EMAIL=shadowadmin@bmcnetworks.dk
+SHADOW_ADMIN_FULL_NAME=Shadow Administrator
+
# =====================================================
# LOGGING
# =====================================================
@@ -45,6 +53,16 @@ ECONOMIC_AGREEMENT_GRANT_TOKEN=your_agreement_grant_token_here
# đ¨ SAFETY SWITCHES - Beskytter mod utilsigtede ĂŚndringer
ECONOMIC_READ_ONLY=true # Set to false ONLY after testing
ECONOMIC_DRY_RUN=true # Set to false ONLY when ready for production writes
+# =====================================================
+# Nextcloud Integration (Optional)
+# =====================================================
+NEXTCLOUD_READ_ONLY=true
+NEXTCLOUD_DRY_RUN=true
+NEXTCLOUD_TIMEOUT_SECONDS=15
+NEXTCLOUD_CACHE_TTL_SECONDS=300
+# Generate a Fernet key: python -c "from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())"
+NEXTCLOUD_ENCRYPTION_KEY=
+
# =====================================================
# vTiger Cloud Integration (Required for Subscriptions)
# =====================================================
diff --git a/.github/agents/Planning with subagents.agent.md b/.github/agents/Planning with subagents.agent.md
index 82ea150..ae673ed 100644
--- a/.github/agents/Planning with subagents.agent.md
+++ b/.github/agents/Planning with subagents.agent.md
@@ -1,5 +1,28 @@
---
-description: 'Describe what this custom agent does and when to use it.'
-tools: []
----
-Define what this custom agent accomplishes for the user, when to use it, and the edges it won't cross. Specify its ideal inputs/outputs, the tools it may call, and how it reports progress or asks for help.
\ No newline at end of file
+name: hub-sales-and-aggregation-agent
+
+description: "PlanlÌgger og specificerer varekøb og salg i BMC Hub som en simpel sag-baseret funktion, inklusiv aggregering af varer og tid op gennem sagstrÌet."
+
+scope:
+ - Sag-modul
+ - Vare- og ydelsessalg
+ - Aggregering i sagstrĂŚ
+
+constraints:
+ - Ingen ERP-kompleksitet
+ - Ingen lagerstyring
+ - Ingen selvstĂŚndig ordre-entitet i v1
+ - Alt salg er knyttet til en Sag
+ - Aggregering er lĂŚsevisning, ikke datakopiering
+
+inputs:
+ - Eksisterende Sag-model med parent/child-relationer
+ - Eksisterende Tidsmodul
+ - Varekatalog (internt og leverandørvarer)
+
+outputs:
+ - Datamodelforslag
+ - UI-struktur for Varer-fanen
+ - Aggregeringslogik
+ - Faktureringsforberedelse
+---
\ No newline at end of file
diff --git a/NEXTCLOUD_MODULE_PLAN.md b/NEXTCLOUD_MODULE_PLAN.md
new file mode 100644
index 0000000..3bb7170
--- /dev/null
+++ b/NEXTCLOUD_MODULE_PLAN.md
@@ -0,0 +1,241 @@
+# Nextcloud-modul â BMC Hub
+
+## 1. FormĂĽl og rolle i Hubben
+Nextcloud-modulet gør det muligt at sĂŚlge, administrere og supportere kunders Nextcloudâløsninger direkte i Hubben.
+
+Hubben er styrende system. Nextcloud er et eksternt driftsâ og brugersystem, som Hubben taler med direkte (ingen gateway).
+
+## 2. Aktivering af modulet
+Modulet er kontekstbaseret og aktiveres via tag:
+
+- NĂĽr Firma, Kontakt eller Sag har tagget `nextcloud`, vises en Nextcloudâfane i UI.
+- Uden tag vises ingen Nextcloudâfunktioner.
+
+## 3. Kunde â Nextcloudâfane (overblik)
+Fanen indeholder:
+1. Driftsâ og systeminformation (readâonly)
+2. Handlinger relateret til brugere
+3. Historik (hvad Hubben har gjort mod instansen)
+
+Fanen mĂĽ aldrig blokere kundevisningen, selv hvis Nextcloud er utilgĂŚngelig.
+
+## 4. Systemstatus og driftsinformation
+**Datakilde**: Nextcloud Serverinfo API
+
+- `GET /ocs/v2.php/apps/serverinfo/api/v1/info`
+- Direkte kald til Nextcloud
+- Autentificeret
+- Readâonly
+- Cached i DB med global TTL = 5 min
+
+### 4.1 Overblik
+Vises øverst i fanen:
+- Instansâstatus (Online / Offline / Ukendt)
+- Sidst opdateret
+- Nextcloudâversion
+- PHPâversion
+- Databaseâtype og âversion
+
+### 4.2 Ressourceforbrug
+Vises som simple vĂŚrdier/badges:
+- CPU
+- Load average (1 / 5 / 15 min)
+- Antal kerner
+- RAM (total + brug i %)
+- Disk (total + brug i % + fri plads)
+
+Ved kritiske vĂŚrdier vises advarsel.
+
+### 4.3 Nextcloudânøgletal
+Hvor API tillader det:
+- Antal brugere
+- Aktive brugere
+- Antal filer
+- Samlet datamĂŚngde
+- Status pĂĽ: database, cache/Redis, cron/background jobs
+
+## 5. Handlinger i Nextcloudâfanen
+Knapper:
+- Tilføj ny bruger
+- Reset password
+- Luk bruger
+- Gensend guide
+
+Alle handlinger:
+- udføres direkte mod Nextcloud
+- logges i Hub
+- kan spores i historik
+- kan knyttes til sag
+
+## 6. Tilføj ny bruger (primÌr funktion)
+
+### 6.1 Start af flow
+- Ved âTilføj ny brugerâ oprettes automatisk en ny Sag
+- Sagstype: **Nextcloud â Brugeroprettelse**
+- Ingen Nextcloudâhandling udføres uden en sag
+
+### 6.2 Sag â felter og logik
+**Firma**
+- VĂŚlg eksisterende firma
+- Hub slĂĽr tilknyttet Nextcloudâinstans op i DB og vĂŚlger automatisk
+- Instans kan ikke ĂŚndres manuelt
+
+**Kontaktperson**
+- VĂŚlg eksisterende kontakt eller opret ny
+- Bruges til kommunikation, velkomstmail og ejerskab af sag
+
+**Grupper**
+- Multiselect
+- Hentes live fra Nextcloud (OCS groups API)
+- Kun gyldige grupper kan vĂŚlges
+
+**Velkomstbrev**
+- Checkbox: skal velkomstbrev sendes?
+ - Hvis ja: bruger oprettes, password genereres, guide + logininfo sendes
+ - Hvis nej: bruger oprettes uden mail, sag forbliver üben til manuel opfølgning
+
+## 7. Ăvrige handlinger
+
+**Reset password**
+- VĂŚlg eksisterende Nextcloudâbruger
+- Nyt password genereres
+- Valg: send mail til kontakt eller kun log i sag
+
+**Luk bruger**
+- Bruger deaktiveres i Nextcloud
+- Data bevares
+- KrĂŚver eksplicit bekrĂŚftelse
+- Logges i sag og historik
+
+**Gensend guide**
+- Gensender velkomstmail og guide
+- Password ĂŚndres ikke
+- Kan udføres uden ny sag, men logges
+
+## 8. Arkitekturprincipper
+- Hub ejer: firma, kontakt, sag, historik
+- Nextcloud ejer: brugere, filer, rettigheder
+- Integration er direkte (ingen gateway)
+- Perâinstans auth ligger krypteret i DB
+- Global DBâcache (5 min) for readâonly statusdata
+
+## 9. Logning og sporbarhed
+For hver handling gemmes:
+- tidspunkt
+- handlingstype
+- udførende bruger
+- mĂĽl (bruger/instans)
+- teknisk resultat (success/fejl)
+
+Auditâlog er **separat pr. kunde**, med **manuel retention** og **tidsbaseret partitionering**.
+
+## 10. AfgrĂŚnsninger (v1)
+Modulet indeholder ikke:
+- ĂŚndring af serverâkonfiguration
+- hĂĽndtering af apps
+- ĂŚndring af kvoter
+- direkte adminâlogin
+
+## 11. Klar til udvidelse
+Modulet er designet til senere udvidelser:
+- overvĂĽgning â automatisk sag
+- historiske grafer
+- offboardingâflows
+- kvoteâstyring
+- SLAârapportering
+
+## 12. Sikkerhed og drift
+- Credentials krypteres med `settings.NEXTCLOUD_ENCRYPTION_KEY`
+- Safety switches: `NEXTCLOUD_READ_ONLY` og `NEXTCLOUD_DRY_RUN` (default true)
+- Ingen credentials i UI eller logs
+- TLSâonly base URLs
+
+## 13. Backendâstruktur (plan)
+Placering: `app/modules/nextcloud/`
+- `backend/router.py`
+- `backend/service.py`
+- `backend/models.py`
+
+Alle eksterne kald gĂĽr via serviceâlaget, som:
+- loader instans fra DB
+- dekrypterer credentials
+- bruger global DBâcache (5 min)
+- skriver auditâlog pr. kunde
+
+## 14. Databaseâmodel (plan)
+
+### `nextcloud_instances`
+- `customer_id` FK
+- `base_url`
+- `auth_type`
+- `username`
+- `password_encrypted`
+- `is_enabled`, `disabled_at`
+- `created_at`, `updated_at`, `deleted_at`
+
+### `nextcloud_cache`
+- `cache_key` (PK)
+- `payload` (JSONB)
+- `expires_at`
+- `created_at`
+
+### `nextcloud_audit_log`
+- `customer_id`, `instance_id`
+- `event_type`
+- `request_meta`, `response_meta`
+- `actor_user_id`
+- `created_at`
+
+Partitionering: mĂĽnedlig range pĂĽ `created_at`. Retention er manuel via adminâUI.
+
+## 15. APIâendpoints (v1)
+
+### Instanser (admin)
+- `GET /api/v1/nextcloud/instances`
+- `POST /api/v1/nextcloud/instances`
+- `PATCH /api/v1/nextcloud/instances/{id}`
+- `POST /api/v1/nextcloud/instances/{id}/disable`
+- `POST /api/v1/nextcloud/instances/{id}/enable`
+- `POST /api/v1/nextcloud/instances/{id}/rotate-credentials`
+
+### Status + grupper
+- `GET /api/v1/nextcloud/instances/{id}/status`
+- `GET /api/v1/nextcloud/instances/{id}/groups`
+
+### Brugere (handlinger)
+- `POST /api/v1/nextcloud/instances/{id}/users` (opret)
+- `POST /api/v1/nextcloud/instances/{id}/users/{uid}/reset-password`
+- `POST /api/v1/nextcloud/instances/{id}/users/{uid}/disable`
+- `POST /api/v1/nextcloud/instances/{id}/users/{uid}/resend-guide`
+
+Alle endpoints skal:
+- validere `is_enabled = true`
+- hĂĽndhĂŚve kundeejerskab
+- skrive auditâlog
+- respektere `READ_ONLY`/`DRY_RUN`
+
+## 16. UIâkrav (plan)
+Nextcloudâfanen i kundevisning skal vise:
+- Systemstatus
+- Nøgletal
+- Handlinger
+- Historik
+
+AdminâUI (Settings) skal give:
+- Liste over instanser
+- Enable/disable
+- Rotation af credentials
+- Retentionstyring af auditâlog pr. kunde
+
+## 17. Migrations (plan)
+1. `migrations/0XX_nextcloud_instances.sql`
+2. `migrations/0XX_nextcloud_cache.sql`
+3. `migrations/0XX_nextcloud_audit_log.sql` (partitioneret)
+
+## 18. NĂŚste skridt
+1. Opret migrationsfiler
+2. Implementer kryptering helper
+3. Implementer serviceâlag
+4. Implementer routere og schemas
+5. Implementer UIâfanen + adminâUI
+6. Implementer auditâlog viewer/export
\ No newline at end of file
diff --git a/app/apply_migration_084.py b/app/apply_migration_084.py
new file mode 100644
index 0000000..8251528
--- /dev/null
+++ b/app/apply_migration_084.py
@@ -0,0 +1,50 @@
+from app.core.database import get_db_connection, release_db_connection, init_db
+import logging
+
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+def run_migration():
+ init_db() # Initialize the pool
+ conn = get_db_connection()
+ try:
+ with conn.cursor() as cursor:
+ # Files linked to a Case
+ cursor.execute("""
+ CREATE TABLE IF NOT EXISTS sag_files (
+ id SERIAL PRIMARY KEY,
+ sag_id INTEGER NOT NULL REFERENCES sag_sager(id) ON DELETE CASCADE,
+ filename VARCHAR(255) NOT NULL,
+ content_type VARCHAR(100),
+ size_bytes INTEGER,
+ stored_name TEXT NOT NULL,
+ uploaded_by_user_id INTEGER REFERENCES users(user_id) ON DELETE SET NULL,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+ );
+ """)
+
+ cursor.execute("CREATE INDEX IF NOT EXISTS idx_sag_files_sag_id ON sag_files(sag_id);")
+ cursor.execute("COMMENT ON TABLE sag_files IS 'Files uploaded directly to the Case.';")
+
+ # Emails linked to a Case (Many-to-Many)
+ cursor.execute("""
+ CREATE TABLE IF NOT EXISTS sag_emails (
+ sag_id INTEGER REFERENCES sag_sager(id) ON DELETE CASCADE,
+ email_id INTEGER REFERENCES email_messages(id) ON DELETE CASCADE,
+ created_at TIMESTAMPTZ DEFAULT NOW(),
+ PRIMARY KEY (sag_id, email_id)
+ );
+ """)
+
+ cursor.execute("COMMENT ON TABLE sag_emails IS 'Emails linked to the Case.';")
+
+ conn.commit()
+ logger.info("Migration 084 applied successfully.")
+ except Exception as e:
+ conn.rollback()
+ logger.error(f"Migration failed: {e}")
+ finally:
+ release_db_connection(conn)
+
+if __name__ == "__main__":
+ run_migration()
diff --git a/app/apply_migration_085.py b/app/apply_migration_085.py
new file mode 100644
index 0000000..d0fe79f
--- /dev/null
+++ b/app/apply_migration_085.py
@@ -0,0 +1,69 @@
+import logging
+import os
+import sys
+
+# Ensure we can import app modules
+sys.path.append("/app")
+
+from app.core.database import execute_query, init_db
+
+# Setup logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+SQL_MIGRATION = """
+CREATE TABLE IF NOT EXISTS sag_solutions (
+ id SERIAL PRIMARY KEY,
+ sag_id INTEGER NOT NULL REFERENCES sag_sager(id) ON DELETE CASCADE,
+ title VARCHAR(255) NOT NULL,
+ description TEXT,
+ solution_type VARCHAR(50),
+ result VARCHAR(50),
+ created_by_user_id INTEGER,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ CONSTRAINT uq_sag_solutions_sag_id UNIQUE (sag_id)
+);
+
+ALTER TABLE tmodule_times ADD COLUMN IF NOT EXISTS solution_id INTEGER REFERENCES sag_solutions(id) ON DELETE SET NULL;
+
+ALTER TABLE tmodule_times ADD COLUMN IF NOT EXISTS sag_id INTEGER REFERENCES sag_sager(id) ON DELETE SET NULL;
+
+ALTER TABLE tmodule_times ALTER COLUMN vtiger_id DROP NOT NULL;
+
+ALTER TABLE tmodule_times ALTER COLUMN case_id DROP NOT NULL;
+
+CREATE INDEX IF NOT EXISTS idx_sag_solutions_sag_id ON sag_solutions(sag_id);
+
+CREATE INDEX IF NOT EXISTS idx_tmodule_times_solution_id ON tmodule_times(solution_id);
+
+CREATE INDEX IF NOT EXISTS idx_tmodule_times_sag_id ON tmodule_times(sag_id);
+"""
+
+def run_migration():
+ logger.info("Initializing DB connection...")
+ try:
+ init_db()
+ except Exception as e:
+ logger.error(f"Failed to init db: {e}")
+ return
+
+ logger.info("Applying migration 085...")
+
+ commands = [cmd.strip() for cmd in SQL_MIGRATION.split(";") if cmd.strip()]
+
+ for cmd in commands:
+ # Skip empty lines or pure comments
+ if not cmd or cmd.startswith("--"):
+ continue
+
+ logger.info(f"Executing: {cmd[:50]}...")
+ try:
+ execute_query(cmd, ())
+ except Exception as e:
+ logger.warning(f"Error executing command: {e}")
+
+ logger.info("â
Migration applied successfully")
+
+if __name__ == "__main__":
+ run_migration()
diff --git a/app/auth/backend/admin.py b/app/auth/backend/admin.py
new file mode 100644
index 0000000..77c65af
--- /dev/null
+++ b/app/auth/backend/admin.py
@@ -0,0 +1,150 @@
+"""
+Auth Admin API - Users, Groups, Permissions management
+"""
+from fastapi import APIRouter, HTTPException, status, Depends
+from app.core.auth_dependencies import require_superadmin
+from app.core.auth_service import AuthService
+from app.core.database import execute_query, execute_query_single, execute_insert, execute_update
+from app.models.schemas import UserAdminCreate, UserGroupsUpdate, GroupCreate, GroupPermissionsUpdate
+import logging
+
+logger = logging.getLogger(__name__)
+
+router = APIRouter()
+
+
+@router.get("/admin/users", dependencies=[Depends(require_superadmin)])
+async def list_users():
+ users = execute_query(
+ """
+ SELECT u.user_id, u.username, u.email, u.full_name,
+ u.is_active, u.is_superadmin, u.is_2fa_enabled,
+ COALESCE(array_remove(array_agg(g.name), NULL), ARRAY[]::varchar[]) AS groups
+ FROM users u
+ LEFT JOIN user_groups ug ON u.user_id = ug.user_id
+ LEFT JOIN groups g ON ug.group_id = g.id
+ GROUP BY u.user_id
+ ORDER BY u.user_id
+ """
+ )
+ return users
+
+
+@router.post("/admin/users", status_code=status.HTTP_201_CREATED, dependencies=[Depends(require_superadmin)])
+async def create_user(payload: UserAdminCreate):
+ existing = execute_query_single(
+ "SELECT user_id FROM users WHERE username = %s OR email = %s",
+ (payload.username, payload.email)
+ )
+ if existing:
+ raise HTTPException(
+ status_code=status.HTTP_409_CONFLICT,
+ detail="Username or email already exists"
+ )
+
+ password_hash = AuthService.hash_password(payload.password)
+ user_id = execute_insert(
+ """
+ INSERT INTO users (username, email, password_hash, full_name, is_superadmin, is_active)
+ VALUES (%s, %s, %s, %s, %s, %s) RETURNING user_id
+ """,
+ (payload.username, payload.email, password_hash, payload.full_name, payload.is_superadmin, payload.is_active)
+ )
+
+ if payload.group_ids:
+ for group_id in payload.group_ids:
+ execute_insert(
+ """
+ INSERT INTO user_groups (user_id, group_id)
+ VALUES (%s, %s) ON CONFLICT DO NOTHING
+ """,
+ (user_id, group_id)
+ )
+
+ logger.info("â
User created via admin: %s (ID: %s)", payload.username, user_id)
+ return {"user_id": user_id}
+
+
+@router.put("/admin/users/{user_id}/groups", dependencies=[Depends(require_superadmin)])
+async def update_user_groups(user_id: int, payload: UserGroupsUpdate):
+ user = execute_query_single("SELECT user_id FROM users WHERE user_id = %s", (user_id,))
+ if not user:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
+
+ execute_update("DELETE FROM user_groups WHERE user_id = %s", (user_id,))
+
+ for group_id in payload.group_ids:
+ execute_insert(
+ """
+ INSERT INTO user_groups (user_id, group_id)
+ VALUES (%s, %s) ON CONFLICT DO NOTHING
+ """,
+ (user_id, group_id)
+ )
+
+ return {"message": "Groups updated"}
+
+
+@router.get("/admin/groups", dependencies=[Depends(require_superadmin)])
+async def list_groups():
+ groups = execute_query(
+ """
+ SELECT g.id, g.name, g.description,
+ COALESCE(array_remove(array_agg(p.code), NULL), ARRAY[]::varchar[]) AS permissions
+ FROM groups g
+ LEFT JOIN group_permissions gp ON g.id = gp.group_id
+ LEFT JOIN permissions p ON gp.permission_id = p.id
+ GROUP BY g.id
+ ORDER BY g.id
+ """
+ )
+ return groups
+
+
+@router.post("/admin/groups", status_code=status.HTTP_201_CREATED, dependencies=[Depends(require_superadmin)])
+async def create_group(payload: GroupCreate):
+ existing = execute_query_single("SELECT id FROM groups WHERE name = %s", (payload.name,))
+ if existing:
+ raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="Group already exists")
+
+ group_id = execute_insert(
+ """
+ INSERT INTO groups (name, description)
+ VALUES (%s, %s) RETURNING id
+ """,
+ (payload.name, payload.description)
+ )
+
+ return {"group_id": group_id}
+
+
+@router.put("/admin/groups/{group_id}/permissions", dependencies=[Depends(require_superadmin)])
+async def update_group_permissions(group_id: int, payload: GroupPermissionsUpdate):
+ group = execute_query_single("SELECT id FROM groups WHERE id = %s", (group_id,))
+ if not group:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Group not found")
+
+ execute_update("DELETE FROM group_permissions WHERE group_id = %s", (group_id,))
+
+ for permission_id in payload.permission_ids:
+ execute_insert(
+ """
+ INSERT INTO group_permissions (group_id, permission_id)
+ VALUES (%s, %s) ON CONFLICT DO NOTHING
+ """,
+ (group_id, permission_id)
+ )
+
+ return {"message": "Permissions updated"}
+
+
+@router.get("/admin/permissions", dependencies=[Depends(require_superadmin)])
+async def list_permissions():
+ permissions = execute_query(
+ """
+ SELECT id, code, description, category
+ FROM permissions
+ ORDER BY category, code
+ """
+ )
+ return permissions
diff --git a/app/auth/backend/router.py b/app/auth/backend/router.py
index a8ed06d..8e8aa74 100644
--- a/app/auth/backend/router.py
+++ b/app/auth/backend/router.py
@@ -1,8 +1,9 @@
"""
Auth API Router - Login, Logout, Me endpoints
"""
-from fastapi import APIRouter, HTTPException, status, Request, Depends
+from fastapi import APIRouter, HTTPException, status, Request, Depends, Response
from pydantic import BaseModel
+from typing import Optional
from app.core.auth_service import AuthService
from app.core.auth_dependencies import get_current_user
import logging
@@ -15,6 +16,7 @@ router = APIRouter()
class LoginRequest(BaseModel):
username: str
password: str
+ otp_code: Optional[str] = None
class LoginResponse(BaseModel):
@@ -27,20 +29,32 @@ class LogoutRequest(BaseModel):
token_jti: str
+class TwoFactorCodeRequest(BaseModel):
+ otp_code: str
+
+
@router.post("/login", response_model=LoginResponse)
-async def login(request: Request, credentials: LoginRequest):
+async def login(request: Request, credentials: LoginRequest, response: Response):
"""
Authenticate user and return JWT token
"""
ip_address = request.client.host if request.client else None
# Authenticate user
- user = AuthService.authenticate_user(
+ user, error_detail = AuthService.authenticate_user(
username=credentials.username,
password=credentials.password,
- ip_address=ip_address
+ ip_address=ip_address,
+ otp_code=credentials.otp_code
)
-
+
+ if error_detail:
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail=error_detail,
+ headers={"WWW-Authenticate": "Bearer"},
+ )
+
if not user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
@@ -52,9 +66,18 @@ async def login(request: Request, credentials: LoginRequest):
access_token = AuthService.create_access_token(
user_id=user['user_id'],
username=user['username'],
- is_superadmin=user['is_superadmin']
+ is_superadmin=user['is_superadmin'],
+ is_shadow_admin=user.get('is_shadow_admin', False)
)
+ response.set_cookie(
+ key="access_token",
+ value=access_token,
+ httponly=True,
+ samesite="lax",
+ secure=False
+ )
+
return LoginResponse(
access_token=access_token,
user=user
@@ -62,12 +85,22 @@ async def login(request: Request, credentials: LoginRequest):
@router.post("/logout")
-async def logout(request: LogoutRequest, current_user: dict = Depends(get_current_user)):
+async def logout(
+ request: LogoutRequest,
+ response: Response,
+ current_user: dict = Depends(get_current_user)
+):
"""
Revoke JWT token (logout)
"""
- AuthService.revoke_token(request.token_jti, current_user['id'])
+ AuthService.revoke_token(
+ request.token_jti,
+ current_user['id'],
+ current_user.get('is_shadow_admin', False)
+ )
+ response.delete_cookie("access_token")
+
return {"message": "Successfully logged out"}
@@ -82,5 +115,75 @@ async def get_me(current_user: dict = Depends(get_current_user)):
"email": current_user['email'],
"full_name": current_user['full_name'],
"is_superadmin": current_user['is_superadmin'],
+ "is_2fa_enabled": current_user.get('is_2fa_enabled', False),
"permissions": current_user['permissions']
}
+
+
+@router.post("/2fa/setup")
+async def setup_2fa(current_user: dict = Depends(get_current_user)):
+ """Generate and store TOTP secret (requires verification to enable)"""
+ if current_user.get("is_shadow_admin"):
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Shadow admin cannot configure 2FA",
+ )
+
+ result = AuthService.setup_user_2fa(
+ user_id=current_user["id"],
+ username=current_user["username"]
+ )
+
+ return result
+
+
+@router.post("/2fa/enable")
+async def enable_2fa(
+ request: TwoFactorCodeRequest,
+ current_user: dict = Depends(get_current_user)
+):
+ """Enable 2FA after verifying the provided code"""
+ if current_user.get("is_shadow_admin"):
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Shadow admin cannot configure 2FA",
+ )
+
+ ok = AuthService.enable_user_2fa(
+ user_id=current_user["id"],
+ otp_code=request.otp_code
+ )
+
+ if not ok:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Invalid 2FA code or missing setup",
+ )
+
+ return {"message": "2FA enabled"}
+
+
+@router.post("/2fa/disable")
+async def disable_2fa(
+ request: TwoFactorCodeRequest,
+ current_user: dict = Depends(get_current_user)
+):
+ """Disable 2FA after verifying the provided code"""
+ if current_user.get("is_shadow_admin"):
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Shadow admin cannot configure 2FA",
+ )
+
+ ok = AuthService.disable_user_2fa(
+ user_id=current_user["id"],
+ otp_code=request.otp_code
+ )
+
+ if not ok:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Invalid 2FA code or missing setup",
+ )
+
+ return {"message": "2FA disabled"}
diff --git a/app/auth/frontend/login.html b/app/auth/frontend/login.html
index 226a85d..b1865f1 100644
--- a/app/auth/frontend/login.html
+++ b/app/auth/frontend/login.html
@@ -38,6 +38,18 @@
required
>
+
+
@@ -80,6 +92,7 @@ document.getElementById('loginForm').addEventListener('submit', async (e) => {
const username = document.getElementById('username').value;
const password = document.getElementById('password').value;
+ const otp_code = document.getElementById('otp_code').value;
const errorMessage = document.getElementById('errorMessage');
const errorText = document.getElementById('errorText');
const submitBtn = e.target.querySelector('button[type="submit"]');
@@ -97,7 +110,7 @@ document.getElementById('loginForm').addEventListener('submit', async (e) => {
headers: {
'Content-Type': 'application/json'
},
- body: JSON.stringify({ username, password })
+ body: JSON.stringify({ username, password, otp_code })
});
const data = await response.json();
diff --git a/app/contacts/backend/router.py b/app/contacts/backend/router.py
index 96ddb52..a6fd305 100644
--- a/app/contacts/backend/router.py
+++ b/app/contacts/backend/router.py
@@ -148,11 +148,13 @@ async def get_contact(contact_id: int):
FROM contacts
WHERE id = %s
"""
- contact = execute_query(contact_query, (contact_id,))
+ contact_result = execute_query(contact_query, (contact_id,))
- if not contact:
+ if not contact_result:
raise HTTPException(status_code=404, detail="Contact not found")
+ contact = contact_result[0]
+
# Get linked companies
companies_query = """
SELECT
@@ -163,7 +165,7 @@ async def get_contact(contact_id: int):
WHERE cc.contact_id = %s
ORDER BY cc.is_primary DESC, cu.name
"""
- companies = execute_query_single(companies_query, (contact_id,)) # Default is fetchall
+ companies = execute_query(companies_query, (contact_id,))
contact['companies'] = companies or []
return contact
diff --git a/app/core/auth_dependencies.py b/app/core/auth_dependencies.py
index 84f4bea..53f57ff 100644
--- a/app/core/auth_dependencies.py
+++ b/app/core/auth_dependencies.py
@@ -6,16 +6,18 @@ from fastapi import Depends, HTTPException, status, Request
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from typing import Optional
from app.core.auth_service import AuthService
+from app.core.config import settings
+from app.core.database import execute_query_single
import logging
logger = logging.getLogger(__name__)
-security = HTTPBearer()
+security = HTTPBearer(auto_error=False)
async def get_current_user(
request: Request,
- credentials: HTTPAuthorizationCredentials = Depends(security)
+ credentials: Optional[HTTPAuthorizationCredentials] = Depends(security)
) -> dict:
"""
Dependency to get current authenticated user from JWT token
@@ -25,7 +27,13 @@ async def get_current_user(
async def my_endpoint(current_user: dict = Depends(get_current_user)):
...
"""
- token = credentials.credentials
+ token = credentials.credentials if credentials else request.cookies.get("access_token")
+ if not token:
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Not authenticated",
+ headers={"WWW-Authenticate": "Bearer"},
+ )
# Verify token
payload = AuthService.verify_token(token)
@@ -41,14 +49,27 @@ async def get_current_user(
user_id = int(payload.get("sub"))
username = payload.get("username")
is_superadmin = payload.get("is_superadmin", False)
+ is_shadow_admin = payload.get("shadow_admin", False)
# Add IP address to user info
ip_address = request.client.host if request.client else None
+ if is_shadow_admin:
+ return {
+ "id": user_id,
+ "username": username,
+ "email": settings.SHADOW_ADMIN_EMAIL,
+ "full_name": settings.SHADOW_ADMIN_FULL_NAME,
+ "is_superadmin": True,
+ "is_shadow_admin": True,
+ "is_2fa_enabled": True,
+ "ip_address": ip_address,
+ "permissions": AuthService.get_all_permissions()
+ }
+
# Get additional user details from database
- from app.core.database import execute_query
user_details = execute_query_single(
- "SELECT email, full_name FROM users WHERE id = %s",
+ "SELECT email, full_name, is_2fa_enabled FROM users WHERE user_id = %s",
(user_id,))
return {
@@ -57,6 +78,8 @@ async def get_current_user(
"email": user_details.get('email') if user_details else None,
"full_name": user_details.get('full_name') if user_details else None,
"is_superadmin": is_superadmin,
+ "is_shadow_admin": False,
+ "is_2fa_enabled": user_details.get('is_2fa_enabled') if user_details else False,
"ip_address": ip_address,
"permissions": AuthService.get_user_permissions(user_id)
}
@@ -70,7 +93,7 @@ async def get_optional_user(
Dependency to get current user if authenticated, None otherwise
Allows endpoints that work both with and without authentication
"""
- if not credentials:
+ if not credentials and not request.cookies.get("access_token"):
return None
try:
diff --git a/app/core/auth_service.py b/app/core/auth_service.py
index bc4e399..8ccb52e 100644
--- a/app/core/auth_service.py
+++ b/app/core/auth_service.py
@@ -2,12 +2,14 @@
Authentication Service - HĂĽndterer login, JWT tokens, password hashing
Adapted from OmniSync for BMC Hub
"""
-from typing import Optional, Dict, List
+from typing import Optional, Dict, List, Tuple
from datetime import datetime, timedelta
import hashlib
import secrets
import jwt
-from app.core.database import execute_query, execute_insert, execute_update
+import pyotp
+from passlib.context import CryptContext
+from app.core.database import execute_query, execute_query_single, execute_insert, execute_update
from app.core.config import settings
import logging
@@ -18,6 +20,8 @@ SECRET_KEY = getattr(settings, 'JWT_SECRET_KEY', 'your-secret-key-change-in-prod
ALGORITHM = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 60 * 8 # 8 timer
+pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
+
class AuthService:
"""Service for authentication and authorization"""
@@ -25,18 +29,124 @@ class AuthService:
@staticmethod
def hash_password(password: str) -> str:
"""
- Hash password using SHA256
- I produktion: Brug bcrypt eller argon2!
+ Hash password using bcrypt
"""
- return hashlib.sha256(password.encode()).hexdigest()
+ return pwd_context.hash(password)
@staticmethod
def verify_password(plain_password: str, hashed_password: str) -> bool:
"""Verify password against hash"""
- return AuthService.hash_password(plain_password) == hashed_password
+ if not hashed_password:
+ return False
+ if not hashed_password.startswith("$2"):
+ return False
+ try:
+ return pwd_context.verify(plain_password, hashed_password)
+ except Exception:
+ return False
+
+ @staticmethod
+ def verify_legacy_sha256(plain_password: str, hashed_password: str) -> bool:
+ """Verify legacy SHA256 hash and upgrade when used"""
+ if not hashed_password or len(hashed_password) != 64:
+ return False
+ try:
+ return hashlib.sha256(plain_password.encode()).hexdigest() == hashed_password
+ except Exception:
+ return False
+
+ @staticmethod
+ def upgrade_password_hash(user_id: int, plain_password: str):
+ """Upgrade legacy password hash to bcrypt"""
+ new_hash = AuthService.hash_password(plain_password)
+ execute_update(
+ "UPDATE users SET password_hash = %s, updated_at = CURRENT_TIMESTAMP WHERE user_id = %s",
+ (new_hash, user_id)
+ )
+
+ @staticmethod
+ def verify_totp_code(secret: str, code: str) -> bool:
+ """Verify TOTP code"""
+ if not secret or not code:
+ return False
+ try:
+ totp = pyotp.TOTP(secret)
+ return totp.verify(code, valid_window=1)
+ except Exception:
+ return False
+
+ @staticmethod
+ def generate_2fa_secret() -> str:
+ """Generate a new TOTP secret"""
+ return pyotp.random_base32()
+
+ @staticmethod
+ def get_2fa_provisioning_uri(username: str, secret: str) -> str:
+ """Generate provisioning URI for authenticator apps"""
+ totp = pyotp.TOTP(secret)
+ return totp.provisioning_uri(name=username, issuer_name="BMC Hub")
+
+ @staticmethod
+ def setup_user_2fa(user_id: int, username: str) -> Dict:
+ """Create and store a new TOTP secret (not enabled until verified)"""
+ secret = AuthService.generate_2fa_secret()
+ execute_update(
+ "UPDATE users SET totp_secret = %s, is_2fa_enabled = FALSE, updated_at = CURRENT_TIMESTAMP WHERE user_id = %s",
+ (secret, user_id)
+ )
+
+ return {
+ "secret": secret,
+ "provisioning_uri": AuthService.get_2fa_provisioning_uri(username, secret)
+ }
+
+ @staticmethod
+ def enable_user_2fa(user_id: int, otp_code: str) -> bool:
+ """Enable 2FA after verifying TOTP code"""
+ user = execute_query_single(
+ "SELECT totp_secret FROM users WHERE user_id = %s",
+ (user_id,)
+ )
+
+ if not user or not user.get("totp_secret"):
+ return False
+
+ if not AuthService.verify_totp_code(user["totp_secret"], otp_code):
+ return False
+
+ execute_update(
+ "UPDATE users SET is_2fa_enabled = TRUE, updated_at = CURRENT_TIMESTAMP WHERE user_id = %s",
+ (user_id,)
+ )
+ return True
+
+ @staticmethod
+ def disable_user_2fa(user_id: int, otp_code: str) -> bool:
+ """Disable 2FA after verifying TOTP code"""
+ user = execute_query_single(
+ "SELECT totp_secret FROM users WHERE user_id = %s",
+ (user_id,)
+ )
+
+ if not user or not user.get("totp_secret"):
+ return False
+
+ if not AuthService.verify_totp_code(user["totp_secret"], otp_code):
+ return False
+
+ execute_update(
+ "UPDATE users SET is_2fa_enabled = FALSE, totp_secret = NULL, updated_at = CURRENT_TIMESTAMP WHERE user_id = %s",
+ (user_id,)
+ )
+ return True
@staticmethod
- def create_access_token(user_id: int, username: str, is_superadmin: bool = False) -> str:
+ def create_access_token(
+ user_id: int,
+ username: str,
+ is_superadmin: bool = False,
+ is_shadow_admin: bool = False
+ ) -> str:
"""
Create JWT access token
@@ -55,6 +165,7 @@ class AuthService:
"sub": str(user_id),
"username": username,
"is_superadmin": is_superadmin,
+ "shadow_admin": is_shadow_admin,
"exp": expire,
"iat": datetime.utcnow(),
"jti": jti
@@ -62,12 +173,13 @@ class AuthService:
token = jwt.encode(payload, SECRET_KEY, algorithm=ALGORITHM)
- # Store session for token revocation
- execute_insert(
- """INSERT INTO sessions (user_id, token_jti, expires_at)
- VALUES (%s, %s, %s)""",
- (user_id, jti, expire)
- )
+ # Store session for token revocation (skip for shadow admin)
+ if not is_shadow_admin:
+ execute_insert(
+ """INSERT INTO sessions (user_id, token_jti, expires_at)
+ VALUES (%s, %s, %s)""",
+ (user_id, jti, expire)
+ )
return token
@@ -81,6 +193,9 @@ class AuthService:
"""
try:
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
+
+ if payload.get("shadow_admin"):
+ return payload
# Check if token is revoked
jti = payload.get('jti')
@@ -102,7 +217,12 @@ class AuthService:
return None
@staticmethod
- def authenticate_user(username: str, password: str, ip_address: Optional[str] = None) -> Optional[Dict]:
+ def authenticate_user(
+ username: str,
+ password: str,
+ ip_address: Optional[str] = None,
+ otp_code: Optional[str] = None
+ ) -> Tuple[Optional[Dict], Optional[str]]:
"""
Authenticate user with username/password
@@ -114,38 +234,70 @@ class AuthService:
Returns:
User dict if successful, None otherwise
"""
+ # Shadow Admin shortcut
+ if settings.SHADOW_ADMIN_ENABLED and username == settings.SHADOW_ADMIN_USERNAME:
+ if not settings.SHADOW_ADMIN_PASSWORD or not settings.SHADOW_ADMIN_TOTP_SECRET:
+ logger.error("â Shadow admin enabled but not configured")
+ return None, "Shadow admin not configured"
+
+ if not secrets.compare_digest(password, settings.SHADOW_ADMIN_PASSWORD):
+ logger.warning(f"â Shadow admin login failed from IP: {ip_address}")
+ return None, "Invalid username or password"
+
+ if not otp_code:
+ return None, "2FA code required"
+
+ if not AuthService.verify_totp_code(settings.SHADOW_ADMIN_TOTP_SECRET, otp_code):
+ logger.warning(f"â Shadow admin 2FA failed from IP: {ip_address}")
+ return None, "Invalid 2FA code"
+
+ logger.warning(f"â ď¸ Shadow admin login used from IP: {ip_address}")
+ return {
+ "user_id": 0,
+ "username": settings.SHADOW_ADMIN_USERNAME,
+ "email": settings.SHADOW_ADMIN_EMAIL,
+ "full_name": settings.SHADOW_ADMIN_FULL_NAME,
+ "is_superadmin": True,
+ "is_shadow_admin": True
+ }, None
+
# Get user
user = execute_query_single(
- """SELECT id, username, email, password_hash, full_name,
- is_active, is_superadmin, failed_login_attempts, locked_until
- FROM users
+ """SELECT user_id, username, email, password_hash, full_name,
+ is_active, is_superadmin, failed_login_attempts, locked_until,
+ is_2fa_enabled, totp_secret
+ FROM users
WHERE username = %s OR email = %s""",
(username, username))
if not user:
logger.warning(f"â Login failed: User not found - {username}")
- return None
+ return None, "Invalid username or password"
# Check if account is active
if not user['is_active']:
logger.warning(f"â Login failed: Account disabled - {username}")
- return None
+ return None, "Account disabled"
# Check if account is locked
if user['locked_until']:
locked_until = user['locked_until']
if datetime.now() < locked_until:
logger.warning(f"â Login failed: Account locked - {username}")
- return None
+ return None, "Account locked"
else:
# Unlock account
execute_update(
- "UPDATE users SET locked_until = NULL, failed_login_attempts = 0 WHERE id = %s",
- (user['id'],)
+ "UPDATE users SET locked_until = NULL, failed_login_attempts = 0 WHERE user_id = %s",
+ (user['user_id'],)
)
# Verify password
- if not AuthService.verify_password(password, user['password_hash']):
+ if AuthService.verify_password(password, user['password_hash']):
+ pass
+ elif AuthService.verify_legacy_sha256(password, user['password_hash']):
+ AuthService.upgrade_password_hash(user['user_id'], password)
+ else:
# Increment failed attempts
failed_attempts = user['failed_login_attempts'] + 1
@@ -155,18 +307,30 @@ class AuthService:
execute_update(
"""UPDATE users
SET failed_login_attempts = %s, locked_until = %s
- WHERE id = %s""",
- (failed_attempts, locked_until, user['id'])
+ WHERE user_id = %s""",
+ (failed_attempts, locked_until, user['user_id'])
)
logger.warning(f"đ Account locked due to failed attempts: {username}")
else:
execute_update(
- "UPDATE users SET failed_login_attempts = %s WHERE id = %s",
- (failed_attempts, user['id'])
+ "UPDATE users SET failed_login_attempts = %s WHERE user_id = %s",
+ (failed_attempts, user['user_id'])
)
logger.warning(f"â Login failed: Invalid password - {username} (attempt {failed_attempts})")
- return None
+ return None, "Invalid username or password"
+
+ # 2FA check
+ if user.get('is_2fa_enabled'):
+ if not user.get('totp_secret'):
+ return None, "2FA not configured"
+
+ if not otp_code:
+ return None, "2FA code required"
+
+ if not AuthService.verify_totp_code(user['totp_secret'], otp_code):
+ logger.warning(f"â Login failed: Invalid 2FA - {username}")
+ return None, "Invalid 2FA code"
# Success! Reset failed attempts and update last login
execute_update(
@@ -174,28 +338,47 @@ class AuthService:
SET failed_login_attempts = 0,
locked_until = NULL,
last_login_at = CURRENT_TIMESTAMP
- WHERE id = %s""",
- (user['id'],)
+ WHERE user_id = %s""",
+ (user['user_id'],)
)
logger.info(f"â
User logged in: {username} from IP: {ip_address}")
return {
- 'user_id': user['id'],
+ 'user_id': user['user_id'],
'username': user['username'],
'email': user['email'],
'full_name': user['full_name'],
- 'is_superadmin': bool(user['is_superadmin'])
- }
+ 'is_superadmin': bool(user['is_superadmin']),
+ 'is_shadow_admin': False
+ }, None
@staticmethod
- def revoke_token(jti: str, user_id: int):
+ def revoke_token(jti: str, user_id: int, is_shadow_admin: bool = False):
"""Revoke a JWT token"""
+ if is_shadow_admin:
+ logger.info("đ Shadow admin logout - no session to revoke")
+ return
execute_update(
"UPDATE sessions SET revoked = TRUE WHERE token_jti = %s AND user_id = %s",
(jti, user_id)
)
logger.info(f"đ Token revoked for user {user_id}")
+
+ @staticmethod
+ def get_all_permissions() -> List[str]:
+ """Get all permission codes"""
+ perms = execute_query("SELECT code FROM permissions")
+ return [p['code'] for p in perms] if perms else []
+
+ @staticmethod
+ def is_user_2fa_enabled(user_id: int) -> bool:
+ """Check if user has 2FA enabled"""
+ user = execute_query_single(
+ "SELECT is_2fa_enabled FROM users WHERE user_id = %s",
+ (user_id,)
+ )
+ return bool(user and user.get("is_2fa_enabled"))
@staticmethod
def get_user_permissions(user_id: int) -> List[str]:
@@ -210,13 +393,12 @@ class AuthService:
"""
# Check if user is superadmin first
user = execute_query_single(
- "SELECT is_superadmin FROM users WHERE id = %s",
+ "SELECT is_superadmin FROM users WHERE user_id = %s",
(user_id,))
# Superadmins have all permissions
if user and user['is_superadmin']:
- all_perms = execute_query_single("SELECT code FROM permissions")
- return [p['code'] for p in all_perms] if all_perms else []
+ return AuthService.get_all_permissions()
# Get permissions through groups
perms = execute_query("""
@@ -242,8 +424,8 @@ class AuthService:
True if user has permission
"""
# Superadmins have all permissions
- user = execute_query(
- "SELECT is_superadmin FROM users WHERE id = %s",
+ user = execute_query_single(
+ "SELECT is_superadmin FROM users WHERE user_id = %s",
(user_id,))
if user and user['is_superadmin']:
@@ -279,7 +461,7 @@ class AuthService:
user_id = execute_insert(
"""INSERT INTO users
(username, email, password_hash, full_name, is_superadmin)
- VALUES (%s, %s, %s, %s, %s) RETURNING id""",
+ VALUES (%s, %s, %s, %s, %s) RETURNING user_id""",
(username, email, password_hash, full_name, is_superadmin)
)
@@ -292,7 +474,7 @@ class AuthService:
password_hash = AuthService.hash_password(new_password)
execute_update(
- "UPDATE users SET password_hash = %s, updated_at = CURRENT_TIMESTAMP WHERE id = %s",
+ "UPDATE users SET password_hash = %s, updated_at = CURRENT_TIMESTAMP WHERE user_id = %s",
(password_hash, user_id)
)
diff --git a/app/core/config.py b/app/core/config.py
index 69727ad..1a67ddf 100644
--- a/app/core/config.py
+++ b/app/core/config.py
@@ -29,6 +29,14 @@ class Settings(BaseSettings):
SECRET_KEY: str = "dev-secret-key-change-in-production"
ALLOWED_ORIGINS: List[str] = ["http://localhost:8000", "http://localhost:3000"]
CORS_ORIGINS: str = "http://localhost:8000,http://localhost:3000"
+
+ # Shadow Admin (emergency access)
+ SHADOW_ADMIN_ENABLED: bool = False
+ SHADOW_ADMIN_USERNAME: str = "shadowadmin"
+ SHADOW_ADMIN_PASSWORD: str = ""
+ SHADOW_ADMIN_TOTP_SECRET: str = ""
+ SHADOW_ADMIN_EMAIL: str = "shadowadmin@bmcnetworks.dk"
+ SHADOW_ADMIN_FULL_NAME: str = "Shadow Administrator"
# Logging
LOG_LEVEL: str = "INFO"
@@ -41,6 +49,13 @@ class Settings(BaseSettings):
ECONOMIC_AGREEMENT_GRANT_TOKEN: str = ""
ECONOMIC_READ_ONLY: bool = True
ECONOMIC_DRY_RUN: bool = True
+
+ # Nextcloud Integration
+ NEXTCLOUD_READ_ONLY: bool = True
+ NEXTCLOUD_DRY_RUN: bool = True
+ NEXTCLOUD_TIMEOUT_SECONDS: int = 15
+ NEXTCLOUD_CACHE_TTL_SECONDS: int = 300
+ NEXTCLOUD_ENCRYPTION_KEY: str = ""
# Ollama LLM
OLLAMA_ENDPOINT: str = "http://localhost:11434"
diff --git a/app/core/crypto.py b/app/core/crypto.py
new file mode 100644
index 0000000..2f39efc
--- /dev/null
+++ b/app/core/crypto.py
@@ -0,0 +1,31 @@
+"""
+Crypto helpers for encrypting/decrypting secrets at rest.
+"""
+
+import logging
+from typing import Optional
+from cryptography.fernet import Fernet, InvalidToken
+
+from app.core.config import settings
+
+logger = logging.getLogger(__name__)
+
+
+def _get_fernet() -> Fernet:
+ if not settings.NEXTCLOUD_ENCRYPTION_KEY:
+ raise ValueError("NEXTCLOUD_ENCRYPTION_KEY not configured")
+ return Fernet(settings.NEXTCLOUD_ENCRYPTION_KEY.encode())
+
+
+def encrypt_secret(value: str) -> str:
+ fernet = _get_fernet()
+ return fernet.encrypt(value.encode()).decode()
+
+
+def decrypt_secret(value: str) -> Optional[str]:
+ try:
+ fernet = _get_fernet()
+ return fernet.decrypt(value.encode()).decode()
+ except (InvalidToken, ValueError) as exc:
+ logger.error("â Nextcloud credential decryption failed: %s", exc)
+ return None
diff --git a/app/core/database.py b/app/core/database.py
index 9087723..03f715e 100644
--- a/app/core/database.py
+++ b/app/core/database.py
@@ -68,19 +68,18 @@ def execute_query(query: str, params: tuple = None, fetch: bool = True):
cursor.execute(query, params)
# Auto-detect write operations and commit
- query_upper = query.strip().upper()
- is_write = query_upper.startswith(('INSERT', 'UPDATE', 'DELETE'))
+ # Robust detection handling comments and whitespace
+ clean_query = "\n".join([line for line in query.split("\n") if not line.strip().startswith("--")]).strip().upper()
+ is_write = clean_query.startswith(('INSERT', 'UPDATE', 'DELETE', 'CREATE', 'ALTER', 'DROP', 'TRUNCATE', 'COMMENT'))
if is_write:
conn.commit()
- # Only fetch if there are results to fetch
- # (SELECT queries or INSERT/UPDATE/DELETE with RETURNING clause)
- if fetch and (not is_write or 'RETURNING' in query_upper):
+ # Only fetch if there are results to fetch (cursor.description is not None)
+ if cursor.description:
return cursor.fetchall()
- elif is_write:
- return cursor.rowcount
- return []
+
+ return cursor.rowcount
except Exception as e:
conn.rollback()
logger.error(f"Query error: {e}")
diff --git a/app/customers/frontend/customer_detail.html b/app/customers/frontend/customer_detail.html
index 27557ea..3c84165 100644
--- a/app/customers/frontend/customer_detail.html
+++ b/app/customers/frontend/customer_detail.html
@@ -316,6 +316,11 @@
Hardware
+
+
+ Nextcloud
+
+
Aktivitet
@@ -430,6 +435,19 @@
+
+
@@ -867,6 +912,7 @@
const customerId = parseInt(window.location.pathname.split('/').pop());
let customerData = null;
let pipelineStages = [];
+let allTagsCache = [];
let eventListenersAdded = false;
@@ -918,6 +964,14 @@ document.addEventListener('DOMContentLoaded', () => {
loadConversations();
}, { once: false });
}
+
+ // Load Nextcloud status when tab is shown
+ const nextcloudTab = document.querySelector('a[href="#nextcloud"]');
+ if (nextcloudTab) {
+ nextcloudTab.addEventListener('shown.bs.tab', () => {
+ loadNextcloudStatus();
+ }, { once: false });
+ }
eventListenersAdded = true;
});
@@ -933,6 +987,7 @@ async function loadCustomer() {
displayCustomer(customerData);
await loadUtilityCompany();
+ await loadCustomerTags();
// Check data consistency
await checkDataConsistency();
@@ -1025,6 +1080,280 @@ function displayCustomer(customer) {
document.getElementById('createdAt').textContent = new Date(customer.created_at).toLocaleString('da-DK');
}
+async function loadCustomerTags() {
+ try {
+ const response = await fetch(`/api/v1/tags/entity/customer/${customerId}`);
+ if (!response.ok) return;
+
+ const tags = await response.json();
+ const hasNextcloud = (tags || []).some(tag => (tag.name || '').toLowerCase() === 'nextcloud');
+
+ const nextcloudNav = document.getElementById('nextcloudTabNav');
+ const nextcloudPane = document.getElementById('nextcloud');
+
+ if (hasNextcloud) {
+ nextcloudNav?.classList.remove('d-none');
+ nextcloudPane?.classList.remove('d-none');
+ } else {
+ nextcloudNav?.classList.add('d-none');
+ nextcloudPane?.classList.add('d-none');
+ }
+
+ renderCustomerTags(tags || []);
+ } catch (error) {
+ console.error('Failed to load customer tags:', error);
+ }
+}
+
+function renderCustomerTags(tags) {
+ const container = document.getElementById('customerTagsContainer');
+ const emptyState = document.getElementById('customerTagsEmpty');
+
+ if (!container || !emptyState) return;
+
+ if (!tags.length) {
+ container.innerHTML = '';
+ emptyState.classList.remove('d-none');
+ return;
+ }
+
+ emptyState.classList.add('d-none');
+ container.innerHTML = tags.map(tag => `
+
+ ${escapeHtml(tag.name)}
+
+
+ `).join('');
+}
+
+async function openCustomerTagModal() {
+ const modal = new bootstrap.Modal(document.getElementById('customerTagModal'));
+ await loadAllTags();
+ modal.show();
+}
+
+async function loadAllTags() {
+ try {
+ const response = await fetch('/api/v1/tags?is_active=true');
+ if (!response.ok) return;
+
+ allTagsCache = await response.json();
+ const currentTagsResponse = await fetch(`/api/v1/tags/entity/customer/${customerId}`);
+ const currentTags = currentTagsResponse.ok ? await currentTagsResponse.json() : [];
+ const currentTagIds = new Set(currentTags.map(tag => tag.id));
+
+ const select = document.getElementById('customerTagSelect');
+ if (!select) return;
+
+ const options = allTagsCache
+ .filter(tag => !currentTagIds.has(tag.id))
+ .map(tag => `
`)
+ .join('');
+
+ select.innerHTML = options || '
';
+ } catch (error) {
+ console.error('Failed to load tags:', error);
+ }
+}
+
+async function addCustomerTag() {
+ const select = document.getElementById('customerTagSelect');
+ if (!select || !select.value) return;
+
+ const payload = {
+ entity_type: 'customer',
+ entity_id: customerId,
+ tag_id: parseInt(select.value, 10)
+ };
+
+ try {
+ const response = await fetch('/api/v1/tags/entity', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify(payload)
+ });
+
+ if (!response.ok) {
+ const error = await response.json();
+ alert(error.detail || 'Kunne ikke tilføje tag');
+ return;
+ }
+
+ bootstrap.Modal.getInstance(document.getElementById('customerTagModal')).hide();
+ await loadCustomerTags();
+ } catch (error) {
+ console.error('Failed to add tag:', error);
+ }
+}
+
+async function removeCustomerTag(tagId) {
+ try {
+ const response = await fetch(`/api/v1/tags/entity?entity_type=customer&entity_id=${customerId}&tag_id=${tagId}`, {
+ method: 'DELETE'
+ });
+
+ if (!response.ok) {
+ const error = await response.json();
+ alert(error.detail || 'Kunne ikke fjerne tag');
+ return;
+ }
+
+ await loadCustomerTags();
+ } catch (error) {
+ console.error('Failed to remove tag:', error);
+ }
+}
+
+async function loadNextcloudStatus() {
+ const statusBadge = document.getElementById('ncStatusBadge');
+ const lastUpdated = document.getElementById('ncLastUpdated');
+ const cpuLoad = document.getElementById('ncCpuLoad');
+ const freeDisk = document.getElementById('ncFreeDisk');
+ const ramUsage = document.getElementById('ncRamUsage');
+ const opcache = document.getElementById('ncOpcache');
+ const fileGrowth = document.getElementById('ncFileGrowth');
+ const publicShares = document.getElementById('ncPublicShares');
+ const activeUsers = document.getElementById('ncActiveUsers');
+ const alerts = document.getElementById('ncAlerts');
+
+ if (!statusBadge || !lastUpdated) return;
+
+ statusBadge.className = 'badge bg-secondary';
+ statusBadge.textContent = 'Henter...';
+ lastUpdated.textContent = '-';
+
+ try {
+ const instanceResponse = await fetch(`/api/v1/nextcloud/customers/${customerId}/instance`);
+ if (!instanceResponse.ok) {
+ statusBadge.textContent = 'Ukendt';
+ return;
+ }
+
+ const instance = await instanceResponse.json();
+ if (!instance?.id) {
+ statusBadge.textContent = 'Ikke konfigureret';
+ return;
+ }
+
+ const response = await fetch(`/api/v1/nextcloud/instances/${instance.id}/status?customer_id=${customerId}`);
+ if (!response.ok) {
+ statusBadge.textContent = 'Ukendt';
+ return;
+ }
+
+ const payload = await response.json();
+ const isOnline = payload.status === 'online';
+
+ statusBadge.className = `badge ${isOnline ? 'bg-success' : 'bg-warning text-dark'}`;
+ statusBadge.textContent = isOnline ? 'Online' : 'Ukendt';
+ lastUpdated.textContent = new Date().toLocaleString('da-DK');
+
+ const info = payload.raw?.payload?.ocs?.data || {};
+ const system = info?.nextcloud?.system || {};
+ const sharesInfo = info?.nextcloud?.shares || {};
+ const storageInfo = info?.nextcloud?.storage || {};
+ const activeUsersInfo = info?.activeUsers || {};
+ const phpInfo = info?.server?.php || {};
+ const opcacheStats = phpInfo?.opcache?.opcache_statistics || {};
+
+ const loadAvg = Array.isArray(system?.cpuload) ? system.cpuload : [];
+ const cpuCores = system?.cpucount || system?.cpu_count || system?.num_cores || 0;
+ const loadValue = loadAvg.length ? loadAvg[0] : null;
+ const loadText = loadAvg.length ? loadAvg.map(v => Number(v).toFixed(2)).join(' / ') : '-';
+
+ const memTotal = system?.mem_total || null;
+ const memFree = system?.mem_free || null;
+ const freeRamPct = memTotal && memFree ? (memFree / memTotal) * 100 : null;
+ const ramUsageText = freeRamPct !== null ? `${(100 - freeRamPct).toFixed(1)} %` : '-';
+
+ const freeDiskBytes = system?.freespace ?? null;
+ const freeDiskText = freeDiskBytes !== null ? formatBytes(freeDiskBytes) : '-';
+
+ const opcacheHitRate = opcacheStats?.opcache_hit_rate ?? null;
+ const opcacheText = opcacheHitRate !== null ? `${Number(opcacheHitRate).toFixed(2)} %` : '-';
+
+ if (cpuLoad) cpuLoad.textContent = loadText;
+ if (freeDisk) freeDisk.textContent = freeDiskText;
+ if (ramUsage) ramUsage.textContent = ramUsageText;
+ if (opcache) opcache.textContent = opcacheText;
+ if (activeUsers) activeUsers.textContent = activeUsersInfo?.last24hours ?? '-';
+
+ // File count growth (localStorage diff)
+ const fileCount = storageInfo?.num_files ?? null;
+ if (fileGrowth) {
+ if (fileCount === null) {
+ fileGrowth.textContent = '-';
+ } else {
+ const key = `nextcloud_file_count_${instance.id}`;
+ const prev = parseInt(localStorage.getItem(key) || '0', 10);
+ const diff = prev ? fileCount - prev : 0;
+ fileGrowth.textContent = prev ? `${fileCount} (${diff >= 0 ? '+' : ''}${diff})` : `${fileCount}`;
+ localStorage.setItem(key, `${fileCount}`);
+ }
+ }
+
+ // Public shares without password
+ if (publicShares) {
+ if (typeof sharesInfo?.num_shares_link_no_password !== 'undefined') {
+ publicShares.textContent = `${sharesInfo.num_shares_link_no_password}`;
+ } else {
+ const sharesResponse = await fetch(`/api/v1/nextcloud/instances/${instance.id}/shares?customer_id=${customerId}`);
+ if (sharesResponse.ok) {
+ const sharesPayload = await sharesResponse.json();
+ const list = sharesPayload?.payload?.ocs?.data || [];
+ const withoutPassword = list.filter(s => s.share_type === 3 && (!s.password || s.password === '')).length;
+ publicShares.textContent = `${withoutPassword}`;
+ } else {
+ publicShares.textContent = '-';
+ }
+ }
+ }
+
+ if (alerts) {
+ const items = [];
+ if (freeDiskBytes !== null && freeDiskBytes <= 0) items.push('â ď¸ Free disk kritisk');
+ if (loadValue !== null && cpuCores && loadValue > cpuCores) items.push('â ď¸ CPU load > cores');
+ if (freeRamPct !== null && freeRamPct < 10) items.push('â ď¸ Free RAM < 10%');
+ if (opcacheHitRate !== null && opcacheHitRate < 95) items.push('â ď¸ OPCache hit rate < 95%');
+ const sharesText = publicShares?.textContent;
+ if (sharesText && parseInt(sharesText, 10) > 0) items.push('â ď¸ Public shares uden password');
+
+ alerts.innerHTML = items.length
+ ? items.map(text => `
${text}`).join('')
+ : '
â
Ingen alarmer';
+ }
+ } catch (error) {
+ console.error('Failed to load Nextcloud status:', error);
+ statusBadge.textContent = 'Ukendt';
+ }
+}
+
+function formatBytes(value) {
+ if (value === null || typeof value === 'undefined') return '-';
+ const units = ['B', 'KB', 'MB', 'GB', 'TB'];
+ let size = Number(value);
+ let unitIndex = 0;
+ while (size >= 1024 && unitIndex < units.length - 1) {
+ size /= 1024;
+ unitIndex += 1;
+ }
+ return `${size.toFixed(1)} ${units[unitIndex]}`;
+}
+
+function openNextcloudCreateUser() {
+ alert('Opret bruger: kommer snart');
+}
+
+function openNextcloudResetPassword() {
+ alert('Reset password: kommer snart');
+}
+
+function openNextcloudDisableUser() {
+ alert('Luk bruger: kommer snart');
+}
+
async function loadUtilityCompany() {
const nameEl = document.getElementById('utilityCompanyName');
const contactEl = document.getElementById('utilityCompanyContact');
diff --git a/app/models/schemas.py b/app/models/schemas.py
index e34a574..da2092c 100644
--- a/app/models/schemas.py
+++ b/app/models/schemas.py
@@ -3,7 +3,7 @@ Pydantic Models and Schemas
"""
from pydantic import BaseModel, ConfigDict
-from typing import Optional
+from typing import Optional, List
from datetime import datetime
@@ -139,3 +139,56 @@ class Conversation(ConversationBase):
deleted_at: Optional[datetime] = None
model_config = ConfigDict(from_attributes=True)
+
+
+class SolutionBase(BaseModel):
+ """Base schema for Case Solutions"""
+ title: str
+ description: Optional[str] = None
+ solution_type: Optional[str] = None # Support, Drift, Konsulent, etc.
+ result: Optional[str] = None # Løst, Delvist, Workaround, Ej løst
+
+class SolutionCreate(SolutionBase):
+ """Schema for creating a solution"""
+ sag_id: int
+ created_by_user_id: Optional[int] = None
+
+class SolutionUpdate(BaseModel):
+ """Schema for updating a solution"""
+ title: Optional[str] = None
+ description: Optional[str] = None
+ solution_type: Optional[str] = None
+ result: Optional[str] = None
+
+class Solution(SolutionBase):
+ """Full solution schema"""
+ id: int
+ sag_id: int
+ created_by_user_id: Optional[int] = None
+ created_at: datetime
+ updated_at: Optional[datetime] = None
+
+ model_config = ConfigDict(from_attributes=True)
+
+
+class UserAdminCreate(BaseModel):
+ username: str
+ email: str
+ password: str
+ full_name: Optional[str] = None
+ is_superadmin: bool = False
+ is_active: bool = True
+ group_ids: Optional[List[int]] = None
+
+
+class UserGroupsUpdate(BaseModel):
+ group_ids: List[int]
+
+
+class GroupCreate(BaseModel):
+ name: str
+ description: Optional[str] = None
+
+
+class GroupPermissionsUpdate(BaseModel):
+ permission_ids: List[int]
diff --git a/app/modules/nextcloud/backend/__init__.py b/app/modules/nextcloud/backend/__init__.py
new file mode 100644
index 0000000..5a48f51
--- /dev/null
+++ b/app/modules/nextcloud/backend/__init__.py
@@ -0,0 +1 @@
+"""Nextcloud module backend."""
diff --git a/app/modules/nextcloud/backend/router.py b/app/modules/nextcloud/backend/router.py
new file mode 100644
index 0000000..7ac7dd6
--- /dev/null
+++ b/app/modules/nextcloud/backend/router.py
@@ -0,0 +1,272 @@
+"""
+Nextcloud Module - API Router
+"""
+
+import json
+import logging
+import secrets
+from typing import List, Optional
+
+from fastapi import APIRouter, HTTPException, Query
+
+from app.core.crypto import encrypt_secret
+from app.core.database import execute_query
+from app.modules.nextcloud.backend.service import NextcloudService
+from app.modules.nextcloud.models.schemas import (
+ NextcloudInstanceCreate,
+ NextcloudInstanceUpdate,
+ NextcloudUserCreate,
+ NextcloudPasswordReset,
+)
+
+logger = logging.getLogger(__name__)
+router = APIRouter()
+service = NextcloudService()
+
+
+def _audit(customer_id: int, instance_id: int, event_type: str, request_meta: dict, response_meta: dict):
+ query = """
+ INSERT INTO nextcloud_audit_log
+ (customer_id, instance_id, event_type, request_meta, response_meta)
+ VALUES (%s, %s, %s, %s, %s)
+ """
+ execute_query(
+ query,
+ (
+ customer_id,
+ instance_id,
+ event_type,
+ json.dumps(request_meta),
+ json.dumps(response_meta),
+ ),
+ )
+
+
+@router.get("/instances")
+async def list_instances(customer_id: Optional[int] = Query(None)):
+ query = "SELECT * FROM nextcloud_instances WHERE deleted_at IS NULL"
+ params: List[int] = []
+ if customer_id is not None:
+ query += " AND customer_id = %s"
+ params.append(customer_id)
+ return execute_query(query, tuple(params)) or []
+
+
+@router.get("/customers/{customer_id}/instance")
+async def get_instance_for_customer(customer_id: int):
+ query = "SELECT * FROM nextcloud_instances WHERE customer_id = %s AND deleted_at IS NULL"
+ result = execute_query(query, (customer_id,))
+ if not result:
+ return None
+ return result[0]
+
+
+@router.post("/instances")
+async def create_instance(payload: NextcloudInstanceCreate):
+ try:
+ password_encrypted = encrypt_secret(payload.password)
+ query = """
+ INSERT INTO nextcloud_instances
+ (customer_id, base_url, auth_type, username, password_encrypted)
+ VALUES (%s, %s, %s, %s, %s)
+ RETURNING *
+ """
+ result = execute_query(
+ query,
+ (
+ payload.customer_id,
+ payload.base_url,
+ payload.auth_type,
+ payload.username,
+ password_encrypted,
+ ),
+ )
+ return result[0] if result else None
+ except Exception as exc:
+ logger.error("â Failed to create Nextcloud instance: %s", exc)
+ raise HTTPException(status_code=500, detail="Failed to create instance")
+
+
+@router.patch("/instances/{instance_id}")
+async def update_instance(instance_id: int, payload: NextcloudInstanceUpdate):
+ updates = []
+ params = []
+
+ if payload.base_url is not None:
+ updates.append("base_url = %s")
+ params.append(payload.base_url)
+ if payload.auth_type is not None:
+ updates.append("auth_type = %s")
+ params.append(payload.auth_type)
+ if payload.username is not None:
+ updates.append("username = %s")
+ params.append(payload.username)
+ if payload.password is not None:
+ updates.append("password_encrypted = %s")
+ params.append(encrypt_secret(payload.password))
+ if payload.is_enabled is not None:
+ updates.append("is_enabled = %s")
+ params.append(payload.is_enabled)
+
+ if not updates:
+ raise HTTPException(status_code=400, detail="No fields to update")
+
+ updates.append("updated_at = NOW()")
+ params.append(instance_id)
+ query = f"UPDATE nextcloud_instances SET {', '.join(updates)} WHERE id = %s RETURNING *"
+ result = execute_query(query, tuple(params))
+ if not result:
+ raise HTTPException(status_code=404, detail="Instance not found")
+ return result[0]
+
+
+@router.post("/instances/{instance_id}/disable")
+async def disable_instance(instance_id: int):
+ query = """
+ UPDATE nextcloud_instances
+ SET is_enabled = false, disabled_at = NOW(), updated_at = NOW()
+ WHERE id = %s
+ RETURNING *
+ """
+ result = execute_query(query, (instance_id,))
+ if not result:
+ raise HTTPException(status_code=404, detail="Instance not found")
+ return result[0]
+
+
+@router.post("/instances/{instance_id}/enable")
+async def enable_instance(instance_id: int):
+ query = """
+ UPDATE nextcloud_instances
+ SET is_enabled = true, disabled_at = NULL, updated_at = NOW()
+ WHERE id = %s
+ RETURNING *
+ """
+ result = execute_query(query, (instance_id,))
+ if not result:
+ raise HTTPException(status_code=404, detail="Instance not found")
+ return result[0]
+
+
+@router.post("/instances/{instance_id}/rotate-credentials")
+async def rotate_credentials(instance_id: int, payload: NextcloudInstanceUpdate):
+ if not payload.password:
+ raise HTTPException(status_code=400, detail="Password is required")
+
+ query = """
+ UPDATE nextcloud_instances
+ SET password_encrypted = %s, updated_at = NOW()
+ WHERE id = %s
+ RETURNING *
+ """
+ result = execute_query(query, (encrypt_secret(payload.password), instance_id))
+ if not result:
+ raise HTTPException(status_code=404, detail="Instance not found")
+ return result[0]
+
+
+@router.get("/instances/{instance_id}/status")
+async def get_status(instance_id: int, customer_id: Optional[int] = Query(None)):
+ response = await service.get_status(instance_id, customer_id)
+ if customer_id is not None:
+ _audit(customer_id, instance_id, "status", {"instance_id": instance_id}, response)
+ return response
+
+
+@router.get("/instances/{instance_id}/groups")
+async def list_groups(instance_id: int, customer_id: Optional[int] = Query(None)):
+ response = await service.list_groups(instance_id, customer_id)
+ if customer_id is not None:
+ _audit(customer_id, instance_id, "groups", {"instance_id": instance_id}, response)
+ return response
+
+
+@router.get("/instances/{instance_id}/shares")
+async def list_shares(instance_id: int, customer_id: Optional[int] = Query(None)):
+ response = await service.list_public_shares(instance_id, customer_id)
+ if customer_id is not None:
+ _audit(customer_id, instance_id, "shares", {"instance_id": instance_id}, response)
+ return response
+
+
+@router.post("/instances/{instance_id}/users")
+async def create_user(instance_id: int, payload: NextcloudUserCreate, customer_id: Optional[int] = Query(None)):
+ password = secrets.token_urlsafe(12)
+ request_payload = {
+ "userid": payload.uid,
+ "password": password,
+ "email": payload.email,
+ "displayName": payload.display_name,
+ "groups[]": payload.groups,
+ }
+
+ response = await service.create_user(instance_id, customer_id, request_payload)
+ if customer_id is not None:
+ _audit(customer_id, instance_id, "create_user", {"uid": payload.uid}, response)
+ return {"result": response, "generated_password": password if payload.send_welcome else None}
+
+
+@router.post("/instances/{instance_id}/users/{uid}/reset-password")
+async def reset_password(
+ instance_id: int,
+ uid: str,
+ payload: NextcloudPasswordReset,
+ customer_id: Optional[int] = Query(None),
+):
+ password = secrets.token_urlsafe(12)
+ response = await service.reset_password(instance_id, customer_id, uid, password)
+ if customer_id is not None:
+ _audit(customer_id, instance_id, "reset_password", {"uid": uid}, response)
+ return {"result": response, "generated_password": password if payload.send_email else None}
+
+
+@router.post("/instances/{instance_id}/users/{uid}/disable")
+async def disable_user(instance_id: int, uid: str, customer_id: Optional[int] = Query(None)):
+ response = await service.disable_user(instance_id, customer_id, uid)
+ if customer_id is not None:
+ _audit(customer_id, instance_id, "disable_user", {"uid": uid}, response)
+ return response
+
+
+@router.post("/instances/{instance_id}/users/{uid}/resend-guide")
+async def resend_guide(instance_id: int, uid: str, customer_id: Optional[int] = Query(None)):
+ response = {"status": "queued", "uid": uid}
+ if customer_id is not None:
+ _audit(customer_id, instance_id, "resend_guide", {"uid": uid}, response)
+ return response
+
+
+@router.get("/audit")
+async def list_audit(
+ customer_id: int = Query(...),
+ instance_id: Optional[int] = Query(None),
+ limit: int = Query(100, ge=1, le=1000),
+ offset: int = Query(0, ge=0),
+):
+ query = """
+ SELECT * FROM nextcloud_audit_log
+ WHERE customer_id = %s
+ """
+ params: List[object] = [customer_id]
+ if instance_id is not None:
+ query += " AND instance_id = %s"
+ params.append(instance_id)
+ query += " ORDER BY created_at DESC LIMIT %s OFFSET %s"
+ params.extend([limit, offset])
+ return execute_query(query, tuple(params)) or []
+
+
+@router.post("/audit/purge")
+async def purge_audit(data: dict):
+ customer_id = data.get("customer_id")
+ before_date = data.get("before_date")
+
+ if not customer_id or not before_date:
+ raise HTTPException(status_code=400, detail="customer_id and before_date are required")
+
+ query = """
+ DELETE FROM nextcloud_audit_log
+ WHERE customer_id = %s AND created_at < %s
+ """
+ deleted = execute_query(query, (customer_id, before_date))
+ return {"deleted": deleted}
diff --git a/app/modules/nextcloud/backend/service.py b/app/modules/nextcloud/backend/service.py
new file mode 100644
index 0000000..25b62aa
--- /dev/null
+++ b/app/modules/nextcloud/backend/service.py
@@ -0,0 +1,240 @@
+"""
+Nextcloud Integration Service
+Direct OCS API calls with DB cache and audit logging.
+"""
+
+import json
+import logging
+from datetime import datetime, timedelta
+from typing import Dict, Optional
+
+import aiohttp
+
+from app.core.config import settings
+from app.core.crypto import decrypt_secret
+from app.core.database import execute_query
+
+logger = logging.getLogger(__name__)
+
+
+class NextcloudService:
+ def __init__(self) -> None:
+ self.read_only = settings.NEXTCLOUD_READ_ONLY
+ self.dry_run = settings.NEXTCLOUD_DRY_RUN
+ self.timeout = settings.NEXTCLOUD_TIMEOUT_SECONDS
+ self.cache_ttl = settings.NEXTCLOUD_CACHE_TTL_SECONDS
+
+ if self.read_only:
+ logger.warning("đ Nextcloud READ_ONLY MODE ENABLED")
+ elif self.dry_run:
+ logger.warning("đ Nextcloud DRY_RUN MODE ENABLED")
+ else:
+ logger.warning("â ď¸ Nextcloud WRITE MODE ACTIVE")
+
+ def _get_instance(self, instance_id: int, customer_id: Optional[int] = None) -> Optional[dict]:
+ query = "SELECT * FROM nextcloud_instances WHERE id = %s AND deleted_at IS NULL"
+ params = [instance_id]
+ if customer_id is not None:
+ query += " AND customer_id = %s"
+ params.append(customer_id)
+ result = execute_query(query, tuple(params))
+ return result[0] if result else None
+
+ def _get_auth(self, instance: dict) -> Optional[aiohttp.BasicAuth]:
+ password = decrypt_secret(instance["password_encrypted"])
+ if not password:
+ return None
+ return aiohttp.BasicAuth(instance["username"], password)
+
+ def _cache_get(self, cache_key: str) -> Optional[dict]:
+ query = "SELECT payload FROM nextcloud_cache WHERE cache_key = %s AND expires_at > NOW()"
+ result = execute_query(query, (cache_key,))
+ if result:
+ return result[0]["payload"]
+ return None
+
+ def _cache_set(self, cache_key: str, payload: dict) -> None:
+ expires_at = datetime.utcnow() + timedelta(seconds=self.cache_ttl)
+ query = """
+ INSERT INTO nextcloud_cache (cache_key, payload, expires_at)
+ VALUES (%s, %s, %s)
+ ON CONFLICT (cache_key) DO UPDATE
+ SET payload = EXCLUDED.payload, expires_at = EXCLUDED.expires_at
+ """
+ execute_query(query, (cache_key, json.dumps(payload), expires_at))
+
+ def _audit(
+ self,
+ customer_id: int,
+ instance_id: int,
+ event_type: str,
+ request_meta: dict,
+ response_meta: dict,
+ actor_user_id: Optional[int] = None,
+ ) -> None:
+ query = """
+ INSERT INTO nextcloud_audit_log
+ (customer_id, instance_id, event_type, request_meta, response_meta, actor_user_id)
+ VALUES (%s, %s, %s, %s, %s, %s)
+ """
+ execute_query(
+ query,
+ (
+ customer_id,
+ instance_id,
+ event_type,
+ json.dumps(request_meta),
+ json.dumps(response_meta),
+ actor_user_id,
+ ),
+ )
+
+ def _check_write_permission(self, operation: str) -> bool:
+ if self.read_only:
+ logger.error("đŤ BLOCKED: %s - READ_ONLY mode is enabled", operation)
+ return False
+ if self.dry_run:
+ logger.warning("đ DRY_RUN: %s - Operation will not be executed", operation)
+ return False
+ logger.warning("â ď¸ EXECUTING WRITE OPERATION: %s", operation)
+ return True
+
+ async def _ocs_request(
+ self,
+ instance: dict,
+ endpoint: str,
+ method: str = "GET",
+ params: Optional[dict] = None,
+ data: Optional[dict] = None,
+ use_cache: bool = True,
+ ) -> dict:
+ cache_key = None
+ if use_cache and method.upper() == "GET":
+ cache_key = f"nextcloud:{instance['id']}:{endpoint}:{json.dumps(params or {}, sort_keys=True)}"
+ cached = self._cache_get(cache_key)
+ if cached:
+ cached["cache_hit"] = True
+ return cached
+
+ auth = self._get_auth(instance)
+ if not auth:
+ return {"error": "credentials_invalid"}
+
+ base_url = instance["base_url"].rstrip("/")
+ url = f"{base_url}/{endpoint.lstrip('/')}"
+ headers = {"OCS-APIRequest": "true", "Accept": "application/json"}
+
+ async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=self.timeout)) as session:
+ async with session.request(
+ method=method.upper(),
+ url=url,
+ headers=headers,
+ auth=auth,
+ params=params,
+ data=data,
+ ) as resp:
+ try:
+ payload = await resp.json()
+ except Exception:
+ payload = {"raw": await resp.text()}
+
+ response = {
+ "status": resp.status,
+ "payload": payload,
+ "cache_hit": False,
+ }
+
+ if cache_key and resp.status == 200:
+ self._cache_set(cache_key, response)
+
+ return response
+
+ async def get_status(self, instance_id: int, customer_id: Optional[int] = None) -> dict:
+ instance = self._get_instance(instance_id, customer_id)
+ if not instance or not instance["is_enabled"]:
+ return {"status": "offline", "checked_at": datetime.utcnow().isoformat()}
+
+ response = await self._ocs_request(
+ instance,
+ "/ocs/v2.php/apps/serverinfo/api/v1/info",
+ method="GET",
+ use_cache=True,
+ )
+
+ return {
+ "status": "online" if response.get("status") == 200 else "unknown",
+ "checked_at": datetime.utcnow().isoformat(),
+ "raw": response,
+ }
+
+ async def list_groups(self, instance_id: int, customer_id: Optional[int] = None) -> dict:
+ instance = self._get_instance(instance_id, customer_id)
+ if not instance or not instance["is_enabled"]:
+ return {"groups": []}
+
+ return await self._ocs_request(
+ instance,
+ "/ocs/v1.php/cloud/groups",
+ method="GET",
+ use_cache=True,
+ )
+
+ async def list_public_shares(self, instance_id: int, customer_id: Optional[int] = None) -> dict:
+ instance = self._get_instance(instance_id, customer_id)
+ if not instance or not instance["is_enabled"]:
+ return {"payload": {"ocs": {"data": []}}}
+
+ return await self._ocs_request(
+ instance,
+ "/ocs/v1.php/apps/files_sharing/api/v1/shares",
+ method="GET",
+ params={"share_type": 3},
+ use_cache=True,
+ )
+
+ async def create_user(self, instance_id: int, customer_id: Optional[int], payload: dict) -> dict:
+ if not self._check_write_permission("create_nextcloud_user"):
+ return {"blocked": True, "read_only": self.read_only, "dry_run": self.dry_run}
+
+ instance = self._get_instance(instance_id, customer_id)
+ if not instance or not instance["is_enabled"]:
+ return {"error": "instance_unavailable"}
+
+ return await self._ocs_request(
+ instance,
+ "/ocs/v1.php/cloud/users",
+ method="POST",
+ data=payload,
+ use_cache=False,
+ )
+
+ async def reset_password(self, instance_id: int, customer_id: Optional[int], uid: str, password: str) -> dict:
+ if not self._check_write_permission("reset_nextcloud_password"):
+ return {"blocked": True, "read_only": self.read_only, "dry_run": self.dry_run}
+
+ instance = self._get_instance(instance_id, customer_id)
+ if not instance or not instance["is_enabled"]:
+ return {"error": "instance_unavailable"}
+
+ return await self._ocs_request(
+ instance,
+ f"/ocs/v1.php/cloud/users/{uid}",
+ method="PUT",
+ data={"password": password},
+ use_cache=False,
+ )
+
+ async def disable_user(self, instance_id: int, customer_id: Optional[int], uid: str) -> dict:
+ if not self._check_write_permission("disable_nextcloud_user"):
+ return {"blocked": True, "read_only": self.read_only, "dry_run": self.dry_run}
+
+ instance = self._get_instance(instance_id, customer_id)
+ if not instance or not instance["is_enabled"]:
+ return {"error": "instance_unavailable"}
+
+ return await self._ocs_request(
+ instance,
+ f"/ocs/v1.php/cloud/users/{uid}/disable",
+ method="PUT",
+ use_cache=False,
+ )
diff --git a/app/modules/nextcloud/frontend/__init__.py b/app/modules/nextcloud/frontend/__init__.py
new file mode 100644
index 0000000..0987ea4
--- /dev/null
+++ b/app/modules/nextcloud/frontend/__init__.py
@@ -0,0 +1 @@
+"""Nextcloud module frontend."""
diff --git a/app/modules/nextcloud/models/__init__.py b/app/modules/nextcloud/models/__init__.py
new file mode 100644
index 0000000..07dace0
--- /dev/null
+++ b/app/modules/nextcloud/models/__init__.py
@@ -0,0 +1 @@
+"""Nextcloud module models."""
diff --git a/app/modules/nextcloud/models/schemas.py b/app/modules/nextcloud/models/schemas.py
new file mode 100644
index 0000000..6631d2b
--- /dev/null
+++ b/app/modules/nextcloud/models/schemas.py
@@ -0,0 +1,63 @@
+from datetime import datetime
+from typing import Dict, List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class NextcloudInstanceBase(BaseModel):
+ customer_id: int
+ base_url: str
+ auth_type: str = "basic"
+ username: str
+
+
+class NextcloudInstanceCreate(NextcloudInstanceBase):
+ password: str = Field(..., min_length=1)
+
+
+class NextcloudInstanceUpdate(BaseModel):
+ base_url: Optional[str] = None
+ auth_type: Optional[str] = None
+ username: Optional[str] = None
+ password: Optional[str] = None
+ is_enabled: Optional[bool] = None
+
+
+class NextcloudInstance(NextcloudInstanceBase):
+ id: int
+ is_enabled: bool
+ disabled_at: Optional[datetime] = None
+ created_at: datetime
+ updated_at: datetime
+
+
+class NextcloudStatus(BaseModel):
+ status: str
+ checked_at: datetime
+ version: Optional[str] = None
+ php: Optional[str] = None
+ db: Optional[str] = None
+ metrics: Dict[str, Optional[str]] = {}
+
+
+class NextcloudUserCreate(BaseModel):
+ uid: str
+ display_name: Optional[str] = None
+ email: Optional[str] = None
+ groups: List[str] = []
+ send_welcome: bool = True
+
+
+class NextcloudPasswordReset(BaseModel):
+ send_email: bool = True
+
+
+class NextcloudAuditLogEntry(BaseModel):
+ id: int
+ customer_id: int
+ instance_id: Optional[int] = None
+ event_type: str
+ request_meta: Optional[Dict] = None
+ response_meta: Optional[Dict] = None
+ actor_user_id: Optional[int] = None
+ created_at: datetime
diff --git a/app/modules/nextcloud/module.json b/app/modules/nextcloud/module.json
new file mode 100644
index 0000000..e70365c
--- /dev/null
+++ b/app/modules/nextcloud/module.json
@@ -0,0 +1,19 @@
+{
+ "name": "nextcloud",
+ "version": "1.0.0",
+ "description": "Nextcloud integration: status, users, and audit log",
+ "author": "BMC Networks",
+ "enabled": true,
+ "dependencies": [],
+ "table_prefix": "nextcloud_",
+ "api_prefix": "/api/v1/nextcloud",
+ "tags": [
+ "Nextcloud"
+ ],
+ "config": {
+ "safety_switches": {
+ "read_only": true,
+ "dry_run": true
+ }
+ }
+}
diff --git a/app/modules/nextcloud/templates/tab.html b/app/modules/nextcloud/templates/tab.html
new file mode 100644
index 0000000..1f41d46
--- /dev/null
+++ b/app/modules/nextcloud/templates/tab.html
@@ -0,0 +1,38 @@
+
+
+
+
Systemstatus
+
Ukendt
+
-
+
+
CPU load-
+
Free disk-
+
RAM usage-
+
OPCache hit rate-
+
+
+
+
+
+
+
Handlinger
+
+
+
+
+
+
+
+
Nøgletal
+
File count growth-
+
Public shares uden password-
+
Active users-
+
+
+
+
+
Historik
+
Ingen events endnu.
+
+
+
diff --git a/app/modules/sag/backend/router.py b/app/modules/sag/backend/router.py
index 904ed45..d5d2445 100644
--- a/app/modules/sag/backend/router.py
+++ b/app/modules/sag/backend/router.py
@@ -1,8 +1,23 @@
import logging
-from typing import List, Optional
-from fastapi import APIRouter, HTTPException, Query
-from app.core.database import execute_query
+import os
+import shutil
+from pathlib import Path
from datetime import datetime
+from typing import List, Optional
+from uuid import uuid4
+
+from fastapi import APIRouter, HTTPException, Query, UploadFile, File
+from fastapi.responses import FileResponse
+from app.core.database import execute_query, execute_query_single
+from app.core.config import settings
+from app.services.email_service import EmailService
+
+try:
+ import extract_msg
+except ImportError:
+ extract_msg = None
+import email
+from email.header import decode_header
logger = logging.getLogger(__name__)
router = APIRouter()
@@ -39,46 +54,98 @@ async def list_sager(
# If tag filter, filter in Python after fetch
if tag:
- case_ids = [case['id'] for case in cases]
+ case_ids = [case["id"] for case in cases]
if case_ids:
tag_query = "SELECT sag_id FROM sag_tags WHERE tag_navn = %s AND deleted_at IS NULL"
tagged_cases = execute_query(tag_query, (tag,))
- tagged_ids = set(t['sag_id'] for t in tagged_cases)
- cases = [c for c in cases if c['id'] in tagged_ids]
+ tagged_ids = set(t["sag_id"] for t in tagged_cases)
+ cases = [c for c in cases if c["id"] in tagged_ids]
return cases
except Exception as e:
logger.error("â Error listing cases: %s", e)
raise HTTPException(status_code=500, detail="Failed to list cases")
+@router.get("/sag/sale-items/all")
+async def list_all_sale_items(
+ type: Optional[str] = Query(None),
+ status: Optional[str] = Query(None),
+ q: Optional[str] = Query(None),
+ customer_id: Optional[int] = Query(None),
+ sag_id: Optional[int] = Query(None),
+ date_from: Optional[str] = Query(None),
+ date_to: Optional[str] = Query(None),
+):
+ """List all sale items across cases (orders overview)."""
+ try:
+ query = """
+ SELECT si.*, s.titel AS sag_titel, s.customer_id, c.name AS customer_name
+ FROM sag_salgsvarer si
+ JOIN sag_sager s ON s.id = si.sag_id
+ LEFT JOIN customers c ON c.id = s.customer_id
+ WHERE s.deleted_at IS NULL
+ """
+ params = []
+
+ if type:
+ query += " AND LOWER(si.type) = %s"
+ params.append(type.lower())
+ if status:
+ query += " AND LOWER(si.status) = %s"
+ params.append(status.lower())
+ if q:
+ query += " AND (LOWER(si.description) LIKE %s OR LOWER(si.external_ref) LIKE %s OR LOWER(s.titel) LIKE %s OR LOWER(c.name) LIKE %s)"
+ like = f"%{q.lower()}%"
+ params.extend([like, like, like, like])
+ if customer_id:
+ query += " AND s.customer_id = %s"
+ params.append(customer_id)
+ if sag_id:
+ query += " AND s.id = %s"
+ params.append(sag_id)
+ if date_from:
+ query += " AND si.line_date >= %s"
+ params.append(date_from)
+ if date_to:
+ query += " AND si.line_date <= %s"
+ params.append(date_to)
+
+ query += " ORDER BY si.line_date DESC NULLS LAST, si.id DESC"
+
+ return execute_query(query, tuple(params)) or []
+ except Exception as e:
+ logger.error("â Error listing sale items: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to list sale items")
+
@router.post("/sag")
async def create_sag(data: dict):
"""Create a new case."""
try:
- if not data.get('titel'):
+ if not data.get("titel"):
raise HTTPException(status_code=400, detail="titel is required")
- if not data.get('customer_id'):
+ if not data.get("customer_id"):
raise HTTPException(status_code=400, detail="customer_id is required")
query = """
INSERT INTO sag_sager
- (titel, beskrivelse, type, status, customer_id, ansvarlig_bruger_id, deadline)
- VALUES (%s, %s, %s, %s, %s, %s, %s)
+ (titel, beskrivelse, type, status, customer_id, ansvarlig_bruger_id, created_by_user_id, deadline)
+ VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
RETURNING *
"""
params = (
- data.get('titel'),
- data.get('beskrivelse', ''),
- data.get('type', 'ticket'),
- data.get('status', 'ĂĽben'),
- data.get('customer_id'),
- data.get('ansvarlig_bruger_id'),
- data.get('deadline'),
+ data.get("titel"),
+ data.get("beskrivelse", ""),
+ data.get("type", "ticket"),
+ data.get("status", "ĂĽben"),
+ data.get("customer_id"),
+ data.get("ansvarlig_bruger_id"),
+ data.get("created_by_user_id", 1),
+ data.get("deadline"),
)
result = execute_query(query, params)
if result:
- logger.info("â
Case created: %s", result[0]['id'])
+ logger.info("â
Case created: %s", result[0]["id"])
return result[0]
raise HTTPException(status_code=500, detail="Failed to create case")
except Exception as e:
@@ -110,7 +177,7 @@ async def update_sag(sag_id: int, updates: dict):
raise HTTPException(status_code=404, detail="Case not found")
# Build dynamic update query
- allowed_fields = ['titel', 'beskrivelse', 'type', 'status', 'ansvarlig_bruger_id', 'deadline']
+ allowed_fields = ["titel", "beskrivelse", "type", "status", "ansvarlig_bruger_id", "deadline"]
set_clauses = []
params = []
@@ -123,7 +190,7 @@ async def update_sag(sag_id: int, updates: dict):
raise HTTPException(status_code=400, detail="No valid fields to update")
params.append(sag_id)
- query = f"UPDATE sag_sager SET {', '.join(set_clauses)} WHERE id = %s RETURNING *"
+ query = f"UPDATE sag_sager SET {", ".join(set_clauses)} WHERE id = %s RETURNING *"
result = execute_query(query, tuple(params))
if result:
@@ -193,11 +260,11 @@ async def get_relationer(sag_id: int):
async def create_relation(sag_id: int, data: dict):
"""Add a relation to another case."""
try:
- if not data.get('mĂĽlsag_id') or not data.get('relationstype'):
+ if not data.get("mĂĽlsag_id") or not data.get("relationstype"):
raise HTTPException(status_code=400, detail="mĂĽlsag_id and relationstype required")
- mĂĽlsag_id = data.get('mĂĽlsag_id')
- relationstype = data.get('relationstype')
+ mĂĽlsag_id = data.get("mĂĽlsag_id")
+ relationstype = data.get("relationstype")
# Validate both cases exist
check1 = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
@@ -272,7 +339,7 @@ async def get_tags(sag_id: int):
async def add_tag(sag_id: int, data: dict):
"""Add a tag to a case."""
try:
- if not data.get('tag_navn'):
+ if not data.get("tag_navn"):
raise HTTPException(status_code=400, detail="tag_navn is required")
check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
@@ -284,10 +351,10 @@ async def add_tag(sag_id: int, data: dict):
VALUES (%s, %s)
RETURNING *
"""
- result = execute_query(query, (sag_id, data.get('tag_navn')))
+ result = execute_query(query, (sag_id, data.get("tag_navn")))
if result:
- logger.info("â
Tag added: %s -> %s", sag_id, data.get('tag_navn'))
+ logger.info("â
Tag added: %s -> %s", sag_id, data.get("tag_navn"))
return result[0]
raise HTTPException(status_code=500, detail="Failed to add tag")
except HTTPException:
@@ -321,27 +388,223 @@ async def delete_tag(sag_id: int, tag_id: int):
raise HTTPException(status_code=500, detail="Failed to delete tag")
+# ============================================================================
+# CUSTOMERS - Case Customers (Many-to-Many)
+# ============================================================================
+
+@router.get("/sag/{sag_id}/customers")
+async def list_case_customers(sag_id: int):
+ """List customers associated with a case."""
+ try:
+ query = """
+ SELECT sk.*, c.name as customer_name, c.email as customer_email
+ FROM sag_kunder sk
+ JOIN customers c ON sk.customer_id = c.id
+ WHERE sk.sag_id = %s AND sk.deleted_at IS NULL
+ """
+ result = execute_query(query, (sag_id,))
+ return result
+ except Exception as e:
+ logger.error("â Error listing case customers: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to list case customers")
+
+@router.post("/sag/{sag_id}/customers")
+async def add_case_customer(sag_id: int, data: dict):
+ """Add a customer to a case."""
+ try:
+ if not data.get("customer_id"):
+ raise HTTPException(status_code=400, detail="customer_id is required")
+
+ role = data.get("role", "Kunde")
+
+ # Check if already exists
+ check = execute_query(
+ "SELECT id FROM sag_kunder WHERE sag_id = %s AND customer_id = %s AND deleted_at IS NULL",
+ (sag_id, data["customer_id"])
+ )
+ if check:
+ return check[0] # Already linked
+
+ query = """
+ INSERT INTO sag_kunder (sag_id, customer_id, role)
+ VALUES (%s, %s, %s)
+ RETURNING *
+ """
+ result = execute_query(query, (sag_id, data["customer_id"], role))
+
+ if result:
+ logger.info("â
Customer %s added to case %s", data["customer_id"], sag_id)
+ return result[0]
+ raise HTTPException(status_code=500, detail="Failed to add case customer")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error adding case customer: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to add case customer")
+
+@router.delete("/sag/{sag_id}/customers/{customer_id}")
+async def remove_case_customer(sag_id: int, customer_id: int):
+ """Remove customer from case."""
+ try:
+ query = "UPDATE sag_kunder SET deleted_at = NOW() WHERE sag_id = %s AND customer_id = %s RETURNING id"
+ result = execute_query(query, (sag_id, customer_id))
+
+ if result:
+ logger.info("â
Customer %s removed from case %s", customer_id, sag_id)
+ return {"status": "deleted"}
+ raise HTTPException(status_code=404, detail="Customer link not found")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error removing case customer: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to remove case customer")
+
+
+# ============================================================================
+# CONTACTS - Case Contacts
+# ============================================================================
+
+@router.get("/sag/{sag_id}/contacts")
+async def list_case_contacts(sag_id: int):
+ """List contacts associated with a case."""
+ try:
+ query = """
+ SELECT sk.*, c.first_name, c.last_name, c.email, c.phone
+ FROM sag_kontakter sk
+ JOIN contacts c ON sk.contact_id = c.id
+ WHERE sk.sag_id = %s AND sk.deleted_at IS NULL
+ """
+ result = execute_query(query, (sag_id,))
+ return result
+ except Exception as e:
+ logger.error("â Error listing case contacts: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to list case contacts")
+
+@router.post("/sag/{sag_id}/contacts")
+async def add_case_contact(sag_id: int, data: dict):
+ """Add a contact to a case."""
+ try:
+ if not data.get("contact_id"):
+ raise HTTPException(status_code=400, detail="contact_id is required")
+
+ role = data.get("role", "Kontakt")
+
+ # Check if already exists
+ check = execute_query(
+ "SELECT id FROM sag_kontakter WHERE sag_id = %s AND contact_id = %s AND deleted_at IS NULL",
+ (sag_id, data["contact_id"])
+ )
+ if check:
+ return check[0] # Already linked
+
+ query = """
+ INSERT INTO sag_kontakter (sag_id, contact_id, role)
+ VALUES (%s, %s, %s)
+ RETURNING *
+ """
+ result = execute_query(query, (sag_id, data["contact_id"], role))
+
+ if result:
+ logger.info("â
Contact added to case %s: %s", sag_id, data["contact_id"])
+ return result[0]
+ raise HTTPException(status_code=500, detail="Failed to add contact")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error adding case contact: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to add contact")
+
+@router.delete("/sag/{sag_id}/contacts/{contact_id}")
+async def remove_case_contact(sag_id: int, contact_id: int):
+ """Remove contact from case."""
+ try:
+ query = "UPDATE sag_kontakter SET deleted_at = NOW() WHERE sag_id = %s AND contact_id = %s RETURNING id"
+ result = execute_query(query, (sag_id, contact_id))
+
+ if result:
+ logger.info("â
Contact %s removed from case %s", contact_id, sag_id)
+ return {"status": "deleted"}
+ raise HTTPException(status_code=404, detail="Contact link not found")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error removing case contact: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to remove case contact")
+
+
# ============================================================================
# HARDWARE - Placeholder endpoints for frontend compatibility
# ============================================================================
@router.get("/sag/{sag_id}/hardware")
async def list_case_hardware(sag_id: int):
- """List hardware associated with a case. Placeholder endpoint."""
- # TODO: Implement when hardware-case relation is defined
- return []
+ """List hardware associated with a case."""
+ try:
+ query = """
+ SELECT sh.id as relation_id, h.*
+ FROM sag_hardware sh
+ JOIN hardware_assets h ON sh.hardware_id = h.id
+ WHERE sh.sag_id = %s AND sh.deleted_at IS NULL
+ ORDER BY sh.created_at DESC
+ """
+ result = execute_query(query, (sag_id,))
+ return result
+ except Exception as e:
+ logger.error("â Error listing case hardware: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to list case hardware")
@router.post("/sag/{sag_id}/hardware")
-async def add_case_hardware(sag_id: int):
- """Add hardware to case. Placeholder endpoint."""
- # TODO: Implement when hardware-case relation is defined
- return {"message": "Hardware endpoint not yet implemented"}
+async def add_case_hardware(sag_id: int, data: dict):
+ """Add hardware to case."""
+ try:
+ if not data.get("hardware_id"):
+ raise HTTPException(status_code=400, detail="hardware_id is required")
+
+ hardware_id = data.get("hardware_id")
+
+ # Check if already linked
+ check = execute_query(
+ "SELECT id FROM sag_hardware WHERE sag_id = %s AND hardware_id = %s AND deleted_at IS NULL",
+ (sag_id, hardware_id)
+ )
+ if check:
+ return check[0] # Already linked
+
+ query = """
+ INSERT INTO sag_hardware (sag_id, hardware_id, note)
+ VALUES (%s, %s, %s)
+ ON CONFLICT (sag_id, hardware_id)
+ DO UPDATE SET deleted_at = NULL, note = EXCLUDED.note
+ RETURNING *
+ """
+ result = execute_query(query, (sag_id, hardware_id, data.get("note")))
+
+ if result:
+ logger.info("â
Hardware %s added to case %s", hardware_id, sag_id)
+ return result[0]
+ raise HTTPException(status_code=500, detail="Failed to add hardware")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error adding case hardware: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to add hardware")
@router.delete("/sag/{sag_id}/hardware/{hardware_id}")
async def remove_case_hardware(sag_id: int, hardware_id: int):
- """Remove hardware from case. Placeholder endpoint."""
- # TODO: Implement when hardware-case relation is defined
- return {"message": "Hardware endpoint not yet implemented"}
+ """Remove hardware from case."""
+ try:
+ query = "UPDATE sag_hardware SET deleted_at = NOW() WHERE sag_id = %s AND hardware_id = %s RETURNING id"
+ result = execute_query(query, (sag_id, hardware_id))
+
+ if result:
+ logger.info("â
Hardware %s removed from case %s", hardware_id, sag_id)
+ return {"status": "deleted"}
+ raise HTTPException(status_code=404, detail="Hardware link not found")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error removing case hardware: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to remove hardware")
# ============================================================================
@@ -350,18 +613,745 @@ async def remove_case_hardware(sag_id: int, hardware_id: int):
@router.get("/sag/{sag_id}/locations")
async def list_case_locations(sag_id: int):
- """List locations associated with a case. Placeholder endpoint."""
- # TODO: Implement when location-case relation is defined
- return []
+ """List locations associated with a case."""
+ try:
+ query = """
+ SELECT sl.id as relation_id, l.*
+ FROM sag_lokationer sl
+ JOIN locations_locations l ON sl.location_id = l.id
+ WHERE sl.sag_id = %s AND sl.deleted_at IS NULL
+ ORDER BY sl.created_at DESC
+ """
+ result = execute_query(query, (sag_id,))
+ return result
+ except Exception as e:
+ logger.error("â Error listing case locations: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to list case locations")
@router.post("/sag/{sag_id}/locations")
-async def add_case_location(sag_id: int):
- """Add location to case. Placeholder endpoint."""
- # TODO: Implement when location-case relation is defined
- return {"message": "Location endpoint not yet implemented"}
+async def add_case_location(sag_id: int, data: dict):
+ """Add location to case."""
+ try:
+ if not data.get("location_id"):
+ raise HTTPException(status_code=400, detail="location_id is required")
+
+ location_id = data.get("location_id")
+
+ # Check if already linked
+ check = execute_query(
+ "SELECT id FROM sag_lokationer WHERE sag_id = %s AND location_id = %s AND deleted_at IS NULL",
+ (sag_id, location_id)
+ )
+ if check:
+ return check[0] # Already linked
+
+ query = """
+ INSERT INTO sag_lokationer (sag_id, location_id, note)
+ VALUES (%s, %s, %s)
+ ON CONFLICT (sag_id, location_id)
+ DO UPDATE SET deleted_at = NULL, note = EXCLUDED.note
+ RETURNING *
+ """
+ result = execute_query(query, (sag_id, location_id, data.get("note")))
+
+ if result:
+ logger.info("â
Location %s added to case %s", location_id, sag_id)
+ return result[0]
+ raise HTTPException(status_code=500, detail="Failed to add location")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error adding case location: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to add location")
@router.delete("/sag/{sag_id}/locations/{location_id}")
async def remove_case_location(sag_id: int, location_id: int):
- """Remove location from case. Placeholder endpoint."""
- # TODO: Implement when location-case relation is defined
- return {"message": "Location endpoint not yet implemented"}
+ """Remove location from case."""
+ try:
+ query = "UPDATE sag_lokationer SET deleted_at = NOW() WHERE sag_id = %s AND location_id = %s RETURNING id"
+ result = execute_query(query, (sag_id, location_id))
+
+ if result:
+ logger.info("â
Location %s removed from case %s", location_id, sag_id)
+ return {"status": "deleted"}
+ raise HTTPException(status_code=404, detail="Location link not found")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error removing case location: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to remove location")
+
+# ============================================================================
+# VAREKĂB & SALG - Aggregation (Case Tree)
+# ============================================================================
+
+@router.get("/sag/{sag_id}/varekob-salg")
+async def get_varekob_salg(sag_id: int, include_subcases: bool = True):
+ """Aggregate time entries (and future sales items) for a case tree."""
+ try:
+ check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
+ if not check:
+ raise HTTPException(status_code=404, detail="Case not found")
+
+ if include_subcases:
+ case_tree_query = """
+ WITH RECURSIVE normalized_relations AS (
+ SELECT
+ CASE
+ WHEN LOWER(relationstype) IN ('afledt af', 'afledt_af') THEN mĂĽlsag_id
+ WHEN LOWER(relationstype) IN ('ĂĽrsag til', 'ĂĽrsag_til') THEN kilde_sag_id
+ ELSE kilde_sag_id
+ END AS parent_id,
+ CASE
+ WHEN LOWER(relationstype) IN ('afledt af', 'afledt_af') THEN kilde_sag_id
+ WHEN LOWER(relationstype) IN ('ĂĽrsag til', 'ĂĽrsag_til') THEN mĂĽlsag_id
+ ELSE mĂĽlsag_id
+ END AS child_id
+ FROM sag_relationer
+ WHERE deleted_at IS NULL
+ ),
+ case_tree AS (
+ SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL
+ UNION
+ SELECT nr.child_id
+ FROM normalized_relations nr
+ JOIN case_tree ct ON nr.parent_id = ct.id
+ )
+ SELECT s.id, s.titel
+ FROM sag_sager s
+ JOIN case_tree ct ON s.id = ct.id
+ WHERE s.deleted_at IS NULL
+ ORDER BY s.id
+ """
+ case_tree = execute_query(case_tree_query, (sag_id,))
+
+ time_query = """
+ WITH RECURSIVE normalized_relations AS (
+ SELECT
+ CASE
+ WHEN LOWER(relationstype) IN ('afledt af', 'afledt_af') THEN mĂĽlsag_id
+ WHEN LOWER(relationstype) IN ('ĂĽrsag til', 'ĂĽrsag_til') THEN kilde_sag_id
+ ELSE kilde_sag_id
+ END AS parent_id,
+ CASE
+ WHEN LOWER(relationstype) IN ('afledt af', 'afledt_af') THEN kilde_sag_id
+ WHEN LOWER(relationstype) IN ('ĂĽrsag til', 'ĂĽrsag_til') THEN mĂĽlsag_id
+ ELSE mĂĽlsag_id
+ END AS child_id
+ FROM sag_relationer
+ WHERE deleted_at IS NULL
+ ),
+ case_tree AS (
+ SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL
+ UNION
+ SELECT nr.child_id
+ FROM normalized_relations nr
+ JOIN case_tree ct ON nr.parent_id = ct.id
+ )
+ SELECT t.*, s.titel AS source_sag_titel
+ FROM tmodule_times t
+ JOIN case_tree ct ON t.sag_id = ct.id
+ LEFT JOIN sag_sager s ON s.id = t.sag_id
+ ORDER BY t.worked_date DESC NULLS LAST, t.id DESC
+ """
+ time_entries = execute_query(time_query, (sag_id,))
+
+ sale_items_query = """
+ WITH RECURSIVE normalized_relations AS (
+ SELECT
+ CASE
+ WHEN LOWER(relationstype) IN ('afledt af', 'afledt_af') THEN mĂĽlsag_id
+ WHEN LOWER(relationstype) IN ('ĂĽrsag til', 'ĂĽrsag_til') THEN kilde_sag_id
+ ELSE kilde_sag_id
+ END AS parent_id,
+ CASE
+ WHEN LOWER(relationstype) IN ('afledt af', 'afledt_af') THEN kilde_sag_id
+ WHEN LOWER(relationstype) IN ('ĂĽrsag til', 'ĂĽrsag_til') THEN mĂĽlsag_id
+ ELSE mĂĽlsag_id
+ END AS child_id
+ FROM sag_relationer
+ WHERE deleted_at IS NULL
+ ),
+ case_tree AS (
+ SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL
+ UNION
+ SELECT nr.child_id
+ FROM normalized_relations nr
+ JOIN case_tree ct ON nr.parent_id = ct.id
+ )
+ SELECT si.*, s.titel AS source_sag_titel
+ FROM sag_salgsvarer si
+ JOIN case_tree ct ON si.sag_id = ct.id
+ LEFT JOIN sag_sager s ON s.id = si.sag_id
+ ORDER BY si.line_date DESC NULLS LAST, si.id DESC
+ """
+ sale_items = execute_query(sale_items_query, (sag_id,))
+ else:
+ case_tree = execute_query(
+ "SELECT id, titel FROM sag_sager WHERE id = %s AND deleted_at IS NULL",
+ (sag_id,)
+ )
+ time_query = """
+ SELECT t.*, s.titel AS source_sag_titel
+ FROM tmodule_times t
+ LEFT JOIN sag_sager s ON s.id = t.sag_id
+ WHERE t.sag_id = %s
+ ORDER BY t.worked_date DESC NULLS LAST, t.id DESC
+ """
+ time_entries = execute_query(time_query, (sag_id,))
+
+ sale_items_query = """
+ SELECT si.*, s.titel AS source_sag_titel
+ FROM sag_salgsvarer si
+ LEFT JOIN sag_sager s ON s.id = si.sag_id
+ WHERE si.sag_id = %s
+ ORDER BY si.line_date DESC NULLS LAST, si.id DESC
+ """
+ sale_items = execute_query(sale_items_query, (sag_id,))
+
+ total_entries = len(time_entries or [])
+ total_hours = 0
+ billable_hours = 0
+
+ sale_total = 0
+ purchase_total = 0
+
+ for entry in time_entries or []:
+ hours = entry.get("approved_hours") or entry.get("original_hours") or 0
+ total_hours += float(hours)
+ if entry.get("billable"):
+ billable_hours += float(hours)
+
+ for item in sale_items or []:
+ amount = item.get("amount") or 0
+ if (item.get("type") or "").lower() == "purchase":
+ purchase_total += float(amount)
+ else:
+ sale_total += float(amount)
+
+ return {
+ "sag_id": sag_id,
+ "include_subcases": include_subcases,
+ "case_tree": case_tree or [],
+ "time_entries": time_entries or [],
+ "sale_items": sale_items or [],
+ "totals": {
+ "total_entries": total_entries,
+ "total_hours": round(total_hours, 2),
+ "billable_hours": round(billable_hours, 2),
+ "sale_total": round(sale_total, 2),
+ "purchase_total": round(purchase_total, 2),
+ "net_total": round(sale_total - purchase_total, 2)
+ }
+ }
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error aggregating case data: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to aggregate case data")
+
+# ============================================================================
+# VAREKĂB & SALG - CRUD (Case-linked sale items)
+# ============================================================================
+
+@router.get("/sag/{sag_id}/sale-items")
+async def list_sale_items(sag_id: int):
+ """List sale items for a case."""
+ try:
+ check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
+ if not check:
+ raise HTTPException(status_code=404, detail="Case not found")
+
+ query = """
+ SELECT si.*, s.titel AS source_sag_titel
+ FROM sag_salgsvarer si
+ LEFT JOIN sag_sager s ON s.id = si.sag_id
+ WHERE si.sag_id = %s
+ ORDER BY si.line_date DESC NULLS LAST, si.id DESC
+ """
+ return execute_query(query, (sag_id,)) or []
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error listing sale items: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to list sale items")
+
+
+@router.post("/sag/{sag_id}/sale-items")
+async def create_sale_item(sag_id: int, data: dict):
+ """Create a sale item for a case."""
+ try:
+ check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
+ if not check:
+ raise HTTPException(status_code=404, detail="Case not found")
+
+ description = data.get("description")
+ amount = data.get("amount")
+ if not description:
+ raise HTTPException(status_code=400, detail="description is required")
+ if amount is None:
+ raise HTTPException(status_code=400, detail="amount is required")
+
+ item_type = (data.get("type") or "sale").lower()
+ if item_type not in ("sale", "purchase"):
+ raise HTTPException(status_code=400, detail="type must be sale or purchase")
+
+ status = (data.get("status") or "draft").lower()
+ if status not in ("draft", "confirmed", "cancelled"):
+ raise HTTPException(status_code=400, detail="status must be draft, confirmed, or cancelled")
+
+ query = """
+ INSERT INTO sag_salgsvarer
+ (sag_id, type, description, quantity, unit, unit_price, amount, currency, status, line_date, external_ref)
+ VALUES
+ (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
+ RETURNING *
+ """
+ params = (
+ sag_id,
+ item_type,
+ description,
+ data.get("quantity"),
+ data.get("unit"),
+ data.get("unit_price"),
+ amount,
+ data.get("currency", "DKK"),
+ status,
+ data.get("line_date"),
+ data.get("external_ref"),
+ )
+ result = execute_query(query, params)
+ if result:
+ logger.info("â
Sale item created for case %s", sag_id)
+ return result[0]
+ raise HTTPException(status_code=500, detail="Failed to create sale item")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error creating sale item: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to create sale item")
+
+
+@router.get("/sag/{sag_id}/sale-items/{item_id}")
+async def get_sale_item(sag_id: int, item_id: int):
+ """Get a single sale item for a case."""
+ try:
+ query = """
+ SELECT si.*, s.titel AS source_sag_titel
+ FROM sag_salgsvarer si
+ LEFT JOIN sag_sager s ON s.id = si.sag_id
+ WHERE si.sag_id = %s AND si.id = %s
+ """
+ result = execute_query(query, (sag_id, item_id))
+ if not result:
+ raise HTTPException(status_code=404, detail="Sale item not found")
+ return result[0]
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error getting sale item: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to get sale item")
+
+
+@router.patch("/sag/{sag_id}/sale-items/{item_id}")
+async def update_sale_item(sag_id: int, item_id: int, updates: dict):
+ """Update a sale item for a case."""
+ try:
+ check = execute_query(
+ "SELECT id FROM sag_salgsvarer WHERE id = %s AND sag_id = %s",
+ (item_id, sag_id)
+ )
+ if not check:
+ raise HTTPException(status_code=404, detail="Sale item not found")
+
+ allowed_fields = [
+ "type",
+ "description",
+ "quantity",
+ "unit",
+ "unit_price",
+ "amount",
+ "currency",
+ "status",
+ "line_date",
+ "external_ref",
+ ]
+
+ set_clauses = []
+ params = []
+
+ for field in allowed_fields:
+ if field in updates:
+ if field == "type":
+ value = (updates[field] or "").lower()
+ if value not in ("sale", "purchase"):
+ raise HTTPException(status_code=400, detail="type must be sale or purchase")
+ set_clauses.append("type = %s")
+ params.append(value)
+ elif field == "status":
+ value = (updates[field] or "").lower()
+ if value not in ("draft", "confirmed", "cancelled"):
+ raise HTTPException(status_code=400, detail="status must be draft, confirmed, or cancelled")
+ set_clauses.append("status = %s")
+ params.append(value)
+ else:
+ set_clauses.append(f"{field} = %s")
+ params.append(updates[field])
+
+ if not set_clauses:
+ raise HTTPException(status_code=400, detail="No valid fields to update")
+
+ params.extend([item_id, sag_id])
+ query = f"UPDATE sag_salgsvarer SET {', '.join(set_clauses)} WHERE id = %s AND sag_id = %s RETURNING *"
+ result = execute_query(query, tuple(params))
+ if result:
+ logger.info("â
Sale item updated: %s", item_id)
+ return result[0]
+ raise HTTPException(status_code=500, detail="Failed to update sale item")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error updating sale item: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to update sale item")
+
+
+@router.delete("/sag/{sag_id}/sale-items/{item_id}")
+async def delete_sale_item(sag_id: int, item_id: int):
+ """Delete a sale item for a case."""
+ try:
+ query = "DELETE FROM sag_salgsvarer WHERE id = %s AND sag_id = %s RETURNING id"
+ result = execute_query(query, (item_id, sag_id))
+ if result:
+ logger.info("â
Sale item deleted: %s", item_id)
+ return {"status": "deleted", "id": item_id}
+ raise HTTPException(status_code=404, detail="Sale item not found")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error deleting sale item: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to delete sale item")
+
+# ============================================================================
+# KOMMENTARER - Case Comments
+# ============================================================================
+
+@router.get("/sag/{sag_id}/kommentarer")
+async def get_kommentarer(sag_id: int):
+ """Get all comments for a case."""
+ try:
+ check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
+ if not check:
+ raise HTTPException(status_code=404, detail="Case not found")
+
+ query = "SELECT * FROM sag_kommentarer WHERE sag_id = %s AND deleted_at IS NULL ORDER BY created_at ASC"
+ result = execute_query(query, (sag_id,))
+ return result
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error getting comments: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to get comments")
+
+@router.post("/sag/{sag_id}/kommentarer")
+async def add_kommentar(sag_id: int, data: dict):
+ """Add a comment to a case."""
+ try:
+ if not data.get("indhold"):
+ raise HTTPException(status_code=400, detail="indhold is required")
+
+ check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
+ if not check:
+ raise HTTPException(status_code=404, detail="Case not found")
+
+ # Default author to current user or provided in body (if system)
+ # simplistic auth for now
+ forfatter = data.get("forfatter", "Bruger")
+ er_system_besked = data.get("er_system_besked", False)
+
+ query = """
+ INSERT INTO sag_kommentarer (sag_id, forfatter, indhold, er_system_besked)
+ VALUES (%s, %s, %s, %s)
+ RETURNING *
+ """
+ result = execute_query(query, (sag_id, forfatter, data.get("indhold"), er_system_besked))
+
+ if result:
+ logger.info("â
Comment added to case %s by %s", sag_id, forfatter)
+ return result[0]
+ raise HTTPException(status_code=500, detail="Failed to add comment")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error adding comment: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to add comment")
+
+
+# ============================================================================
+# FILES - Case Files
+# ============================================================================
+
+UPLOAD_BASE_PATH = Path(settings.UPLOAD_DIR).resolve()
+SAG_FILE_SUBDIR = "sag_files"
+(UPLOAD_BASE_PATH / SAG_FILE_SUBDIR).mkdir(parents=True, exist_ok=True)
+ALLOWED_EXTENSIONS = {ext.lower() for ext in settings.ALLOWED_EXTENSIONS}
+MAX_ATTACHMENT_SIZE = settings.EMAIL_MAX_UPLOAD_SIZE_MB * 1024 * 1024
+
+def _generate_stored_name(filename: str, subdir: str) -> str:
+ cleaned = Path(filename).name
+ unique = f"{uuid4().hex}_{cleaned}"
+ return f"{subdir}/{unique}"
+
+def _resolve_attachment_path(stored_name: str) -> Path:
+ return UPLOAD_BASE_PATH / stored_name
+
+def _store_upload_file(upload_file: UploadFile, subdir: str):
+ if not upload_file.filename:
+ raise HTTPException(400, detail="Filename missing")
+
+ ext = Path(upload_file.filename).suffix.lower().lstrip(".")
+ # Basic check - allow more types for generic files?
+ # if ext not in ALLOWED_EXTENSIONS: ...
+
+ upload_file.file.seek(0, os.SEEK_END)
+ size = upload_file.file.tell()
+ upload_file.file.seek(0)
+
+ if size > MAX_ATTACHMENT_SIZE:
+ raise HTTPException(400, detail=f"File too large (> {settings.EMAIL_MAX_UPLOAD_SIZE_MB}MB)")
+
+ stored_name = _generate_stored_name(upload_file.filename, subdir)
+ destination = _resolve_attachment_path(stored_name)
+ destination.parent.mkdir(parents=True, exist_ok=True)
+
+ try:
+ with destination.open("wb") as buffer:
+ shutil.copyfileobj(upload_file.file, buffer)
+ except Exception as e:
+ logger.error(f"Upload failed: {e}")
+ raise HTTPException(500, detail="Server upload failed")
+
+ return stored_name, size
+
+@router.get("/sag/{sag_id}/files")
+async def list_sag_files(sag_id: int):
+ """List files attached to a case."""
+ try:
+ query = """
+ SELECT * FROM sag_files
+ WHERE sag_id = %s
+ ORDER BY created_at DESC
+ """
+ files = execute_query(query, (sag_id,))
+ # Add download URL
+ if files:
+ for f in files:
+ f["download_url"] = f"/api/v1/sag/{sag_id}/files/{f['id']}"
+ return files or []
+ except Exception as e:
+ logger.error("â Error listing files: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to list files")
+
+@router.post("/sag/{sag_id}/files")
+async def upload_sag_files(sag_id: int, files: List[UploadFile] = File(...)):
+ """Upload files to a case."""
+ check = execute_query("SELECT id FROM sag_sager WHERE id = %s AND deleted_at IS NULL", (sag_id,))
+ if not check:
+ raise HTTPException(status_code=404, detail="Case not found")
+
+ saved_files = []
+
+ for file in files:
+ try:
+ stored_name, size = _store_upload_file(file, SAG_FILE_SUBDIR)
+
+ query = """
+ INSERT INTO sag_files (sag_id, filename, content_type, size_bytes, stored_name)
+ VALUES (%s, %s, %s, %s, %s)
+ RETURNING id, filename, created_at
+ """
+ result = execute_query(query, (sag_id, file.filename, file.content_type, size, stored_name))
+ if result:
+ saved = result[0]
+ saved["download_url"] = f"/api/v1/sag/{sag_id}/files/{saved['id']}"
+ saved_files.append(saved)
+ except HTTPException:
+ continue # Skip invalid
+ except Exception as e:
+ logger.error(f"Error saving file {file.filename}: {e}")
+ continue
+
+ return saved_files
+
+@router.get("/sag/{sag_id}/files/{file_id}")
+async def download_sag_file(sag_id: int, file_id: int):
+ """Download a specific file."""
+ query = "SELECT * FROM sag_files WHERE id = %s AND sag_id = %s"
+ result = execute_query(query, (file_id, sag_id))
+
+ if not result:
+ raise HTTPException(status_code=404, detail="File not found")
+
+ file_data = result[0]
+ path = _resolve_attachment_path(file_data["stored_name"])
+
+ if not path.exists():
+ raise HTTPException(status_code=404, detail="File lost on server")
+
+ return FileResponse(
+ path=path,
+ filename=file_data["filename"],
+ media_type=file_data.get("content_type", "application/octet-stream")
+ )
+
+@router.delete("/sag/{sag_id}/files/{file_id}")
+async def delete_sag_file(sag_id: int, file_id: int):
+ """Delete a file."""
+ query = "DELETE FROM sag_files WHERE id = %s AND sag_id = %s RETURNING stored_name"
+ result = execute_query(query, (file_id, sag_id))
+
+ if result:
+ # Clean up disk
+ path = _resolve_attachment_path(result[0]["stored_name"])
+ if path.exists():
+ try:
+ os.remove(path)
+ except:
+ pass
+ return {"status": "deleted"}
+ raise HTTPException(status_code=404, detail="File not found")
+
+# ============================================================================
+# EMAILS - Case Emails (Linked)
+# ============================================================================
+
+@router.post("/sag/{sag_id}/email-links")
+async def add_sag_email_link(sag_id: int, payload: dict):
+ """Link an existing email to a case."""
+ email_id = payload.get("email_id")
+ if not email_id:
+ raise HTTPException(status_code=400, detail="email_id required")
+
+ query = """
+ INSERT INTO sag_emails (sag_id, email_id)
+ VALUES (%s, %s)
+ ON CONFLICT DO NOTHING
+ RETURNING *
+ """
+ execute_query(query, (sag_id, email_id))
+ return {"status": "linked"}
+
+@router.get("/sag/{sag_id}/email-links")
+async def get_sag_emails(sag_id: int):
+ """Get emails linked to a case."""
+ query = """
+ SELECT e.*
+ FROM email_messages e
+ JOIN sag_emails se ON e.id = se.email_id
+ WHERE se.sag_id = %s
+ ORDER BY e.received_date DESC
+ """
+ return execute_query(query, (sag_id,)) or []
+
+@router.delete("/sag/{sag_id}/email-links/{email_id}")
+async def remove_sag_email_link(sag_id: int, email_id: int):
+ """Unlink an email."""
+ query = "DELETE FROM sag_emails WHERE sag_id = %s AND email_id = %s"
+ execute_query(query, (sag_id, email_id))
+ return {"status": "unlinked"}
+
+def _decode_header_str(header_val):
+ if not header_val: return ""
+ try:
+ decoded_list = decode_header(header_val)
+ result = ""
+ for content, encoding in decoded_list:
+ if isinstance(content, bytes):
+ if encoding:
+ try:
+ result += content.decode(encoding)
+ except:
+ result += content.decode('utf-8', errors='ignore')
+ else:
+ result += content.decode('utf-8', errors='ignore')
+ else:
+ result += str(content)
+ return result
+ except:
+ return str(header_val)
+
+@router.post("/sag/{sag_id}/upload-email")
+async def upload_sag_email(sag_id: int, file: UploadFile = File(...)):
+ """Upload .eml/.msg, parse it, save to email_messages, and link to case."""
+ content = await file.read()
+ filename = file.filename.lower()
+ email_data = {}
+ temp_id = uuid4().hex
+
+ # 1. Parse File
+ if filename.endswith('.msg'):
+ if not extract_msg: raise HTTPException(500, "extract-msg missing")
+ import io
+ msg = extract_msg.Message(io.BytesIO(content))
+ email_data = {
+ 'message_id': msg.messageId or f"msg-{temp_id}",
+ 'subject': msg.subject or "No Subject",
+ 'sender_email': msg.sender or "",
+ 'sender_name': msg.sender or "",
+ 'recipient_email': msg.to or "",
+ 'cc': msg.cc or "",
+ 'body_text': msg.body,
+ 'body_html': msg.htmlBody,
+ 'received_date': msg.date or datetime.now(),
+ 'folder': 'Imported',
+ 'attachments': [],
+ 'has_attachments': False,
+ 'attachment_count': 0
+ }
+ elif filename.endswith('.eml'):
+ msg = email.message_from_bytes(content)
+ body_text = ""
+ if msg.is_multipart():
+ for part in msg.walk():
+ if part.get_content_type() == "text/plain":
+ body_text = part.get_payload(decode=True).decode('utf-8', errors='ignore')
+ break
+ else:
+ body_text = msg.get_payload(decode=True).decode('utf-8', errors='ignore')
+
+ email_data = {
+ 'message_id': msg.get('Message-ID', f"eml-{temp_id}"),
+ 'subject': _decode_header_str(msg.get('Subject', 'No Subject')),
+ 'sender_email': _decode_header_str(msg.get('From', '')),
+ 'sender_name': _decode_header_str(msg.get('From', '')),
+ 'recipient_email': _decode_header_str(msg.get('To', '')),
+ 'cc': _decode_header_str(msg.get('Cc', '')),
+ 'received_date': datetime.now(),
+ 'body_text': body_text,
+ 'body_html': "",
+ 'folder': 'Imported',
+ 'has_attachments': False,
+ 'attachment_count': 0,
+ 'attachments': []
+ }
+ else:
+ raise HTTPException(400, "Only .msg or .eml files allowed")
+
+ # 2. Save Email via Service (deduplicate)
+ svc = EmailService()
+ existing = execute_query_single("SELECT id FROM email_messages WHERE message_id = %s", (email_data['message_id'],))
+
+ if existing:
+ email_id = existing['id']
+ else:
+ # We try to strict save, keeping it simple
+ email_id = await svc.save_email(email_data)
+
+ # 3. Link
+ await add_sag_email_link(sag_id, {"email_id": email_id})
+ return {"status": "imported", "email_id": email_id}
+
+# ============================================================================
+# SOLUTIONS
+# ============================================================================
+from . import solutions
+router.include_router(solutions.router)
diff --git a/app/modules/sag/backend/solutions.py b/app/modules/sag/backend/solutions.py
new file mode 100644
index 0000000..76d752a
--- /dev/null
+++ b/app/modules/sag/backend/solutions.py
@@ -0,0 +1,108 @@
+import logging
+from fastapi import APIRouter, HTTPException, Depends
+from typing import Optional
+
+from app.core.database import execute_query
+from app.models.schemas import Solution, SolutionCreate, SolutionUpdate
+
+logger = logging.getLogger(__name__)
+router = APIRouter()
+
+@router.get("/sag/{sag_id}/solution", response_model=Optional[Solution])
+async def get_solution(sag_id: int):
+ """Get the solution associated with a case."""
+ try:
+ query = "SELECT * FROM sag_solutions WHERE sag_id = %s"
+ result = execute_query(query, (sag_id,))
+ if not result:
+ return None
+ return result[0]
+ except Exception as e:
+ logger.error("â Error getting solution for case %s: %s", sag_id, e)
+ raise HTTPException(status_code=500, detail="Failed to get solution")
+
+@router.post("/sag/{sag_id}/solution", response_model=Solution)
+async def create_solution(sag_id: int, solution: SolutionCreate):
+ """Create a solution for a case."""
+ try:
+ # Check if case exists
+ case_check = execute_query("SELECT id FROM sag_sager WHERE id = %s", (sag_id,))
+ if not case_check:
+ raise HTTPException(status_code=404, detail="Case not found")
+
+ # Check if solution already exists
+ check = execute_query("SELECT id FROM sag_solutions WHERE sag_id = %s", (sag_id,))
+ if check:
+ raise HTTPException(status_code=400, detail="Solution already exists for this case")
+
+ query = """
+ INSERT INTO sag_solutions
+ (sag_id, title, description, solution_type, result, created_by_user_id)
+ VALUES (%s, %s, %s, %s, %s, %s)
+ RETURNING *
+ """
+ params = (
+ sag_id,
+ solution.title,
+ solution.description,
+ solution.solution_type,
+ solution.result,
+ solution.created_by_user_id
+ )
+
+ result = execute_query(query, params)
+ if result:
+ logger.info("â
Solution created for case: %s", sag_id)
+ return result[0]
+ raise HTTPException(status_code=500, detail="Failed to create solution")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error creating solution: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to create solution")
+
+@router.patch("/sag/{sag_id}/solution", response_model=Solution)
+async def update_solution(sag_id: int, updates: SolutionUpdate):
+ """Update a solution."""
+ try:
+ # Check if solution exists
+ check = execute_query("SELECT id FROM sag_solutions WHERE sag_id = %s", (sag_id,))
+ if not check:
+ raise HTTPException(status_code=404, detail="Solution not found")
+
+ # Build dynamic update query
+ set_clauses = []
+ params = []
+
+ # Helper to check and add params
+ if updates.title is not None:
+ set_clauses.append("title = %s")
+ params.append(updates.title)
+ if updates.description is not None:
+ set_clauses.append("description = %s")
+ params.append(updates.description)
+ if updates.solution_type is not None:
+ set_clauses.append("solution_type = %s")
+ params.append(updates.solution_type)
+ if updates.result is not None:
+ set_clauses.append("result = %s")
+ params.append(updates.result)
+
+ if not set_clauses:
+ raise HTTPException(status_code=400, detail="No fields to update")
+
+ set_clauses.append("updated_at = NOW()")
+
+ params.append(sag_id)
+ query = f"UPDATE sag_solutions SET {', '.join(set_clauses)} WHERE sag_id = %s RETURNING *"
+
+ result = execute_query(query, tuple(params))
+ if result:
+ logger.info("â
Solution updated for case: %s", sag_id)
+ return result[0]
+ raise HTTPException(status_code=500, detail="Failed to update solution")
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error updating solution: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to update solution")
diff --git a/app/modules/sag/frontend/views.py b/app/modules/sag/frontend/views.py
index 5cef803..ff5f810 100644
--- a/app/modules/sag/frontend/views.py
+++ b/app/modules/sag/frontend/views.py
@@ -109,6 +109,18 @@ async def sager_liste(
logger.error("â Error displaying case list: %s", e)
raise HTTPException(status_code=500, detail="Failed to load case list")
+@router.get("/sag/new", response_class=HTMLResponse)
+async def opret_sag_side(request: Request):
+ """Show create case form."""
+ return templates.TemplateResponse("modules/sag/templates/create.html", {"request": request})
+
+@router.get("/sag/varekob-salg", response_class=HTMLResponse)
+async def sag_varekob_salg(request: Request):
+ """Display orders overview for all purchases and sales."""
+ return templates.TemplateResponse("modules/sag/templates/varekob_salg.html", {
+ "request": request,
+ })
+
@router.get("/sag/{sag_id}", response_class=HTMLResponse)
async def sag_detaljer(request: Request, sag_id: int):
"""Display case details."""
@@ -122,9 +134,20 @@ async def sag_detaljer(request: Request, sag_id: int):
sag = sag_result[0]
- # Fetch tags
- tags_query = "SELECT * FROM sag_tags WHERE sag_id = %s AND deleted_at IS NULL ORDER BY created_at DESC"
+ # Fetch tags (Support both Legacy sag_tags and New entity_tags)
+ # First try the new system (entity_tags) which the valid frontend uses
+ tags_query = """
+ SELECT t.name as tag_navn
+ FROM tags t
+ JOIN entity_tags et ON t.id = et.tag_id
+ WHERE et.entity_type = 'case' AND et.entity_id = %s
+ """
tags = execute_query(tags_query, (sag_id,))
+
+ # If empty, try legacy table fallback
+ if not tags:
+ tags_query_legacy = "SELECT * FROM sag_tags WHERE sag_id = %s AND deleted_at IS NULL ORDER BY created_at DESC"
+ tags = execute_query(tags_query_legacy, (sag_id,))
# Fetch relations
relationer_query = """
@@ -140,6 +163,92 @@ async def sag_detaljer(request: Request, sag_id: int):
"""
relationer = execute_query(relationer_query, (sag_id, sag_id))
+ # --- Relation Tree Construction ---
+ relation_tree = []
+ try:
+ # 1. Get all connected case IDs (Recursive CTE)
+ tree_ids_query = """
+ WITH RECURSIVE CaseTree AS (
+ SELECT id FROM sag_sager WHERE id = %s
+ UNION
+ SELECT CASE WHEN sr.kilde_sag_id = ct.id THEN sr.mĂĽlsag_id ELSE sr.kilde_sag_id END
+ FROM sag_relationer sr
+ JOIN CaseTree ct ON sr.kilde_sag_id = ct.id OR sr.mĂĽlsag_id = ct.id
+ WHERE sr.deleted_at IS NULL
+ )
+ SELECT id FROM CaseTree LIMIT 50;
+ """
+ tree_ids_rows = execute_query(tree_ids_query, (sag_id,))
+ tree_ids = [r['id'] for r in tree_ids_rows]
+
+ if tree_ids:
+ # 2. Fetch details
+ placeholders = ','.join(['%s'] * len(tree_ids))
+ tree_cases_query = f"SELECT id, titel, status FROM sag_sager WHERE id IN ({placeholders})"
+ tree_cases = {c['id']: c for c in execute_query(tree_cases_query, tuple(tree_ids))}
+
+ # 3. Fetch edges
+ tree_edges_query = f"""
+ SELECT id, kilde_sag_id, mĂĽlsag_id, relationstype
+ FROM sag_relationer
+ WHERE deleted_at IS NULL
+ AND kilde_sag_id IN ({placeholders})
+ AND mĂĽlsag_id IN ({placeholders})
+ """
+ tree_edges = execute_query(tree_edges_query, tuple(tree_ids) * 2)
+
+ # 4. Build Graph
+ children_map = {cid: [] for cid in tree_ids}
+ parents_map = {cid: [] for cid in tree_ids}
+
+ for edge in tree_edges:
+ k, m, rtype = edge['kilde_sag_id'], edge['mĂĽlsag_id'], edge['relationstype'].lower()
+ parent, child = k, m # Default (e.g. Relateret til)
+
+ if rtype == 'afledt af': # m is parent of k
+ parent, child = m, k
+ elif rtype == 'ĂĽrsag til': # k is parent of m
+ parent, child = k, m
+
+ if parent in children_map:
+ children_map[parent].append({
+ 'id': child,
+ 'type': edge['relationstype'],
+ 'rel_id': edge['id']
+ })
+ if child in parents_map:
+ parents_map[child].append(parent)
+
+ # 5. Identify Roots and Build
+ roots = [cid for cid in tree_ids if not parents_map[cid]]
+ if not roots and tree_ids: roots = [min(tree_ids)] # Fallback
+
+ def build_tree_node(cid, visited):
+ if cid in visited: return None
+ visited.add(cid)
+ node_case = tree_cases.get(cid)
+ if not node_case: return None
+
+ children_nodes = []
+ for child_info in children_map.get(cid, []):
+ c_node = build_tree_node(child_info['id'], visited.copy())
+ if c_node:
+ c_node['relation_type'] = child_info['type']
+ c_node['relation_id'] = child_info['rel_id']
+ children_nodes.append(c_node)
+
+ return {
+ 'case': node_case,
+ 'children': children_nodes,
+ 'is_current': cid == sag_id
+ }
+
+ relation_tree = [build_tree_node(r, set()) for r in roots]
+ relation_tree = [n for n in relation_tree if n]
+ except Exception as e:
+ logger.error(f"Error building relation tree: {e}")
+ relation_tree = []
+
# Fetch customer info if customer_id exists
customer = None
hovedkontakt = None
@@ -161,17 +270,111 @@ async def sag_detaljer(request: Request, sag_id: int):
kontakt_result = execute_query(kontakt_query, (sag['customer_id'],))
if kontakt_result:
hovedkontakt = kontakt_result[0]
+
+ # Fetch prepaid cards for customer
+ # Cast remaining_hours to float to avoid Jinja formatting issues with Decimal
+ # DEBUG: Logging customer ID
+ cid = sag.get('customer_id')
+ logger.info(f"đ Looking up prepaid cards for Sag {sag_id}, Customer ID: {cid} (Type: {type(cid)})")
+
+ pc_query = """
+ SELECT id, card_number, CAST(remaining_hours AS FLOAT) as remaining_hours
+ FROM tticket_prepaid_cards
+ WHERE customer_id = %s
+ AND status = 'active'
+ AND remaining_hours > 0
+ ORDER BY created_at DESC
+ """
+ prepaid_cards = execute_query(pc_query, (cid,))
+ logger.info(f"đł Found {len(prepaid_cards)} prepaid cards for customer {cid}")
+ else:
+ prepaid_cards = []
+
+ # Fetch Nextcloud Instance for this customer
+ nextcloud_instance = None
+ if customer:
+ nc_query = "SELECT * FROM nextcloud_instances WHERE customer_id = %s AND deleted_at IS NULL"
+ nc_result = execute_query(nc_query, (customer['id'],))
+ if nc_result:
+ nextcloud_instance = nc_result[0]
+
+ # Fetch linked contacts
+ contacts_query = """
+ SELECT sk.*, c.first_name || ' ' || c.last_name as contact_name, c.email as contact_email
+ FROM sag_kontakter sk
+ JOIN contacts c ON sk.contact_id = c.id
+ WHERE sk.sag_id = %s AND sk.deleted_at IS NULL
+ """
+ contacts = execute_query(contacts_query, (sag_id,))
+
+ # Fetch linked customers
+ customers_query = """
+ SELECT sk.*, c.name as customer_name, c.email as customer_email
+ FROM sag_kunder sk
+ JOIN customers c ON sk.customer_id = c.id
+ WHERE sk.sag_id = %s AND sk.deleted_at IS NULL
+ """
+ customers = execute_query(customers_query, (sag_id,))
+ # Fetch comments
+ comments_query = "SELECT * FROM sag_kommentarer WHERE sag_id = %s AND deleted_at IS NULL ORDER BY created_at ASC"
+ comments = execute_query(comments_query, (sag_id,))
+
+ # Fetch Solution
+ solution_query = "SELECT * FROM sag_solutions WHERE sag_id = %s"
+ solution_res = execute_query(solution_query, (sag_id,))
+ solution = solution_res[0] if solution_res else None
+
+ # Fetch Time Entries
+ time_query = "SELECT * FROM tmodule_times WHERE sag_id = %s ORDER BY worked_date DESC"
+ time_entries = execute_query(time_query, (sag_id,))
+
+ # Check for nextcloud integration (case-insensitive, insensitive to whitespace)
+ logger.info(f"Checking tags for Nextcloud on case {sag_id}: {tags}")
+ is_nextcloud = any(t['tag_navn'] and t['tag_navn'].strip().lower() == 'nextcloud' for t in tags)
+ logger.info(f"is_nextcloud result: {is_nextcloud}")
+
return templates.TemplateResponse("modules/sag/templates/detail.html", {
"request": request,
"case": sag,
"customer": customer,
"hovedkontakt": hovedkontakt,
+ "contacts": contacts,
+ "customers": customers,
+ "prepaid_cards": prepaid_cards,
"tags": tags,
+
"relationer": relationer,
+ "relation_tree": relation_tree,
+ "comments": comments,
+ "solution": solution,
+ "time_entries": time_entries,
+ "is_nextcloud": is_nextcloud,
+ "nextcloud_instance": nextcloud_instance,
})
except HTTPException:
raise
except Exception as e:
logger.error("â Error displaying case details: %s", e)
raise HTTPException(status_code=500, detail="Failed to load case details")
+
+
+@router.get("/sag/{sag_id}/edit", response_class=HTMLResponse)
+async def sag_rediger(request: Request, sag_id: int):
+ """Display edit case form."""
+ try:
+ sag_query = "SELECT * FROM sag_sager WHERE id = %s AND deleted_at IS NULL"
+ sag_result = execute_query(sag_query, (sag_id,))
+
+ if not sag_result:
+ raise HTTPException(status_code=404, detail="Case not found")
+
+ return templates.TemplateResponse("modules/sag/templates/edit.html", {
+ "request": request,
+ "case": sag_result[0],
+ })
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error("â Error loading edit case page: %s", e)
+ raise HTTPException(status_code=500, detail="Failed to load edit case page")
diff --git a/app/modules/sag/migrations/002_varekob_salg.sql b/app/modules/sag/migrations/002_varekob_salg.sql
new file mode 100644
index 0000000..10280c1
--- /dev/null
+++ b/app/modules/sag/migrations/002_varekob_salg.sql
@@ -0,0 +1,36 @@
+-- Sag Module: Varekøb & Salg (case-linked sales items)
+
+CREATE TABLE IF NOT EXISTS sag_salgsvarer (
+ id SERIAL PRIMARY KEY,
+ sag_id INTEGER NOT NULL REFERENCES sag_sager(id) ON DELETE CASCADE,
+ type VARCHAR(20) NOT NULL DEFAULT 'sale', -- sale | purchase
+ description TEXT NOT NULL,
+ quantity NUMERIC(12, 2),
+ unit VARCHAR(50),
+ unit_price NUMERIC(12, 2),
+ amount NUMERIC(12, 2) NOT NULL,
+ currency VARCHAR(10) NOT NULL DEFAULT 'DKK',
+ status VARCHAR(20) NOT NULL DEFAULT 'draft', -- draft | confirmed | cancelled
+ line_date DATE,
+ external_ref VARCHAR(100),
+ created_at TIMESTAMP NOT NULL DEFAULT NOW(),
+ updated_at TIMESTAMP NOT NULL DEFAULT NOW()
+);
+
+CREATE INDEX IF NOT EXISTS idx_sag_salgsvarer_sag_id ON sag_salgsvarer(sag_id);
+CREATE INDEX IF NOT EXISTS idx_sag_salgsvarer_type ON sag_salgsvarer(type);
+CREATE INDEX IF NOT EXISTS idx_sag_salgsvarer_status ON sag_salgsvarer(status);
+
+CREATE OR REPLACE FUNCTION update_sag_salgsvarer_updated_at()
+RETURNS TRIGGER AS $$
+BEGIN
+ NEW.updated_at = NOW();
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
+
+DROP TRIGGER IF EXISTS trigger_sag_salgsvarer_updated_at ON sag_salgsvarer;
+CREATE TRIGGER trigger_sag_salgsvarer_updated_at
+BEFORE UPDATE ON sag_salgsvarer
+FOR EACH ROW
+EXECUTE FUNCTION update_sag_salgsvarer_updated_at();
diff --git a/app/modules/sag/templates/create.html b/app/modules/sag/templates/create.html
index 21fe16a..8140df6 100644
--- a/app/modules/sag/templates/create.html
+++ b/app/modules/sag/templates/create.html
@@ -4,106 +4,37 @@
{% block extra_css %}
{% endblock %}
{% block content %}
-