2026-01-31 23:16:24 +01:00
|
|
|
|
"""
|
|
|
|
|
|
Location Module - Backend API Router
|
|
|
|
|
|
Provides REST endpoints for location management
|
|
|
|
|
|
|
|
|
|
|
|
Phase 2 Implementation: Core CRUD Operations (8 endpoints) + Phase 2.5 (5 bulk/advanced endpoints)
|
|
|
|
|
|
|
|
|
|
|
|
Phase 2 Endpoints:
|
|
|
|
|
|
1. GET /api/v1/locations - List all locations (with filters, pagination)
|
|
|
|
|
|
2. POST /api/v1/locations - Create new location
|
|
|
|
|
|
3. GET /api/v1/locations/{id} - Get single location details
|
|
|
|
|
|
4. PATCH /api/v1/locations/{id} - Update location
|
|
|
|
|
|
5. DELETE /api/v1/locations/{id} - Soft-delete location
|
|
|
|
|
|
6. POST /api/v1/locations/{id}/restore - Restore deleted location
|
|
|
|
|
|
7. GET /api/v1/locations/{id}/audit - Get audit trail for location
|
|
|
|
|
|
8. GET /api/v1/locations/search - Search locations by name/address
|
|
|
|
|
|
|
|
|
|
|
|
Phase 2.5 Endpoints (Bulk Operations & Advanced Queries):
|
|
|
|
|
|
9. POST /api/v1/locations/bulk-update - Bulk update multiple locations
|
|
|
|
|
|
10. POST /api/v1/locations/bulk-delete - Bulk soft-delete multiple locations
|
|
|
|
|
|
11. GET /api/v1/locations/by-type/{location_type} - Filter locations by type
|
|
|
|
|
|
12. GET /api/v1/locations/near-me - Proximity search by coordinates
|
|
|
|
|
|
13. GET /api/v1/locations/stats - Statistics about all locations
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
from fastapi import APIRouter, HTTPException, Query, Request
|
|
|
|
|
|
from fastapi.responses import RedirectResponse
|
|
|
|
|
|
from typing import Optional, List, Any
|
|
|
|
|
|
from datetime import datetime, time, date
|
|
|
|
|
|
import logging
|
|
|
|
|
|
import json
|
|
|
|
|
|
from pydantic import ValidationError
|
|
|
|
|
|
|
|
|
|
|
|
from app.core.database import execute_query
|
2026-02-11 23:51:21 +01:00
|
|
|
|
from app.core.contact_utils import get_contact_customer_ids
|
2026-01-31 23:16:24 +01:00
|
|
|
|
from app.modules.locations.models.schemas import (
|
|
|
|
|
|
Location, LocationCreate, LocationUpdate, LocationDetail,
|
|
|
|
|
|
AuditLogEntry, LocationSearchResponse,
|
|
|
|
|
|
Contact, ContactCreate, ContactUpdate,
|
|
|
|
|
|
OperatingHours, OperatingHoursCreate, OperatingHoursUpdate,
|
|
|
|
|
|
Service, ServiceCreate, ServiceUpdate,
|
|
|
|
|
|
Capacity, CapacityCreate, CapacityUpdate,
|
2026-02-09 15:30:07 +01:00
|
|
|
|
BulkUpdateRequest, BulkDeleteRequest, LocationStats,
|
|
|
|
|
|
LocationWizardCreateRequest, LocationWizardCreateResponse
|
2026-01-31 23:16:24 +01:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
router = APIRouter()
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _normalize_form_data(form_data: Any) -> dict:
|
|
|
|
|
|
"""Normalize form data into LocationCreate-compatible payload."""
|
|
|
|
|
|
data = dict(form_data)
|
|
|
|
|
|
|
|
|
|
|
|
def _to_int(value: Any) -> Optional[int]:
|
|
|
|
|
|
return int(value) if value not in (None, "") else None
|
|
|
|
|
|
|
|
|
|
|
|
def _to_float(value: Any) -> Optional[float]:
|
|
|
|
|
|
return float(value) if value not in (None, "") else None
|
|
|
|
|
|
|
|
|
|
|
|
is_active_value = data.get("is_active")
|
|
|
|
|
|
data["is_active"] = is_active_value in ("on", "true", "1", "yes", True)
|
|
|
|
|
|
data["parent_location_id"] = _to_int(data.get("parent_location_id"))
|
|
|
|
|
|
data["customer_id"] = _to_int(data.get("customer_id"))
|
|
|
|
|
|
data["latitude"] = _to_float(data.get("latitude"))
|
|
|
|
|
|
data["longitude"] = _to_float(data.get("longitude"))
|
|
|
|
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 1. GET /api/v1/locations - List all locations with filters and pagination
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations", response_model=List[Location])
|
|
|
|
|
|
async def list_locations(
|
2026-02-09 15:30:07 +01:00
|
|
|
|
location_type: Optional[str] = Query(None, description="Filter by location type (kompleks, bygning, etage, customer_site, rum, kantine, moedelokale, vehicle)"),
|
2026-01-31 23:16:24 +01:00
|
|
|
|
is_active: Optional[bool] = Query(None, description="Filter by active status"),
|
|
|
|
|
|
skip: int = Query(0, ge=0),
|
|
|
|
|
|
limit: int = Query(50, ge=1, le=1000)
|
|
|
|
|
|
):
|
|
|
|
|
|
"""
|
|
|
|
|
|
List all locations with optional filters and pagination.
|
|
|
|
|
|
|
|
|
|
|
|
Query Parameters:
|
2026-02-09 15:30:07 +01:00
|
|
|
|
- location_type: Filter by type (kompleks, bygning, etage, customer_site, rum, kantine, moedelokale, vehicle)
|
2026-01-31 23:16:24 +01:00
|
|
|
|
- is_active: Filter by active status (true/false)
|
|
|
|
|
|
- skip: Pagination offset (default 0)
|
|
|
|
|
|
- limit: Results per page (default 50, max 1000)
|
|
|
|
|
|
|
|
|
|
|
|
Returns: List of Location objects ordered by name
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Build WHERE clause with filters
|
|
|
|
|
|
where_parts = ["l.deleted_at IS NULL"]
|
|
|
|
|
|
params = []
|
|
|
|
|
|
|
|
|
|
|
|
if location_type is not None:
|
|
|
|
|
|
where_parts.append("l.location_type = %s")
|
|
|
|
|
|
params.append(location_type)
|
|
|
|
|
|
|
|
|
|
|
|
if is_active is not None:
|
|
|
|
|
|
where_parts.append("l.is_active = %s")
|
|
|
|
|
|
params.append(is_active)
|
|
|
|
|
|
|
|
|
|
|
|
where_clause = " AND ".join(where_parts)
|
|
|
|
|
|
|
|
|
|
|
|
# Add pagination parameters
|
|
|
|
|
|
params.append(limit)
|
|
|
|
|
|
params.append(skip)
|
|
|
|
|
|
|
|
|
|
|
|
# Execute parameterized query
|
|
|
|
|
|
query = f"""
|
|
|
|
|
|
SELECT l.*, p.name AS parent_location_name, c.name AS customer_name
|
|
|
|
|
|
FROM locations_locations l
|
|
|
|
|
|
LEFT JOIN locations_locations p ON l.parent_location_id = p.id
|
|
|
|
|
|
LEFT JOIN customers c ON l.customer_id = c.id
|
|
|
|
|
|
WHERE {where_clause}
|
|
|
|
|
|
ORDER BY l.name ASC
|
|
|
|
|
|
LIMIT %s OFFSET %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
results = execute_query(query, tuple(params))
|
|
|
|
|
|
logger.info(f"📍 Listed {len(results)} locations (skip={skip}, limit={limit})")
|
|
|
|
|
|
|
|
|
|
|
|
return [Location(**row) for row in results]
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error listing locations: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to list locations"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 2. POST /api/v1/locations - Create new location
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/locations", response_model=Location)
|
|
|
|
|
|
async def create_location(request: Request):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Create a new location.
|
|
|
|
|
|
|
|
|
|
|
|
Request body: LocationCreate model
|
|
|
|
|
|
Returns: Created Location object with ID
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 400: Duplicate name or validation error
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
content_type = request.headers.get("content-type", "")
|
|
|
|
|
|
if "application/json" in content_type:
|
|
|
|
|
|
payload = await request.json()
|
|
|
|
|
|
redirect_to = None
|
|
|
|
|
|
else:
|
|
|
|
|
|
form = await request.form()
|
|
|
|
|
|
payload = _normalize_form_data(form)
|
|
|
|
|
|
redirect_to = payload.pop("redirect_to", None)
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
data = LocationCreate(**payload)
|
|
|
|
|
|
except ValidationError as e:
|
|
|
|
|
|
logger.warning("⚠️ Invalid location payload")
|
|
|
|
|
|
raise HTTPException(status_code=422, detail=e.errors())
|
|
|
|
|
|
|
|
|
|
|
|
# Check for duplicate name
|
|
|
|
|
|
check_query = "SELECT id FROM locations_locations WHERE name = %s AND deleted_at IS NULL"
|
|
|
|
|
|
existing = execute_query(check_query, (data.name,))
|
|
|
|
|
|
|
|
|
|
|
|
if existing:
|
|
|
|
|
|
logger.warning(f"⚠️ Duplicate location name: {data.name}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail=f"Location with name '{data.name}' already exists"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if data.customer_id is not None:
|
|
|
|
|
|
customer_query = "SELECT id FROM customers WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
customer = execute_query(customer_query, (data.customer_id,))
|
|
|
|
|
|
if not customer:
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid customer_id: {data.customer_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="customer_id does not exist"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Validate parent_location_id if provided
|
|
|
|
|
|
if data.parent_location_id is not None:
|
|
|
|
|
|
parent_query = "SELECT id FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
parent = execute_query(parent_query, (data.parent_location_id,))
|
|
|
|
|
|
if not parent:
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid parent_location_id: {data.parent_location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="parent_location_id does not exist"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# INSERT into locations_locations table
|
|
|
|
|
|
insert_query = """
|
|
|
|
|
|
INSERT INTO locations_locations (
|
|
|
|
|
|
name, location_type, parent_location_id, customer_id, address_street, address_city,
|
|
|
|
|
|
address_postal_code, address_country, latitude, longitude,
|
|
|
|
|
|
phone, email, notes, is_active, created_at, updated_at
|
|
|
|
|
|
)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, NOW(), NOW())
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
params = (
|
|
|
|
|
|
data.name,
|
|
|
|
|
|
data.location_type,
|
|
|
|
|
|
data.parent_location_id,
|
|
|
|
|
|
data.customer_id,
|
|
|
|
|
|
data.address_street,
|
|
|
|
|
|
data.address_city,
|
|
|
|
|
|
data.address_postal_code,
|
|
|
|
|
|
data.address_country,
|
|
|
|
|
|
data.latitude,
|
|
|
|
|
|
data.longitude,
|
|
|
|
|
|
data.phone,
|
|
|
|
|
|
data.email,
|
|
|
|
|
|
data.notes,
|
|
|
|
|
|
data.is_active
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
result = execute_query(insert_query, params)
|
|
|
|
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
|
|
logger.error("❌ Failed to create location")
|
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to create location")
|
|
|
|
|
|
|
|
|
|
|
|
location = Location(**result[0])
|
|
|
|
|
|
|
|
|
|
|
|
# Log audit entry
|
|
|
|
|
|
audit_query = """
|
|
|
|
|
|
INSERT INTO locations_audit_log (location_id, event_type, user_id, changes, created_at)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s, NOW())
|
|
|
|
|
|
"""
|
|
|
|
|
|
changes = {"after": data.model_dump()}
|
|
|
|
|
|
execute_query(audit_query, (location.id, 'created', None, json.dumps(changes)))
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"✅ Location created: {data.name} (ID: {location.id})")
|
|
|
|
|
|
if redirect_to:
|
|
|
|
|
|
if "{id}" in redirect_to:
|
|
|
|
|
|
redirect_to = redirect_to.replace("{id}", str(location.id))
|
|
|
|
|
|
return RedirectResponse(url=redirect_to, status_code=303)
|
|
|
|
|
|
return location
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error creating location: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to create location"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
2026-02-09 15:30:07 +01:00
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 11b. GET /api/v1/locations/by-customer/{customer_id} - Filter by customer
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations/by-customer/{customer_id}", response_model=List[Location])
|
|
|
|
|
|
async def get_locations_by_customer(customer_id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Get all locations linked to a customer.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: customer_id
|
|
|
|
|
|
Returns: List of Location objects ordered by name
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
query = """
|
|
|
|
|
|
SELECT l.*, p.name AS parent_location_name, c.name AS customer_name
|
|
|
|
|
|
FROM locations_locations l
|
|
|
|
|
|
LEFT JOIN locations_locations p ON l.parent_location_id = p.id
|
|
|
|
|
|
LEFT JOIN customers c ON l.customer_id = c.id
|
|
|
|
|
|
WHERE l.customer_id = %s AND l.deleted_at IS NULL
|
|
|
|
|
|
ORDER BY l.name ASC
|
|
|
|
|
|
"""
|
|
|
|
|
|
results = execute_query(query, (customer_id,))
|
|
|
|
|
|
return [Location(**row) for row in results]
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error getting customer locations: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to get locations by customer"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
2026-02-11 23:51:21 +01:00
|
|
|
|
@router.get("/locations/by-contact/{contact_id}", response_model=List[Location])
|
|
|
|
|
|
async def get_locations_by_contact(contact_id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Get all locations linked to a contact via the contact's companies.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: contact_id
|
|
|
|
|
|
Returns: List of Location objects ordered by name
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
customer_ids = get_contact_customer_ids(contact_id)
|
|
|
|
|
|
if not customer_ids:
|
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
|
placeholders = ",".join(["%s"] * len(customer_ids))
|
|
|
|
|
|
query = f"""
|
|
|
|
|
|
SELECT l.*, p.name AS parent_location_name, c.name AS customer_name
|
|
|
|
|
|
FROM locations_locations l
|
|
|
|
|
|
LEFT JOIN locations_locations p ON l.parent_location_id = p.id
|
|
|
|
|
|
LEFT JOIN customers c ON l.customer_id = c.id
|
|
|
|
|
|
WHERE l.customer_id IN ({placeholders}) AND l.deleted_at IS NULL
|
|
|
|
|
|
ORDER BY l.name ASC
|
|
|
|
|
|
"""
|
|
|
|
|
|
results = execute_query(query, tuple(customer_ids))
|
|
|
|
|
|
return [Location(**row) for row in results]
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error getting contact locations: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to get locations by contact"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
2026-02-09 15:30:07 +01:00
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 11c. GET /api/v1/locations/by-ids - Fetch by IDs
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations/by-ids", response_model=List[Location])
|
|
|
|
|
|
async def get_locations_by_ids(ids: str = Query(..., description="Comma-separated location IDs")):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Get locations by a comma-separated list of IDs.
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
id_values = [int(value) for value in ids.split(',') if value.strip().isdigit()]
|
|
|
|
|
|
if not id_values:
|
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
|
placeholders = ",".join(["%s"] * len(id_values))
|
|
|
|
|
|
query = f"""
|
|
|
|
|
|
SELECT l.*, p.name AS parent_location_name, c.name AS customer_name
|
|
|
|
|
|
FROM locations_locations l
|
|
|
|
|
|
LEFT JOIN locations_locations p ON l.parent_location_id = p.id
|
|
|
|
|
|
LEFT JOIN customers c ON l.customer_id = c.id
|
|
|
|
|
|
WHERE l.id IN ({placeholders}) AND l.deleted_at IS NULL
|
|
|
|
|
|
ORDER BY l.name ASC
|
|
|
|
|
|
"""
|
|
|
|
|
|
results = execute_query(query, tuple(id_values))
|
|
|
|
|
|
return [Location(**row) for row in results]
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error getting locations by ids: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to get locations by ids"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# =========================================================================
|
|
|
|
|
|
# 2b. POST /api/v1/locations/bulk-create - Create location with floors/rooms
|
|
|
|
|
|
# =========================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/locations/bulk-create", response_model=LocationWizardCreateResponse)
|
|
|
|
|
|
async def bulk_create_location_hierarchy(data: LocationWizardCreateRequest):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Create a root location with floors and rooms in a single request.
|
|
|
|
|
|
|
|
|
|
|
|
Request body: LocationWizardCreateRequest
|
|
|
|
|
|
Returns: IDs of created locations
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
root = data.root
|
|
|
|
|
|
auto_suffix = data.auto_suffix
|
|
|
|
|
|
|
|
|
|
|
|
payload_names = [root.name]
|
|
|
|
|
|
for floor in data.floors:
|
|
|
|
|
|
payload_names.append(floor.name)
|
|
|
|
|
|
for room in floor.rooms:
|
|
|
|
|
|
payload_names.append(room.name)
|
|
|
|
|
|
|
|
|
|
|
|
normalized_names = [name.strip().lower() for name in payload_names if name]
|
|
|
|
|
|
if not auto_suffix and len(normalized_names) != len(set(normalized_names)):
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="Duplicate names found in wizard payload"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if not auto_suffix:
|
|
|
|
|
|
placeholders = ",".join(["%s"] * len(payload_names))
|
|
|
|
|
|
existing_query = f"""
|
|
|
|
|
|
SELECT name FROM locations_locations
|
|
|
|
|
|
WHERE name IN ({placeholders}) AND deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
existing = execute_query(existing_query, tuple(payload_names))
|
|
|
|
|
|
if existing:
|
|
|
|
|
|
existing_names = ", ".join(sorted({row.get("name") for row in existing if row.get("name")}))
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail=f"Locations already exist with names: {existing_names}"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if root.customer_id is not None:
|
|
|
|
|
|
customer_query = "SELECT id FROM customers WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
customer = execute_query(customer_query, (root.customer_id,))
|
|
|
|
|
|
if not customer:
|
|
|
|
|
|
raise HTTPException(status_code=400, detail="customer_id does not exist")
|
|
|
|
|
|
|
|
|
|
|
|
if root.parent_location_id is not None:
|
|
|
|
|
|
parent_query = "SELECT id FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
parent = execute_query(parent_query, (root.parent_location_id,))
|
|
|
|
|
|
if not parent:
|
|
|
|
|
|
raise HTTPException(status_code=400, detail="parent_location_id does not exist")
|
|
|
|
|
|
|
|
|
|
|
|
insert_query = """
|
|
|
|
|
|
INSERT INTO locations_locations (
|
|
|
|
|
|
name, location_type, parent_location_id, customer_id, address_street, address_city,
|
|
|
|
|
|
address_postal_code, address_country, latitude, longitude,
|
|
|
|
|
|
phone, email, notes, is_active, created_at, updated_at
|
|
|
|
|
|
)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, NOW(), NOW())
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
reserved_names = set()
|
|
|
|
|
|
|
|
|
|
|
|
def _normalize_name(value: str) -> str:
|
|
|
|
|
|
return (value or "").strip().lower()
|
|
|
|
|
|
|
|
|
|
|
|
def _name_exists(value: str) -> bool:
|
|
|
|
|
|
normalized = _normalize_name(value)
|
|
|
|
|
|
if normalized in reserved_names:
|
|
|
|
|
|
return True
|
|
|
|
|
|
check_query = "SELECT 1 FROM locations_locations WHERE name = %s AND deleted_at IS NULL"
|
|
|
|
|
|
existing = execute_query(check_query, (value,))
|
|
|
|
|
|
return bool(existing)
|
|
|
|
|
|
|
|
|
|
|
|
def _reserve_name(value: str) -> None:
|
|
|
|
|
|
normalized = _normalize_name(value)
|
|
|
|
|
|
if normalized:
|
|
|
|
|
|
reserved_names.add(normalized)
|
|
|
|
|
|
|
|
|
|
|
|
def _resolve_unique_name(base_name: str) -> str:
|
|
|
|
|
|
if not auto_suffix:
|
|
|
|
|
|
_reserve_name(base_name)
|
|
|
|
|
|
return base_name
|
|
|
|
|
|
base_name = base_name.strip()
|
|
|
|
|
|
if not _name_exists(base_name):
|
|
|
|
|
|
_reserve_name(base_name)
|
|
|
|
|
|
return base_name
|
|
|
|
|
|
suffix = 2
|
|
|
|
|
|
while True:
|
|
|
|
|
|
candidate = f"{base_name} ({suffix})"
|
|
|
|
|
|
if not _name_exists(candidate):
|
|
|
|
|
|
_reserve_name(candidate)
|
|
|
|
|
|
return candidate
|
|
|
|
|
|
suffix += 1
|
|
|
|
|
|
|
|
|
|
|
|
def insert_location_record(
|
|
|
|
|
|
name: str,
|
|
|
|
|
|
location_type: str,
|
|
|
|
|
|
parent_location_id: Optional[int],
|
|
|
|
|
|
customer_id: Optional[int],
|
|
|
|
|
|
address_street: Optional[str],
|
|
|
|
|
|
address_city: Optional[str],
|
|
|
|
|
|
address_postal_code: Optional[str],
|
|
|
|
|
|
address_country: Optional[str],
|
|
|
|
|
|
latitude: Optional[float],
|
|
|
|
|
|
longitude: Optional[float],
|
|
|
|
|
|
phone: Optional[str],
|
|
|
|
|
|
email: Optional[str],
|
|
|
|
|
|
notes: Optional[str],
|
|
|
|
|
|
is_active: bool
|
|
|
|
|
|
) -> Location:
|
|
|
|
|
|
params = (
|
|
|
|
|
|
name,
|
|
|
|
|
|
location_type,
|
|
|
|
|
|
parent_location_id,
|
|
|
|
|
|
customer_id,
|
|
|
|
|
|
address_street,
|
|
|
|
|
|
address_city,
|
|
|
|
|
|
address_postal_code,
|
|
|
|
|
|
address_country,
|
|
|
|
|
|
latitude,
|
|
|
|
|
|
longitude,
|
|
|
|
|
|
phone,
|
|
|
|
|
|
email,
|
|
|
|
|
|
notes,
|
|
|
|
|
|
is_active
|
|
|
|
|
|
)
|
|
|
|
|
|
result = execute_query(insert_query, params)
|
|
|
|
|
|
if not result:
|
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to create location")
|
|
|
|
|
|
return Location(**result[0])
|
|
|
|
|
|
|
|
|
|
|
|
resolved_root_name = _resolve_unique_name(root.name)
|
|
|
|
|
|
root_location = insert_location_record(
|
|
|
|
|
|
name=resolved_root_name,
|
|
|
|
|
|
location_type=root.location_type,
|
|
|
|
|
|
parent_location_id=root.parent_location_id,
|
|
|
|
|
|
customer_id=root.customer_id,
|
|
|
|
|
|
address_street=root.address_street,
|
|
|
|
|
|
address_city=root.address_city,
|
|
|
|
|
|
address_postal_code=root.address_postal_code,
|
|
|
|
|
|
address_country=root.address_country,
|
|
|
|
|
|
latitude=root.latitude,
|
|
|
|
|
|
longitude=root.longitude,
|
|
|
|
|
|
phone=root.phone,
|
|
|
|
|
|
email=root.email,
|
|
|
|
|
|
notes=root.notes,
|
|
|
|
|
|
is_active=root.is_active
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
audit_query = """
|
|
|
|
|
|
INSERT INTO locations_audit_log (location_id, event_type, user_id, changes, created_at)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s, NOW())
|
|
|
|
|
|
"""
|
|
|
|
|
|
root_changes = root.model_dump()
|
|
|
|
|
|
root_changes["name"] = resolved_root_name
|
|
|
|
|
|
execute_query(audit_query, (root_location.id, 'created', None, json.dumps({"after": root_changes})))
|
|
|
|
|
|
|
|
|
|
|
|
floor_ids: List[int] = []
|
|
|
|
|
|
room_ids: List[int] = []
|
|
|
|
|
|
|
|
|
|
|
|
for floor in data.floors:
|
|
|
|
|
|
resolved_floor_name = _resolve_unique_name(floor.name)
|
|
|
|
|
|
floor_location = insert_location_record(
|
|
|
|
|
|
name=resolved_floor_name,
|
|
|
|
|
|
location_type=floor.location_type,
|
|
|
|
|
|
parent_location_id=root_location.id,
|
|
|
|
|
|
customer_id=root.customer_id,
|
|
|
|
|
|
address_street=root.address_street,
|
|
|
|
|
|
address_city=root.address_city,
|
|
|
|
|
|
address_postal_code=root.address_postal_code,
|
|
|
|
|
|
address_country=root.address_country,
|
|
|
|
|
|
latitude=root.latitude,
|
|
|
|
|
|
longitude=root.longitude,
|
|
|
|
|
|
phone=root.phone,
|
|
|
|
|
|
email=root.email,
|
|
|
|
|
|
notes=None,
|
|
|
|
|
|
is_active=floor.is_active
|
|
|
|
|
|
)
|
|
|
|
|
|
floor_ids.append(floor_location.id)
|
|
|
|
|
|
execute_query(audit_query, (
|
|
|
|
|
|
floor_location.id,
|
|
|
|
|
|
'created',
|
|
|
|
|
|
None,
|
|
|
|
|
|
json.dumps({"after": {"name": resolved_floor_name, "location_type": floor.location_type, "parent_location_id": root_location.id}})
|
|
|
|
|
|
))
|
|
|
|
|
|
|
|
|
|
|
|
for room in floor.rooms:
|
|
|
|
|
|
resolved_room_name = _resolve_unique_name(room.name)
|
|
|
|
|
|
room_location = insert_location_record(
|
|
|
|
|
|
name=resolved_room_name,
|
|
|
|
|
|
location_type=room.location_type,
|
|
|
|
|
|
parent_location_id=floor_location.id,
|
|
|
|
|
|
customer_id=root.customer_id,
|
|
|
|
|
|
address_street=root.address_street,
|
|
|
|
|
|
address_city=root.address_city,
|
|
|
|
|
|
address_postal_code=root.address_postal_code,
|
|
|
|
|
|
address_country=root.address_country,
|
|
|
|
|
|
latitude=root.latitude,
|
|
|
|
|
|
longitude=root.longitude,
|
|
|
|
|
|
phone=root.phone,
|
|
|
|
|
|
email=root.email,
|
|
|
|
|
|
notes=None,
|
|
|
|
|
|
is_active=room.is_active
|
|
|
|
|
|
)
|
|
|
|
|
|
room_ids.append(room_location.id)
|
|
|
|
|
|
execute_query(audit_query, (
|
|
|
|
|
|
room_location.id,
|
|
|
|
|
|
'created',
|
|
|
|
|
|
None,
|
|
|
|
|
|
json.dumps({"after": {"name": resolved_room_name, "location_type": room.location_type, "parent_location_id": floor_location.id}})
|
|
|
|
|
|
))
|
|
|
|
|
|
|
|
|
|
|
|
created_total = 1 + len(floor_ids) + len(room_ids)
|
|
|
|
|
|
logger.info("✅ Wizard created %s locations (root=%s)", created_total, root_location.id)
|
|
|
|
|
|
|
|
|
|
|
|
return LocationWizardCreateResponse(
|
|
|
|
|
|
root_id=root_location.id,
|
|
|
|
|
|
floor_ids=floor_ids,
|
|
|
|
|
|
room_ids=room_ids,
|
|
|
|
|
|
created_total=created_total
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error creating location hierarchy: {str(e)}")
|
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to create location hierarchy")
|
|
|
|
|
|
|
|
|
|
|
|
|
2026-01-31 23:16:24 +01:00
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 3. GET /api/v1/locations/{id} - Get single location with all relationships
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations/{id}", response_model=LocationDetail)
|
|
|
|
|
|
async def get_location(id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Get detailed information about a single location including all relationships.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: id (location ID)
|
|
|
|
|
|
Returns: Full LocationDetail object with contacts, hours, services, capacity
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location not found or deleted
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Query location by id (exclude soft-deleted)
|
|
|
|
|
|
query = """
|
|
|
|
|
|
SELECT l.*, p.name AS parent_location_name, c.name AS customer_name
|
|
|
|
|
|
FROM locations_locations l
|
|
|
|
|
|
LEFT JOIN locations_locations p ON l.parent_location_id = p.id
|
|
|
|
|
|
LEFT JOIN customers c ON l.customer_id = c.id
|
|
|
|
|
|
WHERE l.id = %s AND l.deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
result = execute_query(query, (id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
location = Location(**result[0])
|
|
|
|
|
|
|
|
|
|
|
|
# Query related contacts
|
|
|
|
|
|
contacts_query = "SELECT * FROM locations_contacts WHERE location_id = %s ORDER BY is_primary DESC, contact_name ASC"
|
|
|
|
|
|
contacts_result = execute_query(contacts_query, (id,))
|
|
|
|
|
|
contacts = [dict(row) for row in contacts_result] if contacts_result else []
|
|
|
|
|
|
|
|
|
|
|
|
# Query related operating hours
|
|
|
|
|
|
hours_query = "SELECT * FROM locations_hours WHERE location_id = %s ORDER BY day_of_week ASC"
|
|
|
|
|
|
hours_result = execute_query(hours_query, (id,))
|
|
|
|
|
|
hours = [dict(row) for row in hours_result] if hours_result else []
|
|
|
|
|
|
|
|
|
|
|
|
# Query related services
|
|
|
|
|
|
services_query = "SELECT * FROM locations_services WHERE location_id = %s ORDER BY service_name ASC"
|
|
|
|
|
|
services_result = execute_query(services_query, (id,))
|
|
|
|
|
|
services = [dict(row) for row in services_result] if services_result else []
|
|
|
|
|
|
|
|
|
|
|
|
# Query related capacity
|
|
|
|
|
|
capacity_query = "SELECT * FROM locations_capacity WHERE location_id = %s ORDER BY capacity_type ASC"
|
|
|
|
|
|
capacity_result = execute_query(capacity_query, (id,))
|
|
|
|
|
|
capacity = [dict(row) for row in capacity_result] if capacity_result else []
|
|
|
|
|
|
|
|
|
|
|
|
# Build hierarchy breadcrumb (ancestors from root to parent)
|
|
|
|
|
|
hierarchy_query = """
|
|
|
|
|
|
WITH RECURSIVE ancestors AS (
|
|
|
|
|
|
SELECT id, name, location_type, parent_location_id, 0 AS depth
|
|
|
|
|
|
FROM locations_locations
|
|
|
|
|
|
WHERE id = %s
|
|
|
|
|
|
UNION ALL
|
|
|
|
|
|
SELECT l.id, l.name, l.location_type, l.parent_location_id, a.depth + 1
|
|
|
|
|
|
FROM locations_locations l
|
|
|
|
|
|
JOIN ancestors a ON l.id = a.parent_location_id
|
|
|
|
|
|
)
|
|
|
|
|
|
SELECT id, name, location_type, depth
|
|
|
|
|
|
FROM ancestors
|
|
|
|
|
|
WHERE depth > 0
|
|
|
|
|
|
ORDER BY depth DESC;
|
|
|
|
|
|
"""
|
|
|
|
|
|
hierarchy_result = execute_query(hierarchy_query, (id,))
|
|
|
|
|
|
hierarchy = [dict(row) for row in hierarchy_result] if hierarchy_result else []
|
|
|
|
|
|
|
|
|
|
|
|
# Fetch direct children for relationship tab
|
|
|
|
|
|
children_query = """
|
|
|
|
|
|
SELECT id, name, location_type
|
|
|
|
|
|
FROM locations_locations
|
|
|
|
|
|
WHERE parent_location_id = %s AND deleted_at IS NULL
|
|
|
|
|
|
ORDER BY name ASC
|
|
|
|
|
|
"""
|
|
|
|
|
|
children_result = execute_query(children_query, (id,))
|
|
|
|
|
|
children = [dict(row) for row in children_result] if children_result else []
|
|
|
|
|
|
|
|
|
|
|
|
# Fetch hardware assigned to this location
|
|
|
|
|
|
hardware_query = """
|
|
|
|
|
|
SELECT id, asset_type, brand, model, serial_number, status
|
|
|
|
|
|
FROM hardware_assets
|
|
|
|
|
|
WHERE current_location_id = %s AND deleted_at IS NULL
|
|
|
|
|
|
ORDER BY brand ASC, model ASC, serial_number ASC
|
|
|
|
|
|
"""
|
|
|
|
|
|
hardware_result = execute_query(hardware_query, (id,))
|
|
|
|
|
|
hardware = [dict(row) for row in hardware_result] if hardware_result else []
|
|
|
|
|
|
|
|
|
|
|
|
# Build LocationDetail response
|
|
|
|
|
|
location_detail = LocationDetail(
|
|
|
|
|
|
**location.model_dump(),
|
|
|
|
|
|
hierarchy=hierarchy,
|
|
|
|
|
|
children=children,
|
|
|
|
|
|
hardware=hardware,
|
|
|
|
|
|
contacts=contacts,
|
|
|
|
|
|
hours=hours,
|
|
|
|
|
|
services=services,
|
|
|
|
|
|
capacity=capacity
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"📍 Location retrieved: {location.name} (ID: {id})")
|
|
|
|
|
|
return location_detail
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error retrieving location {id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to retrieve location"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 4. PATCH /api/v1/locations/{id} - Update location (partial)
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.patch("/locations/{id}", response_model=Location)
|
|
|
|
|
|
async def update_location(id: int, data: LocationUpdate):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Update a location (partial update).
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: id (location ID)
|
|
|
|
|
|
Request body: LocationUpdate model (all fields optional)
|
|
|
|
|
|
Returns: Updated Location object
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location not found
|
|
|
|
|
|
- 400: Duplicate name or validation error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
check_query = "SELECT * FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
existing = execute_query(check_query, (id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not existing:
|
|
|
|
|
|
logger.error(f"❌ Location not found for update: {id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
old_location = Location(**existing[0])
|
|
|
|
|
|
|
|
|
|
|
|
# Check for duplicate name if name is being updated
|
|
|
|
|
|
if data.name is not None and data.name != old_location.name:
|
|
|
|
|
|
dup_query = "SELECT id FROM locations_locations WHERE name = %s AND id != %s AND deleted_at IS NULL"
|
|
|
|
|
|
dup_check = execute_query(dup_query, (data.name, id))
|
|
|
|
|
|
if dup_check:
|
|
|
|
|
|
logger.warning(f"⚠️ Duplicate location name: {data.name}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail=f"Location with name '{data.name}' already exists"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Build UPDATE query with only provided fields
|
|
|
|
|
|
update_parts = ["updated_at = NOW()"]
|
|
|
|
|
|
params = []
|
|
|
|
|
|
|
|
|
|
|
|
# Map update fields
|
|
|
|
|
|
field_mapping = {
|
|
|
|
|
|
'name': 'name',
|
|
|
|
|
|
'location_type': 'location_type',
|
|
|
|
|
|
'parent_location_id': 'parent_location_id',
|
|
|
|
|
|
'customer_id': 'customer_id',
|
|
|
|
|
|
'address_street': 'address_street',
|
|
|
|
|
|
'address_city': 'address_city',
|
|
|
|
|
|
'address_postal_code': 'address_postal_code',
|
|
|
|
|
|
'address_country': 'address_country',
|
|
|
|
|
|
'latitude': 'latitude',
|
|
|
|
|
|
'longitude': 'longitude',
|
|
|
|
|
|
'phone': 'phone',
|
|
|
|
|
|
'email': 'email',
|
|
|
|
|
|
'notes': 'notes',
|
|
|
|
|
|
'is_active': 'is_active'
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
update_data = {}
|
|
|
|
|
|
for key, db_column in field_mapping.items():
|
|
|
|
|
|
value = getattr(data, key, None)
|
|
|
|
|
|
if value is not None:
|
|
|
|
|
|
if key == 'parent_location_id':
|
|
|
|
|
|
if value == id:
|
|
|
|
|
|
logger.warning("⚠️ parent_location_id cannot reference itself")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="parent_location_id cannot reference itself"
|
|
|
|
|
|
)
|
|
|
|
|
|
parent_query = "SELECT id FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
parent = execute_query(parent_query, (value,))
|
|
|
|
|
|
if not parent:
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid parent_location_id: {value}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="parent_location_id does not exist"
|
|
|
|
|
|
)
|
|
|
|
|
|
if key == 'customer_id':
|
|
|
|
|
|
customer_query = "SELECT id FROM customers WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
customer = execute_query(customer_query, (value,))
|
|
|
|
|
|
if not customer:
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid customer_id: {value}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="customer_id does not exist"
|
|
|
|
|
|
)
|
|
|
|
|
|
if key == 'location_type':
|
2026-02-09 15:30:07 +01:00
|
|
|
|
allowed_types = ['kompleks', 'bygning', 'etage', 'customer_site', 'rum', 'kantine', 'moedelokale', 'vehicle']
|
2026-01-31 23:16:24 +01:00
|
|
|
|
if value not in allowed_types:
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid location_type: {value}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail=f"location_type must be one of: {', '.join(allowed_types)}"
|
|
|
|
|
|
)
|
|
|
|
|
|
update_parts.append(f"{db_column} = %s")
|
|
|
|
|
|
params.append(value)
|
|
|
|
|
|
update_data[key] = value
|
|
|
|
|
|
|
|
|
|
|
|
params.append(id)
|
|
|
|
|
|
|
|
|
|
|
|
# Execute UPDATE
|
|
|
|
|
|
update_query = f"""
|
|
|
|
|
|
UPDATE locations_locations
|
|
|
|
|
|
SET {', '.join(update_parts)}
|
|
|
|
|
|
WHERE id = %s
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
result = execute_query(update_query, tuple(params))
|
|
|
|
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
|
|
logger.error(f"❌ Failed to update location {id}")
|
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to update location")
|
|
|
|
|
|
|
|
|
|
|
|
updated_location = Location(**result[0])
|
|
|
|
|
|
|
|
|
|
|
|
# Create audit log entry
|
|
|
|
|
|
audit_query = """
|
|
|
|
|
|
INSERT INTO locations_audit_log (location_id, event_type, user_id, changes, created_at)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s, NOW())
|
|
|
|
|
|
"""
|
|
|
|
|
|
changes = {
|
|
|
|
|
|
"before": {k: v for k, v in old_location.model_dump().items() if k in update_data},
|
|
|
|
|
|
"after": update_data
|
|
|
|
|
|
}
|
|
|
|
|
|
execute_query(audit_query, (id, 'updated', None, json.dumps(changes)))
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"🔄 Location updated: {updated_location.name} (ID: {id})")
|
|
|
|
|
|
return updated_location
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error updating location {id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to update location"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 5. DELETE /api/v1/locations/{id} - Soft-delete location
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.delete("/locations/{id}", response_model=dict)
|
|
|
|
|
|
async def delete_location(id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Soft-delete a location (set deleted_at timestamp).
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: id (location ID)
|
|
|
|
|
|
Returns: Confirmation message
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location not found or already deleted
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists (not already deleted)
|
|
|
|
|
|
check_query = "SELECT * FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
existing = execute_query(check_query, (id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not existing:
|
|
|
|
|
|
logger.error(f"❌ Location not found or already deleted: {id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {id} not found or already deleted"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
location = Location(**existing[0])
|
|
|
|
|
|
|
|
|
|
|
|
# UPDATE to set deleted_at
|
|
|
|
|
|
delete_query = """
|
|
|
|
|
|
UPDATE locations_locations
|
|
|
|
|
|
SET deleted_at = NOW(), updated_at = NOW()
|
|
|
|
|
|
WHERE id = %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
execute_query(delete_query, (id,))
|
|
|
|
|
|
|
|
|
|
|
|
# Create audit log entry
|
|
|
|
|
|
audit_query = """
|
|
|
|
|
|
INSERT INTO locations_audit_log (location_id, event_type, user_id, changes, created_at)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s, NOW())
|
|
|
|
|
|
"""
|
|
|
|
|
|
changes = {"reason": "soft-delete"}
|
|
|
|
|
|
execute_query(audit_query, (id, 'deleted', None, json.dumps(changes)))
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"🗑️ Location soft-deleted: {location.name} (ID: {id})")
|
|
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
|
"status": "deleted",
|
|
|
|
|
|
"id": id,
|
|
|
|
|
|
"message": f"Location '{location.name}' has been deleted"
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error deleting location {id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to delete location"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 6. POST /api/v1/locations/{id}/restore - Restore soft-deleted location
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/locations/{id}/restore", response_model=Location)
|
|
|
|
|
|
async def restore_location(id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Restore a soft-deleted location (clear deleted_at).
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: id (location ID)
|
|
|
|
|
|
Returns: Restored Location object
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location not found or not deleted
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists AND is soft-deleted
|
|
|
|
|
|
check_query = "SELECT * FROM locations_locations WHERE id = %s AND deleted_at IS NOT NULL"
|
|
|
|
|
|
existing = execute_query(check_query, (id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not existing:
|
|
|
|
|
|
logger.error(f"❌ Location not found or not deleted: {id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {id} not found or not deleted"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# UPDATE to clear deleted_at
|
|
|
|
|
|
restore_query = """
|
|
|
|
|
|
UPDATE locations_locations
|
|
|
|
|
|
SET deleted_at = NULL, updated_at = NOW()
|
|
|
|
|
|
WHERE id = %s
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
result = execute_query(restore_query, (id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
|
|
logger.error(f"❌ Failed to restore location {id}")
|
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to restore location")
|
|
|
|
|
|
|
|
|
|
|
|
restored_location = Location(**result[0])
|
|
|
|
|
|
|
|
|
|
|
|
# Create audit log entry
|
|
|
|
|
|
audit_query = """
|
|
|
|
|
|
INSERT INTO locations_audit_log (location_id, event_type, user_id, changes, created_at)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s, NOW())
|
|
|
|
|
|
"""
|
|
|
|
|
|
changes = {"reason": "restore"}
|
|
|
|
|
|
execute_query(audit_query, (id, 'restored', None, json.dumps(changes)))
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"♻️ Location restored: {restored_location.name} (ID: {id})")
|
|
|
|
|
|
return restored_location
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error restoring location {id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to restore location"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 7. GET /api/v1/locations/{id}/audit - Get audit trail for location
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations/{id}/audit", response_model=List[AuditLogEntry])
|
|
|
|
|
|
async def get_location_audit(
|
|
|
|
|
|
id: int,
|
|
|
|
|
|
limit: int = Query(50, ge=1, le=1000)
|
|
|
|
|
|
):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Get audit trail (change history) for a location.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameters:
|
|
|
|
|
|
- id: Location ID
|
|
|
|
|
|
|
|
|
|
|
|
Query parameters:
|
|
|
|
|
|
- limit: Max results (default 50, max 1000)
|
|
|
|
|
|
|
|
|
|
|
|
Returns: List of audit log entries, newest first
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists (including soft-deleted)
|
|
|
|
|
|
check_query = "SELECT id FROM locations_locations WHERE id = %s"
|
|
|
|
|
|
existing = execute_query(check_query, (id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not existing:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Query audit log for this location
|
|
|
|
|
|
audit_query = """
|
|
|
|
|
|
SELECT * FROM locations_audit_log
|
|
|
|
|
|
WHERE location_id = %s
|
|
|
|
|
|
ORDER BY created_at DESC
|
|
|
|
|
|
LIMIT %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
results = execute_query(audit_query, (id, limit))
|
|
|
|
|
|
logger.info(f"📝 Audit trail retrieved: {len(results)} entries for location {id}")
|
|
|
|
|
|
|
|
|
|
|
|
return [AuditLogEntry(**row) for row in results]
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error retrieving audit trail for location {id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to retrieve audit trail"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 8. GET /api/v1/locations/search - Search locations by name/address
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations/search", response_model=LocationSearchResponse)
|
|
|
|
|
|
async def search_locations(
|
|
|
|
|
|
q: str = Query(..., min_length=1, max_length=255, description="Search query (name or address)"),
|
|
|
|
|
|
limit: int = Query(10, ge=1, le=100)
|
|
|
|
|
|
):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Search for locations by name or address.
|
|
|
|
|
|
|
|
|
|
|
|
Query parameters:
|
|
|
|
|
|
- q: Search term (required, 1-255 chars) - matches name, street, city
|
|
|
|
|
|
- limit: Max results (default 10, max 100)
|
|
|
|
|
|
|
|
|
|
|
|
Returns: LocationSearchResponse with results and total count
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Build search pattern for ILIKE (case-insensitive)
|
|
|
|
|
|
search_term = f"%{q}%"
|
|
|
|
|
|
|
|
|
|
|
|
# Search locations_locations (case-insensitive match on name, street, city)
|
|
|
|
|
|
search_query = """
|
|
|
|
|
|
SELECT * FROM locations_locations
|
|
|
|
|
|
WHERE deleted_at IS NULL
|
|
|
|
|
|
AND (
|
|
|
|
|
|
name ILIKE %s
|
|
|
|
|
|
OR address_street ILIKE %s
|
|
|
|
|
|
OR address_city ILIKE %s
|
|
|
|
|
|
)
|
|
|
|
|
|
ORDER BY name ASC
|
|
|
|
|
|
LIMIT %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
results = execute_query(search_query, (search_term, search_term, search_term, limit))
|
|
|
|
|
|
|
|
|
|
|
|
# Get total count (without limit) for the search
|
|
|
|
|
|
count_query = """
|
|
|
|
|
|
SELECT COUNT(*) as total FROM locations_locations
|
|
|
|
|
|
WHERE deleted_at IS NULL
|
|
|
|
|
|
AND (
|
|
|
|
|
|
name ILIKE %s
|
|
|
|
|
|
OR address_street ILIKE %s
|
|
|
|
|
|
OR address_city ILIKE %s
|
|
|
|
|
|
)
|
|
|
|
|
|
"""
|
|
|
|
|
|
count_result = execute_query(count_query, (search_term, search_term, search_term))
|
|
|
|
|
|
total_count = count_result[0]['total'] if count_result else 0
|
|
|
|
|
|
|
|
|
|
|
|
locations = [Location(**row) for row in results]
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"🔍 Location search: '{q}' found {len(results)} results (total: {total_count})")
|
|
|
|
|
|
|
|
|
|
|
|
return LocationSearchResponse(
|
|
|
|
|
|
results=locations,
|
|
|
|
|
|
total=total_count,
|
|
|
|
|
|
query=q
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error searching locations for '{q}': {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to search locations"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# PHASE 2, TASK 2.2: CONTACT MANAGEMENT ENDPOINTS (6 endpoints)
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 1. GET /api/v1/locations/{location_id}/contacts - List location contacts
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations/{location_id}/contacts", response_model=List[Contact])
|
|
|
|
|
|
async def list_location_contacts(location_id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
List all contacts for a specific location.
|
|
|
|
|
|
|
|
|
|
|
|
Returns contacts sorted by is_primary (primary first), then by name.
|
|
|
|
|
|
Excludes soft-deleted contacts.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: location_id (location ID)
|
|
|
|
|
|
Returns: List of Contact objects
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location not found
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found for contacts list: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Query contacts for location (exclude soft-deleted)
|
|
|
|
|
|
query = """
|
|
|
|
|
|
SELECT * FROM locations_contacts
|
|
|
|
|
|
WHERE location_id = %s AND deleted_at IS NULL
|
|
|
|
|
|
ORDER BY is_primary DESC, contact_name ASC
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
results = execute_query(query, (location_id,))
|
|
|
|
|
|
logger.info(f"📋 Listed {len(results)} contacts for location {location_id}")
|
|
|
|
|
|
|
|
|
|
|
|
return [Contact(**row) for row in results]
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error listing contacts for location {location_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to list contacts"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 2. POST /api/v1/locations/{location_id}/contacts - Add contact
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/locations/{location_id}/contacts", response_model=Contact, status_code=201)
|
|
|
|
|
|
async def create_contact(location_id: int, data: ContactCreate):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Add a new contact person to a location.
|
|
|
|
|
|
|
|
|
|
|
|
Only one contact per location can be primary.
|
|
|
|
|
|
If is_primary=true, unset primary flag on other contacts.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: location_id (location ID)
|
|
|
|
|
|
Request body: ContactCreate model
|
|
|
|
|
|
Returns: Created Contact object
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location not found
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT name FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found for contact creation: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
location_name = location_check[0]['name']
|
|
|
|
|
|
|
|
|
|
|
|
# If is_primary is true, unset primary flag on other contacts
|
|
|
|
|
|
if data.is_primary:
|
|
|
|
|
|
unset_primary_query = """
|
|
|
|
|
|
UPDATE locations_contacts
|
|
|
|
|
|
SET is_primary = false
|
|
|
|
|
|
WHERE location_id = %s AND deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
execute_query(unset_primary_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
# INSERT new contact
|
|
|
|
|
|
insert_query = """
|
|
|
|
|
|
INSERT INTO locations_contacts (
|
|
|
|
|
|
location_id, contact_name, contact_email, contact_phone,
|
|
|
|
|
|
role, is_primary, created_at, updated_at
|
|
|
|
|
|
)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s, %s, %s, NOW(), NOW())
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
params = (
|
|
|
|
|
|
location_id,
|
|
|
|
|
|
data.contact_name,
|
|
|
|
|
|
data.contact_email,
|
|
|
|
|
|
data.contact_phone,
|
|
|
|
|
|
data.role,
|
|
|
|
|
|
data.is_primary
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
result = execute_query(insert_query, params)
|
|
|
|
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
|
|
logger.error(f"❌ Failed to create contact for location {location_id}")
|
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to create contact")
|
|
|
|
|
|
|
|
|
|
|
|
contact = Contact(**result[0])
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"✅ Contact added: {data.contact_name} at {location_name} (Location ID: {location_id})")
|
|
|
|
|
|
return contact
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error creating contact for location {location_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to create contact"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 3. PATCH /api/v1/locations/{location_id}/contacts/{contact_id} - Update contact
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.patch("/locations/{location_id}/contacts/{contact_id}", response_model=Contact)
|
|
|
|
|
|
async def update_contact(
|
|
|
|
|
|
location_id: int,
|
|
|
|
|
|
contact_id: int,
|
|
|
|
|
|
data: ContactUpdate
|
|
|
|
|
|
):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Update a contact's information.
|
|
|
|
|
|
|
|
|
|
|
|
If setting is_primary=true, unset primary flag on other contacts.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameters:
|
|
|
|
|
|
- location_id: Location ID
|
|
|
|
|
|
- contact_id: Contact ID
|
|
|
|
|
|
|
|
|
|
|
|
Request body: ContactUpdate model (all fields optional)
|
|
|
|
|
|
Returns: Updated Contact object
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location or contact not found
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found for contact update: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Check contact exists and belongs to this location
|
|
|
|
|
|
contact_query = """
|
|
|
|
|
|
SELECT * FROM locations_contacts
|
|
|
|
|
|
WHERE id = %s AND location_id = %s AND deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
contact_check = execute_query(contact_query, (contact_id, location_id))
|
|
|
|
|
|
|
|
|
|
|
|
if not contact_check:
|
|
|
|
|
|
logger.error(f"❌ Contact not found: {contact_id} for location {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Contact with id {contact_id} not found for location {location_id}"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
old_contact = Contact(**contact_check[0])
|
|
|
|
|
|
|
|
|
|
|
|
# If is_primary is being set to true, unset other contacts
|
|
|
|
|
|
if data.is_primary:
|
|
|
|
|
|
unset_primary_query = """
|
|
|
|
|
|
UPDATE locations_contacts
|
|
|
|
|
|
SET is_primary = false
|
|
|
|
|
|
WHERE location_id = %s AND id != %s AND deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
execute_query(unset_primary_query, (location_id, contact_id))
|
|
|
|
|
|
|
|
|
|
|
|
# Build UPDATE query with provided fields
|
|
|
|
|
|
update_parts = ["updated_at = NOW()"]
|
|
|
|
|
|
params = []
|
|
|
|
|
|
|
|
|
|
|
|
field_mapping = {
|
|
|
|
|
|
'contact_name': 'contact_name',
|
|
|
|
|
|
'contact_email': 'contact_email',
|
|
|
|
|
|
'contact_phone': 'contact_phone',
|
|
|
|
|
|
'role': 'role',
|
|
|
|
|
|
'is_primary': 'is_primary'
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
update_data = {}
|
|
|
|
|
|
for key, db_column in field_mapping.items():
|
|
|
|
|
|
value = getattr(data, key, None)
|
|
|
|
|
|
if value is not None:
|
|
|
|
|
|
update_parts.append(f"{db_column} = %s")
|
|
|
|
|
|
params.append(value)
|
|
|
|
|
|
update_data[key] = value
|
|
|
|
|
|
|
|
|
|
|
|
params.append(contact_id)
|
|
|
|
|
|
|
|
|
|
|
|
# Execute UPDATE
|
|
|
|
|
|
update_query = f"""
|
|
|
|
|
|
UPDATE locations_contacts
|
|
|
|
|
|
SET {', '.join(update_parts)}
|
|
|
|
|
|
WHERE id = %s
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
result = execute_query(update_query, tuple(params))
|
|
|
|
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
|
|
logger.error(f"❌ Failed to update contact {contact_id}")
|
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to update contact")
|
|
|
|
|
|
|
|
|
|
|
|
updated_contact = Contact(**result[0])
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"🔄 Contact updated: {updated_contact.contact_name} (ID: {contact_id})")
|
|
|
|
|
|
return updated_contact
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error updating contact {contact_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to update contact"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 4. DELETE /api/v1/locations/{location_id}/contacts/{contact_id} - Delete contact
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.delete("/locations/{location_id}/contacts/{contact_id}", response_model=dict)
|
|
|
|
|
|
async def delete_contact(location_id: int, contact_id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Soft-delete a contact (set deleted_at).
|
|
|
|
|
|
|
|
|
|
|
|
If deleted contact was primary, set another contact as primary.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameters:
|
|
|
|
|
|
- location_id: Location ID
|
|
|
|
|
|
- contact_id: Contact ID
|
|
|
|
|
|
|
|
|
|
|
|
Returns: Confirmation message
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location or contact not found
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found for contact deletion: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Check contact exists and belongs to this location
|
|
|
|
|
|
contact_query = """
|
|
|
|
|
|
SELECT * FROM locations_contacts
|
|
|
|
|
|
WHERE id = %s AND location_id = %s AND deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
contact_check = execute_query(contact_query, (contact_id, location_id))
|
|
|
|
|
|
|
|
|
|
|
|
if not contact_check:
|
|
|
|
|
|
logger.error(f"❌ Contact not found: {contact_id} for location {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Contact with id {contact_id} not found for location {location_id}"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
contact = Contact(**contact_check[0])
|
|
|
|
|
|
contact_name = contact.contact_name
|
|
|
|
|
|
|
|
|
|
|
|
# If this contact is primary, set another contact as primary
|
|
|
|
|
|
if contact.is_primary:
|
|
|
|
|
|
# Find another active contact for this location
|
|
|
|
|
|
other_contact_query = """
|
|
|
|
|
|
SELECT id FROM locations_contacts
|
|
|
|
|
|
WHERE location_id = %s AND id != %s AND deleted_at IS NULL
|
|
|
|
|
|
ORDER BY created_at ASC
|
|
|
|
|
|
LIMIT 1
|
|
|
|
|
|
"""
|
|
|
|
|
|
other_contact = execute_query(other_contact_query, (location_id, contact_id))
|
|
|
|
|
|
|
|
|
|
|
|
if other_contact:
|
|
|
|
|
|
# Set the first other contact as primary
|
|
|
|
|
|
set_primary_query = """
|
|
|
|
|
|
UPDATE locations_contacts
|
|
|
|
|
|
SET is_primary = true
|
|
|
|
|
|
WHERE id = %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
execute_query(set_primary_query, (other_contact[0]['id'],))
|
|
|
|
|
|
logger.info(f"⭐ Reassigned primary contact after deletion")
|
|
|
|
|
|
|
|
|
|
|
|
# Soft-delete the contact
|
|
|
|
|
|
delete_query = """
|
|
|
|
|
|
UPDATE locations_contacts
|
|
|
|
|
|
SET deleted_at = NOW(), updated_at = NOW()
|
|
|
|
|
|
WHERE id = %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
execute_query(delete_query, (contact_id,))
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"🗑️ Contact soft-deleted: {contact_name} (ID: {contact_id})")
|
|
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
|
"status": "deleted",
|
|
|
|
|
|
"id": contact_id,
|
|
|
|
|
|
"message": f"Contact '{contact_name}' has been deleted"
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error deleting contact {contact_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to delete contact"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 5. PATCH /api/v1/locations/{location_id}/contacts/{contact_id}/set-primary
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.patch("/locations/{location_id}/contacts/{contact_id}/set-primary", response_model=Contact)
|
|
|
|
|
|
async def set_primary_contact(location_id: int, contact_id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Set a contact as the primary contact for the location.
|
|
|
|
|
|
|
|
|
|
|
|
Automatically unsets primary flag on other contacts.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameters:
|
|
|
|
|
|
- location_id: Location ID
|
|
|
|
|
|
- contact_id: Contact ID to set as primary
|
|
|
|
|
|
|
|
|
|
|
|
Returns: Updated Contact object
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location or contact not found
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Check contact exists and belongs to this location
|
|
|
|
|
|
contact_query = """
|
|
|
|
|
|
SELECT * FROM locations_contacts
|
|
|
|
|
|
WHERE id = %s AND location_id = %s AND deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
contact_check = execute_query(contact_query, (contact_id, location_id))
|
|
|
|
|
|
|
|
|
|
|
|
if not contact_check:
|
|
|
|
|
|
logger.error(f"❌ Contact not found: {contact_id} for location {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Contact with id {contact_id} not found for location {location_id}"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Unset primary flag on other contacts
|
|
|
|
|
|
unset_primary_query = """
|
|
|
|
|
|
UPDATE locations_contacts
|
|
|
|
|
|
SET is_primary = false
|
|
|
|
|
|
WHERE location_id = %s AND id != %s AND deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
execute_query(unset_primary_query, (location_id, contact_id))
|
|
|
|
|
|
|
|
|
|
|
|
# Set this contact as primary
|
|
|
|
|
|
set_primary_query = """
|
|
|
|
|
|
UPDATE locations_contacts
|
|
|
|
|
|
SET is_primary = true, updated_at = NOW()
|
|
|
|
|
|
WHERE id = %s
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
result = execute_query(set_primary_query, (contact_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
|
|
logger.error(f"❌ Failed to set primary contact {contact_id}")
|
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to set primary contact")
|
|
|
|
|
|
|
|
|
|
|
|
updated_contact = Contact(**result[0])
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"⭐ Primary contact set: {updated_contact.contact_name}")
|
|
|
|
|
|
return updated_contact
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error setting primary contact {contact_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to set primary contact"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 6. GET /api/v1/locations/{location_id}/contact-primary - Get primary contact
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations/{location_id}/contact-primary", response_model=Optional[Contact])
|
|
|
|
|
|
async def get_primary_contact(location_id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Get the primary contact for a location.
|
|
|
|
|
|
|
|
|
|
|
|
Returns None if no primary contact is set.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: location_id (location ID)
|
|
|
|
|
|
Returns: Contact object or None
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location not found
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Query primary contact
|
|
|
|
|
|
query = """
|
|
|
|
|
|
SELECT * FROM locations_contacts
|
|
|
|
|
|
WHERE location_id = %s AND is_primary = true AND deleted_at IS NULL
|
|
|
|
|
|
LIMIT 1
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
result = execute_query(query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if result:
|
|
|
|
|
|
primary_contact = Contact(**result[0])
|
|
|
|
|
|
logger.info(f"📋 Primary contact retrieved: {primary_contact.contact_name}")
|
|
|
|
|
|
return primary_contact
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"📋 No primary contact found for location {location_id}")
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error retrieving primary contact for location {location_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to retrieve primary contact"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# PHASE 2, TASK 2.3: OPERATING HOURS MANAGEMENT (5 endpoints)
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 1. GET /api/v1/locations/{location_id}/hours - Get operating hours
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations/{location_id}/hours", response_model=List[OperatingHours])
|
|
|
|
|
|
async def get_operating_hours(location_id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Get operating hours for all days of the week.
|
|
|
|
|
|
|
|
|
|
|
|
Returns all 7 days (0=Monday through 6=Sunday).
|
|
|
|
|
|
If no entry for a day, creates default (closed).
|
|
|
|
|
|
Ordered by day_of_week (0-6).
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: location_id (location ID)
|
|
|
|
|
|
Returns: List of OperatingHours objects (7 entries, one per day)
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location not found
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id, name FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
location_name = location_check[0]['name']
|
|
|
|
|
|
|
|
|
|
|
|
# Query existing hours
|
|
|
|
|
|
query = """
|
|
|
|
|
|
SELECT * FROM locations_hours
|
|
|
|
|
|
WHERE location_id = %s
|
|
|
|
|
|
ORDER BY day_of_week ASC
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
results = execute_query(query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
# Convert results to OperatingHours objects
|
|
|
|
|
|
existing_days = {row['day_of_week']: OperatingHours(**row) for row in results}
|
|
|
|
|
|
|
|
|
|
|
|
# Ensure all 7 days exist; create missing ones with is_open=false
|
|
|
|
|
|
for day in range(7):
|
|
|
|
|
|
if day not in existing_days:
|
|
|
|
|
|
# Insert default closed entry for missing day
|
|
|
|
|
|
insert_query = """
|
|
|
|
|
|
INSERT INTO locations_hours (
|
|
|
|
|
|
location_id, day_of_week, is_open, open_time, close_time
|
|
|
|
|
|
)
|
|
|
|
|
|
VALUES (%s, %s, false, NULL, NULL)
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
insert_result = execute_query(insert_query, (location_id, day))
|
|
|
|
|
|
if insert_result:
|
|
|
|
|
|
existing_days[day] = OperatingHours(**insert_result[0])
|
|
|
|
|
|
|
|
|
|
|
|
# Sort by day_of_week
|
|
|
|
|
|
sorted_hours = [existing_days[day] for day in range(7)]
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"📋 Retrieved operating hours for location {location_name} (ID: {location_id})")
|
|
|
|
|
|
return sorted_hours
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error retrieving operating hours for location {location_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to retrieve operating hours"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 2. POST /api/v1/locations/{location_id}/hours - Create/update hours for day
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/locations/{location_id}/hours", response_model=OperatingHours, status_code=201)
|
|
|
|
|
|
async def create_hours(location_id: int, data: OperatingHoursCreate):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Set operating hours for a specific day.
|
|
|
|
|
|
|
|
|
|
|
|
Creates new entry or updates if already exists for that day.
|
|
|
|
|
|
|
|
|
|
|
|
Query validation:
|
|
|
|
|
|
- Requires: day_of_week, open_time, close_time (if is_open=true)
|
|
|
|
|
|
- close_time must be > open_time
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: location_id (location ID)
|
|
|
|
|
|
Request body: OperatingHoursCreate model
|
|
|
|
|
|
|
|
|
|
|
|
Returns: Created/updated OperatingHours object
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location not found
|
|
|
|
|
|
- 400: Validation error (invalid times, etc.)
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id, name FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
location_name = location_check[0]['name']
|
|
|
|
|
|
|
|
|
|
|
|
# Validate day_of_week
|
|
|
|
|
|
if not (0 <= data.day_of_week <= 6):
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid day_of_week: {data.day_of_week}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="day_of_week must be between 0 (Monday) and 6 (Sunday)"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Validate times if location is open
|
|
|
|
|
|
if data.is_open:
|
|
|
|
|
|
if data.open_time is None or data.close_time is None:
|
|
|
|
|
|
logger.warning(f"⚠️ Missing times for open location")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="open_time and close_time required when is_open=true"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if data.close_time <= data.open_time:
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid times: close_time must be after open_time")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="close_time must be greater than open_time"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Check for existing entry
|
|
|
|
|
|
check_query = """
|
|
|
|
|
|
SELECT id FROM locations_hours
|
|
|
|
|
|
WHERE location_id = %s AND day_of_week = %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
existing = execute_query(check_query, (location_id, data.day_of_week))
|
|
|
|
|
|
|
|
|
|
|
|
day_name = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'][data.day_of_week]
|
|
|
|
|
|
|
|
|
|
|
|
if existing:
|
|
|
|
|
|
# Update existing entry
|
|
|
|
|
|
update_query = """
|
|
|
|
|
|
UPDATE locations_hours
|
|
|
|
|
|
SET open_time = %s, close_time = %s, is_open = %s, notes = %s
|
|
|
|
|
|
WHERE location_id = %s AND day_of_week = %s
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
result = execute_query(
|
|
|
|
|
|
update_query,
|
|
|
|
|
|
(data.open_time, data.close_time, data.is_open, data.notes, location_id, data.day_of_week)
|
|
|
|
|
|
)
|
|
|
|
|
|
else:
|
|
|
|
|
|
# Insert new entry
|
|
|
|
|
|
insert_query = """
|
|
|
|
|
|
INSERT INTO locations_hours (
|
|
|
|
|
|
location_id, day_of_week, open_time, close_time, is_open, notes
|
|
|
|
|
|
)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s, %s, %s)
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
result = execute_query(
|
|
|
|
|
|
insert_query,
|
|
|
|
|
|
(location_id, data.day_of_week, data.open_time, data.close_time, data.is_open, data.notes)
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
|
|
logger.error(f"❌ Failed to set hours for {location_name} {day_name}")
|
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to set operating hours")
|
|
|
|
|
|
|
|
|
|
|
|
operating_hours = OperatingHours(**result[0])
|
|
|
|
|
|
|
|
|
|
|
|
if data.is_open and data.open_time and data.close_time:
|
|
|
|
|
|
logger.info(f"✅ Hours set: {location_name} {day_name} {data.open_time}-{data.close_time}")
|
|
|
|
|
|
else:
|
|
|
|
|
|
logger.info(f"✅ Hours cleared: {location_name} {day_name} (Closed)")
|
|
|
|
|
|
|
|
|
|
|
|
return operating_hours
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error creating/updating hours for location {location_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to set operating hours"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 3. PATCH /api/v1/locations/{location_id}/hours/{day_id} - Update hours
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.patch("/locations/{location_id}/hours/{day_id}", response_model=OperatingHours)
|
|
|
|
|
|
async def update_hours(location_id: int, day_id: int, data: OperatingHoursUpdate):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Update operating hours for a specific day.
|
|
|
|
|
|
|
|
|
|
|
|
All fields optional (partial update).
|
|
|
|
|
|
If is_open changes to true, open_time and close_time become required.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameters:
|
|
|
|
|
|
- location_id: Location ID
|
|
|
|
|
|
- day_id: Day of week (0-6)
|
|
|
|
|
|
|
|
|
|
|
|
Request body: OperatingHoursUpdate model (all fields optional)
|
|
|
|
|
|
Returns: Updated OperatingHours object
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location or hours entry not found
|
|
|
|
|
|
- 400: Validation error
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id, name FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
location_name = location_check[0]['name']
|
|
|
|
|
|
|
|
|
|
|
|
# Check hours entry exists
|
|
|
|
|
|
hours_query = """
|
|
|
|
|
|
SELECT * FROM locations_hours
|
|
|
|
|
|
WHERE location_id = %s AND day_of_week = %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
hours_check = execute_query(hours_query, (location_id, day_id))
|
|
|
|
|
|
|
|
|
|
|
|
if not hours_check:
|
|
|
|
|
|
logger.error(f"❌ Hours not found for location {location_id} day {day_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Hours not found for day {day_id}"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
current_hours = hours_check[0]
|
|
|
|
|
|
day_name = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'][day_id]
|
|
|
|
|
|
|
|
|
|
|
|
# Determine if location will be open after update
|
|
|
|
|
|
will_be_open = data.is_open if data.is_open is not None else current_hours['is_open']
|
|
|
|
|
|
|
|
|
|
|
|
# Get times to validate
|
|
|
|
|
|
new_open_time = data.open_time if data.open_time is not None else current_hours['open_time']
|
|
|
|
|
|
new_close_time = data.close_time if data.close_time is not None else current_hours['close_time']
|
|
|
|
|
|
|
|
|
|
|
|
# Validate: if is_open=true, both times required
|
|
|
|
|
|
if will_be_open:
|
|
|
|
|
|
if new_open_time is None or new_close_time is None:
|
|
|
|
|
|
logger.warning(f"⚠️ Missing times for open location")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="open_time and close_time required when is_open=true"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if new_close_time <= new_open_time:
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid times: close_time must be after open_time")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="close_time must be greater than open_time"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Build UPDATE query with only provided fields
|
|
|
|
|
|
update_parts = []
|
|
|
|
|
|
params = []
|
|
|
|
|
|
|
|
|
|
|
|
if data.open_time is not None:
|
|
|
|
|
|
update_parts.append("open_time = %s")
|
|
|
|
|
|
params.append(data.open_time)
|
|
|
|
|
|
|
|
|
|
|
|
if data.close_time is not None:
|
|
|
|
|
|
update_parts.append("close_time = %s")
|
|
|
|
|
|
params.append(data.close_time)
|
|
|
|
|
|
|
|
|
|
|
|
if data.is_open is not None:
|
|
|
|
|
|
update_parts.append("is_open = %s")
|
|
|
|
|
|
params.append(data.is_open)
|
|
|
|
|
|
|
|
|
|
|
|
if data.notes is not None:
|
|
|
|
|
|
update_parts.append("notes = %s")
|
|
|
|
|
|
params.append(data.notes)
|
|
|
|
|
|
|
|
|
|
|
|
if not update_parts:
|
|
|
|
|
|
# No fields to update, return current
|
|
|
|
|
|
logger.info(f"📋 No updates provided for {location_name} {day_name}")
|
|
|
|
|
|
return OperatingHours(**current_hours)
|
|
|
|
|
|
|
|
|
|
|
|
# Add WHERE clause parameters
|
|
|
|
|
|
params.append(location_id)
|
|
|
|
|
|
params.append(day_id)
|
|
|
|
|
|
|
|
|
|
|
|
update_query = f"""
|
|
|
|
|
|
UPDATE locations_hours
|
|
|
|
|
|
SET {', '.join(update_parts)}
|
|
|
|
|
|
WHERE location_id = %s AND day_of_week = %s
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
result = execute_query(update_query, tuple(params))
|
|
|
|
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
|
|
logger.error(f"❌ Failed to update hours for {location_name} {day_name}")
|
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to update operating hours")
|
|
|
|
|
|
|
|
|
|
|
|
updated_hours = OperatingHours(**result[0])
|
|
|
|
|
|
logger.info(f"✏️ Updated hours: {location_name} {day_name}")
|
|
|
|
|
|
|
|
|
|
|
|
return updated_hours
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error updating hours for location {location_id} day {day_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to update operating hours"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 4. DELETE /api/v1/locations/{location_id}/hours/{day_id} - Clear hours for day
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.delete("/locations/{location_id}/hours/{day_id}", response_model=dict)
|
|
|
|
|
|
async def delete_hours(location_id: int, day_id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Clear operating hours for a day (mark location as closed).
|
|
|
|
|
|
|
|
|
|
|
|
Sets is_open=false and clears times.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameters:
|
|
|
|
|
|
- location_id: Location ID
|
|
|
|
|
|
- day_id: Day of week (0-6)
|
|
|
|
|
|
|
|
|
|
|
|
Returns: Status message
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location or hours entry not found
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id, name FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
location_name = location_check[0]['name']
|
|
|
|
|
|
|
|
|
|
|
|
# Check hours entry exists
|
|
|
|
|
|
hours_query = """
|
|
|
|
|
|
SELECT * FROM locations_hours
|
|
|
|
|
|
WHERE location_id = %s AND day_of_week = %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
hours_check = execute_query(hours_query, (location_id, day_id))
|
|
|
|
|
|
|
|
|
|
|
|
if not hours_check:
|
|
|
|
|
|
logger.error(f"❌ Hours not found for location {location_id} day {day_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Hours not found for day {day_id}"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
day_name = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'][day_id]
|
|
|
|
|
|
|
|
|
|
|
|
# Clear hours (set is_open=false, times to NULL)
|
|
|
|
|
|
delete_query = """
|
|
|
|
|
|
UPDATE locations_hours
|
|
|
|
|
|
SET is_open = false, open_time = NULL, close_time = NULL
|
|
|
|
|
|
WHERE location_id = %s AND day_of_week = %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
execute_query(delete_query, (location_id, day_id))
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"🗑️ Hours cleared: {location_name} {day_name}")
|
|
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
|
"status": "cleared",
|
|
|
|
|
|
"location_id": location_id,
|
|
|
|
|
|
"day_of_week": day_id,
|
|
|
|
|
|
"message": f"Operating hours cleared for {day_name}"
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error clearing hours for location {location_id} day {day_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to clear operating hours"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 5. GET /api/v1/locations/{location_id}/is-open-now - Check if open now
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations/{location_id}/is-open-now", response_model=dict)
|
|
|
|
|
|
async def is_location_open_now(location_id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Check if location is currently open.
|
|
|
|
|
|
|
|
|
|
|
|
Handles:
|
|
|
|
|
|
- Current day of week
|
|
|
|
|
|
- Current time comparison
|
|
|
|
|
|
- Edge case: time spans midnight (e.g., 22:00-06:00)
|
|
|
|
|
|
- Timezone: uses server timezone
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: location_id (location ID)
|
|
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
|
{
|
|
|
|
|
|
"is_open": boolean,
|
|
|
|
|
|
"current_time": "HH:MM",
|
|
|
|
|
|
"location_name": string,
|
|
|
|
|
|
"today_hours": {"day": int, "open_time": "HH:MM" or null, "close_time": "HH:MM" or null},
|
|
|
|
|
|
"message": string
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location not found
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id, name FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
location_name = location_check[0]['name']
|
|
|
|
|
|
|
|
|
|
|
|
# Get current datetime and day of week (0=Monday, 6=Sunday)
|
|
|
|
|
|
now = datetime.now()
|
|
|
|
|
|
current_day_of_week = now.weekday() # Python's weekday: 0=Monday, 6=Sunday
|
|
|
|
|
|
current_time = now.time()
|
|
|
|
|
|
|
|
|
|
|
|
# Query today's hours
|
|
|
|
|
|
hours_query = """
|
|
|
|
|
|
SELECT * FROM locations_hours
|
|
|
|
|
|
WHERE location_id = %s AND day_of_week = %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
hours_result = execute_query(hours_query, (location_id, current_day_of_week))
|
|
|
|
|
|
|
|
|
|
|
|
if not hours_result:
|
|
|
|
|
|
# No hours entry for today - create default (closed)
|
|
|
|
|
|
insert_query = """
|
|
|
|
|
|
INSERT INTO locations_hours (
|
|
|
|
|
|
location_id, day_of_week, is_open, open_time, close_time
|
|
|
|
|
|
)
|
|
|
|
|
|
VALUES (%s, %s, false, NULL, NULL)
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
hours_result = execute_query(insert_query, (location_id, current_day_of_week))
|
|
|
|
|
|
|
|
|
|
|
|
if not hours_result:
|
|
|
|
|
|
logger.error(f"❌ Failed to retrieve/create hours for location {location_id}")
|
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to check operating hours")
|
|
|
|
|
|
|
|
|
|
|
|
hours = hours_result[0]
|
|
|
|
|
|
|
|
|
|
|
|
# Determine if open now
|
|
|
|
|
|
is_open = False
|
|
|
|
|
|
message = ""
|
|
|
|
|
|
|
|
|
|
|
|
if not hours['is_open']:
|
|
|
|
|
|
# Location is marked as closed today
|
|
|
|
|
|
is_open = False
|
|
|
|
|
|
message = "Closed today"
|
|
|
|
|
|
elif hours['open_time'] is None or hours['close_time'] is None:
|
|
|
|
|
|
# Open flag set but times missing
|
|
|
|
|
|
is_open = False
|
|
|
|
|
|
message = "Operating hours not set"
|
|
|
|
|
|
else:
|
|
|
|
|
|
# Compare times
|
|
|
|
|
|
open_time = hours['open_time']
|
|
|
|
|
|
close_time = hours['close_time']
|
|
|
|
|
|
|
|
|
|
|
|
# Handle midnight edge case: if open_time > close_time, location is open across midnight
|
|
|
|
|
|
if open_time > close_time:
|
|
|
|
|
|
# Open across midnight (e.g., 22:00-06:00)
|
|
|
|
|
|
is_open = (current_time >= open_time) or (current_time < close_time)
|
|
|
|
|
|
else:
|
|
|
|
|
|
# Normal case: open_time < close_time (same day)
|
|
|
|
|
|
is_open = (current_time >= open_time) and (current_time < close_time)
|
|
|
|
|
|
|
|
|
|
|
|
# Generate message
|
|
|
|
|
|
if is_open:
|
|
|
|
|
|
# Format close time for message
|
|
|
|
|
|
close_time_str = close_time.strftime('%H:%M') if close_time else 'Unknown'
|
|
|
|
|
|
message = f"Open until {close_time_str}"
|
|
|
|
|
|
else:
|
|
|
|
|
|
# Location is closed now, find next opening
|
|
|
|
|
|
if current_time < open_time:
|
|
|
|
|
|
# Will open later today
|
|
|
|
|
|
open_time_str = open_time.strftime('%H:%M') if open_time else 'Unknown'
|
|
|
|
|
|
message = f"Closed, opens today at {open_time_str}"
|
|
|
|
|
|
else:
|
|
|
|
|
|
# Opening tomorrow
|
|
|
|
|
|
message = "Closed, opens tomorrow"
|
|
|
|
|
|
|
|
|
|
|
|
# Format times for response
|
|
|
|
|
|
open_time_str = hours['open_time'].strftime('%H:%M') if hours['open_time'] else None
|
|
|
|
|
|
close_time_str = hours['close_time'].strftime('%H:%M') if hours['close_time'] else None
|
|
|
|
|
|
current_time_str = current_time.strftime('%H:%M')
|
|
|
|
|
|
|
|
|
|
|
|
day_name = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'][current_day_of_week]
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"📍 Status check: {location_name} is {'OPEN' if is_open else 'CLOSED'} now")
|
|
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
|
"is_open": is_open,
|
|
|
|
|
|
"current_time": current_time_str,
|
|
|
|
|
|
"current_day": day_name,
|
|
|
|
|
|
"location_name": location_name,
|
|
|
|
|
|
"location_id": location_id,
|
|
|
|
|
|
"today_hours": {
|
|
|
|
|
|
"day": current_day_of_week,
|
|
|
|
|
|
"day_name": day_name,
|
|
|
|
|
|
"open_time": open_time_str,
|
|
|
|
|
|
"close_time": close_time_str
|
|
|
|
|
|
},
|
|
|
|
|
|
"message": message
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error checking if location {location_id} is open now: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to check location status"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# PHASE 2, TASK 2.4: SERVICE & CAPACITY ENDPOINTS
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# PART A: SERVICE ENDPOINTS (4 TOTAL)
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 1. GET /api/v1/locations/{location_id}/services - List services
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations/{location_id}/services", response_model=List[Service])
|
|
|
|
|
|
async def list_services(location_id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
List all services offered at a location.
|
|
|
|
|
|
|
|
|
|
|
|
Excludes soft-deleted services.
|
|
|
|
|
|
Ordered by is_available DESC, then service_name ASC.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: location_id (location ID)
|
|
|
|
|
|
|
|
|
|
|
|
Returns: List of Service objects
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location not found
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Query services, ordered by availability (available first) then name
|
|
|
|
|
|
query = """
|
|
|
|
|
|
SELECT * FROM locations_services
|
|
|
|
|
|
WHERE location_id = %s AND deleted_at IS NULL
|
|
|
|
|
|
ORDER BY is_available DESC, service_name ASC
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
results = execute_query(query, (location_id,))
|
|
|
|
|
|
logger.info(f"📋 Listed {len(results)} services for location {location_id}")
|
|
|
|
|
|
|
|
|
|
|
|
return [Service(**row) for row in results]
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error listing services for location {location_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to list services"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 2. POST /api/v1/locations/{location_id}/services - Add service
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/locations/{location_id}/services", response_model=Service, status_code=201)
|
|
|
|
|
|
async def create_service(location_id: int, data: ServiceCreate):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Add a new service to a location.
|
|
|
|
|
|
|
|
|
|
|
|
Request: ServiceCreate with service_name (required), is_available (default true)
|
|
|
|
|
|
Returns: Created Service object with ID
|
|
|
|
|
|
|
|
|
|
|
|
Validation:
|
|
|
|
|
|
- service_name cannot be empty
|
|
|
|
|
|
- location must exist
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location not found
|
|
|
|
|
|
- 400: Invalid input (empty service_name)
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Validate service_name not empty
|
|
|
|
|
|
if not data.service_name or not data.service_name.strip():
|
|
|
|
|
|
logger.warning("❌ Service creation failed: empty service_name")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="service_name cannot be empty"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT name FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
location_name = location_check[0]['name']
|
|
|
|
|
|
|
|
|
|
|
|
# INSERT new service
|
|
|
|
|
|
insert_query = """
|
|
|
|
|
|
INSERT INTO locations_services (
|
|
|
|
|
|
location_id, service_name, is_available, created_at
|
|
|
|
|
|
)
|
|
|
|
|
|
VALUES (%s, %s, %s, NOW())
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
result = execute_query(
|
|
|
|
|
|
insert_query,
|
|
|
|
|
|
(location_id, data.service_name.strip(), data.is_available)
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
|
|
logger.error(f"❌ Failed to create service for location {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to create service"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"✅ Service added: {data.service_name} at {location_name}")
|
|
|
|
|
|
|
|
|
|
|
|
return Service(**result[0])
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error creating service for location {location_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to create service"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 3. PATCH /api/v1/locations/{location_id}/services/{service_id} - Update service
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.patch("/locations/{location_id}/services/{service_id}", response_model=Service)
|
|
|
|
|
|
async def update_service(
|
|
|
|
|
|
location_id: int,
|
|
|
|
|
|
service_id: int,
|
|
|
|
|
|
data: ServiceUpdate
|
|
|
|
|
|
):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Update a service (name or availability).
|
|
|
|
|
|
|
|
|
|
|
|
All fields optional.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameters: location_id, service_id
|
|
|
|
|
|
Request: ServiceUpdate with optional service_name and/or is_available
|
|
|
|
|
|
|
|
|
|
|
|
Returns: Updated Service object
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location or service not found
|
|
|
|
|
|
- 400: No fields provided for update
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Check service exists and belongs to location
|
|
|
|
|
|
service_query = """
|
|
|
|
|
|
SELECT * FROM locations_services
|
|
|
|
|
|
WHERE id = %s AND location_id = %s AND deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
service_check = execute_query(service_query, (service_id, location_id))
|
|
|
|
|
|
|
|
|
|
|
|
if not service_check:
|
|
|
|
|
|
logger.error(f"❌ Service not found: {service_id} for location {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Service with id {service_id} not found for this location"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
current_service = service_check[0]
|
|
|
|
|
|
|
|
|
|
|
|
# Build UPDATE query with only provided fields
|
|
|
|
|
|
update_parts = []
|
|
|
|
|
|
params = []
|
|
|
|
|
|
|
|
|
|
|
|
if data.service_name is not None:
|
|
|
|
|
|
update_parts.append("service_name = %s")
|
|
|
|
|
|
params.append(data.service_name.strip())
|
|
|
|
|
|
|
|
|
|
|
|
if data.is_available is not None:
|
|
|
|
|
|
update_parts.append("is_available = %s")
|
|
|
|
|
|
params.append(data.is_available)
|
|
|
|
|
|
|
|
|
|
|
|
if not update_parts:
|
|
|
|
|
|
logger.warning(f"⚠️ No fields provided for update: service {service_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="No fields provided for update"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
params.append(service_id)
|
|
|
|
|
|
|
|
|
|
|
|
update_query = f"""
|
|
|
|
|
|
UPDATE locations_services
|
|
|
|
|
|
SET {', '.join(update_parts)}
|
|
|
|
|
|
WHERE id = %s AND deleted_at IS NULL
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
result = execute_query(update_query, tuple(params))
|
|
|
|
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
|
|
logger.error(f"❌ Failed to update service {service_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to update service"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"🔄 Service updated: {current_service['service_name']}")
|
|
|
|
|
|
|
|
|
|
|
|
return Service(**result[0])
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error updating service {service_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to update service"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 4. DELETE /api/v1/locations/{location_id}/services/{service_id} - Delete service
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.delete("/locations/{location_id}/services/{service_id}", response_model=dict)
|
|
|
|
|
|
async def delete_service(location_id: int, service_id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Soft-delete a service.
|
|
|
|
|
|
|
|
|
|
|
|
Sets deleted_at timestamp, preserving audit trail.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameters: location_id, service_id
|
|
|
|
|
|
|
|
|
|
|
|
Returns: Confirmation dict
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location or service not found
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Check service exists and belongs to location
|
|
|
|
|
|
service_query = """
|
|
|
|
|
|
SELECT service_name FROM locations_services
|
|
|
|
|
|
WHERE id = %s AND location_id = %s AND deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
service_check = execute_query(service_query, (service_id, location_id))
|
|
|
|
|
|
|
|
|
|
|
|
if not service_check:
|
|
|
|
|
|
logger.error(f"❌ Service not found: {service_id} for location {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Service with id {service_id} not found for this location"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
service_name = service_check[0]['service_name']
|
|
|
|
|
|
|
|
|
|
|
|
# Soft-delete: set deleted_at
|
|
|
|
|
|
delete_query = """
|
|
|
|
|
|
UPDATE locations_services
|
|
|
|
|
|
SET deleted_at = NOW()
|
|
|
|
|
|
WHERE id = %s AND deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
execute_query(delete_query, (service_id,))
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"🗑️ Service deleted: {service_name}")
|
|
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
|
"success": True,
|
|
|
|
|
|
"message": f"Service '{service_name}' deleted successfully",
|
|
|
|
|
|
"service_id": service_id
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error deleting service {service_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to delete service"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# PART B: CAPACITY ENDPOINTS (4 TOTAL)
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 5. GET /api/v1/locations/{location_id}/capacity - List capacity entries
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations/{location_id}/capacity", response_model=List[Capacity])
|
|
|
|
|
|
async def list_capacity(location_id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
List all capacity tracking entries for a location.
|
|
|
|
|
|
|
|
|
|
|
|
Includes usage percentage calculation via property.
|
|
|
|
|
|
Ordered by capacity_type ASC.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: location_id (location ID)
|
|
|
|
|
|
|
|
|
|
|
|
Returns: List of Capacity objects with usage_percentage and available_capacity
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location not found
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Query capacity entries, ordered by type
|
|
|
|
|
|
query = """
|
|
|
|
|
|
SELECT * FROM locations_capacity
|
|
|
|
|
|
WHERE location_id = %s
|
|
|
|
|
|
ORDER BY capacity_type ASC
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
results = execute_query(query, (location_id,))
|
|
|
|
|
|
logger.info(f"📊 Listed {len(results)} capacity entries for location {location_id}")
|
|
|
|
|
|
|
|
|
|
|
|
return [Capacity(**row) for row in results]
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error listing capacity for location {location_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to list capacity"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 6. POST /api/v1/locations/{location_id}/capacity - Add capacity entry
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/locations/{location_id}/capacity", response_model=Capacity, status_code=201)
|
|
|
|
|
|
async def create_capacity(location_id: int, data: CapacityCreate):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Add a new capacity tracking entry for a location.
|
|
|
|
|
|
|
|
|
|
|
|
Example types: rack_units, square_meters, storage_boxes, parking_spaces
|
|
|
|
|
|
|
|
|
|
|
|
Validation:
|
|
|
|
|
|
- total_capacity > 0
|
|
|
|
|
|
- used_capacity >= 0
|
|
|
|
|
|
- used_capacity <= total_capacity
|
|
|
|
|
|
- location must exist
|
|
|
|
|
|
|
|
|
|
|
|
Path parameter: location_id (location ID)
|
|
|
|
|
|
Request: CapacityCreate with capacity_type, total_capacity, used_capacity (optional)
|
|
|
|
|
|
|
|
|
|
|
|
Returns: Created Capacity object with ID
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location not found
|
|
|
|
|
|
- 400: Invalid input (capacity constraints violated)
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT name FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
location_name = location_check[0]['name']
|
|
|
|
|
|
|
|
|
|
|
|
# Validate total_capacity > 0
|
|
|
|
|
|
if data.total_capacity <= 0:
|
|
|
|
|
|
logger.warning(f"❌ Capacity creation failed: total_capacity must be > 0")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="total_capacity must be greater than 0"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Validate used_capacity >= 0
|
|
|
|
|
|
if data.used_capacity < 0:
|
|
|
|
|
|
logger.warning(f"❌ Capacity creation failed: used_capacity must be >= 0")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="used_capacity must be >= 0"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Validate used_capacity <= total_capacity
|
|
|
|
|
|
if data.used_capacity > data.total_capacity:
|
|
|
|
|
|
logger.warning(
|
|
|
|
|
|
f"❌ Capacity creation failed: used_capacity ({data.used_capacity}) "
|
|
|
|
|
|
f"> total_capacity ({data.total_capacity})"
|
|
|
|
|
|
)
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="used_capacity cannot exceed total_capacity"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# INSERT new capacity entry
|
|
|
|
|
|
insert_query = """
|
|
|
|
|
|
INSERT INTO locations_capacity (
|
|
|
|
|
|
location_id, capacity_type, total_capacity, used_capacity, last_updated
|
|
|
|
|
|
)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s, NOW())
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
result = execute_query(
|
|
|
|
|
|
insert_query,
|
|
|
|
|
|
(location_id, data.capacity_type, data.total_capacity, data.used_capacity)
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
|
|
logger.error(f"❌ Failed to create capacity for location {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to create capacity"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
|
|
f"✅ Capacity added: {data.capacity_type} ({data.used_capacity}/{data.total_capacity}) "
|
|
|
|
|
|
f"at {location_name}"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
return Capacity(**result[0])
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error creating capacity for location {location_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to create capacity"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 7. PATCH /api/v1/locations/{location_id}/capacity/{capacity_id} - Update capacity
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.patch("/locations/{location_id}/capacity/{capacity_id}", response_model=Capacity)
|
|
|
|
|
|
async def update_capacity(
|
|
|
|
|
|
location_id: int,
|
|
|
|
|
|
capacity_id: int,
|
|
|
|
|
|
data: CapacityUpdate
|
|
|
|
|
|
):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Update capacity (total or used).
|
|
|
|
|
|
|
|
|
|
|
|
All fields optional.
|
|
|
|
|
|
Validates used_capacity <= total_capacity after update.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameters: location_id, capacity_id
|
|
|
|
|
|
Request: CapacityUpdate with optional total_capacity and/or used_capacity
|
|
|
|
|
|
|
|
|
|
|
|
Returns: Updated Capacity object
|
|
|
|
|
|
|
|
|
|
|
|
Validation:
|
|
|
|
|
|
- total_capacity must be > 0 if provided
|
|
|
|
|
|
- used_capacity must be >= 0 if provided
|
|
|
|
|
|
- used_capacity cannot exceed total_capacity (after updates)
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location or capacity not found
|
|
|
|
|
|
- 400: Invalid input or validation failed
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Check capacity exists and belongs to location
|
|
|
|
|
|
capacity_query = """
|
|
|
|
|
|
SELECT * FROM locations_capacity
|
|
|
|
|
|
WHERE id = %s AND location_id = %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
capacity_check = execute_query(capacity_query, (capacity_id, location_id))
|
|
|
|
|
|
|
|
|
|
|
|
if not capacity_check:
|
|
|
|
|
|
logger.error(f"❌ Capacity not found: {capacity_id} for location {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Capacity with id {capacity_id} not found for this location"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
current_capacity = capacity_check[0]
|
|
|
|
|
|
|
|
|
|
|
|
# Determine new values (use existing if not provided)
|
|
|
|
|
|
new_total = data.total_capacity if data.total_capacity is not None else current_capacity['total_capacity']
|
|
|
|
|
|
new_used = data.used_capacity if data.used_capacity is not None else current_capacity['used_capacity']
|
|
|
|
|
|
|
|
|
|
|
|
# Validate total_capacity > 0
|
|
|
|
|
|
if new_total <= 0:
|
|
|
|
|
|
logger.warning(f"❌ Capacity update failed: total_capacity must be > 0")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="total_capacity must be greater than 0"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Validate used_capacity >= 0
|
|
|
|
|
|
if new_used < 0:
|
|
|
|
|
|
logger.warning(f"❌ Capacity update failed: used_capacity must be >= 0")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="used_capacity must be >= 0"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Validate used_capacity <= total_capacity
|
|
|
|
|
|
if new_used > new_total:
|
|
|
|
|
|
logger.warning(
|
|
|
|
|
|
f"❌ Capacity update failed: used_capacity ({new_used}) > total_capacity ({new_total})"
|
|
|
|
|
|
)
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="used_capacity cannot exceed total_capacity"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Build UPDATE query with only provided fields
|
|
|
|
|
|
update_parts = []
|
|
|
|
|
|
params = []
|
|
|
|
|
|
|
|
|
|
|
|
if data.total_capacity is not None:
|
|
|
|
|
|
update_parts.append("total_capacity = %s")
|
|
|
|
|
|
params.append(data.total_capacity)
|
|
|
|
|
|
|
|
|
|
|
|
if data.used_capacity is not None:
|
|
|
|
|
|
update_parts.append("used_capacity = %s")
|
|
|
|
|
|
params.append(data.used_capacity)
|
|
|
|
|
|
|
|
|
|
|
|
if not update_parts:
|
|
|
|
|
|
logger.warning(f"⚠️ No fields provided for update: capacity {capacity_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="No fields provided for update"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Always update last_updated timestamp
|
|
|
|
|
|
update_parts.append("last_updated = NOW()")
|
|
|
|
|
|
params.append(capacity_id)
|
|
|
|
|
|
|
|
|
|
|
|
update_query = f"""
|
|
|
|
|
|
UPDATE locations_capacity
|
|
|
|
|
|
SET {', '.join(update_parts)}
|
|
|
|
|
|
WHERE id = %s
|
|
|
|
|
|
RETURNING *
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
result = execute_query(update_query, tuple(params))
|
|
|
|
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
|
|
logger.error(f"❌ Failed to update capacity {capacity_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to update capacity"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
updated_capacity = result[0]
|
|
|
|
|
|
usage_percentage = float((updated_capacity['used_capacity'] / updated_capacity['total_capacity']) * 100)
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
|
|
f"🔄 Capacity updated: {updated_capacity['capacity_type']} "
|
|
|
|
|
|
f"utilization now {usage_percentage:.1f}%"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
return Capacity(**result[0])
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error updating capacity {capacity_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to update capacity"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 8. DELETE /api/v1/locations/{location_id}/capacity/{capacity_id} - Delete capacity
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.delete("/locations/{location_id}/capacity/{capacity_id}", response_model=dict)
|
|
|
|
|
|
async def delete_capacity(location_id: int, capacity_id: int):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Delete a capacity entry.
|
|
|
|
|
|
|
|
|
|
|
|
Physical deletion (not soft-delete) - capacity is not critical data.
|
|
|
|
|
|
|
|
|
|
|
|
Path parameters: location_id, capacity_id
|
|
|
|
|
|
|
|
|
|
|
|
Returns: Confirmation dict
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 404: Location or capacity not found
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check location exists
|
|
|
|
|
|
location_query = "SELECT id FROM locations_locations WHERE id = %s AND deleted_at IS NULL"
|
|
|
|
|
|
location_check = execute_query(location_query, (location_id,))
|
|
|
|
|
|
|
|
|
|
|
|
if not location_check:
|
|
|
|
|
|
logger.error(f"❌ Location not found: {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Location with id {location_id} not found"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Check capacity exists and belongs to location
|
|
|
|
|
|
capacity_query = """
|
|
|
|
|
|
SELECT capacity_type FROM locations_capacity
|
|
|
|
|
|
WHERE id = %s AND location_id = %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
capacity_check = execute_query(capacity_query, (capacity_id, location_id))
|
|
|
|
|
|
|
|
|
|
|
|
if not capacity_check:
|
|
|
|
|
|
logger.error(f"❌ Capacity not found: {capacity_id} for location {location_id}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail=f"Capacity with id {capacity_id} not found for this location"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
capacity_type = capacity_check[0]['capacity_type']
|
|
|
|
|
|
|
|
|
|
|
|
# Physical delete (not soft-delete)
|
|
|
|
|
|
delete_query = """
|
|
|
|
|
|
DELETE FROM locations_capacity
|
|
|
|
|
|
WHERE id = %s AND location_id = %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
execute_query(delete_query, (capacity_id, location_id))
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"🗑️ Capacity deleted: {capacity_type}")
|
|
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
|
"success": True,
|
|
|
|
|
|
"message": f"Capacity '{capacity_type}' deleted successfully",
|
|
|
|
|
|
"capacity_id": capacity_id
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error deleting capacity {capacity_id}: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to delete capacity"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# PHASE 2.5: BULK OPERATIONS & ADVANCED QUERIES
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 9. POST /api/v1/locations/bulk-update - Bulk update locations
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/locations/bulk-update", response_model=dict)
|
|
|
|
|
|
async def bulk_update_locations(data: BulkUpdateRequest):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Update multiple locations at once with transaction atomicity.
|
|
|
|
|
|
|
|
|
|
|
|
Request: BulkUpdateRequest with:
|
|
|
|
|
|
- ids: List[int] - Location IDs to update (min 1, max 1000)
|
|
|
|
|
|
- updates: dict - Field names and values to update
|
|
|
|
|
|
|
|
|
|
|
|
Supported fields for bulk update:
|
|
|
|
|
|
- is_active (bool)
|
|
|
|
|
|
- location_type (str)
|
|
|
|
|
|
- address_country (str)
|
|
|
|
|
|
|
|
|
|
|
|
Limited to avoid accidental mass overwrites of critical fields.
|
|
|
|
|
|
|
|
|
|
|
|
Returns: {
|
|
|
|
|
|
"updated": count,
|
|
|
|
|
|
"failed": count,
|
|
|
|
|
|
"errors": list of errors
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 400: Invalid location IDs or update fields
|
|
|
|
|
|
- 500: Database transaction error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Validate ids list
|
|
|
|
|
|
if len(data.ids) < 1 or len(data.ids) > 1000:
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid bulk update: {len(data.ids)} IDs (must be 1-1000)")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="Must provide between 1 and 1000 location IDs"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Validate updates dict contains only allowed fields
|
|
|
|
|
|
allowed_fields = {'is_active', 'location_type', 'address_country'}
|
|
|
|
|
|
provided_fields = set(data.updates.keys())
|
|
|
|
|
|
|
|
|
|
|
|
if not provided_fields.issubset(allowed_fields):
|
|
|
|
|
|
invalid_fields = provided_fields - allowed_fields
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid update fields: {invalid_fields}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail=f"Invalid fields: {', '.join(invalid_fields)}. Allowed: {', '.join(allowed_fields)}"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if not data.updates:
|
|
|
|
|
|
logger.warning("⚠️ No updates provided")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="At least one field must be provided for update"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Validate location_type if provided
|
|
|
|
|
|
if 'location_type' in data.updates:
|
2026-02-09 15:30:07 +01:00
|
|
|
|
allowed_types = ['kompleks', 'bygning', 'etage', 'customer_site', 'rum', 'kantine', 'moedelokale', 'vehicle']
|
2026-01-31 23:16:24 +01:00
|
|
|
|
if data.updates['location_type'] not in allowed_types:
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid location_type: {data.updates['location_type']}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail=f"location_type must be one of: {', '.join(allowed_types)}"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Check which locations exist
|
|
|
|
|
|
ids_placeholder = ','.join(['%s'] * len(data.ids))
|
|
|
|
|
|
check_query = f"""
|
|
|
|
|
|
SELECT id FROM locations_locations
|
|
|
|
|
|
WHERE id IN ({ids_placeholder}) AND deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
existing = execute_query(check_query, tuple(data.ids))
|
|
|
|
|
|
existing_ids = set(row['id'] for row in existing)
|
|
|
|
|
|
failed_ids = set(data.ids) - existing_ids
|
|
|
|
|
|
|
|
|
|
|
|
if not existing_ids:
|
|
|
|
|
|
logger.warning(f"❌ No valid locations found for bulk update")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail="No valid locations found with provided IDs"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Build UPDATE query with provided fields
|
|
|
|
|
|
update_parts = []
|
|
|
|
|
|
update_values = []
|
|
|
|
|
|
|
|
|
|
|
|
for field, value in data.updates.items():
|
|
|
|
|
|
update_parts.append(f"{field} = %s")
|
|
|
|
|
|
update_values.append(value)
|
|
|
|
|
|
|
|
|
|
|
|
update_values.append(datetime.utcnow())
|
|
|
|
|
|
update_clause = ", ".join(update_parts) + ", updated_at = %s"
|
|
|
|
|
|
|
|
|
|
|
|
# Build WHERE clause
|
|
|
|
|
|
update_ids_placeholder = ','.join(['%s'] * len(existing_ids))
|
|
|
|
|
|
update_values.extend(existing_ids)
|
|
|
|
|
|
|
|
|
|
|
|
# Execute transaction
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Begin transaction
|
|
|
|
|
|
execute_query("BEGIN")
|
|
|
|
|
|
|
|
|
|
|
|
# Perform update
|
|
|
|
|
|
update_query = f"""
|
|
|
|
|
|
UPDATE locations_locations
|
|
|
|
|
|
SET {update_clause}
|
|
|
|
|
|
WHERE id IN ({update_ids_placeholder})
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
execute_query(update_query, tuple(update_values))
|
|
|
|
|
|
|
|
|
|
|
|
# Create audit log entries for each updated location
|
|
|
|
|
|
for location_id in existing_ids:
|
|
|
|
|
|
audit_query = """
|
|
|
|
|
|
INSERT INTO locations_audit_log (location_id, event_type, changes, created_at)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s)
|
|
|
|
|
|
"""
|
|
|
|
|
|
changes_json = json.dumps(data.updates)
|
|
|
|
|
|
execute_query(
|
|
|
|
|
|
audit_query,
|
|
|
|
|
|
(location_id, 'bulk_update', changes_json, datetime.utcnow())
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Commit transaction
|
|
|
|
|
|
execute_query("COMMIT")
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"✅ Bulk updated {len(existing_ids)} locations")
|
|
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
|
"updated": len(existing_ids),
|
|
|
|
|
|
"failed": len(failed_ids),
|
|
|
|
|
|
"errors": [{"id": lid, "reason": "Location not found or already deleted"} for lid in failed_ids]
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as tx_error:
|
|
|
|
|
|
# Rollback on transaction error
|
|
|
|
|
|
try:
|
|
|
|
|
|
execute_query("ROLLBACK")
|
|
|
|
|
|
except:
|
|
|
|
|
|
pass
|
|
|
|
|
|
logger.error(f"❌ Transaction error during bulk update: {str(tx_error)}")
|
|
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error during bulk update: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to bulk update locations"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 10. POST /api/v1/locations/bulk-delete - Bulk soft-delete locations
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/locations/bulk-delete", response_model=dict)
|
|
|
|
|
|
async def bulk_delete_locations(data: BulkDeleteRequest):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Soft-delete multiple locations at once with transaction atomicity.
|
|
|
|
|
|
|
|
|
|
|
|
Request: BulkDeleteRequest with:
|
|
|
|
|
|
- ids: List[int] - Location IDs to delete (min 1, max 1000)
|
|
|
|
|
|
|
|
|
|
|
|
Returns: {
|
|
|
|
|
|
"deleted": count,
|
|
|
|
|
|
"failed": count,
|
|
|
|
|
|
"message": "..."
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 400: Invalid location IDs or count
|
|
|
|
|
|
- 404: No valid locations found
|
|
|
|
|
|
- 500: Database transaction error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Validate ids list
|
|
|
|
|
|
if len(data.ids) < 1 or len(data.ids) > 1000:
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid bulk delete: {len(data.ids)} IDs (must be 1-1000)")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail="Must provide between 1 and 1000 location IDs"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Check which locations exist and are not already deleted
|
|
|
|
|
|
ids_placeholder = ','.join(['%s'] * len(data.ids))
|
|
|
|
|
|
check_query = f"""
|
|
|
|
|
|
SELECT id FROM locations_locations
|
|
|
|
|
|
WHERE id IN ({ids_placeholder}) AND deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
existing = execute_query(check_query, tuple(data.ids))
|
|
|
|
|
|
existing_ids = set(row['id'] for row in existing)
|
|
|
|
|
|
failed_ids = set(data.ids) - existing_ids
|
|
|
|
|
|
|
|
|
|
|
|
if not existing_ids:
|
|
|
|
|
|
logger.warning(f"❌ No valid locations found for bulk delete")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=404,
|
|
|
|
|
|
detail="No valid locations found with provided IDs"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Execute transaction
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Begin transaction
|
|
|
|
|
|
execute_query("BEGIN")
|
|
|
|
|
|
|
|
|
|
|
|
# Perform soft delete (set deleted_at)
|
|
|
|
|
|
delete_ids_placeholder = ','.join(['%s'] * len(existing_ids))
|
|
|
|
|
|
delete_query = f"""
|
|
|
|
|
|
UPDATE locations_locations
|
|
|
|
|
|
SET deleted_at = %s, updated_at = %s
|
|
|
|
|
|
WHERE id IN ({delete_ids_placeholder})
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
execute_query(
|
|
|
|
|
|
delete_query,
|
|
|
|
|
|
(datetime.utcnow(), datetime.utcnow(), *existing_ids)
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Create audit log entries for each deleted location
|
|
|
|
|
|
for location_id in existing_ids:
|
|
|
|
|
|
audit_query = """
|
|
|
|
|
|
INSERT INTO locations_audit_log (location_id, event_type, changes, created_at)
|
|
|
|
|
|
VALUES (%s, %s, %s, %s)
|
|
|
|
|
|
"""
|
|
|
|
|
|
execute_query(
|
|
|
|
|
|
audit_query,
|
|
|
|
|
|
(location_id, 'bulk_delete', '{}', datetime.utcnow())
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Commit transaction
|
|
|
|
|
|
execute_query("COMMIT")
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"🗑️ Bulk deleted {len(existing_ids)} locations")
|
|
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
|
"deleted": len(existing_ids),
|
|
|
|
|
|
"failed": len(failed_ids),
|
|
|
|
|
|
"message": f"Successfully soft-deleted {len(existing_ids)} locations"
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as tx_error:
|
|
|
|
|
|
# Rollback on transaction error
|
|
|
|
|
|
try:
|
|
|
|
|
|
execute_query("ROLLBACK")
|
|
|
|
|
|
except:
|
|
|
|
|
|
pass
|
|
|
|
|
|
logger.error(f"❌ Transaction error during bulk delete: {str(tx_error)}")
|
|
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error during bulk delete: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to bulk delete locations"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 11. GET /api/v1/locations/by-type/{location_type} - Filter by type
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations/by-type/{location_type}", response_model=List[Location])
|
|
|
|
|
|
async def get_locations_by_type(
|
|
|
|
|
|
location_type: str,
|
|
|
|
|
|
skip: int = Query(0, ge=0),
|
|
|
|
|
|
limit: int = Query(50, ge=1, le=1000)
|
|
|
|
|
|
):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Get all locations of a specific type with pagination.
|
|
|
|
|
|
|
2026-02-09 15:30:07 +01:00
|
|
|
|
Path parameter: location_type - one of (kompleks, bygning, etage, customer_site, rum, kantine, moedelokale, vehicle)
|
2026-01-31 23:16:24 +01:00
|
|
|
|
Query parameters: skip, limit for pagination
|
|
|
|
|
|
|
|
|
|
|
|
Returns: Paginated list of Location objects ordered by name
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 400: Invalid location_type
|
|
|
|
|
|
- 404: No locations found of that type
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Validate location_type is one of allowed values
|
2026-02-09 15:30:07 +01:00
|
|
|
|
allowed_types = ['kompleks', 'bygning', 'etage', 'customer_site', 'rum', 'kantine', 'moedelokale', 'vehicle']
|
2026-01-31 23:16:24 +01:00
|
|
|
|
if location_type not in allowed_types:
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid location_type: {location_type}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=400,
|
|
|
|
|
|
detail=f"Invalid location_type. Must be one of: {', '.join(allowed_types)}"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Query locations by type
|
|
|
|
|
|
query = """
|
|
|
|
|
|
SELECT * FROM locations_locations
|
|
|
|
|
|
WHERE location_type = %s AND deleted_at IS NULL
|
|
|
|
|
|
ORDER BY name ASC
|
|
|
|
|
|
LIMIT %s OFFSET %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
results = execute_query(query, (location_type, limit, skip))
|
|
|
|
|
|
|
|
|
|
|
|
if not results:
|
|
|
|
|
|
logger.info(f"ℹ️ No locations found for type: {location_type}")
|
|
|
|
|
|
# Don't raise 404 - empty list is valid
|
|
|
|
|
|
else:
|
|
|
|
|
|
logger.info(f"📍 Found {len(results)} {location_type} locations")
|
|
|
|
|
|
|
|
|
|
|
|
return [Location(**row) for row in results]
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error filtering locations by type: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to filter locations by type"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 12. GET /api/v1/locations/near-me - Proximity search
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations/near-me", response_model=List[Location])
|
|
|
|
|
|
async def get_nearby_locations(
|
|
|
|
|
|
latitude: float = Query(..., ge=-90, le=90, description="Reference latitude"),
|
|
|
|
|
|
longitude: float = Query(..., ge=-180, le=180, description="Reference longitude"),
|
|
|
|
|
|
distance_km: float = Query(50, ge=1, le=1000, description="Search radius in kilometers"),
|
|
|
|
|
|
limit: int = Query(10, ge=1, le=100, description="Max results to return")
|
|
|
|
|
|
):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Find locations within a distance radius using latitude/longitude coordinates.
|
|
|
|
|
|
|
|
|
|
|
|
Uses Haversine formula for great-circle distance calculation.
|
|
|
|
|
|
|
|
|
|
|
|
Query parameters:
|
|
|
|
|
|
- latitude: Reference latitude (required, -90 to 90)
|
|
|
|
|
|
- longitude: Reference longitude (required, -180 to 180)
|
|
|
|
|
|
- distance_km: Search radius in kilometers (default 50, max 1000)
|
|
|
|
|
|
- limit: Maximum results to return (default 10, max 100)
|
|
|
|
|
|
|
|
|
|
|
|
Returns: List of nearby Location objects ordered by distance ASC
|
|
|
|
|
|
|
|
|
|
|
|
Note: Returns only locations with geocoordinates populated.
|
|
|
|
|
|
Returns empty list if no locations found within radius.
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 400: Invalid coordinates
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Validate coordinates
|
|
|
|
|
|
if not (-90 <= latitude <= 90):
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid latitude: {latitude}")
|
|
|
|
|
|
raise HTTPException(status_code=400, detail="Latitude must be between -90 and 90")
|
|
|
|
|
|
|
|
|
|
|
|
if not (-180 <= longitude <= 180):
|
|
|
|
|
|
logger.warning(f"⚠️ Invalid longitude: {longitude}")
|
|
|
|
|
|
raise HTTPException(status_code=400, detail="Longitude must be between -180 and 180")
|
|
|
|
|
|
|
|
|
|
|
|
# Use Haversine formula for distance calculation
|
|
|
|
|
|
# Formula: distance = 6371 * acos(sin(lat1) * sin(lat2) + cos(lat1) * cos(lat2) * cos(lon2 - lon1))
|
|
|
|
|
|
# Where 6371 is Earth's radius in km
|
|
|
|
|
|
# Convert degrees to radians: radians(x) = x * pi() / 180
|
|
|
|
|
|
|
|
|
|
|
|
query = """
|
|
|
|
|
|
SELECT *,
|
|
|
|
|
|
6371 * acos(
|
|
|
|
|
|
sin(radians(%s)) * sin(radians(latitude)) +
|
|
|
|
|
|
cos(radians(%s)) * cos(radians(latitude)) *
|
|
|
|
|
|
cos(radians(%s - longitude))
|
|
|
|
|
|
) AS distance_km
|
|
|
|
|
|
FROM locations_locations
|
|
|
|
|
|
WHERE latitude IS NOT NULL
|
|
|
|
|
|
AND longitude IS NOT NULL
|
|
|
|
|
|
AND deleted_at IS NULL
|
|
|
|
|
|
AND 6371 * acos(
|
|
|
|
|
|
sin(radians(%s)) * sin(radians(latitude)) +
|
|
|
|
|
|
cos(radians(%s)) * cos(radians(latitude)) *
|
|
|
|
|
|
cos(radians(%s - longitude))
|
|
|
|
|
|
) <= %s
|
|
|
|
|
|
ORDER BY distance_km ASC
|
|
|
|
|
|
LIMIT %s
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
results = execute_query(
|
|
|
|
|
|
query,
|
|
|
|
|
|
(latitude, latitude, longitude, latitude, latitude, longitude, distance_km, limit)
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"📍 Found {len(results)} locations within {distance_km}km of ({latitude}, {longitude})")
|
|
|
|
|
|
|
|
|
|
|
|
return [Location(**row) for row in results]
|
|
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
|
raise
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error searching nearby locations: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to search nearby locations"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
# 13. GET /api/v1/locations/stats - Statistics
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/locations/stats", response_model=LocationStats)
|
|
|
|
|
|
async def get_locations_statistics():
|
|
|
|
|
|
"""
|
|
|
|
|
|
Get comprehensive statistics about all locations.
|
|
|
|
|
|
|
|
|
|
|
|
Returns: LocationStats object with:
|
|
|
|
|
|
- total_locations: Total count of non-deleted locations
|
|
|
|
|
|
- active_locations: Count where is_active = true
|
|
|
|
|
|
- by_type: Dict with count per location_type
|
|
|
|
|
|
- total_contacts: Sum of all contacts across locations
|
|
|
|
|
|
- total_services: Sum of all services across locations
|
|
|
|
|
|
- average_capacity_utilization: Average usage percentage across all capacity entries
|
|
|
|
|
|
|
|
|
|
|
|
Example response:
|
|
|
|
|
|
{
|
|
|
|
|
|
"total_locations": 15,
|
|
|
|
|
|
"active_locations": 12,
|
|
|
|
|
|
"by_type": {
|
|
|
|
|
|
"kompleks": 2,
|
|
|
|
|
|
"bygning": 6,
|
|
|
|
|
|
"etage": 3,
|
|
|
|
|
|
"customer_site": 2,
|
|
|
|
|
|
"rum": 2,
|
|
|
|
|
|
"vehicle": 1
|
|
|
|
|
|
},
|
|
|
|
|
|
"total_contacts": 32,
|
|
|
|
|
|
"total_services": 18,
|
|
|
|
|
|
"average_capacity_utilization": 68.5
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
|
- 500: Database error
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Query 1: Total locations
|
|
|
|
|
|
total_query = """
|
|
|
|
|
|
SELECT COUNT(*) as count FROM locations_locations
|
|
|
|
|
|
WHERE deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
total_result = execute_query(total_query)
|
|
|
|
|
|
total_locations = total_result[0]['count'] if total_result else 0
|
|
|
|
|
|
|
|
|
|
|
|
# Query 2: Active locations
|
|
|
|
|
|
active_query = """
|
|
|
|
|
|
SELECT COUNT(*) as count FROM locations_locations
|
|
|
|
|
|
WHERE deleted_at IS NULL AND is_active = true
|
|
|
|
|
|
"""
|
|
|
|
|
|
active_result = execute_query(active_query)
|
|
|
|
|
|
active_locations = active_result[0]['count'] if active_result else 0
|
|
|
|
|
|
|
|
|
|
|
|
# Query 3: Count by type
|
|
|
|
|
|
by_type_query = """
|
|
|
|
|
|
SELECT location_type, COUNT(*) as count
|
|
|
|
|
|
FROM locations_locations
|
|
|
|
|
|
WHERE deleted_at IS NULL
|
|
|
|
|
|
GROUP BY location_type
|
|
|
|
|
|
"""
|
|
|
|
|
|
by_type_result = execute_query(by_type_query)
|
|
|
|
|
|
by_type = {row['location_type']: row['count'] for row in by_type_result}
|
|
|
|
|
|
|
|
|
|
|
|
# Query 4: Total contacts
|
|
|
|
|
|
contacts_query = """
|
|
|
|
|
|
SELECT COUNT(*) as count FROM locations_contacts
|
|
|
|
|
|
WHERE deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
contacts_result = execute_query(contacts_query)
|
|
|
|
|
|
total_contacts = contacts_result[0]['count'] if contacts_result else 0
|
|
|
|
|
|
|
|
|
|
|
|
# Query 5: Total services
|
|
|
|
|
|
services_query = """
|
|
|
|
|
|
SELECT COUNT(*) as count FROM locations_services
|
|
|
|
|
|
WHERE deleted_at IS NULL
|
|
|
|
|
|
"""
|
|
|
|
|
|
services_result = execute_query(services_query)
|
|
|
|
|
|
total_services = services_result[0]['count'] if services_result else 0
|
|
|
|
|
|
|
|
|
|
|
|
# Query 6: Average capacity utilization
|
|
|
|
|
|
capacity_query = """
|
|
|
|
|
|
SELECT AVG((used_capacity / NULLIF(total_capacity, 0)) * 100) as avg_utilization
|
|
|
|
|
|
FROM locations_capacity
|
|
|
|
|
|
WHERE total_capacity > 0
|
|
|
|
|
|
"""
|
|
|
|
|
|
capacity_result = execute_query(capacity_query)
|
|
|
|
|
|
average_capacity_utilization = float(capacity_result[0]['avg_utilization'] or 0) if capacity_result else 0
|
|
|
|
|
|
|
|
|
|
|
|
# Ensure average is between 0-100
|
|
|
|
|
|
average_capacity_utilization = max(0, min(100, average_capacity_utilization))
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"📊 Statistics retrieved: {total_locations} locations, {total_contacts} contacts, {total_services} services")
|
|
|
|
|
|
|
|
|
|
|
|
return LocationStats(
|
|
|
|
|
|
total_locations=total_locations,
|
|
|
|
|
|
active_locations=active_locations,
|
|
|
|
|
|
by_type=by_type,
|
|
|
|
|
|
total_contacts=total_contacts,
|
|
|
|
|
|
total_services=total_services,
|
|
|
|
|
|
average_capacity_utilization=average_capacity_utilization
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"❌ Error retrieving statistics: {str(e)}")
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
|
status_code=500,
|
|
|
|
|
|
detail="Failed to retrieve location statistics"
|
|
|
|
|
|
)
|