184 lines
6.0 KiB
Python
184 lines
6.0 KiB
Python
|
|
"""
|
||
|
|
Wiki.js Integration Service (read-only)
|
||
|
|
"""
|
||
|
|
|
||
|
|
import logging
|
||
|
|
from typing import Any, Dict, List, Optional
|
||
|
|
|
||
|
|
import aiohttp
|
||
|
|
|
||
|
|
from app.core.config import settings
|
||
|
|
|
||
|
|
logger = logging.getLogger(__name__)
|
||
|
|
|
||
|
|
|
||
|
|
class WikiService:
|
||
|
|
def __init__(self) -> None:
|
||
|
|
self.base_url = settings.WIKI_BASE_URL.rstrip("/")
|
||
|
|
self.api_token = settings.WIKI_API_KEY or settings.WIKI_API_TOKEN
|
||
|
|
self.timeout = settings.WIKI_TIMEOUT_SECONDS
|
||
|
|
self.read_only = settings.WIKI_READ_ONLY
|
||
|
|
|
||
|
|
if self.read_only:
|
||
|
|
logger.warning("\ud83d\udd12 Wiki.js READ_ONLY MODE ENABLED")
|
||
|
|
|
||
|
|
def _normalize_slug(self, slug: str) -> str:
|
||
|
|
normalized = (slug or "").strip().strip("/")
|
||
|
|
prefix = "en/Kunder/"
|
||
|
|
if normalized.lower().startswith(prefix.lower()):
|
||
|
|
normalized = normalized[len(prefix):]
|
||
|
|
return normalized
|
||
|
|
|
||
|
|
def build_customer_path(self, slug: str) -> str:
|
||
|
|
normalized = self._normalize_slug(slug)
|
||
|
|
base_path = "en/Kunder"
|
||
|
|
return f"{base_path}/{normalized}" if normalized else base_path
|
||
|
|
|
||
|
|
def _headers(self) -> Dict[str, str]:
|
||
|
|
if not self.api_token:
|
||
|
|
return {"Content-Type": "application/json"}
|
||
|
|
return {
|
||
|
|
"Authorization": f"Bearer {self.api_token}",
|
||
|
|
"Content-Type": "application/json",
|
||
|
|
}
|
||
|
|
|
||
|
|
async def _graphql_request(self, query: str, variables: Optional[dict] = None) -> Dict[str, Any]:
|
||
|
|
if not self.api_token:
|
||
|
|
return {"errors": ["missing_token"]}
|
||
|
|
|
||
|
|
url = f"{self.base_url}/graphql"
|
||
|
|
payload = {"query": query, "variables": variables or {}}
|
||
|
|
|
||
|
|
try:
|
||
|
|
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=self.timeout)) as session:
|
||
|
|
async with session.post(url, json=payload, headers=self._headers()) as resp:
|
||
|
|
data = await resp.json()
|
||
|
|
if resp.status >= 400:
|
||
|
|
return {"errors": [data], "status": resp.status}
|
||
|
|
return data
|
||
|
|
except Exception as exc:
|
||
|
|
logger.error("\u274c Wiki.js request failed: %s", exc)
|
||
|
|
return {"errors": [str(exc)]}
|
||
|
|
|
||
|
|
async def list_customer_pages(
|
||
|
|
self,
|
||
|
|
slug: str,
|
||
|
|
tag: Optional[str] = "guide",
|
||
|
|
query: Optional[str] = None,
|
||
|
|
limit: int = 20,
|
||
|
|
) -> Dict[str, Any]:
|
||
|
|
path = self.build_customer_path(slug)
|
||
|
|
path_prefix = path.lstrip("/")
|
||
|
|
tag_list = [tag] if tag else None
|
||
|
|
query_value = query.strip() if query else None
|
||
|
|
|
||
|
|
gql_list = """
|
||
|
|
query WikiCustomerList($tags: [String!], $limit: Int) {
|
||
|
|
pages {
|
||
|
|
list(
|
||
|
|
limit: $limit,
|
||
|
|
orderBy: TITLE,
|
||
|
|
orderByDirection: ASC,
|
||
|
|
tags: $tags
|
||
|
|
) {
|
||
|
|
id
|
||
|
|
title
|
||
|
|
path
|
||
|
|
description
|
||
|
|
updatedAt
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
"""
|
||
|
|
|
||
|
|
gql_search = """
|
||
|
|
query WikiCustomerSearch($query: String!, $path: String) {
|
||
|
|
pages {
|
||
|
|
search(query: $query, path: $path) {
|
||
|
|
results {
|
||
|
|
id
|
||
|
|
title
|
||
|
|
path
|
||
|
|
description
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
"""
|
||
|
|
|
||
|
|
if query_value:
|
||
|
|
response = await self._graphql_request(
|
||
|
|
gql_search,
|
||
|
|
{
|
||
|
|
"query": query_value,
|
||
|
|
},
|
||
|
|
)
|
||
|
|
pages_raw = (((response.get("data") or {}).get("pages") or {}).get("search") or {}).get("results") or []
|
||
|
|
else:
|
||
|
|
response = await self._graphql_request(
|
||
|
|
gql_list,
|
||
|
|
{
|
||
|
|
"tags": tag_list,
|
||
|
|
"limit": max(1, min(limit, 100)),
|
||
|
|
},
|
||
|
|
)
|
||
|
|
pages_raw = (((response.get("data") or {}).get("pages") or {}).get("list")) or []
|
||
|
|
|
||
|
|
errors = response.get("errors")
|
||
|
|
if errors:
|
||
|
|
logger.error("\u274c Wiki.js query failed: %s", errors)
|
||
|
|
return {
|
||
|
|
"pages": [],
|
||
|
|
"path": path,
|
||
|
|
"tag": tag,
|
||
|
|
"query": query_value,
|
||
|
|
"base_url": self.base_url,
|
||
|
|
"errors": errors,
|
||
|
|
}
|
||
|
|
|
||
|
|
if not pages_raw and query_value:
|
||
|
|
response = await self._graphql_request(
|
||
|
|
gql_search,
|
||
|
|
{
|
||
|
|
"query": query_value,
|
||
|
|
"path": f"/{path}" if not path.startswith("/") else path,
|
||
|
|
},
|
||
|
|
)
|
||
|
|
if not response.get("errors"):
|
||
|
|
pages_raw = (((response.get("data") or {}).get("pages") or {}).get("search") or {}).get("results") or []
|
||
|
|
|
||
|
|
if path_prefix:
|
||
|
|
prefixes = [path_prefix]
|
||
|
|
if path_prefix.lower().startswith("en/"):
|
||
|
|
prefixes.append(path_prefix[3:])
|
||
|
|
prefix_lowers = [value.lower() for value in prefixes]
|
||
|
|
pages_raw = [
|
||
|
|
item for item in pages_raw
|
||
|
|
if any(
|
||
|
|
(item.get("path") or "").lstrip("/").lower().startswith(prefix)
|
||
|
|
for prefix in prefix_lowers
|
||
|
|
)
|
||
|
|
]
|
||
|
|
pages: List[Dict[str, Any]] = []
|
||
|
|
for item in pages_raw:
|
||
|
|
raw_path = item.get("path") if isinstance(item, dict) else None
|
||
|
|
if not raw_path:
|
||
|
|
continue
|
||
|
|
url = f"{self.base_url}{raw_path if raw_path.startswith('/') else '/' + raw_path}"
|
||
|
|
pages.append({
|
||
|
|
"id": item.get("id"),
|
||
|
|
"title": item.get("title") or raw_path,
|
||
|
|
"path": raw_path,
|
||
|
|
"description": item.get("description"),
|
||
|
|
"updated_at": item.get("updatedAt"),
|
||
|
|
"url": url,
|
||
|
|
})
|
||
|
|
|
||
|
|
return {
|
||
|
|
"pages": pages,
|
||
|
|
"path": f"/{path}",
|
||
|
|
"tag": tag,
|
||
|
|
"query": query_value,
|
||
|
|
"base_url": self.base_url,
|
||
|
|
}
|