diff --git a/Patriot API.postman_collection.json b/Patriot API.postman_collection.json
index d5cf0a2..0589006 100644
--- a/Patriot API.postman_collection.json
+++ b/Patriot API.postman_collection.json
@@ -1,10 +1,11 @@
{
"info": {
- "_postman_id": "d549670f-756d-49fa-925b-dd82e8e9cc0c",
+ "_postman_id": "f7926ba4-6fae-4e5e-973b-402456b66ceb",
"name": "Patriot API",
"description": "Common Status Codes\n\n- \\- 200 OK: Request successful (e.g.,UPSERT update, reads)\n \n- \\- 201 Created: Resource created (e.g., UPSERT create, add user, upsert zone)\n \n- \\- 204 No Content: Deleted successfully\n \n- \\- 401 Unauthorized: Missing/invalid/disabled/expired API key\n \n- \\- 404 Not Found: Client or resource not found\n \n- \\- 409 Conflict: Duplicate user number\n \n- \\- 422 Unprocessable Entity: Validation error (e.g., zone_id out of range)",
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json",
- "_exporter_id": "18764749"
+ "_exporter_id": "50473838",
+ "_collection_link": "https://mrnutsen-7f68d4fc-3205107.postman.co/workspace/A24_Patriot_API~882aa50c-5471-4e78-aa12-1811e10b3979/collection/50473838-f7926ba4-6fae-4e5e-973b-402456b66ceb?action=share&source=collection_link&creator=50473838"
},
"item": [
{
@@ -56,12 +57,11 @@
}
],
"url": {
- "raw": "{{baseUrl}}/clients/zones",
+ "raw": "{{baseUrl}}/zones",
"host": [
"{{baseUrl}}"
],
"path": [
- "clients",
"zones"
]
},
@@ -86,12 +86,11 @@
}
],
"url": {
- "raw": "{{baseUrl}}/clients/users",
+ "raw": "{{baseUrl}}/users",
"host": [
"{{baseUrl}}"
],
"path": [
- "clients",
"users"
]
},
@@ -121,13 +120,12 @@
}
],
"url": {
- "raw": "{{baseUrl}}/clients/users",
+ "raw": "{{baseUrl}}/user",
"host": [
"{{baseUrl}}"
],
"path": [
- "clients",
- "users"
+ "user"
]
},
"description": "Get a specific user\n\nClient ID must be set in the \"X-Client-Id\" header.\n\nUser ID must be set in the \"X-User-Id\" header."
@@ -153,7 +151,7 @@
],
"body": {
"mode": "raw",
- "raw": "{\n \"client_id\": 123456789,\n \"info\": {\n \"Name\": \"Anders Knutsen\",\n \"Alias\": \"000FD267\",\n \"Location\": \"Lislebyveien 58\",\n \"area_code\": \"1604\",\n \"area\": \"Fredrikstad\",\n \"BusPhone\": \"69310000\",\n \"Email\": \"post@ostsik.no\",\n \"OKPassword\": \"franzjager\",\n \"SpecRequest\": \"Dette skal gjøres ved alarm på denne kunden.\",\n \"NoSigsMon\": \"ActiveAny\",\n \"SinceDays\": 1,\n \"SinceHrs\": 0,\n \"SinceMins\": 30,\n \"ResetNosigsIgnored\": true,\n \"ResetNosigsDays\": 7,\n \"ResetNosigsHrs\": 0,\n \"ResetNosigsMins\": 0,\n \"InstallDateTime\": \"2023-02-20\",\n \"PanelName\": \"Ajax\",\n \"PanelSite\": \"Stue\",\n \"KeypadLocation\": \"Inngang\",\n \"SPPage\": \"Ekstra informasjon som kan være relevant.\"\n }\n}",
+ "raw": "{\n \"client_id\": 4848,\n \"info\": {\n \"Name\": \"Anders Knutsen\",\n \"Alias\": \"000FD267\",\n \"Location\": \"Lislebyveien 58\",\n \"area_code\": \"1604\",\n \"area\": \"Fredrikstad\",\n \"BusPhone\": \"69310000\",\n \"Email\": \"post@ostsik.no\",\n \"OKPassword\": \"franzjager\",\n \"SpecRequest\": \"Dette skal gjøres ved alarm på denne kunden.\",\n \"NoSigsMon\": \"1\",\n \"SinceDays\": 1,\n \"SinceHrs\": 0,\n \"SinceMins\": 30,\n \"ResetNosigsIgnored\": true,\n \"ResetNosigsDays\": 7,\n \"ResetNosigsHrs\": 0,\n \"ResetNosigsMins\": 0,\n \"InstallDateTime\": \"2023-02-20\",\n \"PanelName\": \"Ajax\",\n \"PanelSite\": \"Stue\",\n \"KeypadLocation\": \"Inngang\",\n \"SPPage\": \"Ekstra informasjon som kan være relevant.\"\n }\n}",
"options": {
"raw": {
"language": "json"
@@ -186,7 +184,7 @@
],
"body": {
"mode": "raw",
- "raw": "{\n \"client_id\": 123456789,\n \"info\": {\n \"Name\": \"optional\"\n }\n}",
+ "raw": "{\n \"client_id\": {{clientId}},\n \"info\": {\n \"Name\": \"Anders Knutsen\",\n \"Alias\": \"000FD267\",\n \"Location\": \"Bergbyveien\",\n \"area_code\": \"1730\",\n \"area\": \"Ise\",\n \"BusPhone\": \"69310000\",\n \"Email\": \"post@ostsik.no\",\n \"OKPassword\": \"franzjager\",\n \"SpecRequest\": \"Dette skal gjøres ved alarm på denne kunden.\",\n \"NoSigsMon\": \"Disabled\",\n \"SinceDays\": 1,\n \"SinceHrs\": 0,\n \"SinceMins\": 30,\n \"ResetNosigsIgnored\": true,\n \"ResetNosigsDays\": 14,\n \"ResetNosigsHrs\": 0,\n \"ResetNosigsMins\": 0,\n \"InstallDateTime\": \"2025-12-01\",\n \"PanelName\": \"Ajax\",\n \"PanelSite\": \"Ved tv\",\n \"KeypadLocation\": \"Inngang\",\n \"SPPage\": \"Ekstra informasjon som kan være relevant.\"\n }\n}",
"options": {
"raw": {
"language": "json"
@@ -207,9 +205,9 @@
"response": []
},
{
- "name": "Create/Update zone",
+ "name": "Create zone",
"request": {
- "method": "PUT",
+ "method": "POST",
"header": [
{
"key": "Authorization",
@@ -219,7 +217,7 @@
],
"body": {
"mode": "raw",
- "raw": "{\n \"client_id\": 123456789,\n \"zone_id\": 3,\n \"Zone_area\": \"RD Stue\",\n \"ModuleNo\": 0\n}\n",
+ "raw": "{\n \"client_id\": 123456789,\n \"zone\": {\n \"ZoneNo\": 62,\n \"ZoneText\": \"Tastatur\"\n }\n}\n",
"options": {
"raw": {
"language": "json"
@@ -227,12 +225,44 @@
}
},
"url": {
- "raw": "{{baseUrl}}/clients/zones",
+ "raw": "{{baseUrl}}/zones",
+ "host": [
+ "{{baseUrl}}"
+ ],
+ "path": [
+ "zones"
+ ]
+ },
+ "description": "Creates or updates a zone"
+ },
+ "response": []
+ },
+ {
+ "name": "Update zone",
+ "request": {
+ "method": "PATCH",
+ "header": [
+ {
+ "key": "Authorization",
+ "value": "Bearer {{apiKey}}",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"client_id\": 123456789,\n \"zone\": {\n \"ZoneNo\": 62,\n \"ZoneText\": \"Tastaturrrrrrr\"\n }\n}",
+ "options": {
+ "raw": {
+ "language": "json"
+ }
+ }
+ },
+ "url": {
+ "raw": "{{baseUrl}}/zones",
"host": [
"{{baseUrl}}"
],
"path": [
- "clients",
"zones"
]
},
@@ -253,7 +283,7 @@
],
"body": {
"mode": "raw",
- "raw": "{\n \"client_id\": {{clientId}},\n \"user\": {\n \"User_Name\": \"Anders Knutsen\",\n \"MobileNo\": \"+4740642018\",\n \"MobileNoOrder\": 1,\n \"Email\": \"anders@ostsik.no\",\n \"Type\": \"U\",\n \"UserNo\": 1,\n \"Instructions\": \"\",\n \"CallOrder\": 1\n }\n}",
+ "raw": "{\n \"client_id\": 5555,\n \"user\": {\n \"User_Name\": \"Anders Knutsen\",\n \"MobileNo\": \"+4740642018\",\n \"MobileNoOrder\": 1,\n \"Email\": \"anders@ostsik.no\",\n \"Type\": \"U\",\n \"UserNo\": 1,\n \"Instructions\": \"\",\n \"CallOrder\": 1\n }\n}\n",
"options": {
"raw": {
"language": "json"
@@ -261,12 +291,11 @@
}
},
"url": {
- "raw": "{{baseUrl}}/clients/users",
+ "raw": "{{baseUrl}}/users",
"host": [
"{{baseUrl}}"
],
"path": [
- "clients",
"users"
]
},
@@ -277,7 +306,7 @@
{
"name": "Update user",
"request": {
- "method": "PUT",
+ "method": "PATCH",
"header": [
{
"key": "Authorization",
@@ -287,7 +316,7 @@
],
"body": {
"mode": "raw",
- "raw": "{\n \"client_id\": {{clientId}},\n \"user\": {\n \"User_Name\": \"Changed Name\",\n \"MobileNo\": \"+4798765432\",\n \"MobileNoOrder\": 1,\n \"Email\": \"new@email.com\",\n \"Type\": \"U\",\n \"UserNo\": 1,\n \"Instructions\": \"New instructions\",\n \"CallOrder\": 0\n }\n}",
+ "raw": "{\n \"client_id\": {{clientId}},\n \"user\": {\n \"User_Name\": \"Anders Knutsen\",\n \"MobileNo\": \"40642018\",\n \"MobileNoOrder\": 1,\n \"Email\": \"anders@ostsik.no\",\n \"Type\": \"U\",\n \"UserNo\": 1,\n \"Instructions\": \"Do this 3\",\n \"CallOrder\": 1\n }\n}",
"options": {
"raw": {
"language": "json"
@@ -295,12 +324,11 @@
}
},
"url": {
- "raw": "{{baseUrl}}/clients/users",
+ "raw": "{{baseUrl}}/users",
"host": [
"{{baseUrl}}"
],
"path": [
- "clients",
"users"
]
},
@@ -326,7 +354,7 @@
],
"body": {
"mode": "raw",
- "raw": "{\n \"client_id\": {{clientId}},\n \"zone_id\": 1\n}",
+ "raw": "{\n \"client_id\": {{clientId}},\n \"zone_no\": 62\n}",
"options": {
"raw": {
"language": "json"
@@ -334,12 +362,11 @@
}
},
"url": {
- "raw": "{{baseUrl}}/clients/zones",
+ "raw": "{{baseUrl}}/zones",
"host": [
"{{baseUrl}}"
],
"path": [
- "clients",
"zones"
]
},
@@ -368,12 +395,11 @@
}
},
"url": {
- "raw": "{{baseUrl}}/clients/users",
+ "raw": "{{baseUrl}}/users",
"host": [
"{{baseUrl}}"
],
"path": [
- "clients",
"users"
]
},
@@ -394,7 +420,7 @@
],
"body": {
"mode": "raw",
- "raw": "{\n \"client_id\": {{clientId}}\n}",
+ "raw": "{\n \"client_id\": 1234\n}",
"options": {
"raw": {
"language": "json"
diff --git a/keys.json b/keys.json
index 9fa3c0a..d19463f 100644
--- a/keys.json
+++ b/keys.json
@@ -8,7 +8,7 @@
"port": "BASE11",
"installer_name": "Østfold Sikkerhetsservice AS",
- "installer_email": "service@ostsik.no",
+ "installer_email": "post@ostsik.no",
"use_glob_callouts": true,
"show_on_callouts": false,
@@ -19,9 +19,9 @@
"glob_callouts2": "TLF02BASE01",
"alt_lookup": true,
- "alt_alarm_no": "CID4BASE01",
+ "alt_alarm_no": "SIA1000101",
"convert_type": "None",
- "siginterpret": "None",
+ "siginterpret": "SIADecimal",
"client_groupings": [
{
@@ -73,4 +73,4 @@
}
]
}
-]
\ No newline at end of file
+]
diff --git a/main.py b/main.py
index bb0bb8a..404dba3 100644
--- a/main.py
+++ b/main.py
@@ -1,11 +1,19 @@
from __future__ import annotations
-import json, os, threading, re, logging
+import json
+import os
+import threading
+import re
+import logging
from datetime import datetime, timezone
from typing import Dict, Optional, List
from time import time
+from pathlib import Path
from fastapi import FastAPI, HTTPException, Depends, Header, Request, Response, status
from pydantic import BaseModel, Field, EmailStr, field_validator
+import pyodbc
+
+# ----------------- Helpers for filenames -----------------
SAFE_NAME = re.compile(r"[^A-Za-z0-9._-]")
@@ -15,12 +23,35 @@ def _safe_folder(name: str) -> str:
return cleaned or "unknown"
-# ------------ Config ------------
-DATA_FILE = os.getenv("DATA_FILE", "data.json")
+# ----------------- Basic JSON helpers (still used for keys.json) -----------------
+
+
+def _load_json(path: str, default):
+ try:
+ with open(path, "r", encoding="utf-8") as f:
+ return json.load(f)
+ except FileNotFoundError:
+ return default
+ except Exception as e:
+ logging.error("Failed to load %s: %s", path, e)
+ return default
+
+
+# ----------------- Config -----------------
+
KEY_FILE = os.getenv("KEY_FILE", "keys.json")
XML_DIR = os.getenv("XML_DIR", "out/clients")
-# ------------ Models used in keys.json ------------
+# MSSQL / Patriot DB config
+SQL_SERVER = os.getenv("SQL_SERVER", "10.181.149.83") # or PATRIOTDB2\\SQLEXPRESS
+SQL_PORT = os.getenv("SQL_PORT", "1433") # empty string if using named instance only
+SQL_DATABASE = os.getenv("SQL_DATABASE", "Patriot")
+SQL_USERNAME = os.getenv("SQL_USERNAME", "sa")
+SQL_PASSWORD = os.getenv("SQL_PASSWORD", "sa")
+ODBC_DRIVER = os.getenv("ODBC_DRIVER", "ODBC Driver 18 for SQL Server")
+
+
+# ----------------- Models used in keys.json -----------------
class ClientGroupingConfig(BaseModel):
@@ -29,7 +60,7 @@ class ClientGroupingConfig(BaseModel):
grouping_allow_multiple: bool = True
-# ------------ Auth (hot-reloaded key store) ------------
+# ----------------- Auth (hot-reloaded key store) -----------------
class KeyRecord(BaseModel):
@@ -38,75 +69,80 @@ class KeyRecord(BaseModel):
enabled: bool = True
valid_to: str # ISO-8601
- # Used for __id in XML: <__id>{client_id}{port}
+ # Used for __id in XML and DB Client_No: {port}
port: str = ""
# Per-key forced XML fields
- installer_name: str = "" # , user 199 name, grouping #2
- installer_email: str = "" # user 199 email
-
- use_glob_callouts: bool = True #
+ installer_name: str = "" # ,
+ installer_email: str = ""
+ use_glob_callouts: bool = False #
show_on_callouts: bool = False #
glob_callouts: str = "" #
- use_glob_callouts2: bool = True #
- show_on_callouts2: bool = False #
- glob_callouts2: str = "" #
+ use_glob_callouts2: bool = False
+ show_on_callouts2: bool = False
+ glob_callouts2: str = ""
- alt_lookup: bool = True #
- alt_alarm_no: str = "CID4BASE01" #
+ alt_lookup: bool = False #
+ alt_alarm_no: str = "" #
convert_type: str = "None" #
siginterpret: str = "SIADecimal" #
- # NEW: per-key client groupings (rendered to )
client_groupings: List[ClientGroupingConfig] = Field(default_factory=list)
class KeyStore:
+ """
+ Loads keys.json on-demand and supports hot reloading.
+ """
+
def __init__(self, path: str):
self.path = path
+ self._lock = threading.Lock()
self._mtime = 0.0
self._keys: Dict[str, KeyRecord] = {}
- self._lock = threading.Lock()
+ self._reload()
+
+ def _reload(self):
+ mtime = os.path.getmtime(self.path) if os.path.exists(self.path) else 0
+ if mtime <= self._mtime:
+ return
+ logging.info("Reloading key store from %s", self.path)
+
+ raw = _load_json(self.path, {"keys": []})
+
+ # Support both formats:
+ # 1) { "keys": [ {...}, {...} ] }
+ # 2) [ {...}, {...} ]
+ if isinstance(raw, list):
+ keys_raw = raw
+ elif isinstance(raw, dict):
+ keys_raw = raw.get("keys", [])
+ else:
+ logging.error("Unexpected format in %s (must be list or dict)", self.path)
+ keys_raw = []
+
+ keys: Dict[str, KeyRecord] = {}
+ for rec in keys_raw:
+ try:
+ k = KeyRecord(**rec)
+ keys[k.key] = k
+ except Exception as e:
+ logging.error("Invalid key record in %s: %s", self.path, e)
+
+ self._keys = keys
+ self._mtime = mtime
def _parse_time(self, s: str) -> datetime:
- try:
- if s.endswith("Z"):
- s = s[:-1] + "+00:00"
- dt = datetime.fromisoformat(s)
- if dt.tzinfo is None:
- dt = dt.replace(tzinfo=timezone.utc)
- return dt.astimezone(timezone.utc)
- except Exception:
- return datetime.min.replace(tzinfo=timezone.utc)
+ return datetime.fromisoformat(s.replace("Z", "+00:00"))
- def _load_if_changed(self):
- try:
- mtime = os.path.getmtime(self.path)
- except FileNotFoundError:
- mtime = 0.0
- if mtime != self._mtime:
- with self._lock:
- try:
- mtime2 = os.path.getmtime(self.path)
- except FileNotFoundError:
- self._keys = {}
- self._mtime = 0.0
- return
- if mtime2 == self._mtime:
- return
- with open(self.path, "r", encoding="utf-8") as f:
- raw = json.load(f)
- new_map: Dict[str, KeyRecord] = {}
- for item in raw:
- rec = KeyRecord(**item)
- new_map[rec.key] = rec
- self._keys = new_map
- self._mtime = mtime2
+ def get(self, token: str) -> Optional[KeyRecord]:
+ with self._lock:
+ self._reload()
+ return self._keys.get(token)
- def validate_bearer(self, bearer: str) -> KeyRecord:
- self._load_if_changed()
- rec = self._keys.get(bearer)
+ def must_get(self, token: str) -> KeyRecord:
+ rec = self.get(token)
if not rec:
raise HTTPException(status_code=401, detail="Invalid token")
if not rec.enabled:
@@ -121,14 +157,144 @@ class KeyStore:
_key_store = KeyStore(KEY_FILE)
-def require_api_key(authorization: Optional[str] = Header(None)) -> KeyRecord:
- if not authorization or not authorization.startswith("Bearer "):
- raise HTTPException(status_code=401, detail="Missing bearer token")
- token = authorization.split(" ", 1)[1].strip()
- return _key_store.validate_bearer(token)
+# ----------------- MSSQL helpers -----------------
-# ------------ Models (client payloads) ------------
+def _client_combined_id(client_id: int, keyrec: KeyRecord) -> str:
+ """
+ Combined ID used in DB Memalarm.Client_No and XML __id: f"{client_id}{port}".
+ If port is empty, just client_id as string.
+ """
+ return f"{client_id}{keyrec.port}" if keyrec.port else str(client_id)
+
+
+def _get_db_connection():
+ """
+ Open a new MSSQL connection to Patriot. Autocommit is enabled.
+ """
+ if SQL_PORT:
+ server_part = f"{SQL_SERVER},{SQL_PORT}"
+ else:
+ server_part = SQL_SERVER
+
+ conn_str = (
+ f"DRIVER={{{{ {ODBC_DRIVER} }}}};"
+ f"SERVER={server_part};"
+ f"DATABASE={SQL_DATABASE};"
+ f"UID={SQL_USERNAME};"
+ f"PWD={SQL_PASSWORD};"
+ "Encrypt=no;"
+ "TrustServerCertificate=yes;"
+ )
+ # Note: curly braces around driver are doubled above to survive f-string formatting.
+ conn_str = conn_str.replace("{{ ", "{").replace(" }}", "}")
+ return pyodbc.connect(conn_str, autocommit=True)
+
+
+def _split_loc2(loc2: str) -> tuple[str, str]:
+ loc2 = (loc2 or "").strip()
+ if not loc2:
+ return "", ""
+ parts = loc2.split(" ", 1)
+ if len(parts) == 1:
+ return parts[0], ""
+ return parts[0], parts[1]
+
+
+# ------------------ TEMP STORAGE (pending_import state) -------------
+
+CLIENT_STATE_FILE = Path(os.getenv("CLIENT_STATE_FILE", "/opt/patriot_api/client_state.json"))
+
+
+def _load_client_state() -> dict:
+ if CLIENT_STATE_FILE.is_file():
+ try:
+ return json.loads(CLIENT_STATE_FILE.read_text(encoding="utf-8"))
+ except Exception as e:
+ logging.error("Failed to read client state file %s: %s", CLIENT_STATE_FILE, e)
+ return {}
+ return {}
+
+
+def _save_client_state(state: dict):
+ try:
+ tmp = CLIENT_STATE_FILE.with_suffix(".tmp")
+ tmp.write_text(json.dumps(state, indent=2, ensure_ascii=False), encoding="utf-8")
+ os.replace(tmp, CLIENT_STATE_FILE)
+ except Exception as e:
+ logging.error("Failed to write client state file %s: %s", CLIENT_STATE_FILE, e)
+
+
+def _clear_pending_import_by_combined_id(combined_id: str):
+ """
+ Remove a client from pending_import state if present.
+ Called automatically whenever we detect the client in DB.
+ """
+ state = _load_client_state()
+ if combined_id in state:
+ try:
+ del state[combined_id]
+ _save_client_state(state)
+ logging.info(
+ "Cleared pending_import state for Client_No=%s (detected in DB).",
+ combined_id,
+ )
+ except Exception as e:
+ logging.error(
+ "Failed to clear pending_import state for Client_No=%s: %s",
+ combined_id,
+ e,
+ )
+
+
+def client_is_pending_import(client_id: int, keyrec: KeyRecord) -> bool:
+ """
+ Return True if this client_id+port (__id / Client_No) is marked as pending_import.
+ Used to lock non-DB clients while their XML has been sent but not imported yet.
+ """
+ combined_id = _client_combined_id(client_id, keyrec)
+ state = _load_client_state()
+ entry = state.get(combined_id)
+ return isinstance(entry, dict) and entry.get("status") == "pending_import"
+
+
+def db_client_exists(client_id: int, keyrec: KeyRecord) -> bool:
+ """
+ Check if a client exists in dbo.Memalarm.
+ Returns True if row exists, False otherwise (or on error).
+
+ If the client exists, we also clear any 'pending_import' state for this
+ client in the shared state file.
+ """
+ combined_id = _client_combined_id(client_id, keyrec)
+ try:
+ conn = _get_db_connection()
+ except Exception as e:
+ logging.error("DB connect failed in db_client_exists for %s: %s", combined_id, e)
+ return False
+
+ try:
+ cur = conn.cursor()
+ cur.execute("SELECT 1 FROM dbo.Memalarm WHERE Client_No = ?", (combined_id,))
+ row = cur.fetchone()
+ exists = row is not None
+
+ # If the client is now in DB, clear any pending_import state
+ if exists:
+ _clear_pending_import_by_combined_id(combined_id)
+
+ return exists
+ except Exception as e:
+ logging.error("DB query failed in db_client_exists for Client_No=%s: %s", combined_id, e)
+ return False
+ finally:
+ try:
+ conn.close()
+ except Exception:
+ pass
+
+
+# ----------------- API models -----------------
class ClientInfo(BaseModel):
@@ -147,7 +313,7 @@ class ClientInfo(BaseModel):
SpecRequest: Optional[str] = Field(None, max_length=1000)
# No-signal monitoring
- NoSigsMon: str = "ActiveAny"
+ NoSigsMon: str = Field("ActiveAny", max_length=50)
SinceDays: int = Field(1, ge=0)
SinceHrs: int = Field(0, ge=0)
SinceMins: int = Field(30, ge=0)
@@ -164,8 +330,13 @@ class ClientInfo(BaseModel):
PanelName: str = "Panel Type"
PanelSite: str = "Panel location"
KeypadLocation: Optional[str] = Field(None, max_length=200)
+
SPPage: Optional[str] = Field(None, max_length=2000)
+ NonMonitored: bool = False
+ DecommissionDate: Optional[str] = None
+ DecommissionReason: Optional[str] = None
+
class User(BaseModel):
User_Name: str = Field(..., min_length=1, max_length=120)
@@ -180,8 +351,13 @@ class User(BaseModel):
@field_validator("MobileNo")
def phone_has_digit(cls, v: str):
if not any(ch.isdigit() for ch in v):
- raise ValueError("MobileNo must contain digits")
- return v.strip()
+ raise ValueError("MobileNo must contain at least one digit")
+ return v
+
+
+class Zone(BaseModel):
+ ZoneNo: int = Field(..., ge=1, le=999)
+ ZoneText: str = Field(..., min_length=1, max_length=100)
class ClientCreate(BaseModel):
@@ -189,6 +365,10 @@ class ClientCreate(BaseModel):
info: ClientInfo
+class ClientID(BaseModel):
+ client_id: int = Field(..., ge=1)
+
+
class ClientInfoPatch(BaseModel):
# all optional, for PATCH
Name: Optional[str] = Field(None, min_length=1, max_length=200)
@@ -203,7 +383,7 @@ class ClientInfoPatch(BaseModel):
OKPassword: Optional[str] = Field(None, max_length=100)
SpecRequest: Optional[str] = Field(None, max_length=1000)
- NoSigsMon: Optional[str] = None
+ NoSigsMon: Optional[str] = Field(None, max_length=50)
SinceDays: Optional[int] = Field(None, ge=0)
SinceHrs: Optional[int] = Field(None, ge=0)
SinceMins: Optional[int] = Field(None, ge=0)
@@ -213,10 +393,12 @@ class ClientInfoPatch(BaseModel):
ResetNosigsHrs: Optional[int] = Field(None, ge=0)
ResetNosigsMins: Optional[int] = Field(None, ge=0)
- InstallDateTime: Optional[str] = None
+ InstallDateTime: Optional[str] = Field(
+ None, description="Installation date/time, e.g. '2023-02-20'"
+ )
- PanelName: Optional[str] = Field(None, min_length=1, max_length=200)
- PanelSite: Optional[str] = Field(None, min_length=1, max_length=200)
+ PanelName: Optional[str] = None
+ PanelSite: Optional[str] = None
KeypadLocation: Optional[str] = Field(None, max_length=200)
SPPage: Optional[str] = Field(None, max_length=2000)
@@ -226,69 +408,591 @@ class ClientPatch(BaseModel):
info: Optional[ClientInfoPatch] = None
-class ClientID(BaseModel):
+class ZoneCreate(BaseModel):
client_id: int = Field(..., ge=1)
+ zone: Zone
-class ZoneUpdate(BaseModel):
+class ZonePatch(BaseModel):
client_id: int = Field(..., ge=1)
- zone_id: int = Field(..., ge=1, le=999)
- Zone_area: str = Field(..., min_length=1, max_length=200)
- ModuleNo: int = Field(0, ge=0)
+ zone: Zone
class ZoneDelete(BaseModel):
client_id: int = Field(..., ge=1)
- zone_id: int = Field(..., ge=1, le=999)
+ zone_no: int = Field(..., ge=1, le=999)
-class UserWithClient(BaseModel):
+class UserCreate(BaseModel):
client_id: int = Field(..., ge=1)
user: User
-class UserKey(BaseModel):
+class UserPatch(BaseModel):
+ client_id: int = Field(..., ge=1)
+ user: User
+
+
+class UserDelete(BaseModel):
client_id: int = Field(..., ge=1)
user_no: int = Field(..., ge=1, le=999)
-# ------------ In-memory store + persistence ------------
-# Store is namespaced per API key: "key_name:client_id"
+# ----------------- Auth dependency -----------------
+def require_api_key(authorization: str = Header(..., alias="Authorization")) -> KeyRecord:
+ """
+ Expect header:
+ Authorization: Bearer
+ """
+ parts = authorization.split()
+ if len(parts) != 2 or parts[0].lower() != "bearer":
+ raise HTTPException(status_code=401, detail="Invalid authorization header")
+ token = parts[1]
+ return _key_store.must_get(token)
+
+
+# ----------------- In-memory data store for non-DB clients -----------------
+
+# Only used for clients that are NOT yet in Patriot DB.
_store_lock = threading.Lock()
-_store: Dict = {"clients": {}}
-
-
-def _load_store():
- global _store
- if os.path.exists(DATA_FILE):
- with open(DATA_FILE, "r", encoding="utf-8") as f:
- _store = json.load(f)
- else:
- _flush_store()
-
-
-def _flush_store():
- tmp = DATA_FILE + ".tmp"
- with open(tmp, "w", encoding="utf-8") as f:
- json.dump(_store, f, ensure_ascii=False, indent=2)
- os.replace(tmp, DATA_FILE)
+_store = {
+ "clients": {} # key -> {"info": {...}, "zones": {}, "users": {}}
+}
def _store_key(client_id: int, keyrec: KeyRecord) -> str:
+ # per-key namespace, so same client_id can be reused between keys
return f"{keyrec.key_name}:{client_id}"
def _require_client(client_id: int, keyrec: KeyRecord) -> Dict:
+ """
+ Return client from in-memory 'pending' store (for clients not in DB).
+ Does NOT touch DB. DB-aware logic happens at the endpoint level.
+ """
skey = _store_key(client_id, keyrec)
cli = _store["clients"].get(skey)
if not cli:
- raise HTTPException(status_code=404, detail="Client not found")
+ raise HTTPException(
+ status_code=404,
+ detail="Client not found (may be a pending client, not in DB. Please try again later.)",
+ )
return cli
-# ------------ XML writer ------------
+# ----------------- DB-level client helpers -----------------
+
+
+def db_client_is_decommissioned(client_id: int, keyrec: KeyRecord) -> bool:
+ """
+ Return True if the client exists in DB and is decommissioned / non-monitored.
+ We consider a client decommissioned if:
+ - NonMonitored = 1 OR
+ - DecommissionDate is not NULL
+ If the row is missing or DB error -> return False (treat as not decommissioned).
+ """
+ combined_id = _client_combined_id(client_id, keyrec)
+ try:
+ conn = _get_db_connection()
+ except Exception as e:
+ logging.error("DB connect failed in db_client_is_decommissioned for %s: %s", combined_id, e)
+ return False
+
+ try:
+ cur = conn.cursor()
+ cur.execute(
+ """
+ SELECT NonMonitored, DecommissionDate
+ FROM dbo.Memalarm
+ WHERE Client_No = ?
+ """,
+ (combined_id,),
+ )
+ row = cur.fetchone()
+ if not row:
+ return False
+
+ non_monitored = row.NonMonitored if hasattr(row, "NonMonitored") else 0
+ decomm_date = row.DecommissionDate if hasattr(row, "DecommissionDate") else None
+
+ if non_monitored == 1:
+ return True
+ if decomm_date is not None:
+ return True
+ return False
+
+ except Exception as e:
+ logging.error(
+ "DB query failed in db_client_is_decommissioned for Client_No=%s: %s",
+ combined_id,
+ e,
+ )
+ return False
+ finally:
+ try:
+ conn.close()
+ except Exception:
+ pass
+
+
+def _update_client_in_db_from_data(client_id: int, keyrec: KeyRecord, data: dict) -> bool:
+ """
+ Try to update an existing client in dbo.Memalarm from API data.
+ Behaviour:
+ * If Client_No does not exist in DB -> return False (caller can create XML instead).
+ * If exists:
+ - For each field present in 'data':
+ - If value is None or empty string -> we DO NOT touch the DB column.
+ - If value is non-empty -> we update the mapped DB column.
+ - Return True (even if no columns actually changed).
+ * On connection / SQL error -> log and return False (so caller can fall back to XML).
+ """
+ combined_id = _client_combined_id(client_id, keyrec)
+
+ try:
+ conn = _get_db_connection()
+ except Exception as e:
+ logging.error("DB connect failed for client %s: %s", combined_id, e)
+ return False
+
+ try:
+ cur = conn.cursor()
+ cur.execute(
+ """
+ SELECT
+ Client_No,
+ Alias,
+ Name,
+ Location,
+ loc2,
+ Bus_Phone,
+ Email,
+ OKPassword,
+ SpecRequest,
+ NOSIGS_MON,
+ Since_Days,
+ Since_Hrs,
+ Since_Mins,
+ Reset_Nosigs_Days,
+ Reset_Nosigs_Hrs,
+ Reset_Nosigs_Mins,
+ Reset_Nosigs_Ignored,
+ InstallDateTime,
+ Panel_Site,
+ KeypadLocation,
+ SP_Page,
+ PanelTypeId
+ FROM dbo.Memalarm
+ WHERE Client_No = ?
+ """,
+ (combined_id,),
+ )
+ row = cur.fetchone()
+ if not row:
+ logging.info(
+ 'DB: Client_No "%s" not found in Memalarm (from client_id=%s); will use XML instead.',
+ combined_id,
+ client_id,
+ )
+ return False
+
+ updates: dict[str, object] = {}
+
+ def set_text(field_name: str, column: str):
+ if field_name not in data:
+ return
+ val = data[field_name]
+ if val is None:
+ return
+ sval = str(val).strip()
+ if not sval:
+ return
+ updates[column] = sval
+
+ def set_int(field_name: str, column: str):
+ if field_name not in data:
+ return
+ val = data[field_name]
+ if val is None:
+ return
+ try:
+ ival = int(val)
+ except (ValueError, TypeError):
+ logging.warning("DB: field %s has non-int value %r, skipping", field_name, val)
+ return
+ updates[column] = ival
+
+ def set_bit(field_name: str, column: str):
+ if field_name not in data:
+ return
+ val = data[field_name]
+ if val is None:
+ return
+ b = 1 if bool(val) else 0
+ updates[column] = b
+
+ # Simple text fields
+ set_text("Name", "Name")
+ set_text("Alias", "Alias")
+ set_text("Location", "Location")
+ set_text("BusPhone", "Bus_Phone")
+ set_text("Email", "Email")
+ set_text("OKPassword", "OKPassword")
+ set_text("SpecRequest", "SpecRequest")
+
+ # NoSigsMon -> NOSIGS_MON as "1" or "0" (string)
+ if "NoSigsMon" in data:
+ raw = data["NoSigsMon"]
+ if raw is not None:
+ s = str(raw).strip().lower()
+ # Treat these as "on"
+ if s in ("1", "true", "yes", "y", "activeany", "active", "on"):
+ updates["NOSIGS_MON"] = "1"
+ else:
+ updates["NOSIGS_MON"] = "0"
+
+ # PanelName -> PanelTypeId (Ajax=58, Future=59)
+ if "PanelName" in data:
+ pname_raw = data["PanelName"]
+ if pname_raw is not None:
+ pname = str(pname_raw).strip().lower()
+ panel_map = {
+ "ajax": 58,
+ "futurehome": 59,
+ }
+ if pname in panel_map:
+ updates["PanelTypeId"] = panel_map[pname]
+ else:
+ logging.warning(
+ "Unknown PanelName %r for client_id=%s; not updating PanelTypeId",
+ pname_raw,
+ client_id,
+ )
+
+ set_text("PanelSite", "Panel_Site")
+ set_text("KeypadLocation", "KeypadLocation")
+ set_text("SPPage", "SP_Page")
+
+ # loc2 from area_code + area (merge with existing DB loc2 if only one is sent)
+ if "area_code" in data or "area" in data:
+ old_loc2 = getattr(row, "loc2", "") # may be None
+ old_ac, old_area = _split_loc2(old_loc2 or "")
+ new_ac = data.get("area_code", None)
+ new_area = data.get("area", None)
+
+ final_ac = str(new_ac).strip() if new_ac is not None else old_ac
+ final_area = str(new_area).strip() if new_area is not None else old_area
+
+ loc2_val = f"{final_ac} {final_area}".strip()
+ if loc2_val:
+ updates["loc2"] = loc2_val
+
+ # Integers
+ set_int("SinceDays", "Since_Days")
+ set_int("SinceHrs", "Since_Hrs")
+ set_int("SinceMins", "Since_Mins")
+ set_int("ResetNosigsDays", "Reset_Nosigs_Days")
+ set_int("ResetNosigsHrs", "Reset_Nosigs_Hrs")
+ set_int("ResetNosigsMins", "Reset_Nosigs_Mins")
+
+ # Boolean bit
+ set_bit("ResetNosigsIgnored", "Reset_Nosigs_Ignored")
+
+ # InstallDateTime (string, e.g. "2023-02-20")
+ if "InstallDateTime" in data:
+ val = data["InstallDateTime"]
+ if val:
+ updates["InstallDateTime"] = str(val).strip()
+
+ if not updates:
+ logging.info(
+ "DB: client %s (Client_No=%s) exists but no DB columns to update from API.",
+ client_id,
+ combined_id,
+ )
+ return True # exists, nothing to change
+
+ set_clause = ", ".join(f"{col} = ?" for col in updates.keys())
+ params = list(updates.values()) + [combined_id]
+ sql = f"UPDATE dbo.Memalarm SET {set_clause} WHERE Client_No = ?"
+ cur.execute(sql, params)
+ logging.info(
+ "DB: updated client %s (Client_No=%s) columns: %s",
+ client_id,
+ combined_id,
+ ", ".join(updates.keys()),
+ )
+ return True
+
+ except Exception as e:
+ logging.error(
+ "DB update failed for client_id=%s (Client_No=%s): %s",
+ client_id,
+ combined_id,
+ e,
+ )
+ return False
+ finally:
+ try:
+ conn.close()
+ except Exception:
+ pass
+
+
+def update_client_in_db_from_full(client_id: int, keyrec: KeyRecord, info: ClientInfo) -> bool:
+ """
+ Wrapper for PUT /clients: full payload from ClientInfo, but still only
+ non-empty fields are written to DB.
+ Returns True if the DB client exists (even if no columns changed).
+ """
+ data = info.model_dump()
+ return _update_client_in_db_from_data(client_id, keyrec, data)
+
+
+def update_client_in_db_from_patch(
+ client_id: int,
+ keyrec: KeyRecord,
+ info_patch: ClientInfoPatch,
+) -> bool:
+ """
+ Wrapper for PATCH /clients: only fields provided in the patch payload
+ are considered for DB update.
+ Returns True if the DB client exists (even if no columns changed).
+ """
+ data = info_patch.model_dump(exclude_unset=True)
+ if not data:
+ return False
+ return _update_client_in_db_from_data(client_id, keyrec, data)
+
+
+def _get_client_info_from_db(client_id: int, keyrec: KeyRecord) -> Optional[ClientInfo]:
+ """
+ Try to build a ClientInfo object from dbo.Memalarm for this client.
+ Returns:
+ - ClientInfo if DB row exists
+ - None if no such Client_No or DB error
+ """
+ combined_id = _client_combined_id(client_id, keyrec)
+
+ try:
+ conn = _get_db_connection()
+ except Exception as e:
+ logging.error("DB connect failed in _get_client_info_from_db for %s: %s", combined_id, e)
+ return None
+
+ try:
+ cur = conn.cursor()
+ cur.execute(
+ """
+ SELECT
+ Client_No,
+ Alias,
+ Name,
+ Location,
+ loc2,
+ Bus_Phone,
+ Email,
+ OKPassword,
+ SpecRequest,
+ NOSIGS_MON,
+ Since_Days,
+ Since_Hrs,
+ Since_Mins,
+ Reset_Nosigs_Days,
+ Reset_Nosigs_Hrs,
+ Reset_Nosigs_Mins,
+ Reset_Nosigs_Ignored,
+ InstallDateTime,
+ Panel_Site,
+ KeypadLocation,
+ SP_Page,
+ PanelTypeId,
+ NonMonitored,
+ DecommissionDate,
+ DecommissionReason
+ FROM dbo.Memalarm
+ WHERE Client_No = ?
+ """,
+ (combined_id,),
+ )
+ row = cur.fetchone()
+ if not row:
+ logging.info("DB: _get_client_info_from_db: Client_No %r not found.", combined_id)
+ return None
+
+ # helper to safely make strings
+ def s(val) -> str:
+ if val is None:
+ return ""
+ return str(val)
+
+ # Basic fields (safe for any type)
+ name = s(getattr(row, "Name", None)).strip() or "Unnamed client"
+ alias_val = getattr(row, "Alias", None)
+ alias = s(alias_val).strip() or None
+
+ location = s(getattr(row, "Location", None)).strip() or "Unknown location"
+
+ loc2_val = getattr(row, "loc2", None)
+ area_code, area = _split_loc2(s(loc2_val))
+
+ bus_phone_val = getattr(row, "Bus_Phone", None)
+ bus_phone = s(bus_phone_val).strip() or None
+
+ email_db = s(getattr(row, "Email", None)).strip()
+ email: Optional[str] = email_db if email_db else None
+
+ okpassword_val = getattr(row, "OKPassword", None)
+ okpassword = s(okpassword_val).strip() or None
+
+ specrequest_val = getattr(row, "SpecRequest", None)
+ specrequest = s(specrequest_val).strip() or None
+
+ # NOSIGS_MON mapping: treat as string "1" / "0" regardless of underlying type
+ nosigs_val = getattr(row, "NOSIGS_MON", None)
+ raw_nosigs = s(nosigs_val).strip()
+ if raw_nosigs == "1":
+ nosigsmon = "ActiveAny"
+ else:
+ nosigsmon = "Disabled"
+
+ # Integers with defaults
+ since_days = getattr(row, "Since_Days", None)
+ since_days = int(since_days) if since_days is not None else 1
+
+ since_hrs = getattr(row, "Since_Hrs", None)
+ since_hrs = int(since_hrs) if since_hrs is not None else 0
+
+ since_mins = getattr(row, "Since_Mins", None)
+ since_mins = int(since_mins) if since_mins is not None else 30
+
+ reset_days = getattr(row, "Reset_Nosigs_Days", None)
+ reset_days = int(reset_days) if reset_days is not None else 7
+
+ reset_hrs = getattr(row, "Reset_Nosigs_Hrs", None)
+ reset_hrs = int(reset_hrs) if reset_hrs is not None else 0
+
+ reset_mins = getattr(row, "Reset_Nosigs_Mins", None)
+ reset_mins = int(reset_mins) if reset_mins is not None else 0
+
+ reset_ignored_val = getattr(row, "Reset_Nosigs_Ignored", None)
+ if reset_ignored_val is None:
+ reset_ignored = True
+ else:
+ reset_ignored = bool(reset_ignored_val)
+
+ # InstallDateTime
+ install_raw = getattr(row, "InstallDateTime", None)
+ if install_raw is None:
+ install_str: Optional[str] = None
+ else:
+ # try datetime/date -> iso string, else just str()
+ try:
+ install_str = install_raw.isoformat()
+ except AttributeError:
+ install_str = str(install_raw)
+
+ panel_site = s(getattr(row, "Panel_Site", None)).strip() or "Panel location"
+ keypad_location_val = getattr(row, "KeypadLocation", None)
+ keypad_location = s(keypad_location_val).strip() or None
+
+ sp_page_val = getattr(row, "SP_Page", None)
+ sp_page = s(sp_page_val).strip() or None
+
+ # PanelTypeId -> PanelName
+ ptype = getattr(row, "PanelTypeId", None)
+ try:
+ ptype_int = int(ptype) if ptype is not None else None
+ except (ValueError, TypeError):
+ ptype_int = None
+
+ if ptype_int == 58:
+ panel_name = "Ajax"
+ elif ptype_int == 59:
+ panel_name = "Futurehome"
+ else:
+ panel_name = "Panel Type"
+
+ # Decommission info
+ non_monitored_val = getattr(row, "NonMonitored", 0)
+ try:
+ non_monitored = bool(int(non_monitored_val))
+ except (ValueError, TypeError):
+ non_monitored = False
+
+ raw_decomm_date = getattr(row, "DecommissionDate", None)
+ if raw_decomm_date is None:
+ decomm_date_str: Optional[str] = None
+ else:
+ try:
+ decomm_date_str = raw_decomm_date.isoformat()
+ except AttributeError:
+ decomm_date_str = str(raw_decomm_date)
+
+ raw_reason = getattr(row, "DecommissionReason", None)
+ if raw_reason is None:
+ decomm_reason: Optional[str] = None
+ else:
+ decomm_reason = s(raw_reason).strip() or None
+
+ info_data = {
+ "Name": name,
+ "Alias": alias,
+ "Location": location,
+ "area_code": area_code or "0000",
+ "area": area or "Unknown",
+ "BusPhone": bus_phone,
+ "Email": email,
+ "OKPassword": okpassword,
+ "SpecRequest": specrequest,
+ "NoSigsMon": nosigsmon,
+ "SinceDays": since_days,
+ "SinceHrs": since_hrs,
+ "SinceMins": since_mins,
+ "ResetNosigsIgnored": reset_ignored,
+ "ResetNosigsDays": reset_days,
+ "ResetNosigsHrs": reset_hrs,
+ "ResetNosigsMins": reset_mins,
+ "InstallDateTime": install_str,
+ "PanelName": panel_name,
+ "PanelSite": panel_site,
+ "KeypadLocation": keypad_location,
+ "SPPage": sp_page,
+ "NonMonitored": non_monitored,
+ "DecommissionDate": decomm_date_str,
+ "DecommissionReason": decomm_reason,
+ }
+
+ try:
+ info = ClientInfo(**info_data)
+ except Exception as e:
+ logging.error(
+ "DB row for Client_No=%s could not be mapped to ClientInfo: %s; data=%r",
+ combined_id,
+ e,
+ info_data,
+ )
+ return None
+
+ return info
+
+ except Exception as e:
+ logging.error(
+ "DB query failed in _get_client_info_from_db for Client_No=%s: %s",
+ combined_id,
+ e,
+ )
+ return None
+ finally:
+ try:
+ conn.close()
+ except Exception:
+ pass
+
+
+# ----------------- XML writer (for non-DB clients only) -----------------
def _ensure_dir(path: str):
@@ -306,6 +1010,10 @@ def _bool_text(val: bool) -> str:
def write_client_xml(client_id: int, keyrec: KeyRecord):
+ """
+ Serialize client + zones + users to XML for downstream import.
+ Only used for clients that do NOT yet exist in Patriot DB.
+ """
from xml.etree.ElementTree import Element, SubElement, ElementTree
skey = _store_key(client_id, keyrec)
@@ -318,11 +1026,11 @@ def write_client_xml(client_id: int, keyrec: KeyRecord):
root = Element("Clients")
row = SubElement(root, "Row")
- # __id = client_id + port, e.g. "9998BASE11"
- combined_id = f"{client_id}{keyrec.port}" if keyrec.port else str(client_id)
+ # __id = client_id + port, e.g. "123456789BASE11"
+ combined_id = _client_combined_id(client_id, keyrec)
SubElement(row, "__id").text = combined_id
- # Client fields from API
+ # ---- Client fields ----
SubElement(row, "Name").text = info.Name
SubElement(row, "Alias").text = info.Alias or ""
SubElement(row, "Location").text = info.Location
@@ -350,7 +1058,7 @@ def write_client_xml(client_id: int, keyrec: KeyRecord):
SubElement(row, "KeypadLocation").text = info.KeypadLocation or ""
SubElement(row, "SPPage").text = info.SPPage or ""
- # Per-key forced fields (from keys.json)
+ # Per-key fixed fields
SubElement(row, "Installer").text = keyrec.installer_name or ""
SubElement(row, "UseGlobCallOuts").text = _bool_text(keyrec.use_glob_callouts)
@@ -366,7 +1074,7 @@ def write_client_xml(client_id: int, keyrec: KeyRecord):
SubElement(row, "ConvertType").text = keyrec.convert_type or "None"
SubElement(row, "SIGINTERPRET").text = keyrec.siginterpret or "SIADecimal"
- # ClientGroupings from keys.json
+ # ---- ClientGroupings ----
cgs_el = SubElement(row, "ClientGroupings")
if keyrec.client_groupings:
for cg in keyrec.client_groupings:
@@ -377,118 +1085,105 @@ def write_client_xml(client_id: int, keyrec: KeyRecord):
cg.grouping_allow_multiple
)
else:
- # Fallback default if nothing configured (optional)
cg_el = SubElement(cgs_el, "ClientGrouping")
- SubElement(cg_el, "Description").text = "Alarm24"
- SubElement(cg_el, "GroupingTypeDescription").text = "Alarm24 Tilgang"
- SubElement(cg_el, "GroupingAllowMultiple").text = "True"
+ SubElement(cg_el, "Description").text = ""
+ SubElement(cg_el, "GroupingTypeDescription").text = ""
+ SubElement(cg_el, "GroupingAllowMultiple").text = _bool_text(True)
- # Zones
- zones_el = SubElement(row, "Zones")
- zones = cli.get("zones") or {}
- for zid_str in sorted(zones.keys(), key=lambda x: int(x)):
- z = zones[zid_str] or {}
- z_el = SubElement(zones_el, "Zone")
- SubElement(z_el, "Zone_area").text = z.get("Zone_area", "")
- SubElement(z_el, "Zone_No").text = zid_str
- SubElement(z_el, "ModuleNo").text = str(z.get("ModuleNo", 0))
+ # ---------- ZONES ----------
+ zones_dict = cli.get("zones", {})
+ if zones_dict:
+ zones_el = SubElement(row, "Zones")
+ # zones_dict keys are "ZoneNo" as string, values like {"ZoneNo": 1, "ZoneText": "..."}
+ for key_zone_no, zdata in sorted(zones_dict.items(), key=lambda kv: int(kv[0])):
+ zrow = SubElement(zones_el, "Zone")
+ zone_no = zdata.get("ZoneNo", int(key_zone_no))
+ SubElement(zrow, "Zone_No").text = str(zone_no)
+ SubElement(zrow, "Zone_area").text = zdata.get("ZoneText", "")
- # Users (from API)
- users_el = SubElement(row, "Users")
- users = cli.get("users") or {}
+ # ---------- USERS (including default 199) ----------
+ raw_users = dict(cli.get("users", {})) # key: "UserNo" -> dict
- def _sort_user(u: dict):
- return (u.get("CallOrder", 0), u.get("MobileNoOrder", 0), u.get("UserNo", 0))
+ # Inject default 199 from installer if not already present
+ if "199" not in raw_users:
+ u199 = {
+ "UserNo": 199,
+ "User_Name": keyrec.installer_name or "",
+ "MobileNo": "",
+ "MobileNoOrder": 1,
+ "Email": keyrec.installer_email or "",
+ "Type": "N",
+ "Instructions": "",
+ "CallOrder": 0,
+ }
+ raw_users["199"] = u199
- for _, u in sorted(users.items(), key=lambda kv: _sort_user(kv[1])):
- u_el = SubElement(users_el, "User")
- SubElement(u_el, "User_Name").text = u.get("User_Name", "")
- SubElement(u_el, "MobileNo").text = u.get("MobileNo", "")
- SubElement(u_el, "MobileNoOrder").text = str(u.get("MobileNoOrder", 0))
- SubElement(u_el, "Email").text = u.get("Email", "")
- SubElement(u_el, "Type").text = u.get("Type", "U")
- SubElement(u_el, "UserNo").text = str(u.get("UserNo", ""))
- SubElement(u_el, "Instructions").text = u.get("Instructions", "") or ""
- SubElement(u_el, "CallOrder").text = str(u.get("CallOrder", 0))
+ if raw_users:
+ users_el = SubElement(row, "Users")
+ for key_user_no, udata in sorted(raw_users.items(), key=lambda kv: int(kv[0])):
+ urow = SubElement(users_el, "User")
+ user_no = udata.get("UserNo", int(key_user_no))
+ SubElement(urow, "UserNo").text = str(user_no)
+ SubElement(urow, "User_Name").text = udata.get("User_Name", "")
+ SubElement(urow, "MobileNo").text = udata.get("MobileNo", "")
+ SubElement(urow, "MobileNoOrder").text = str(udata.get("MobileNoOrder", 1))
+ SubElement(urow, "Email").text = udata.get("Email", "")
+ SubElement(urow, "Type").text = udata.get("Type", "U")
+ SubElement(urow, "Instructions").text = udata.get("Instructions", "")
+ SubElement(urow, "CallOrder").text = str(udata.get("CallOrder", 0))
- # Extra forced installer user (type "N", UserNo 199)
- inst_user_el = SubElement(users_el, "User")
- SubElement(inst_user_el, "User_Name").text = keyrec.installer_name or ""
- SubElement(inst_user_el, "Email").text = keyrec.installer_email or ""
- SubElement(inst_user_el, "UserNo").text = "199"
- SubElement(inst_user_el, "CallOrder").text = "0"
- SubElement(inst_user_el, "Type").text = "N"
+ # ---- write file ----
+ xml_path = _xml_path_for(client_id, keyrec)
+ _ensure_dir(os.path.dirname(xml_path))
+ ElementTree(root).write(xml_path, encoding="utf-8", xml_declaration=True)
+ logging.info("Wrote XML for client_id=%s key=%s to %s", client_id, keyrec.key_name, xml_path)
- path = _xml_path_for(client_id, keyrec)
+
+def _purge_cached_client(client_id: int, keyrec: KeyRecord):
+ """
+ Remove stale non-DB client data from memory and delete its XML file.
+ After Patriot imports a client, all operations must go through DB.
+ """
+ skey = _store_key(client_id, keyrec)
+
+ with _store_lock:
+ if skey in _store["clients"]:
+ logging.info("Purging cached XML-only client %s for key %s", client_id, keyrec.key_name)
+ del _store["clients"][skey]
+
+ # Remove XML file if present
try:
- import xml.dom.minidom as minidom
-
- tmp_path = path + ".tmp"
- ElementTree(root).write(tmp_path, encoding="utf-8", xml_declaration=True)
- with open(tmp_path, "rb") as rf:
- dom = minidom.parseString(rf.read())
- pretty = dom.toprettyxml(indent=" ", encoding="utf-8")
- with open(tmp_path, "wb") as wf:
- wf.write(pretty)
- os.replace(tmp_path, path)
- except Exception:
- ElementTree(root).write(path, encoding="utf-8", xml_declaration=True)
+ xml_path = _xml_path_for(client_id, keyrec)
+ if os.path.exists(xml_path):
+ os.remove(xml_path)
+ logging.info("Deleted stale XML file for client %s at %s", client_id, xml_path)
+ except Exception as e:
+ logging.error("Failed to remove stale XML file for client %s: %s", client_id, e)
-# ------------ App & logging middleware ------------
-_load_store()
-app = FastAPI(title="Client Registry API", version="3.3.0")
-
-logging.basicConfig(
- filename="api_requests.log",
- level=logging.INFO,
- format="%(asctime)s - %(levelname)s - %(message)s",
-)
-
-REDACT_KEYS = {"authorization", "password", "secret", "key"}
-MAX_BODY_CHARS = 4000
+# ----------------- FastAPI app -----------------
-def _redact(obj):
- try:
- if isinstance(obj, dict):
- return {k: ("***" if k.lower() in REDACT_KEYS else _redact(v)) for k, v in obj.items()}
- if isinstance(obj, list):
- return [_redact(v) for v in obj]
- except Exception:
- pass
- return obj
+app = FastAPI(title="Patriot client API", version="1.0.0")
@app.middleware("http")
async def log_requests(request: Request, call_next):
start = time()
- method = request.method.upper()
+ body_bytes = await request.body()
body_text = ""
-
- if method in {"POST", "PUT", "PATCH", "DELETE"}:
+ if body_bytes:
try:
- raw = await request.body()
-
- async def receive():
- return {"type": "http.request", "body": raw, "more_body": False}
-
- request._receive = receive
-
- try:
- parsed = json.loads(raw.decode("utf-8") if raw else "{}")
- body_text = json.dumps(_redact(parsed))[:MAX_BODY_CHARS]
- except Exception:
- body_text = (raw.decode("utf-8", "replace") if raw else "")[:MAX_BODY_CHARS]
+ body_text = body_bytes.decode("utf-8")
except Exception as e:
- body_text = f"<>"
+ body_text = f"<>"
response = await call_next(request)
duration = time() - start
logging.info(
"%s %s - %s - %.3fs - body=%s",
- method,
+ request.method,
request.url.path,
response.status_code,
duration,
@@ -497,15 +1192,24 @@ async def log_requests(request: Request, call_next):
return response
-# --------- Routes (unchanged logic, but keyrec now has client_groupings) ---------
+# ----------------- Client endpoints -----------------
+
-# UPSERT CLIENT (create if missing, update if exists)
@app.put("/clients", status_code=200)
def upsert_client(
payload: ClientCreate,
keyrec: KeyRecord = Depends(require_api_key),
response: Response = None,
):
+ """
+ Create or replace full client info.
+
+ Behaviour:
+ - If client exists in Patriot DB (Memalarm.Client_No = client_id+port):
+ * Update DB fields from API only (no XML, no caching).
+ - If client does NOT exist in DB (or DB unreachable):
+ * Keep info in in-memory store and write XML so Patriot can import it.
+ """
client_id = payload.client_id
logging.info(
@@ -515,12 +1219,38 @@ def upsert_client(
payload.model_dump(),
)
+ # First: DB client?
+ if db_client_exists(client_id, keyrec):
+ _purge_cached_client(client_id, keyrec)
+ if db_client_is_decommissioned(client_id, keyrec):
+ raise HTTPException(
+ status_code=403,
+ detail="Client is decommissioned and is read-only",
+ )
+
+ try:
+ update_client_in_db_from_full(client_id, keyrec, payload.info)
+ except Exception as e:
+ logging.error("DB sync error on upsert_client client_id=%s: %s", client_id, e)
+ raise HTTPException(status_code=500, detail="Failed to update client in DB")
+
+ if response is not None:
+ response.status_code = status.HTTP_200_OK
+ return {"client_id": client_id}
+
+ # Non-DB client but state says pending_import -> lock
+ if client_is_pending_import(client_id, keyrec):
+ raise HTTPException(
+ status_code=409,
+ detail="Client is pending import into Patriot and is currently locked",
+ )
+
+ # Not in DB & not pending: keep in memory (RAM only) and write XML for Patriot import
info = payload.info.model_dump()
skey = _store_key(client_id, keyrec)
with _store_lock:
existed = skey in _store["clients"]
-
if existed:
cli = _store["clients"][skey]
cli["info"] = info
@@ -531,18 +1261,26 @@ def upsert_client(
if response is not None:
response.status_code = status.HTTP_200_OK if existed else status.HTTP_201_CREATED
- _flush_store()
write_client_xml(client_id, keyrec)
return {"client_id": client_id}
-# PATCH CLIENT INFO (partial replace of info)
@app.patch("/clients", status_code=200)
def patch_client(
payload: ClientPatch,
keyrec: KeyRecord = Depends(require_api_key),
):
+ """
+ Partial update of client info.
+
+ Behaviour:
+ - If client exists in DB:
+ * Only update DB columns for fields present in the patch.
+ * No XML, no in-memory cache.
+ - If client does NOT exist in DB:
+ * Merge into pending in-memory client and rewrite XML.
+ """
client_id = payload.client_id
logging.info(
"AUDIT patch_client key=%s client_id=%s payload=%s",
@@ -551,313 +1289,975 @@ def patch_client(
payload.model_dump(),
)
+ # DB client -> only DB
+ if db_client_exists(client_id, keyrec):
+ _purge_cached_client(client_id, keyrec)
+ if db_client_is_decommissioned(client_id, keyrec):
+ raise HTTPException(
+ status_code=403,
+ detail="Client is decommissioned and is read-only",
+ )
+
+ if payload.info is not None:
+ try:
+ update_client_in_db_from_patch(client_id, keyrec, payload.info)
+ except Exception as e:
+ logging.error("DB sync error on patch_client client_id=%s: %s", client_id, e)
+ raise HTTPException(status_code=500, detail="Failed to update client in DB")
+ return {"client_id": client_id}
+
+ # Non-DB client but pending_import -> lock
+ if client_is_pending_import(client_id, keyrec):
+ raise HTTPException(
+ status_code=409,
+ detail="Client is pending import into Patriot and is currently locked",
+ )
+
+ # Non-DB client -> pending store + XML
+ if payload.info is None:
+ raise HTTPException(status_code=400, detail="No info to patch for non-DB client")
+
with _store_lock:
cli = _require_client(client_id, keyrec)
- if payload.info is not None:
- current_info = cli.get("info", {})
- base = ClientInfo(**current_info).model_dump()
- updates = payload.info.model_dump(exclude_unset=True)
- merged = {**base, **updates}
+ current_info = cli.get("info", {})
+ base = ClientInfo(**current_info).model_dump()
+ updates = payload.info.model_dump(exclude_unset=True)
+ merged = {**base, **updates}
- merged_valid = ClientInfo(**merged).model_dump()
- cli["info"] = merged_valid
+ merged_valid = ClientInfo(**merged).model_dump()
+ cli["info"] = merged_valid
- _flush_store()
write_client_xml(client_id, keyrec)
return {"client_id": client_id, **cli}
-# GET CLIENT (with header X-Client-Id, auto-regens XML if missing)
@app.get("/clients")
def get_client(
x_client_id: int = Header(..., alias="X-Client-Id"),
keyrec: KeyRecord = Depends(require_api_key),
):
+ """
+ Get client info.
+
+ Behaviour:
+ - If client exists in DB, read from DB and return.
+ - Else, read from in-memory pending store (non-DB client).
+ """
client_id = x_client_id
logging.info("AUDIT get_client key=%s client_id=%s", keyrec.key_name, client_id)
- cli = _require_client(client_id, keyrec)
+ if db_client_exists(client_id, keyrec):
+ _purge_cached_client(client_id, keyrec)
+ info = _get_client_info_from_db(client_id, keyrec)
+ if info is None:
+ raise HTTPException(status_code=500, detail="Failed to load client from DB")
+ return {"client_id": client_id, "info": info.model_dump()}
- xml_path = _xml_path_for(client_id, keyrec)
- if not os.path.exists(xml_path):
- logging.info(
- "XML missing for client_id=%s (key=%s). Regenerating...",
- client_id,
- keyrec.key_name,
- )
- write_client_xml(client_id, keyrec)
-
- return {"client_id": client_id, **cli}
+ with _store_lock:
+ cli = _require_client(client_id, keyrec)
+ # Optionally expose pending status here if you want:
+ body = {"client_id": client_id, **cli}
+ if client_is_pending_import(client_id, keyrec):
+ body["import_status"] = "pending_import"
+ return body
@app.post("/clients/get", status_code=200)
def get_client_body(payload: ClientID, keyrec: KeyRecord = Depends(require_api_key)):
client_id = payload.client_id
logging.info("AUDIT get_client key=%s client_id=%s", keyrec.key_name, client_id)
- cli = _require_client(client_id, keyrec)
- xml_path = _xml_path_for(client_id, keyrec)
- if not os.path.exists(xml_path):
- logging.info(
- "XML missing for client_id=%s (key=%s). Regenerating...",
- client_id,
- keyrec.key_name,
- )
- write_client_xml(client_id, keyrec)
-
- return {"client_id": client_id, **cli}
-
-
-# REGEN XML explicitly
-@app.post("/clients/regen-xml", status_code=200)
-def regen_xml(payload: ClientID, keyrec: KeyRecord = Depends(require_api_key)):
- client_id = payload.client_id
- logging.info("AUDIT regen_xml key=%s client_id=%s", keyrec.key_name, client_id)
- _require_client(client_id, keyrec)
- write_client_xml(client_id, keyrec)
- return {"client_id": client_id, "xml": "regenerated"}
-
-
-# DELETE ENTIRE CLIENT (for this key only)
-@app.delete("/clients", status_code=204)
-def delete_client(payload: ClientID, keyrec: KeyRecord = Depends(require_api_key)):
- client_id = payload.client_id
- logging.info("AUDIT delete_client key=%s client_id=%s", keyrec.key_name, client_id)
- skey = _store_key(client_id, keyrec)
+ if db_client_exists(client_id, keyrec):
+ info = _get_client_info_from_db(client_id, keyrec)
+ if info is None:
+ raise HTTPException(status_code=500, detail="Failed to load client from DB")
+ return {"client_id": client_id, "info": info.model_dump(), "zones": {}, "users": {}}
with _store_lock:
- if skey not in _store["clients"]:
- raise HTTPException(status_code=404, detail="Client not found")
- del _store["clients"][skey]
- _flush_store()
+ cli = _require_client(client_id, keyrec)
+ body = {"client_id": client_id, **cli}
+ if client_is_pending_import(client_id, keyrec):
+ body["import_status"] = "pending_import"
+ return body
+
+@app.delete("/clients", status_code=204)
+def delete_client(
+ payload: ClientID,
+ keyrec: KeyRecord = Depends(require_api_key),
+):
+ """
+ Soft-delete a client.
+
+ DB clients:
+ - Do NOT delete row.
+ - Set:
+ NonMonitored = 1
+ DecommissionDate = current date/time (GETDATE())
+ DecommissionReason = "Decommissioned by API using key "
+
+ Non-DB clients:
+ - Remove from in-memory store
+ - Delete XML file (if any)
+ """
+ client_id = payload.client_id
+ logging.info(
+ "AUDIT delete_client key=%s client_id=%s",
+ keyrec.key_name,
+ client_id,
+ )
+
+ combined_id = _client_combined_id(client_id, keyrec)
+
+ # If client exists in DB → soft-delete via flags
+ if db_client_exists(client_id, keyrec):
+ reason = f"Decommissioned by API using key {keyrec.key_name}"
+
+ try:
+ conn = _get_db_connection()
+ cur = conn.cursor()
+ cur.execute(
+ """
+ UPDATE dbo.Memalarm
+ SET NonMonitored = 1,
+ DecommissionDate = GETDATE(),
+ DecommissionReason = ?
+ WHERE Client_No = ?
+ """,
+ (reason, combined_id),
+ )
+
+ logging.info(
+ "DB: soft-decommissioned client Client_No=%s (client_id=%s) reason=%r",
+ combined_id,
+ client_id,
+ reason,
+ )
+ except Exception as e:
+ logging.error(
+ "DB: failed to soft-delete client_id=%s (Client_No=%s): %s",
+ client_id,
+ combined_id,
+ e,
+ )
+ raise HTTPException(status_code=500, detail="Failed to decommission client in DB")
+ finally:
+ try:
+ conn.close()
+ except Exception:
+ pass
+
+ return
+
+ # Non-DB client but pending_import -> lock deletion
+ if client_is_pending_import(client_id, keyrec):
+ raise HTTPException(
+ status_code=409,
+ detail="Client is pending import into Patriot and cannot be deleted yet",
+ )
+
+ # Non-DB client: remove pending client + XML
+ skey = _store_key(client_id, keyrec)
+ with _store_lock:
+ if skey not in _store["clients"]:
+ # Nothing in DB, nothing in pending store
+ raise HTTPException(status_code=404, detail="Client not found")
+
+ del _store["clients"][skey]
+
+ # Try to remove XML file if it exists
try:
xml_path = _xml_path_for(client_id, keyrec)
if os.path.exists(xml_path):
os.remove(xml_path)
+ logging.info(
+ "Deleted XML for non-DB client client_id=%s key=%s at %s",
+ client_id,
+ keyrec.key_name,
+ xml_path,
+ )
except Exception as e:
logging.warning(
- "Failed to remove XML file for client %s (key=%s): %s",
+ "Failed to delete XML for non-DB client client_id=%s: %s",
client_id,
- keyrec.key_name,
e,
)
return
-# ---- Zones ----
-@app.get("/clients/zones")
-def list_zones(
- x_client_id: int = Header(..., alias="X-Client-Id"),
+# ----------------- Zones -----------------
+
+
+@app.post("/zones", status_code=201)
+def create_zone(
+ payload: ZoneCreate,
keyrec: KeyRecord = Depends(require_api_key),
):
- client_id = x_client_id
- logging.info("AUDIT list_zones key=%s client_id=%s", keyrec.key_name, client_id)
-
- cli = _require_client(client_id, keyrec)
- zones_obj = cli.get("zones", {})
-
- zones = [
- {"Zone_No": int(zid), "Zone_area": data.get("Zone_area"), "ModuleNo": data.get("ModuleNo", 0)}
- for zid, data in zones_obj.items()
- ]
- zones.sort(key=lambda z: z["Zone_No"])
- return {"client_id": client_id, "zones": zones}
-
-
-@app.post("/clients/zones/list", status_code=200)
-def list_zones_body(payload: ClientID, keyrec: KeyRecord = Depends(require_api_key)):
client_id = payload.client_id
- logging.info("AUDIT list_zones key=%s client_id=%s", keyrec.key_name, client_id)
+ zone = payload.zone
- cli = _require_client(client_id, keyrec)
- zones_obj = cli.get("zones", {})
- zones = [
- {"Zone_No": int(zid), "Zone_area": data.get("Zone_area"), "ModuleNo": data.get("ModuleNo", 0)}
- for zid, data in zones_obj.items()
- ]
- zones.sort(key=lambda z: z["Zone_No"])
- return {"client_id": client_id, "zones": zones}
-
-
-@app.put("/clients/zones", status_code=201)
-def upsert_zone(payload: ZoneUpdate, keyrec: KeyRecord = Depends(require_api_key)):
- client_id = payload.client_id
- zone_id = payload.zone_id
logging.info(
- "AUDIT upsert_zone key=%s client_id=%s zone_id=%s body=%s",
+ "AUDIT create_zone key=%s client_id=%s zone_no=%s",
keyrec.key_name,
client_id,
- zone_id,
- payload.model_dump(),
+ zone.ZoneNo,
)
- if not (1 <= zone_id <= 999):
- raise HTTPException(status_code=422, detail="zone_id must be 1..999")
+ # DB client: create directly in dbo.MZone, never XML
+ if db_client_exists(client_id, keyrec):
+ _purge_cached_client(client_id, keyrec)
+ if db_client_is_decommissioned(client_id, keyrec):
+ raise HTTPException(
+ status_code=403,
+ detail="Client is decommissioned and zones are read-only",
+ )
+ combined_id = _client_combined_id(client_id, keyrec)
+ try:
+ conn = _get_db_connection()
+ cur = conn.cursor()
+ # Check if zone already exists
+ cur.execute(
+ "SELECT 1 FROM dbo.MZone WHERE Client_No = ? AND Zone_No = ?",
+ (combined_id, zone.ZoneNo),
+ )
+ if cur.fetchone():
+ raise HTTPException(status_code=409, detail="Zone already exists")
+
+ # Insert new zone; ModuleNo set to 0 by default, Zone_area from ZoneText
+ cur.execute(
+ """
+ INSERT INTO dbo.Mzone (Client_No, ModuleNo, Zone_No, Zone_area)
+ VALUES (?, ?, ?, ?)
+ """,
+ (combined_id, 0, zone.ZoneNo, zone.ZoneText),
+ )
+ logging.info(
+ "DB: created zone Zone_No=%s for Client_No=%s",
+ zone.ZoneNo,
+ combined_id,
+ )
+ except HTTPException:
+ raise
+ except Exception as e:
+ logging.error(
+ "DB: failed to create zone Zone_No=%s for client_id=%s (Client_No=%s): %s",
+ zone.ZoneNo,
+ client_id,
+ combined_id,
+ e,
+ )
+ raise HTTPException(status_code=500, detail="Failed to create zone in DB")
+ finally:
+ try:
+ conn.close()
+ except Exception:
+ pass
+
+ return {"client_id": client_id, "zone": zone}
+
+ # Non-DB client but pending_import -> lock
+ if client_is_pending_import(client_id, keyrec):
+ raise HTTPException(
+ status_code=409,
+ detail="Client is pending import into Patriot and is currently locked",
+ )
+
+ # Non-DB client: store in JSON + XML
with _store_lock:
cli = _require_client(client_id, keyrec)
- cli.setdefault("zones", {})
- cli["zones"][str(zone_id)] = {
- "Zone_area": payload.Zone_area,
- "ModuleNo": payload.ModuleNo,
- }
- _flush_store()
+ zones = cli.setdefault("zones", {})
+ if str(zone.ZoneNo) in zones:
+ raise HTTPException(status_code=409, detail="Zone already exists")
+
+ zones[str(zone.ZoneNo)] = zone.model_dump()
write_client_xml(client_id, keyrec)
- return {
- "client_id": client_id,
- "zone": {"Zone_No": zone_id, "Zone_area": payload.Zone_area, "ModuleNo": payload.ModuleNo},
- }
+ return {"client_id": client_id, "zone": zone}
-@app.delete("/clients/zones", status_code=204)
-def delete_zone(payload: ZoneDelete, keyrec: KeyRecord = Depends(require_api_key)):
+@app.patch("/zones", status_code=200)
+def patch_zone(
+ payload: ZonePatch,
+ keyrec: KeyRecord = Depends(require_api_key),
+):
client_id = payload.client_id
- zone_id = payload.zone_id
+ zone = payload.zone
+ zone_no = zone.ZoneNo
+
logging.info(
- "AUDIT delete_zone key=%s client_id=%s zone_id=%s",
+ "AUDIT patch_zone key=%s client_id=%s zone_no=%s",
keyrec.key_name,
client_id,
- zone_id,
+ zone_no,
)
+ # DB client: update dbo.MZone
+ if db_client_exists(client_id, keyrec):
+ _purge_cached_client(client_id, keyrec)
+ if db_client_is_decommissioned(client_id, keyrec):
+ raise HTTPException(
+ status_code=403,
+ detail="Client is decommissioned and zones are read-only",
+ )
+ combined_id = _client_combined_id(client_id, keyrec)
+ try:
+ conn = _get_db_connection()
+ cur = conn.cursor()
+ # Ensure zone exists
+ cur.execute(
+ "SELECT 1 FROM dbo.MZone WHERE Client_No = ? AND Zone_No = ?",
+ (combined_id, zone_no),
+ )
+ if not cur.fetchone():
+ raise HTTPException(status_code=404, detail="Zone not found")
+
+ # Update Zone_area text
+ cur.execute(
+ "UPDATE dbo.Mzone SET Zone_area = ? WHERE Client_No = ? AND Zone_No = ?",
+ (zone.ZoneText, combined_id, zone_no),
+ )
+ logging.info(
+ "DB: updated zone Zone_No=%s for Client_No=%s",
+ zone_no,
+ combined_id,
+ )
+ except HTTPException:
+ raise
+ except Exception as e:
+ logging.error(
+ "DB: failed to update zone Zone_No=%s for client_id=%s (Client_No=%s): %s",
+ zone_no,
+ client_id,
+ combined_id,
+ e,
+ )
+ raise HTTPException(status_code=500, detail="Failed to update zone in DB")
+ finally:
+ try:
+ conn.close()
+ except Exception:
+ pass
+
+ return {"client_id": client_id, "zone_no": zone_no, "zone": zone}
+
+ # Non-DB client but pending_import -> lock
+ if client_is_pending_import(client_id, keyrec):
+ raise HTTPException(
+ status_code=409,
+ detail="Client is pending import into Patriot and is currently locked",
+ )
+
+ # Non-DB client: operate on in-memory + XML
with _store_lock:
cli = _require_client(client_id, keyrec)
- zones = cli.get("zones", {})
- if str(zone_id) not in zones:
+ zones = cli.setdefault("zones", {})
+ if str(zone_no) not in zones:
raise HTTPException(status_code=404, detail="Zone not found")
- del zones[str(zone_id)]
- _flush_store()
+
+ zones[str(zone_no)] = zone.model_dump()
write_client_xml(client_id, keyrec)
+
+ return {"client_id": client_id, "zone_no": zone_no, "zone": zone}
+
+
+@app.delete("/zones", status_code=204)
+def delete_zone(
+ payload: ZoneDelete,
+ keyrec: KeyRecord = Depends(require_api_key),
+):
+ client_id = payload.client_id
+ zone_no = payload.zone_no
+
+ logging.info(
+ "AUDIT delete_zone key=%s client_id=%s zone_no=%s",
+ keyrec.key_name,
+ client_id,
+ zone_no,
+ )
+
+ # DB client: delete from dbo.MZone
+ if db_client_exists(client_id, keyrec):
+ if db_client_is_decommissioned(client_id, keyrec):
+ raise HTTPException(
+ status_code=403,
+ detail="Client is decommissioned and zones are read-only",
+ )
+ combined_id = _client_combined_id(client_id, keyrec)
+ try:
+ conn = _get_db_connection()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT 1 FROM dbo.MZone WHERE Client_No = ? AND Zone_No = ?",
+ (combined_id, zone_no),
+ )
+ if not cur.fetchone():
+ raise HTTPException(status_code=404, detail="Zone not found")
+
+ cur.execute(
+ "DELETE FROM dbo.MZone WHERE Client_No = ? AND Zone_No = ?",
+ (combined_id, zone_no),
+ )
+ logging.info(
+ "DB: deleted zone Zone_No=%s for Client_No=%s",
+ zone_no,
+ combined_id,
+ )
+ except HTTPException:
+ raise
+ except Exception as e:
+ logging.error(
+ "DB: failed to delete zone Zone_No=%s for client_id=%s (Client_No=%s): %s",
+ zone_no,
+ client_id,
+ combined_id,
+ e,
+ )
+ raise HTTPException(status_code=500, detail="Failed to delete zone in DB")
+ finally:
+ try:
+ conn.close()
+ except Exception:
+ pass
+
+ return
+
+ # Non-DB client but pending_import -> lock
+ if client_is_pending_import(client_id, keyrec):
+ raise HTTPException(
+ status_code=409,
+ detail="Client is pending import into Patriot and is currently locked",
+ )
+
+ # Non-DB client: operate on in-memory + XML
+ with _store_lock:
+ cli = _require_client(client_id, keyrec)
+ zones = cli.setdefault("zones", {})
+ if str(zone_no) not in zones:
+ raise HTTPException(status_code=404, detail="Zone not found")
+
+ del zones[str(zone_no)]
+ write_client_xml(client_id, keyrec)
+
return
-# ---- Users ----
-@app.get("/clients/users")
-def list_users(
+@app.get("/zones", status_code=200)
+def get_zones(
x_client_id: int = Header(..., alias="X-Client-Id"),
keyrec: KeyRecord = Depends(require_api_key),
):
client_id = x_client_id
- logging.info("AUDIT list_users key=%s client_id=%s", keyrec.key_name, client_id)
+ logging.info("AUDIT get_zones key=%s client_id=%s", keyrec.key_name, client_id)
- cli = _require_client(client_id, keyrec)
- users = list(cli.get("users", {}).values())
- users.sort(key=lambda u: (u.get("CallOrder", 0), u.get("MobileNoOrder", 0), u.get("UserNo", 0)))
+ # DB client → return zones from dbo.Mzone
+ if db_client_exists(client_id, keyrec):
+ _purge_cached_client(client_id, keyrec)
+ combined_id = _client_combined_id(client_id, keyrec)
- return {"client_id": client_id, "users": users}
+ try:
+ conn = _get_db_connection()
+ cur = conn.cursor()
+
+ cur.execute(
+ """
+ SELECT Zone_No, Zone_area
+ FROM dbo.Mzone
+ WHERE Client_No = ?
+ ORDER BY Zone_No
+ """,
+ (combined_id,),
+ )
+
+ rows = cur.fetchall()
+ zones = [
+ {"ZoneNo": r.Zone_No, "ZoneText": r.Zone_area or ""}
+ for r in rows
+ ]
+
+ return {"client_id": client_id, "zones": zones}
+
+ except Exception as e:
+ logging.error(
+ "DB: failed to fetch zones for client_id=%s (Client_No=%s): %s",
+ client_id, combined_id, e
+ )
+ raise HTTPException(status_code=500, detail="Failed to load zones from DB")
+ finally:
+ try:
+ conn.close()
+ except Exception:
+ pass
+
+ # Non-DB client → return from in-memory store
+ with _store_lock:
+ cli = _require_client(client_id, keyrec)
+ zones = cli.get("zones", {})
+ zone_list = [
+ {"ZoneNo": int(zno), "ZoneText": zdata.get("ZoneText", "")}
+ for zno, zdata in zones.items()
+ ]
+ return {"client_id": client_id, "zones": zone_list}
-@app.get("/clients/users/single")
-def get_user_header(
- x_client_id: int = Header(..., alias="X-Client-Id"),
- x_user_no: int = Header(..., alias="X-User-No"),
+# ----------------- Users -----------------
+
+# User types that must NEVER be modified or deleted
+RESTRICTED_USER_TYPES = {"N"}
+
+
+@app.post("/users", status_code=201)
+def create_user(
+ payload: UserCreate,
keyrec: KeyRecord = Depends(require_api_key),
):
- client_id = x_client_id
- user_no = x_user_no
-
- logging.info(
- "AUDIT get_user key=%s client_id=%s user_no=%s",
- keyrec.key_name,
- client_id,
- user_no,
- )
-
- cli = _require_client(client_id, keyrec)
- u = cli.get("users", {}).get(str(user_no))
-
- if not u:
- raise HTTPException(status_code=404, detail="User not found")
-
- return {"client_id": client_id, "user": u}
-
-
-@app.post("/clients/users/list", status_code=200)
-def list_users_body(payload: ClientID, keyrec: KeyRecord = Depends(require_api_key)):
- client_id = payload.client_id
- logging.info("AUDIT list_users key=%s client_id=%s", keyrec.key_name, client_id)
-
- cli = _require_client(client_id, keyrec)
- users = list(cli.get("users", {}).values())
- users.sort(key=lambda u: (u.get("CallOrder", 0), u.get("MobileNoOrder", 0), u.get("UserNo", 0)))
- return {"client_id": client_id, "users": users}
-
-
-@app.post("/clients/users", status_code=201)
-def add_user(payload: UserWithClient, keyrec: KeyRecord = Depends(require_api_key)):
client_id = payload.client_id
user = payload.user
+
logging.info(
- "AUDIT add_user key=%s client_id=%s UserNo=%s user=%s",
+ "AUDIT create_user key=%s client_id=%s user_no=%s",
keyrec.key_name,
client_id,
user.UserNo,
- user.model_dump(),
)
+ # DB client: create user in dbo.Muser + dbo.UserToClient
+ if db_client_exists(client_id, keyrec):
+ _purge_cached_client(client_id, keyrec)
+ if db_client_is_decommissioned(client_id, keyrec):
+ raise HTTPException(
+ status_code=403,
+ detail="Client is decommissioned and users are read-only",
+ )
+
+ combined_id = _client_combined_id(client_id, keyrec)
+ try:
+ conn = _get_db_connection()
+ cur = conn.cursor()
+
+ # Check if this user number is already linked to this client
+ cur.execute(
+ "SELECT 1 FROM dbo.UserToClient WHERE ClientNo = ? AND UserNo = ?",
+ (combined_id, user.UserNo),
+ )
+ if cur.fetchone():
+ raise HTTPException(status_code=409, detail="User already exists")
+
+ # Insert into Muser
+ mobile = user.MobileNo
+ phone_type = 1 # tinyint, pick a sensible code for "mobile"
+ notes = user.Instructions or ""
+
+ # Use OUTPUT INSERTED.UserId to get the new UserId in one step
+ cur.execute(
+ """
+ INSERT INTO dbo.Muser (User_Name, PhoneNo3, PhoneNo3Type, Email, Type, Notes)
+ OUTPUT INSERTED.UserId
+ VALUES (?, ?, ?, ?, ?, ?)
+ """,
+ (user.User_Name, mobile, phone_type, user.Email, user.Type, notes),
+ )
+ row = cur.fetchone()
+ if not row or row[0] is None:
+ raise Exception("Failed to retrieve inserted UserId")
+ user_id = int(row[0])
+
+ # Link user to client
+ cur.execute(
+ """
+ INSERT INTO dbo.UserToClient (UserId, ClientNo, UserNo, CallOrder, Instructions)
+ VALUES (?, ?, ?, ?, ?)
+ """,
+ (user_id, combined_id, user.UserNo, user.CallOrder, notes),
+ )
+ logging.info(
+ "DB: created user UserId=%s UserNo=%s for Client_No=%s",
+ user_id,
+ user.UserNo,
+ combined_id,
+ )
+ except HTTPException:
+ raise
+ except Exception as e:
+ logging.error(
+ "DB: failed to create user UserNo=%s for client_id=%s (Client_No=%s): %s",
+ user.UserNo,
+ client_id,
+ combined_id,
+ e,
+ )
+ raise HTTPException(status_code=500, detail="Failed to create user in DB")
+ finally:
+ try:
+ conn.close()
+ except Exception:
+ pass
+
+ return {"client_id": client_id, "user": user}
+
+ # Non-DB client but pending_import -> lock
+ if client_is_pending_import(client_id, keyrec):
+ raise HTTPException(
+ status_code=409,
+ detail="Client is pending import into Patriot and is currently locked",
+ )
+
+ # Non-DB client: store in in-memory + XML
with _store_lock:
cli = _require_client(client_id, keyrec)
- cli.setdefault("users", {})
- if str(user.UserNo) in cli["users"]:
- raise HTTPException(status_code=409, detail="UserNo already exists")
- cli["users"][str(user.UserNo)] = user.model_dump()
- _flush_store()
+ users = cli.setdefault("users", {})
+ if str(user.UserNo) in users:
+ raise HTTPException(status_code=409, detail="User already exists")
+
+ users[str(user.UserNo)] = user.model_dump()
write_client_xml(client_id, keyrec)
return {"client_id": client_id, "user": user}
-@app.post("/clients/users/get", status_code=200)
-def get_user_body(payload: UserKey, keyrec: KeyRecord = Depends(require_api_key)):
- client_id = payload.client_id
- user_no = payload.user_no
- logging.info(
- "AUDIT get_user key=%s client_id=%s user_no=%s",
- keyrec.key_name,
- client_id,
- user_no,
- )
-
- cli = _require_client(client_id, keyrec)
- u = cli.get("users", {}).get(str(user_no))
- if not u:
- raise HTTPException(status_code=404, detail="User not found")
- return {"client_id": client_id, "user": u}
-
-
-@app.put("/clients/users", status_code=200)
-def replace_user(payload: UserWithClient, keyrec: KeyRecord = Depends(require_api_key)):
+@app.patch("/users", status_code=200)
+def patch_user(
+ payload: UserPatch,
+ keyrec: KeyRecord = Depends(require_api_key),
+):
client_id = payload.client_id
user = payload.user
user_no = user.UserNo
logging.info(
- "AUDIT replace_user key=%s client_id=%s user_no=%s user=%s",
+ "AUDIT patch_user key=%s client_id=%s user_no=%s",
keyrec.key_name,
client_id,
user_no,
- user.model_dump(),
)
+ # DB client: update dbo.Muser + dbo.UserToClient
+ if db_client_exists(client_id, keyrec):
+ _purge_cached_client(client_id, keyrec)
+ if db_client_is_decommissioned(client_id, keyrec):
+ raise HTTPException(
+ status_code=403,
+ detail="Client is decommissioned and users are read-only",
+ )
+
+ combined_id = _client_combined_id(client_id, keyrec)
+ try:
+ conn = _get_db_connection()
+ cur = conn.cursor()
+
+ # Look up existing relation + current Type
+ cur.execute(
+ """
+ SELECT u.UserId, ut.IDUserToClient, u.Type
+ FROM dbo.UserToClient ut
+ JOIN dbo.Muser u ON ut.UserId = u.UserId
+ WHERE ut.ClientNo = ? AND ut.UserNo = ?
+ """,
+ (combined_id, user_no),
+ )
+ row = cur.fetchone()
+ if not row:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ user_id = int(row[0])
+ link_id = int(row[1])
+ current_type = (row[2] or "").strip()
+
+ # If existing user type is restricted, do not allow PATCH
+ if current_type in RESTRICTED_USER_TYPES:
+ raise HTTPException(
+ status_code=403,
+ detail=f"User type '{current_type}' is restricted and cannot be modified",
+ )
+
+ # Also do not allow changing Type TO a restricted value
+ new_type = (user.Type or "").strip()
+ if new_type in RESTRICTED_USER_TYPES:
+ raise HTTPException(
+ status_code=403,
+ detail=f"Cannot change user to restricted type '{new_type}'",
+ )
+
+ mobile = user.MobileNo
+ phone_type = 1 # tinyint, default "mobile"
+ notes = user.Instructions or ""
+
+ # Update core user record
+ cur.execute(
+ """
+ UPDATE dbo.Muser
+ SET User_Name = ?, PhoneNo3 = ?, PhoneNo3Type = ?, Email = ?, Type = ?
+ WHERE UserId = ?
+ """,
+ (user.User_Name, mobile, phone_type, user.Email, new_type, user_id),
+ )
+
+ # Update link info
+ cur.execute(
+ """
+ UPDATE dbo.UserToClient
+ SET CallOrder = ?, Instructions = ?
+ WHERE IDUserToClient = ?
+ """,
+ (user.CallOrder, notes, link_id),
+ )
+
+ logging.info(
+ "DB: updated user UserId=%s UserNo=%s for Client_No=%s",
+ user_id,
+ user_no,
+ combined_id,
+ )
+ except HTTPException:
+ raise
+ except Exception as e:
+ logging.error(
+ "DB: failed to update user UserNo=%s for client_id=%s (Client_No=%s): %s",
+ user_no,
+ client_id,
+ combined_id,
+ e,
+ )
+ raise HTTPException(status_code=500, detail="Failed to update user in DB")
+ finally:
+ try:
+ conn.close()
+ except Exception:
+ pass
+
+ return {"client_id": client_id, "user_no": user_no, "user": user}
+
+ # Non-DB client but pending_import -> lock
+ if client_is_pending_import(client_id, keyrec):
+ raise HTTPException(
+ status_code=409,
+ detail="Client is pending import into Patriot and is currently locked",
+ )
+
+ # Non-DB client: operate on in-memory + XML
with _store_lock:
cli = _require_client(client_id, keyrec)
- cli.setdefault("users", {})
- cli["users"][str(user_no)] = user.model_dump()
- _flush_store()
+ users = cli.setdefault("users", {})
+ existing = users.get(str(user_no))
+ if not existing:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ current_type = (existing.get("Type") or "").strip()
+ if current_type in RESTRICTED_USER_TYPES:
+ raise HTTPException(
+ status_code=403,
+ detail=f"User type '{current_type}' is restricted and cannot be modified",
+ )
+
+ new_type = (user.Type or "").strip()
+ if new_type in RESTRICTED_USER_TYPES:
+ raise HTTPException(
+ status_code=403,
+ detail=f"Cannot change user to restricted type '{new_type}'",
+ )
+
+ users[str(user_no)] = user.model_dump()
write_client_xml(client_id, keyrec)
- return {"client_id": client_id, "user": user}
+ return {"client_id": client_id, "user_no": user_no, "user": user}
-@app.delete("/clients/users", status_code=204)
-def delete_user(payload: UserKey, keyrec: KeyRecord = Depends(require_api_key)):
+@app.get("/users", status_code=200)
+def get_users(
+ x_client_id: int = Header(..., alias="X-Client-Id"),
+ keyrec: KeyRecord = Depends(require_api_key),
+):
+ """
+ Get all users for a client.
+
+ - If client exists in DB:
+ * Read users via dbo.UserToClient + dbo.Muser
+ - If client does NOT exist in DB:
+ * Read users from in-memory pending store
+ """
+ client_id = x_client_id
+ logging.info("AUDIT get_users key=%s client_id=%s", keyrec.key_name, client_id)
+
+ # DB client → list from UserToClient + Muser
+ if db_client_exists(client_id, keyrec):
+ _purge_cached_client(client_id, keyrec)
+ combined_id = _client_combined_id(client_id, keyrec)
+
+ try:
+ conn = _get_db_connection()
+ cur = conn.cursor()
+ cur.execute(
+ """
+ SELECT
+ u.UserId,
+ u.User_Name AS User_Name,
+ u.PhoneNo3 AS PhoneNo3,
+ u.Email AS Email,
+ u.Type AS Type,
+ u.Notes AS Notes,
+ ut.UserNo AS UserNo,
+ ut.CallOrder AS CallOrder,
+ ut.Instructions AS Instructions
+ FROM dbo.UserToClient ut
+ JOIN dbo.Muser u ON ut.UserId = u.UserId
+ WHERE ut.ClientNo = ?
+ ORDER BY ut.CallOrder, ut.UserNo
+ """,
+ (combined_id,),
+ )
+ rows = cur.fetchall()
+
+ users = []
+ for r in rows:
+ instructions = (r.Instructions or r.Notes or "").strip()
+ call_order = r.CallOrder if r.CallOrder is not None else 0
+
+ users.append(
+ {
+ "User_Name": r.User_Name,
+ "MobileNo": (r.PhoneNo3 or "").strip(),
+ # We don't have a separate MobileNoOrder column in DB,
+ # so we mirror CallOrder here for ordering.
+ "MobileNoOrder": call_order if call_order > 0 else 1,
+ "Email": (r.Email or "").strip(),
+ "Type": (r.Type or "U").strip() or "U",
+ "UserNo": r.UserNo,
+ "Instructions": instructions,
+ "CallOrder": call_order,
+ }
+ )
+
+ return {"client_id": client_id, "users": users}
+
+ except Exception as e:
+ logging.error(
+ "DB: failed to fetch users for client_id=%s (Client_No=%s): %s",
+ client_id,
+ combined_id,
+ e,
+ )
+ raise HTTPException(status_code=500, detail="Failed to load users from DB")
+ finally:
+ try:
+ conn.close()
+ except Exception:
+ pass
+
+ # Non-DB client → pending in-memory users
+ with _store_lock:
+ cli = _require_client(client_id, keyrec)
+ users_dict = cli.get("users", {})
+ users = list(users_dict.values())
+ return {"client_id": client_id, "users": users}
+
+
+@app.get("/user", status_code=200)
+def get_user(
+ x_client_id: int = Header(..., alias="X-Client-Id"),
+ x_user_no: int = Header(..., alias="X-User-No"),
+ keyrec: KeyRecord = Depends(require_api_key),
+):
+ """
+ Get a specific user (by UserNo) for a client.
+
+ - If client exists in DB:
+ * Read from UserToClient + Muser
+ - If client does NOT exist in DB:
+ * Read from in-memory pending store
+ """
+ client_id = x_client_id
+ user_no = x_user_no
+ logging.info(
+ "AUDIT get_user key=%s client_id=%s user_no=%s",
+ keyrec.key_name,
+ client_id,
+ user_no,
+ )
+
+ # DB client → single user
+ if db_client_exists(client_id, keyrec):
+ _purge_cached_client(client_id, keyrec)
+ combined_id = _client_combined_id(client_id, keyrec)
+
+ try:
+ conn = _get_db_connection()
+ cur = conn.cursor()
+ cur.execute(
+ """
+ SELECT
+ u.UserId,
+ u.User_Name AS User_Name,
+ u.PhoneNo3 AS PhoneNo3,
+ u.Email AS Email,
+ u.Type AS Type,
+ u.Notes AS Notes,
+ ut.UserNo AS UserNo,
+ ut.CallOrder AS CallOrder,
+ ut.Instructions AS Instructions
+ FROM dbo.UserToClient ut
+ JOIN dbo.Muser u ON ut.UserId = u.UserId
+ WHERE ut.ClientNo = ? AND ut.UserNo = ?
+ """,
+ (combined_id, user_no),
+ )
+ r = cur.fetchone()
+ if not r:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ instructions = (r.Instructions or r.Notes or "").strip()
+ call_order = r.CallOrder if r.CallOrder is not None else 0
+
+ user = {
+ "User_Name": r.User_Name,
+ "MobileNo": (r.PhoneNo3 or "").strip(),
+ "MobileNoOrder": call_order if call_order > 0 else 1,
+ "Email": (r.Email or "").strip(),
+ "Type": (r.Type or "U").strip() or "U",
+ "UserNo": r.UserNo,
+ "Instructions": instructions,
+ "CallOrder": call_order,
+ }
+
+ return {"client_id": client_id, "user": user}
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ logging.error(
+ "DB: failed to fetch user UserNo=%s for client_id=%s (Client_No=%s): %s",
+ user_no,
+ client_id,
+ combined_id,
+ e,
+ )
+ raise HTTPException(status_code=500, detail="Failed to load user from DB")
+ finally:
+ try:
+ conn.close()
+ except Exception:
+ pass
+
+ # Non-DB client → pending in-memory users
+ with _store_lock:
+ cli = _require_client(client_id, keyrec)
+ users_dict = cli.get("users", {})
+ user = users_dict.get(str(user_no))
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+ return {"client_id": client_id, "user": user}
+
+
+@app.delete("/users", status_code=204)
+def delete_user(
+ payload: UserDelete,
+ keyrec: KeyRecord = Depends(require_api_key),
+):
client_id = payload.client_id
user_no = payload.user_no
+
logging.info(
"AUDIT delete_user key=%s client_id=%s user_no=%s",
keyrec.key_name,
@@ -865,12 +2265,114 @@ def delete_user(payload: UserKey, keyrec: KeyRecord = Depends(require_api_key)):
user_no,
)
+ # DB client: delete from UserToClient AND Muser
+ if db_client_exists(client_id, keyrec):
+ if db_client_is_decommissioned(client_id, keyrec):
+ raise HTTPException(
+ status_code=403,
+ detail="Client is decommissioned and users are read-only",
+ )
+
+ combined_id = _client_combined_id(client_id, keyrec)
+ try:
+ conn = _get_db_connection()
+ cur = conn.cursor()
+
+ # Look up link + current Type + UserId
+ cur.execute(
+ """
+ SELECT ut.IDUserToClient, u.UserId, u.Type
+ FROM dbo.UserToClient ut
+ JOIN dbo.Muser u ON ut.UserId = u.UserId
+ WHERE ut.ClientNo = ? AND ut.UserNo = ?
+ """,
+ (combined_id, user_no),
+ )
+ row = cur.fetchone()
+ if not row:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ link_id = int(row[0])
+ user_id = int(row[1])
+ current_type = (row[2] or "").strip()
+
+ if current_type in RESTRICTED_USER_TYPES:
+ raise HTTPException(
+ status_code=403,
+ detail=f"User type '{current_type}' is restricted and cannot be deleted",
+ )
+
+ # First delete link
+ cur.execute(
+ "DELETE FROM dbo.UserToClient WHERE IDUserToClient = ?",
+ (link_id,),
+ )
+
+ # Then delete the user row itself
+ cur.execute(
+ "DELETE FROM dbo.Muser WHERE UserId = ?",
+ (user_id,),
+ )
+
+ logging.info(
+ "DB: deleted user UserId=%s UserNo=%s (IDUserToClient=%s) for Client_No=%s",
+ user_id,
+ user_no,
+ link_id,
+ combined_id,
+ )
+ except HTTPException:
+ raise
+ except Exception as e:
+ logging.error(
+ "DB: failed to delete user UserNo=%s for client_id=%s (Client_No=%s): %s",
+ user_no,
+ client_id,
+ combined_id,
+ e,
+ )
+ raise HTTPException(status_code=500, detail="Failed to delete user in DB")
+ finally:
+ try:
+ conn.close()
+ except Exception:
+ pass
+
+ return
+
+ # Non-DB client but pending_import -> lock
+ if client_is_pending_import(client_id, keyrec):
+ raise HTTPException(
+ status_code=409,
+ detail="Client is pending import into Patriot and is currently locked",
+ )
+
+ # Non-DB client: operate on in-memory + XML
with _store_lock:
cli = _require_client(client_id, keyrec)
users = cli.get("users", {})
- if str(user_no) not in users:
+ existing = users.get(str(user_no))
+ if not existing:
raise HTTPException(status_code=404, detail="User not found")
+
+ current_type = (existing.get("Type") or "").strip()
+ if current_type in RESTRICTED_USER_TYPES:
+ raise HTTPException(
+ status_code=403,
+ detail=f"User type '{current_type}' is restricted and cannot be deleted",
+ )
+
del users[str(user_no)]
- _flush_store()
write_client_xml(client_id, keyrec)
+
return
+
+
+# ----------------- Main (for standalone run) -----------------
+
+
+if __name__ == "__main__":
+ logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s [%(levelname)s] %(message)s",
+ )
diff --git a/run_server.py b/run_server.py
index 2b01111..3bbea33 100644
--- a/run_server.py
+++ b/run_server.py
@@ -18,7 +18,7 @@ os.environ.setdefault("XML_DIR", str(bd / "out" / "clients"))
os.makedirs(os.environ["XML_DIR"], exist_ok=True)
try:
- from main import app # must succeed
+ from main import app # import FastAPI app from main.py
print("[launcher] imported main.app OK")
except Exception as e:
print(f"[launcher] FAILED to import main.app: {e}")
@@ -31,6 +31,6 @@ except Exception as e:
sys.exit(1)
if __name__ == "__main__":
- print("[launcher] running uvicorn on 0.0.0.0:7071")
- uvicorn.run(app, host="0.0.0.0", port=8081, log_level="info")
+ print("[launcher] running uvicorn on 0.0.0.0:8082")
+ uvicorn.run(app, host="0.0.0.0", port=8082, log_level="info")
print("[launcher] uvicorn.run returned (server stopped)") # should only print on shutdown
diff --git a/test_change.py b/test_change.py
new file mode 100644
index 0000000..ef1d959
--- /dev/null
+++ b/test_change.py
@@ -0,0 +1,306 @@
+#!/usr/bin/env python3
+import random
+import string
+import time
+from datetime import datetime, timedelta
+
+import requests
+
+# ================== CONFIG ==================
+
+BASE_URL = "http://10.181.149.220:8081" # through nginx
+API_KEY = "_2sW6roe2ZQ4V6Cldo0v295fakHT8vBHqHScfliX445tZuzxDwMRqjPeCE7FDcVVr" # your real API key
+
+# Auth style: choose one of these:
+USE_X_API_KEY = False # send X-API-Key header
+USE_BEARER = True # send Authorization: Bearer
+
+# <<< IMPORTANT >>>
+# List of existing 4-digit client IDs you want to mutate.
+# Fill this with IDs you know exist (created by your previous test/import).
+TARGET_CLIENT_IDS = [
+ # Example: 1234, 5678, 9012
+]
+
+# If TARGET_CLIENT_IDS is empty, we fallback to random 4-digit IDs (you'll get some 404s)
+FALLBACK_RANDOM_RANGE = (1235, 1244)
+
+# How aggressive should this be?
+SLEEP_BETWEEN_OPS = 10 # seconds between operations
+
+# Relative probabilities of operations
+OP_WEIGHTS = {
+ "update_client": 0.4,
+ "create_or_update_user": 0.3,
+ "create_or_update_zone": 0.3,
+}
+
+# ============================================
+
+
+def auth_headers():
+ headers = {
+ "Content-Type": "application/json",
+ }
+ if USE_X_API_KEY:
+ headers["X-API-Key"] = API_KEY
+ if USE_BEARER:
+ headers["Authorization"] = f"Bearer {API_KEY}"
+ return headers
+
+
+def pick_client_id() -> int:
+ if TARGET_CLIENT_IDS:
+ return random.choice(TARGET_CLIENT_IDS)
+ return random.randint(*FALLBACK_RANDOM_RANGE)
+
+
+def random_phone():
+ return "+47" + "".join(random.choice("0123456789") for _ in range(8))
+
+
+def random_email(name_stub: str):
+ domains = ["example.com", "test.com", "mailinator.com", "demo.net"]
+ clean = "".join(c for c in name_stub.lower() if c.isalnum())
+ return f"{clean or 'user'}@{random.choice(domains)}"
+
+
+def random_string(prefix: str, length: int = 8):
+ tail = "".join(random.choice(string.ascii_letters + string.digits) for _ in range(length))
+ return f"{prefix}{tail}"
+
+
+def random_client_info():
+ # Random-ish basic client fields
+ name = random_string("Client_", 5)
+ alias = random_string("AL", 4)
+ street_no = random.randint(1, 200)
+ street_name = random.choice(["Gate", "Veien", "Stien", "Allé"])
+ location = f"{random.choice(['Test', 'Demo', 'Fake'])} {street_name} {street_no}"
+
+ area_code = str(random.randint(1000, 9999))
+ area = random.choice(["Oslo", "Sarpsborg", "Bergen", "Trondheim", "Stavanger"])
+
+ bus_phone = random_phone()
+ email = random_email(name)
+
+ ok_password = random_string("pwd_", 6)
+ spec_request = random.choice(
+ [
+ "Script-endret testkunde.",
+ "Oppdatert spesialinstruks fra load-test.",
+ "Ingen reelle tiltak, kun test.",
+ ]
+ )
+
+ no_sigs_mon = random.choice(["ActiveAny", "Disabled"])
+ since_days = random.randint(0, 7)
+ since_hrs = random.randint(0, 23)
+ since_mins = random.randint(0, 59)
+
+ reset_ignored = random.choice([True, False])
+ reset_days = random.randint(0, 14)
+ reset_hrs = random.randint(0, 23)
+ reset_mins = random.randint(0, 59)
+
+ install_date = (datetime.now() - timedelta(days=random.randint(0, 365))).date().isoformat()
+
+ panel_name = random.choice(["Ajax", "Future"])
+ panel_site = random.choice(["Stue", "Gang", "Kontor", "Lager"])
+ keypad_location = random.choice(["Inngang", "Bakdør", "Garasje", "2. etg"])
+ sp_page = random.choice(
+ [
+ "Ekstra info endret av load-test.",
+ "Test entry, kan ignoreres.",
+ "API performance-test.",
+ ]
+ )
+
+ return {
+ "Name": name,
+ "Alias": alias,
+ "Location": location,
+ "area_code": area_code,
+ "area": area,
+ "BusPhone": bus_phone,
+ "Email": email,
+ "OKPassword": ok_password,
+ "SpecRequest": spec_request,
+ "NoSigsMon": no_sigs_mon,
+ "SinceDays": since_days,
+ "SinceHrs": since_hrs,
+ "SinceMins": since_mins,
+ "ResetNosigsIgnored": reset_ignored,
+ "ResetNosigsDays": reset_days,
+ "ResetNosigsHrs": reset_hrs,
+ "ResetNosigsMins": reset_mins,
+ "InstallDateTime": install_date,
+ "PanelName": panel_name,
+ "PanelSite": panel_site,
+ "KeypadLocation": keypad_location,
+ "SPPage": sp_page,
+ }
+
+
+def random_user_payload(existing_user_no: int | None = None):
+ first_names = ["Anders", "Per", "Lise", "Kari", "Ole", "Nina"]
+ last_names = ["Knutsen", "Olsen", "Hansen", "Johansen", "Pedersen"]
+ name = f"{random.choice(first_names)} {random.choice(last_names)}"
+
+ mobile = random_phone()
+ email = random_email(name.replace(" ", "."))
+
+ user_type = "U"
+ instructions = random.choice(
+ [
+ "Oppdatert instrukser.",
+ "Ring ved alarm.",
+ "Kun SMS.",
+ "Kontakt vaktmester først.",
+ ]
+ )
+
+ call_order = random.randint(0, 3)
+ mobile_order = random.randint(1, 3)
+
+ if existing_user_no is None:
+ user_no = random.randint(1, 5)
+ else:
+ user_no = existing_user_no
+
+ return {
+ "User_Name": name,
+ "MobileNo": mobile,
+ "MobileNoOrder": mobile_order,
+ "Email": email,
+ "Type": user_type,
+ "UserNo": user_no,
+ "Instructions": instructions,
+ "CallOrder": call_order,
+ }
+
+
+def random_zone_payload(existing_zone_no: int | None = None):
+ zone_names = [
+ "Stue",
+ "Kjøkken",
+ "Gang",
+ "Soverom",
+ "Garasje",
+ "Kontor",
+ "Lager",
+ "Uteområde",
+ ]
+ if existing_zone_no is None:
+ zone_no = random.randint(1, 20)
+ else:
+ zone_no = existing_zone_no
+
+ zone_text = random.choice(zone_names) + " " + random.choice(["1", "2", "A", "B"])
+ return {"ZoneNo": zone_no, "ZoneText": zone_text}
+
+
+def do_update_client(client_id: int):
+ url = f"{BASE_URL}/clients"
+ payload = {
+ "client_id": client_id,
+ "info": random_client_info(),
+ }
+ r = requests.put(url, json=payload, headers=auth_headers(), timeout=10)
+ print(f"[update_client] client_id={client_id} -> {r.status_code}")
+ if r.status_code >= 400:
+ print(f" body: {r.text[:500]}")
+ return r.status_code
+
+
+def do_create_or_update_user(client_id: int):
+ url = f"{BASE_URL}/users"
+ # Randomly choose new or existing user no
+ if random.random() < 0.5:
+ user_no = random.randint(1, 3) # likely already created by your first script
+ else:
+ user_no = random.randint(4, 10) # maybe new user
+
+ user_payload = random_user_payload(existing_user_no=user_no)
+ payload = {
+ "client_id": client_id,
+ "user": user_payload,
+ }
+ r = requests.post(url, json=payload, headers=auth_headers(), timeout=10)
+ print(
+ f"[user] client_id={client_id} UserNo={user_no} -> {r.status_code}"
+ )
+ if r.status_code >= 400:
+ print(f" body: {r.text[:500]}")
+ return r.status_code
+
+
+def do_create_or_update_zone(client_id: int):
+ url = f"{BASE_URL}/zones"
+ # Same trick: sometimes hit likely existing zones, sometimes new
+ if random.random() < 0.5:
+ zone_no = random.randint(1, 10)
+ else:
+ zone_no = random.randint(11, 30)
+
+ zone_payload = random_zone_payload(existing_zone_no=zone_no)
+ payload = {
+ "client_id": client_id,
+ "zone": zone_payload,
+ }
+ r = requests.post(url, json=payload, headers=auth_headers(), timeout=10)
+ print(
+ f"[zone] client_id={client_id} ZoneNo={zone_no} -> {r.status_code}"
+ )
+ if r.status_code >= 400:
+ print(f" body: {r.text[:500]}")
+ return r.status_code
+
+
+def pick_operation():
+ ops = list(OP_WEIGHTS.keys())
+ weights = [OP_WEIGHTS[o] for o in ops]
+ return random.choices(ops, weights=weights, k=1)[0]
+
+
+def main():
+ if not TARGET_CLIENT_IDS:
+ print(
+ "WARNING: TARGET_CLIENT_IDS is empty.\n"
+ " Script will use random 4-digit client IDs and you may see many 404s."
+ )
+ else:
+ print(f"Targeting these client IDs: {TARGET_CLIENT_IDS}")
+
+ print("Starting random modification loop. Press Ctrl+C to stop.\n")
+
+ op_count = 0
+ try:
+ while True:
+ client_id = pick_client_id()
+ op = pick_operation()
+
+ print(f"\n=== op #{op_count} ===")
+ print(f"Client: {client_id}, operation: {op}")
+
+ try:
+ if op == "update_client":
+ do_update_client(client_id)
+ elif op == "create_or_update_user":
+ do_create_or_update_user(client_id)
+ elif op == "create_or_update_zone":
+ do_create_or_update_zone(client_id)
+ else:
+ print(f"Unknown operation {op}, skipping.")
+ except requests.RequestException as e:
+ print(f"HTTP error: {e}")
+
+ op_count += 1
+ time.sleep(SLEEP_BETWEEN_OPS)
+
+ except KeyboardInterrupt:
+ print("\nStopping on user request (Ctrl+C).")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/xml_combine_for_import.py b/xml_combine_for_import.py
new file mode 100755
index 0000000..13a7031
--- /dev/null
+++ b/xml_combine_for_import.py
@@ -0,0 +1,434 @@
+#!/usr/bin/env python3
+import xml.etree.ElementTree as ET
+from pathlib import Path
+import logging
+import os
+import copy
+import time
+import shutil
+import socket
+from datetime import datetime, timedelta
+from io import BytesIO
+from smb.SMBConnection import SMBConnection # pip install pysmb
+import json
+
+CLIENT_STATE_FILE = Path("/opt/patriot_api/client_state.json")
+os.makedirs(CLIENT_STATE_FILE.parent, exist_ok=True)
+
+def load_client_state() -> dict:
+ if CLIENT_STATE_FILE.is_file():
+ try:
+ return json.loads(CLIENT_STATE_FILE.read_text(encoding="utf-8"))
+ except Exception as e:
+ logging.error("Failed to read client state file %s: %s", CLIENT_STATE_FILE, e)
+ return {}
+ return {}
+
+def save_client_state(state: dict):
+ try:
+ tmp = CLIENT_STATE_FILE.with_suffix(".tmp")
+ tmp.write_text(json.dumps(state, indent=2, ensure_ascii=False), encoding="utf-8")
+ os.replace(tmp, CLIENT_STATE_FILE)
+ except Exception as e:
+ logging.error("Failed to write client state file %s: %s", CLIENT_STATE_FILE, e)
+
+def mark_clients_pending_import(client_ids: list[str]):
+ if not client_ids:
+ return
+ state = load_client_state()
+ now_ts = datetime.now().isoformat(timespec="seconds")
+ for cid in client_ids:
+ state[cid] = {
+ "status": "pending_import",
+ "last_batch": now_ts,
+ }
+ save_client_state(state)
+
+
+# --------- CONFIG ---------
+
+XML_ROOT_PATH = Path("/opt/patriot_api/out/clients") # per-client XMLs (from main_v2)
+READY_DIR = Path("/opt/patriot_api/ready_for_import") # combined XML output
+COMBINED_FILENAME = "clients.xml"
+LOG_FILE = "/opt/patriot_api/xml_combine.log"
+ERROR_LOG_FILE = "/opt/patriot_api/import_errors.log"
+
+# Script will run 5 minutes before every hour (hh:55), so no fixed RUN_INTERVAL needed
+RUN_INTERVAL = 3600 # still used as a safety fallback if needed
+MAX_CLIENTS_PER_RUN = 300
+
+# SMB / Windows share config (no kernel mount needed)
+SMB_ENABLED = True
+
+SMB_SERVER_IP = "10.181.149.83" # Windows server IP
+SMB_SERVER_NAME = "PATRIOT" # NetBIOS/hostname (can be anything if IP is used, but fill something)
+SMB_SHARE_NAME = "api_import" # Share name (from //IP/share)
+
+# Guest access: empty username/password, use_ntlm_v2=False
+SMB_USERNAME = "administrator" # empty = guest
+SMB_PASSWORD = "wprs100qq!" # empty = guest
+SMB_DOMAIN = "WORKGROUP" # often empty for guest
+
+# Remote path inside the share where clients.xml will be stored
+# e.g. "clients/clients.xml" or just "clients.xml"
+SMB_REMOTE_PATH = "clients.xml"
+SMB_RESULTS_FILENAME = "clients_Import_Results.txt"
+
+# --------------------------
+
+os.makedirs(READY_DIR, exist_ok=True)
+os.makedirs(os.path.dirname(LOG_FILE), exist_ok=True)
+os.makedirs(os.path.dirname(ERROR_LOG_FILE), exist_ok=True)
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s [%(levelname)s] %(message)s",
+ handlers=[
+ logging.FileHandler(LOG_FILE, encoding="utf-8"),
+ logging.StreamHandler(),
+ ],
+)
+
+def log_missing_ids_to_error_log(missing_ids: list[str]):
+ if not missing_ids:
+ return
+ try:
+ ts = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+ with open(ERROR_LOG_FILE, "a", encoding="utf-8") as f:
+ for cid in missing_ids:
+ f.write(f"{ts} Missing or failed import for client __id={cid}\n")
+ except Exception as e:
+ logging.error("Failed to write to error log %s: %s", ERROR_LOG_FILE, e)
+
+
+def load_combined_root(path: Path) -> ET.Element:
+ if not path.is_file():
+ logging.info("Combined XML %s does not exist yet, creating new.", path)
+ return ET.Element("Clients")
+
+ try:
+ tree = ET.parse(path)
+ root = tree.getroot()
+ if root.tag != "Clients":
+ logging.warning(
+ "Combined XML %s has root <%s> instead of ; recreating.",
+ path,
+ root.tag,
+ )
+ return ET.Element("Clients")
+ return root
+ except Exception as e:
+ logging.error("Failed to parse existing combined XML %s: %s; recreating.", path, e)
+ return ET.Element("Clients")
+
+
+def rows_by_id(root: ET.Element) -> dict[str, ET.Element]:
+ mapping = {}
+ for row in root.findall("Row"):
+ id_el = row.find("__id")
+ if id_el is None or not (id_el.text or "").strip():
+ continue
+ cid = id_el.text.strip()
+ mapping[cid] = row
+ return mapping
+
+
+def upload_to_smb(local_path: Path) -> bool:
+ """
+ Upload the combined XML to the Windows share using SMB.
+
+ Before uploading:
+ - If an existing clients.xml and clients_Import_Results.txt are present
+ on the SMB share, verify that each __id in that clients.xml has a line
+ containing that id and the word "Successful" in the results file.
+ - Any IDs not matching are logged to ERROR_LOG_FILE.
+
+ Returns:
+ True -> upload considered successful (or SMB disabled)
+ False -> upload failed, caller should NOT delete source XMLs
+ """
+ if not SMB_ENABLED:
+ logging.info("SMB upload disabled in config; skipping upload but treating as success.")
+ return True
+
+ if not local_path.is_file():
+ logging.error("SMB upload: local file %s does not exist", local_path)
+ return False
+
+ try:
+ my_name = socket.gethostname() or "patriot-api"
+ conn = SMBConnection(
+ SMB_USERNAME,
+ SMB_PASSWORD,
+ my_name,
+ SMB_SERVER_NAME,
+ domain=SMB_DOMAIN,
+ use_ntlm_v2=False, # set True if you move to proper user+password auth
+ is_direct_tcp=True, # connect directly to port 445
+ )
+
+ logging.info("SMB: connecting to %s (%s)...", SMB_SERVER_NAME, SMB_SERVER_IP)
+ if not conn.connect(SMB_SERVER_IP, 445, timeout=10):
+ logging.error("SMB: failed to connect to %s", SMB_SERVER_IP)
+ return False
+
+ # Split directory and filename in remote path
+ remote_dir, remote_name = os.path.split(SMB_REMOTE_PATH)
+ if not remote_dir:
+ remote_dir = "/"
+
+ # Build full paths for existing clients.xml and the results file
+ if remote_dir in ("", "/"):
+ remote_clients_path = remote_name
+ remote_results_path = SMB_RESULTS_FILENAME
+ else:
+ rd = remote_dir.rstrip("/")
+ remote_clients_path = f"{rd}/{remote_name}"
+ remote_results_path = f"{rd}/{SMB_RESULTS_FILENAME}"
+
+ # -------- PRE-UPLOAD CHECK vs clients_Import_Results.txt --------
+ try:
+ xml_buf = BytesIO()
+ res_buf = BytesIO()
+
+ # Try to retrieve both files; if either is missing, skip the check
+ try:
+ conn.retrieveFile(SMB_SHARE_NAME, remote_clients_path, xml_buf)
+ conn.retrieveFile(SMB_SHARE_NAME, remote_results_path, res_buf)
+ xml_buf.seek(0)
+ res_buf.seek(0)
+ except Exception as e:
+ logging.info(
+ "SMB pre-upload check: could not retrieve existing clients.xml or "
+ "results file (may not exist yet): %s", e
+ )
+ else:
+ # Parse existing clients.xml to gather IDs
+ try:
+ tree_remote = ET.parse(xml_buf)
+ root_remote = tree_remote.getroot()
+
+ remote_ids = set()
+ for row in root_remote.findall(".//Row"):
+ id_el = row.find("__id")
+ if id_el is not None and id_el.text and id_el.text.strip():
+ remote_ids.add(id_el.text.strip())
+ except Exception as e:
+ logging.error("SMB pre-upload check: failed to parse remote clients.xml: %s", e)
+ remote_ids = set()
+
+ # Read results txt lines
+ results_lines = res_buf.getvalue().decode("utf-8", errors="ignore").splitlines()
+
+ missing_ids = []
+ if remote_ids:
+ for cid in remote_ids:
+ found_success = False
+ for line in results_lines:
+ # Very generic check: line contains id AND word "Completed processing client"
+ if "Completed processing client" in line and cid in line:
+ found_success = True
+ break
+ if not found_success:
+ missing_ids.append(cid)
+
+ if missing_ids:
+ log_missing_ids_to_error_log(missing_ids)
+ logging.warning(
+ "SMB pre-upload check: %d client(s) from existing clients.xml "
+ "do not have 'Completed processing client' result lines. Logged to %s.",
+ len(missing_ids),
+ ERROR_LOG_FILE,
+ )
+ else:
+ if remote_ids:
+ logging.info(
+ "SMB pre-upload check: all %d client IDs in existing clients.xml "
+ "appear as 'Completed processing client' in results file.",
+ len(remote_ids),
+ )
+ except Exception as e:
+ logging.error("SMB: unexpected error during pre-upload check: %s", e)
+
+ # -------- HANDLE EXISTING clients.xml (rotate/rename) --------
+ try:
+ files = conn.listPath(SMB_SHARE_NAME, remote_dir, pattern=remote_name)
+ exists = any(f.filename == remote_name for f in files)
+ except Exception:
+ exists = False
+
+ if exists:
+ ts = datetime.now().strftime("%Y%m%d_%H%M%S")
+ base, ext = os.path.splitext(remote_name)
+ backup_name = f"{base}_{ts}{ext}"
+
+ if remote_dir in ("", "/"):
+ old_path = remote_name
+ new_path = backup_name
+ else:
+ old_path = f"{remote_dir.rstrip('/')}/{remote_name}"
+ new_path = f"{remote_dir.rstrip('/')}/{backup_name}"
+
+ try:
+ conn.rename(SMB_SHARE_NAME, old_path, new_path)
+ logging.info("SMB: existing %s renamed to %s", old_path, new_path)
+ except Exception as e:
+ logging.error("SMB: failed to rename existing %s: %s", old_path, e)
+
+ # -------- UPLOAD NEW clients.xml --------
+ with open(local_path, "rb") as f:
+ logging.info(
+ "SMB: uploading %s to //%s/%s/%s",
+ local_path,
+ SMB_SERVER_IP,
+ SMB_SHARE_NAME,
+ SMB_REMOTE_PATH,
+ )
+ conn.storeFile(SMB_SHARE_NAME, SMB_REMOTE_PATH, f)
+
+ logging.info("SMB: upload completed successfully.")
+ conn.close()
+ return True
+
+ except Exception as e:
+ logging.error("SMB: error during upload of %s: %s", local_path, e)
+ return False
+
+
+
+
+def combine_xml_once():
+ combined_path = READY_DIR / COMBINED_FILENAME
+
+ # Scan all per-client XMLs
+ xml_files = sorted(XML_ROOT_PATH.rglob("*.xml")) # sorted for deterministic order
+
+ # Track files to delete after upload
+ processed_files = []
+
+ combined_rows: dict[str, ET.Element] = {}
+
+ if xml_files:
+ logging.info("Found %d new per-client XML file(s) to merge.", len(xml_files))
+
+ # Limit how many XMLs (clients) we handle per run
+ limited_files = xml_files[:MAX_CLIENTS_PER_RUN]
+ if len(xml_files) > MAX_CLIENTS_PER_RUN:
+ logging.info(
+ "Limiting this run to %d clients; %d will be processed in later runs.",
+ MAX_CLIENTS_PER_RUN,
+ len(xml_files) - MAX_CLIENTS_PER_RUN,
+ )
+
+ for path in limited_files:
+ logging.info(" Reading %s", path)
+ try:
+ tree = ET.parse(path)
+ root = tree.getroot()
+ except Exception as e:
+ logging.error(" Failed to parse %s: %s", path, e)
+ # Keep previous behavior: delete bad file
+ processed_files.append(path)
+ continue
+
+ processed_files.append(path)
+
+ # Extract rows
+ if root.tag == "Clients":
+ rows = root.findall("Row")
+ elif root.tag == "Row":
+ rows = [root]
+ else:
+ rows = root.findall(".//Row")
+
+ for row in rows:
+ id_el = row.find("__id")
+ if id_el is None or not (id_el.text or "").strip():
+ logging.warning(" Skipping row without __id in %s", path)
+ continue
+ cid = id_el.text.strip()
+ logging.info(" Including client __id=%s", cid)
+ combined_rows[cid] = copy.deepcopy(row)
+
+ else:
+ logging.info("No NEW client XMLs found locally — will upload an empty Clients file.")
+
+
+ # Build XML root (may be empty)
+ new_root = ET.Element("Clients")
+ for cid, row in combined_rows.items():
+ new_root.append(row)
+
+ # Always write combined XML (new or empty)
+ tmp_path = combined_path.with_suffix(".tmp")
+ try:
+ ET.ElementTree(new_root).write(tmp_path, encoding="utf-8", xml_declaration=True)
+ os.replace(tmp_path, combined_path)
+ logging.info("Wrote combined (or empty) clients.xml to %s", combined_path)
+ except Exception as e:
+ logging.error("Failed writing combined XML: %s", e)
+ if os.path.exists(tmp_path):
+ os.remove(tmp_path)
+ return
+
+ # Rotate & upload to SMB
+ upload_ok = upload_to_smb(combined_path)
+
+ if not upload_ok:
+ logging.warning(
+ "SMB upload failed; keeping per-client XMLs so this batch can be retried next run."
+ )
+ return
+
+ # Mark all combined client IDs as pending_import
+ try:
+ client_ids_in_batch = list(combined_rows.keys()) # these are the __id values
+ mark_clients_pending_import(client_ids_in_batch)
+ logging.info(
+ "Marked %d client(s) as pending_import in %s",
+ len(client_ids_in_batch),
+ CLIENT_STATE_FILE,
+ )
+ except Exception as e:
+ logging.error("Failed to update client pending_import state: %s", e)
+
+ # Delete ONLY the processed new XMLs if upload succeeded
+ for src in processed_files:
+ try:
+ src.unlink()
+ logging.info("Deleted processed source XML: %s", src)
+ except Exception as e:
+ logging.error("Failed to delete %s: %s", src, e)
+
+
+def sleep_until_next_run():
+ """
+ Sleep until 1 minute before the next full hour (hh:59).
+ """
+ now = datetime.now()
+
+ # Top of the next hour
+ next_hour = now.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)
+ run_time = next_hour - timedelta(minutes=1) # hh:59
+
+ # If we are already past that run_time (e.g., started at 10:59:30), move to the next hour
+ if run_time <= now:
+ next_hour = next_hour + timedelta(hours=1)
+ run_time = next_hour - timedelta(minutes=1) # still hh:59, just next hour
+
+ delta = (run_time - now).total_seconds()
+ logging.info("Next combine run scheduled at %s (in %.0f seconds)", run_time, delta)
+ time.sleep(delta)
+
+
+def main():
+ while True:
+ sleep_until_next_run()
+ logging.info("==== XML combine run ====")
+ combine_xml_once()
+ # Safety fallback: if anything goes wrong with time calc, we still avoid tight loop
+ time.sleep(1)
+
+
+if __name__ == "__main__":
+ main()