v.0.9 Beta

This commit is contained in:
Anders Knutsen 2025-12-03 09:12:19 +01:00
parent 9f67d87e9d
commit c9a3292bdd
6 changed files with 2689 additions and 421 deletions

View File

@ -1,10 +1,11 @@
{ {
"info": { "info": {
"_postman_id": "d549670f-756d-49fa-925b-dd82e8e9cc0c", "_postman_id": "f7926ba4-6fae-4e5e-973b-402456b66ceb",
"name": "Patriot API", "name": "Patriot API",
"description": "Common Status Codes\n\n- \\- 200 OK: Request successful (e.g.,UPSERT update, reads)\n \n- \\- 201 Created: Resource created (e.g., UPSERT create, add user, upsert zone)\n \n- \\- 204 No Content: Deleted successfully\n \n- \\- 401 Unauthorized: Missing/invalid/disabled/expired API key\n \n- \\- 404 Not Found: Client or resource not found\n \n- \\- 409 Conflict: Duplicate user number\n \n- \\- 422 Unprocessable Entity: Validation error (e.g., zone_id out of range)", "description": "Common Status Codes\n\n- \\- 200 OK: Request successful (e.g.,UPSERT update, reads)\n \n- \\- 201 Created: Resource created (e.g., UPSERT create, add user, upsert zone)\n \n- \\- 204 No Content: Deleted successfully\n \n- \\- 401 Unauthorized: Missing/invalid/disabled/expired API key\n \n- \\- 404 Not Found: Client or resource not found\n \n- \\- 409 Conflict: Duplicate user number\n \n- \\- 422 Unprocessable Entity: Validation error (e.g., zone_id out of range)",
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json",
"_exporter_id": "18764749" "_exporter_id": "50473838",
"_collection_link": "https://mrnutsen-7f68d4fc-3205107.postman.co/workspace/A24_Patriot_API~882aa50c-5471-4e78-aa12-1811e10b3979/collection/50473838-f7926ba4-6fae-4e5e-973b-402456b66ceb?action=share&source=collection_link&creator=50473838"
}, },
"item": [ "item": [
{ {
@ -56,12 +57,11 @@
} }
], ],
"url": { "url": {
"raw": "{{baseUrl}}/clients/zones", "raw": "{{baseUrl}}/zones",
"host": [ "host": [
"{{baseUrl}}" "{{baseUrl}}"
], ],
"path": [ "path": [
"clients",
"zones" "zones"
] ]
}, },
@ -86,12 +86,11 @@
} }
], ],
"url": { "url": {
"raw": "{{baseUrl}}/clients/users", "raw": "{{baseUrl}}/users",
"host": [ "host": [
"{{baseUrl}}" "{{baseUrl}}"
], ],
"path": [ "path": [
"clients",
"users" "users"
] ]
}, },
@ -121,13 +120,12 @@
} }
], ],
"url": { "url": {
"raw": "{{baseUrl}}/clients/users", "raw": "{{baseUrl}}/user",
"host": [ "host": [
"{{baseUrl}}" "{{baseUrl}}"
], ],
"path": [ "path": [
"clients", "user"
"users"
] ]
}, },
"description": "Get a specific user\n\nClient ID must be set in the \"X-Client-Id\" header.\n\nUser ID must be set in the \"X-User-Id\" header." "description": "Get a specific user\n\nClient ID must be set in the \"X-Client-Id\" header.\n\nUser ID must be set in the \"X-User-Id\" header."
@ -153,7 +151,7 @@
], ],
"body": { "body": {
"mode": "raw", "mode": "raw",
"raw": "{\n \"client_id\": 123456789,\n \"info\": {\n \"Name\": \"Anders Knutsen\",\n \"Alias\": \"000FD267\",\n \"Location\": \"Lislebyveien 58\",\n \"area_code\": \"1604\",\n \"area\": \"Fredrikstad\",\n \"BusPhone\": \"69310000\",\n \"Email\": \"post@ostsik.no\",\n \"OKPassword\": \"franzjager\",\n \"SpecRequest\": \"Dette skal gjøres ved alarm på denne kunden.\",\n \"NoSigsMon\": \"ActiveAny\",\n \"SinceDays\": 1,\n \"SinceHrs\": 0,\n \"SinceMins\": 30,\n \"ResetNosigsIgnored\": true,\n \"ResetNosigsDays\": 7,\n \"ResetNosigsHrs\": 0,\n \"ResetNosigsMins\": 0,\n \"InstallDateTime\": \"2023-02-20\",\n \"PanelName\": \"Ajax\",\n \"PanelSite\": \"Stue\",\n \"KeypadLocation\": \"Inngang\",\n \"SPPage\": \"Ekstra informasjon som kan være relevant.\"\n }\n}", "raw": "{\n \"client_id\": 4848,\n \"info\": {\n \"Name\": \"Anders Knutsen\",\n \"Alias\": \"000FD267\",\n \"Location\": \"Lislebyveien 58\",\n \"area_code\": \"1604\",\n \"area\": \"Fredrikstad\",\n \"BusPhone\": \"69310000\",\n \"Email\": \"post@ostsik.no\",\n \"OKPassword\": \"franzjager\",\n \"SpecRequest\": \"Dette skal gjøres ved alarm på denne kunden.\",\n \"NoSigsMon\": \"1\",\n \"SinceDays\": 1,\n \"SinceHrs\": 0,\n \"SinceMins\": 30,\n \"ResetNosigsIgnored\": true,\n \"ResetNosigsDays\": 7,\n \"ResetNosigsHrs\": 0,\n \"ResetNosigsMins\": 0,\n \"InstallDateTime\": \"2023-02-20\",\n \"PanelName\": \"Ajax\",\n \"PanelSite\": \"Stue\",\n \"KeypadLocation\": \"Inngang\",\n \"SPPage\": \"Ekstra informasjon som kan være relevant.\"\n }\n}",
"options": { "options": {
"raw": { "raw": {
"language": "json" "language": "json"
@ -186,7 +184,7 @@
], ],
"body": { "body": {
"mode": "raw", "mode": "raw",
"raw": "{\n \"client_id\": 123456789,\n \"info\": {\n \"Name\": \"optional\"\n }\n}", "raw": "{\n \"client_id\": {{clientId}},\n \"info\": {\n \"Name\": \"Anders Knutsen\",\n \"Alias\": \"000FD267\",\n \"Location\": \"Bergbyveien\",\n \"area_code\": \"1730\",\n \"area\": \"Ise\",\n \"BusPhone\": \"69310000\",\n \"Email\": \"post@ostsik.no\",\n \"OKPassword\": \"franzjager\",\n \"SpecRequest\": \"Dette skal gjøres ved alarm på denne kunden.\",\n \"NoSigsMon\": \"Disabled\",\n \"SinceDays\": 1,\n \"SinceHrs\": 0,\n \"SinceMins\": 30,\n \"ResetNosigsIgnored\": true,\n \"ResetNosigsDays\": 14,\n \"ResetNosigsHrs\": 0,\n \"ResetNosigsMins\": 0,\n \"InstallDateTime\": \"2025-12-01\",\n \"PanelName\": \"Ajax\",\n \"PanelSite\": \"Ved tv\",\n \"KeypadLocation\": \"Inngang\",\n \"SPPage\": \"Ekstra informasjon som kan være relevant.\"\n }\n}",
"options": { "options": {
"raw": { "raw": {
"language": "json" "language": "json"
@ -207,9 +205,9 @@
"response": [] "response": []
}, },
{ {
"name": "Create/Update zone", "name": "Create zone",
"request": { "request": {
"method": "PUT", "method": "POST",
"header": [ "header": [
{ {
"key": "Authorization", "key": "Authorization",
@ -219,7 +217,7 @@
], ],
"body": { "body": {
"mode": "raw", "mode": "raw",
"raw": "{\n \"client_id\": 123456789,\n \"zone_id\": 3,\n \"Zone_area\": \"RD Stue\",\n \"ModuleNo\": 0\n}\n", "raw": "{\n \"client_id\": 123456789,\n \"zone\": {\n \"ZoneNo\": 62,\n \"ZoneText\": \"Tastatur\"\n }\n}\n",
"options": { "options": {
"raw": { "raw": {
"language": "json" "language": "json"
@ -227,12 +225,44 @@
} }
}, },
"url": { "url": {
"raw": "{{baseUrl}}/clients/zones", "raw": "{{baseUrl}}/zones",
"host": [
"{{baseUrl}}"
],
"path": [
"zones"
]
},
"description": "Creates or updates a zone"
},
"response": []
},
{
"name": "Update zone",
"request": {
"method": "PATCH",
"header": [
{
"key": "Authorization",
"value": "Bearer {{apiKey}}",
"type": "text"
}
],
"body": {
"mode": "raw",
"raw": "{\n \"client_id\": 123456789,\n \"zone\": {\n \"ZoneNo\": 62,\n \"ZoneText\": \"Tastaturrrrrrr\"\n }\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "{{baseUrl}}/zones",
"host": [ "host": [
"{{baseUrl}}" "{{baseUrl}}"
], ],
"path": [ "path": [
"clients",
"zones" "zones"
] ]
}, },
@ -253,7 +283,7 @@
], ],
"body": { "body": {
"mode": "raw", "mode": "raw",
"raw": "{\n \"client_id\": {{clientId}},\n \"user\": {\n \"User_Name\": \"Anders Knutsen\",\n \"MobileNo\": \"+4740642018\",\n \"MobileNoOrder\": 1,\n \"Email\": \"anders@ostsik.no\",\n \"Type\": \"U\",\n \"UserNo\": 1,\n \"Instructions\": \"\",\n \"CallOrder\": 1\n }\n}", "raw": "{\n \"client_id\": 5555,\n \"user\": {\n \"User_Name\": \"Anders Knutsen\",\n \"MobileNo\": \"+4740642018\",\n \"MobileNoOrder\": 1,\n \"Email\": \"anders@ostsik.no\",\n \"Type\": \"U\",\n \"UserNo\": 1,\n \"Instructions\": \"\",\n \"CallOrder\": 1\n }\n}\n",
"options": { "options": {
"raw": { "raw": {
"language": "json" "language": "json"
@ -261,12 +291,11 @@
} }
}, },
"url": { "url": {
"raw": "{{baseUrl}}/clients/users", "raw": "{{baseUrl}}/users",
"host": [ "host": [
"{{baseUrl}}" "{{baseUrl}}"
], ],
"path": [ "path": [
"clients",
"users" "users"
] ]
}, },
@ -277,7 +306,7 @@
{ {
"name": "Update user", "name": "Update user",
"request": { "request": {
"method": "PUT", "method": "PATCH",
"header": [ "header": [
{ {
"key": "Authorization", "key": "Authorization",
@ -287,7 +316,7 @@
], ],
"body": { "body": {
"mode": "raw", "mode": "raw",
"raw": "{\n \"client_id\": {{clientId}},\n \"user\": {\n \"User_Name\": \"Changed Name\",\n \"MobileNo\": \"+4798765432\",\n \"MobileNoOrder\": 1,\n \"Email\": \"new@email.com\",\n \"Type\": \"U\",\n \"UserNo\": 1,\n \"Instructions\": \"New instructions\",\n \"CallOrder\": 0\n }\n}", "raw": "{\n \"client_id\": {{clientId}},\n \"user\": {\n \"User_Name\": \"Anders Knutsen\",\n \"MobileNo\": \"40642018\",\n \"MobileNoOrder\": 1,\n \"Email\": \"anders@ostsik.no\",\n \"Type\": \"U\",\n \"UserNo\": 1,\n \"Instructions\": \"Do this 3\",\n \"CallOrder\": 1\n }\n}",
"options": { "options": {
"raw": { "raw": {
"language": "json" "language": "json"
@ -295,12 +324,11 @@
} }
}, },
"url": { "url": {
"raw": "{{baseUrl}}/clients/users", "raw": "{{baseUrl}}/users",
"host": [ "host": [
"{{baseUrl}}" "{{baseUrl}}"
], ],
"path": [ "path": [
"clients",
"users" "users"
] ]
}, },
@ -326,7 +354,7 @@
], ],
"body": { "body": {
"mode": "raw", "mode": "raw",
"raw": "{\n \"client_id\": {{clientId}},\n \"zone_id\": 1\n}", "raw": "{\n \"client_id\": {{clientId}},\n \"zone_no\": 62\n}",
"options": { "options": {
"raw": { "raw": {
"language": "json" "language": "json"
@ -334,12 +362,11 @@
} }
}, },
"url": { "url": {
"raw": "{{baseUrl}}/clients/zones", "raw": "{{baseUrl}}/zones",
"host": [ "host": [
"{{baseUrl}}" "{{baseUrl}}"
], ],
"path": [ "path": [
"clients",
"zones" "zones"
] ]
}, },
@ -368,12 +395,11 @@
} }
}, },
"url": { "url": {
"raw": "{{baseUrl}}/clients/users", "raw": "{{baseUrl}}/users",
"host": [ "host": [
"{{baseUrl}}" "{{baseUrl}}"
], ],
"path": [ "path": [
"clients",
"users" "users"
] ]
}, },
@ -394,7 +420,7 @@
], ],
"body": { "body": {
"mode": "raw", "mode": "raw",
"raw": "{\n \"client_id\": {{clientId}}\n}", "raw": "{\n \"client_id\": 1234\n}",
"options": { "options": {
"raw": { "raw": {
"language": "json" "language": "json"

View File

@ -8,7 +8,7 @@
"port": "BASE11", "port": "BASE11",
"installer_name": "Østfold Sikkerhetsservice AS", "installer_name": "Østfold Sikkerhetsservice AS",
"installer_email": "service@ostsik.no", "installer_email": "post@ostsik.no",
"use_glob_callouts": true, "use_glob_callouts": true,
"show_on_callouts": false, "show_on_callouts": false,
@ -19,9 +19,9 @@
"glob_callouts2": "TLF02BASE01", "glob_callouts2": "TLF02BASE01",
"alt_lookup": true, "alt_lookup": true,
"alt_alarm_no": "CID4BASE01", "alt_alarm_no": "SIA1000101",
"convert_type": "None", "convert_type": "None",
"siginterpret": "None", "siginterpret": "SIADecimal",
"client_groupings": [ "client_groupings": [
{ {
@ -73,4 +73,4 @@
} }
] ]
} }
] ]

2272
main.py

File diff suppressed because it is too large Load Diff

View File

@ -18,7 +18,7 @@ os.environ.setdefault("XML_DIR", str(bd / "out" / "clients"))
os.makedirs(os.environ["XML_DIR"], exist_ok=True) os.makedirs(os.environ["XML_DIR"], exist_ok=True)
try: try:
from main import app # must succeed from main import app # import FastAPI app from main.py
print("[launcher] imported main.app OK") print("[launcher] imported main.app OK")
except Exception as e: except Exception as e:
print(f"[launcher] FAILED to import main.app: {e}") print(f"[launcher] FAILED to import main.app: {e}")
@ -31,6 +31,6 @@ except Exception as e:
sys.exit(1) sys.exit(1)
if __name__ == "__main__": if __name__ == "__main__":
print("[launcher] running uvicorn on 0.0.0.0:7071") print("[launcher] running uvicorn on 0.0.0.0:8082")
uvicorn.run(app, host="0.0.0.0", port=8081, log_level="info") uvicorn.run(app, host="0.0.0.0", port=8082, log_level="info")
print("[launcher] uvicorn.run returned (server stopped)") # should only print on shutdown print("[launcher] uvicorn.run returned (server stopped)") # should only print on shutdown

306
test_change.py Normal file
View File

@ -0,0 +1,306 @@
#!/usr/bin/env python3
import random
import string
import time
from datetime import datetime, timedelta
import requests
# ================== CONFIG ==================
BASE_URL = "http://10.181.149.220:8081" # through nginx
API_KEY = "_2sW6roe2ZQ4V6Cldo0v295fakHT8vBHqHScfliX445tZuzxDwMRqjPeCE7FDcVVr" # your real API key
# Auth style: choose one of these:
USE_X_API_KEY = False # send X-API-Key header
USE_BEARER = True # send Authorization: Bearer <API_KEY>
# <<< IMPORTANT >>>
# List of existing 4-digit client IDs you want to mutate.
# Fill this with IDs you know exist (created by your previous test/import).
TARGET_CLIENT_IDS = [
# Example: 1234, 5678, 9012
]
# If TARGET_CLIENT_IDS is empty, we fallback to random 4-digit IDs (you'll get some 404s)
FALLBACK_RANDOM_RANGE = (1235, 1244)
# How aggressive should this be?
SLEEP_BETWEEN_OPS = 10 # seconds between operations
# Relative probabilities of operations
OP_WEIGHTS = {
"update_client": 0.4,
"create_or_update_user": 0.3,
"create_or_update_zone": 0.3,
}
# ============================================
def auth_headers():
headers = {
"Content-Type": "application/json",
}
if USE_X_API_KEY:
headers["X-API-Key"] = API_KEY
if USE_BEARER:
headers["Authorization"] = f"Bearer {API_KEY}"
return headers
def pick_client_id() -> int:
if TARGET_CLIENT_IDS:
return random.choice(TARGET_CLIENT_IDS)
return random.randint(*FALLBACK_RANDOM_RANGE)
def random_phone():
return "+47" + "".join(random.choice("0123456789") for _ in range(8))
def random_email(name_stub: str):
domains = ["example.com", "test.com", "mailinator.com", "demo.net"]
clean = "".join(c for c in name_stub.lower() if c.isalnum())
return f"{clean or 'user'}@{random.choice(domains)}"
def random_string(prefix: str, length: int = 8):
tail = "".join(random.choice(string.ascii_letters + string.digits) for _ in range(length))
return f"{prefix}{tail}"
def random_client_info():
# Random-ish basic client fields
name = random_string("Client_", 5)
alias = random_string("AL", 4)
street_no = random.randint(1, 200)
street_name = random.choice(["Gate", "Veien", "Stien", "Allé"])
location = f"{random.choice(['Test', 'Demo', 'Fake'])} {street_name} {street_no}"
area_code = str(random.randint(1000, 9999))
area = random.choice(["Oslo", "Sarpsborg", "Bergen", "Trondheim", "Stavanger"])
bus_phone = random_phone()
email = random_email(name)
ok_password = random_string("pwd_", 6)
spec_request = random.choice(
[
"Script-endret testkunde.",
"Oppdatert spesialinstruks fra load-test.",
"Ingen reelle tiltak, kun test.",
]
)
no_sigs_mon = random.choice(["ActiveAny", "Disabled"])
since_days = random.randint(0, 7)
since_hrs = random.randint(0, 23)
since_mins = random.randint(0, 59)
reset_ignored = random.choice([True, False])
reset_days = random.randint(0, 14)
reset_hrs = random.randint(0, 23)
reset_mins = random.randint(0, 59)
install_date = (datetime.now() - timedelta(days=random.randint(0, 365))).date().isoformat()
panel_name = random.choice(["Ajax", "Future"])
panel_site = random.choice(["Stue", "Gang", "Kontor", "Lager"])
keypad_location = random.choice(["Inngang", "Bakdør", "Garasje", "2. etg"])
sp_page = random.choice(
[
"Ekstra info endret av load-test.",
"Test entry, kan ignoreres.",
"API performance-test.",
]
)
return {
"Name": name,
"Alias": alias,
"Location": location,
"area_code": area_code,
"area": area,
"BusPhone": bus_phone,
"Email": email,
"OKPassword": ok_password,
"SpecRequest": spec_request,
"NoSigsMon": no_sigs_mon,
"SinceDays": since_days,
"SinceHrs": since_hrs,
"SinceMins": since_mins,
"ResetNosigsIgnored": reset_ignored,
"ResetNosigsDays": reset_days,
"ResetNosigsHrs": reset_hrs,
"ResetNosigsMins": reset_mins,
"InstallDateTime": install_date,
"PanelName": panel_name,
"PanelSite": panel_site,
"KeypadLocation": keypad_location,
"SPPage": sp_page,
}
def random_user_payload(existing_user_no: int | None = None):
first_names = ["Anders", "Per", "Lise", "Kari", "Ole", "Nina"]
last_names = ["Knutsen", "Olsen", "Hansen", "Johansen", "Pedersen"]
name = f"{random.choice(first_names)} {random.choice(last_names)}"
mobile = random_phone()
email = random_email(name.replace(" ", "."))
user_type = "U"
instructions = random.choice(
[
"Oppdatert instrukser.",
"Ring ved alarm.",
"Kun SMS.",
"Kontakt vaktmester først.",
]
)
call_order = random.randint(0, 3)
mobile_order = random.randint(1, 3)
if existing_user_no is None:
user_no = random.randint(1, 5)
else:
user_no = existing_user_no
return {
"User_Name": name,
"MobileNo": mobile,
"MobileNoOrder": mobile_order,
"Email": email,
"Type": user_type,
"UserNo": user_no,
"Instructions": instructions,
"CallOrder": call_order,
}
def random_zone_payload(existing_zone_no: int | None = None):
zone_names = [
"Stue",
"Kjøkken",
"Gang",
"Soverom",
"Garasje",
"Kontor",
"Lager",
"Uteområde",
]
if existing_zone_no is None:
zone_no = random.randint(1, 20)
else:
zone_no = existing_zone_no
zone_text = random.choice(zone_names) + " " + random.choice(["1", "2", "A", "B"])
return {"ZoneNo": zone_no, "ZoneText": zone_text}
def do_update_client(client_id: int):
url = f"{BASE_URL}/clients"
payload = {
"client_id": client_id,
"info": random_client_info(),
}
r = requests.put(url, json=payload, headers=auth_headers(), timeout=10)
print(f"[update_client] client_id={client_id} -> {r.status_code}")
if r.status_code >= 400:
print(f" body: {r.text[:500]}")
return r.status_code
def do_create_or_update_user(client_id: int):
url = f"{BASE_URL}/users"
# Randomly choose new or existing user no
if random.random() < 0.5:
user_no = random.randint(1, 3) # likely already created by your first script
else:
user_no = random.randint(4, 10) # maybe new user
user_payload = random_user_payload(existing_user_no=user_no)
payload = {
"client_id": client_id,
"user": user_payload,
}
r = requests.post(url, json=payload, headers=auth_headers(), timeout=10)
print(
f"[user] client_id={client_id} UserNo={user_no} -> {r.status_code}"
)
if r.status_code >= 400:
print(f" body: {r.text[:500]}")
return r.status_code
def do_create_or_update_zone(client_id: int):
url = f"{BASE_URL}/zones"
# Same trick: sometimes hit likely existing zones, sometimes new
if random.random() < 0.5:
zone_no = random.randint(1, 10)
else:
zone_no = random.randint(11, 30)
zone_payload = random_zone_payload(existing_zone_no=zone_no)
payload = {
"client_id": client_id,
"zone": zone_payload,
}
r = requests.post(url, json=payload, headers=auth_headers(), timeout=10)
print(
f"[zone] client_id={client_id} ZoneNo={zone_no} -> {r.status_code}"
)
if r.status_code >= 400:
print(f" body: {r.text[:500]}")
return r.status_code
def pick_operation():
ops = list(OP_WEIGHTS.keys())
weights = [OP_WEIGHTS[o] for o in ops]
return random.choices(ops, weights=weights, k=1)[0]
def main():
if not TARGET_CLIENT_IDS:
print(
"WARNING: TARGET_CLIENT_IDS is empty.\n"
" Script will use random 4-digit client IDs and you may see many 404s."
)
else:
print(f"Targeting these client IDs: {TARGET_CLIENT_IDS}")
print("Starting random modification loop. Press Ctrl+C to stop.\n")
op_count = 0
try:
while True:
client_id = pick_client_id()
op = pick_operation()
print(f"\n=== op #{op_count} ===")
print(f"Client: {client_id}, operation: {op}")
try:
if op == "update_client":
do_update_client(client_id)
elif op == "create_or_update_user":
do_create_or_update_user(client_id)
elif op == "create_or_update_zone":
do_create_or_update_zone(client_id)
else:
print(f"Unknown operation {op}, skipping.")
except requests.RequestException as e:
print(f"HTTP error: {e}")
op_count += 1
time.sleep(SLEEP_BETWEEN_OPS)
except KeyboardInterrupt:
print("\nStopping on user request (Ctrl+C).")
if __name__ == "__main__":
main()

434
xml_combine_for_import.py Executable file
View File

@ -0,0 +1,434 @@
#!/usr/bin/env python3
import xml.etree.ElementTree as ET
from pathlib import Path
import logging
import os
import copy
import time
import shutil
import socket
from datetime import datetime, timedelta
from io import BytesIO
from smb.SMBConnection import SMBConnection # pip install pysmb
import json
CLIENT_STATE_FILE = Path("/opt/patriot_api/client_state.json")
os.makedirs(CLIENT_STATE_FILE.parent, exist_ok=True)
def load_client_state() -> dict:
if CLIENT_STATE_FILE.is_file():
try:
return json.loads(CLIENT_STATE_FILE.read_text(encoding="utf-8"))
except Exception as e:
logging.error("Failed to read client state file %s: %s", CLIENT_STATE_FILE, e)
return {}
return {}
def save_client_state(state: dict):
try:
tmp = CLIENT_STATE_FILE.with_suffix(".tmp")
tmp.write_text(json.dumps(state, indent=2, ensure_ascii=False), encoding="utf-8")
os.replace(tmp, CLIENT_STATE_FILE)
except Exception as e:
logging.error("Failed to write client state file %s: %s", CLIENT_STATE_FILE, e)
def mark_clients_pending_import(client_ids: list[str]):
if not client_ids:
return
state = load_client_state()
now_ts = datetime.now().isoformat(timespec="seconds")
for cid in client_ids:
state[cid] = {
"status": "pending_import",
"last_batch": now_ts,
}
save_client_state(state)
# --------- CONFIG ---------
XML_ROOT_PATH = Path("/opt/patriot_api/out/clients") # per-client XMLs (from main_v2)
READY_DIR = Path("/opt/patriot_api/ready_for_import") # combined XML output
COMBINED_FILENAME = "clients.xml"
LOG_FILE = "/opt/patriot_api/xml_combine.log"
ERROR_LOG_FILE = "/opt/patriot_api/import_errors.log"
# Script will run 5 minutes before every hour (hh:55), so no fixed RUN_INTERVAL needed
RUN_INTERVAL = 3600 # still used as a safety fallback if needed
MAX_CLIENTS_PER_RUN = 300
# SMB / Windows share config (no kernel mount needed)
SMB_ENABLED = True
SMB_SERVER_IP = "10.181.149.83" # Windows server IP
SMB_SERVER_NAME = "PATRIOT" # NetBIOS/hostname (can be anything if IP is used, but fill something)
SMB_SHARE_NAME = "api_import" # Share name (from //IP/share)
# Guest access: empty username/password, use_ntlm_v2=False
SMB_USERNAME = "administrator" # empty = guest
SMB_PASSWORD = "wprs100qq!" # empty = guest
SMB_DOMAIN = "WORKGROUP" # often empty for guest
# Remote path inside the share where clients.xml will be stored
# e.g. "clients/clients.xml" or just "clients.xml"
SMB_REMOTE_PATH = "clients.xml"
SMB_RESULTS_FILENAME = "clients_Import_Results.txt"
# --------------------------
os.makedirs(READY_DIR, exist_ok=True)
os.makedirs(os.path.dirname(LOG_FILE), exist_ok=True)
os.makedirs(os.path.dirname(ERROR_LOG_FILE), exist_ok=True)
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(levelname)s] %(message)s",
handlers=[
logging.FileHandler(LOG_FILE, encoding="utf-8"),
logging.StreamHandler(),
],
)
def log_missing_ids_to_error_log(missing_ids: list[str]):
if not missing_ids:
return
try:
ts = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
with open(ERROR_LOG_FILE, "a", encoding="utf-8") as f:
for cid in missing_ids:
f.write(f"{ts} Missing or failed import for client __id={cid}\n")
except Exception as e:
logging.error("Failed to write to error log %s: %s", ERROR_LOG_FILE, e)
def load_combined_root(path: Path) -> ET.Element:
if not path.is_file():
logging.info("Combined XML %s does not exist yet, creating new.", path)
return ET.Element("Clients")
try:
tree = ET.parse(path)
root = tree.getroot()
if root.tag != "Clients":
logging.warning(
"Combined XML %s has root <%s> instead of <Clients>; recreating.",
path,
root.tag,
)
return ET.Element("Clients")
return root
except Exception as e:
logging.error("Failed to parse existing combined XML %s: %s; recreating.", path, e)
return ET.Element("Clients")
def rows_by_id(root: ET.Element) -> dict[str, ET.Element]:
mapping = {}
for row in root.findall("Row"):
id_el = row.find("__id")
if id_el is None or not (id_el.text or "").strip():
continue
cid = id_el.text.strip()
mapping[cid] = row
return mapping
def upload_to_smb(local_path: Path) -> bool:
"""
Upload the combined XML to the Windows share using SMB.
Before uploading:
- If an existing clients.xml and clients_Import_Results.txt are present
on the SMB share, verify that each __id in that clients.xml has a line
containing that id and the word "Successful" in the results file.
- Any IDs not matching are logged to ERROR_LOG_FILE.
Returns:
True -> upload considered successful (or SMB disabled)
False -> upload failed, caller should NOT delete source XMLs
"""
if not SMB_ENABLED:
logging.info("SMB upload disabled in config; skipping upload but treating as success.")
return True
if not local_path.is_file():
logging.error("SMB upload: local file %s does not exist", local_path)
return False
try:
my_name = socket.gethostname() or "patriot-api"
conn = SMBConnection(
SMB_USERNAME,
SMB_PASSWORD,
my_name,
SMB_SERVER_NAME,
domain=SMB_DOMAIN,
use_ntlm_v2=False, # set True if you move to proper user+password auth
is_direct_tcp=True, # connect directly to port 445
)
logging.info("SMB: connecting to %s (%s)...", SMB_SERVER_NAME, SMB_SERVER_IP)
if not conn.connect(SMB_SERVER_IP, 445, timeout=10):
logging.error("SMB: failed to connect to %s", SMB_SERVER_IP)
return False
# Split directory and filename in remote path
remote_dir, remote_name = os.path.split(SMB_REMOTE_PATH)
if not remote_dir:
remote_dir = "/"
# Build full paths for existing clients.xml and the results file
if remote_dir in ("", "/"):
remote_clients_path = remote_name
remote_results_path = SMB_RESULTS_FILENAME
else:
rd = remote_dir.rstrip("/")
remote_clients_path = f"{rd}/{remote_name}"
remote_results_path = f"{rd}/{SMB_RESULTS_FILENAME}"
# -------- PRE-UPLOAD CHECK vs clients_Import_Results.txt --------
try:
xml_buf = BytesIO()
res_buf = BytesIO()
# Try to retrieve both files; if either is missing, skip the check
try:
conn.retrieveFile(SMB_SHARE_NAME, remote_clients_path, xml_buf)
conn.retrieveFile(SMB_SHARE_NAME, remote_results_path, res_buf)
xml_buf.seek(0)
res_buf.seek(0)
except Exception as e:
logging.info(
"SMB pre-upload check: could not retrieve existing clients.xml or "
"results file (may not exist yet): %s", e
)
else:
# Parse existing clients.xml to gather IDs
try:
tree_remote = ET.parse(xml_buf)
root_remote = tree_remote.getroot()
remote_ids = set()
for row in root_remote.findall(".//Row"):
id_el = row.find("__id")
if id_el is not None and id_el.text and id_el.text.strip():
remote_ids.add(id_el.text.strip())
except Exception as e:
logging.error("SMB pre-upload check: failed to parse remote clients.xml: %s", e)
remote_ids = set()
# Read results txt lines
results_lines = res_buf.getvalue().decode("utf-8", errors="ignore").splitlines()
missing_ids = []
if remote_ids:
for cid in remote_ids:
found_success = False
for line in results_lines:
# Very generic check: line contains id AND word "Completed processing client"
if "Completed processing client" in line and cid in line:
found_success = True
break
if not found_success:
missing_ids.append(cid)
if missing_ids:
log_missing_ids_to_error_log(missing_ids)
logging.warning(
"SMB pre-upload check: %d client(s) from existing clients.xml "
"do not have 'Completed processing client' result lines. Logged to %s.",
len(missing_ids),
ERROR_LOG_FILE,
)
else:
if remote_ids:
logging.info(
"SMB pre-upload check: all %d client IDs in existing clients.xml "
"appear as 'Completed processing client' in results file.",
len(remote_ids),
)
except Exception as e:
logging.error("SMB: unexpected error during pre-upload check: %s", e)
# -------- HANDLE EXISTING clients.xml (rotate/rename) --------
try:
files = conn.listPath(SMB_SHARE_NAME, remote_dir, pattern=remote_name)
exists = any(f.filename == remote_name for f in files)
except Exception:
exists = False
if exists:
ts = datetime.now().strftime("%Y%m%d_%H%M%S")
base, ext = os.path.splitext(remote_name)
backup_name = f"{base}_{ts}{ext}"
if remote_dir in ("", "/"):
old_path = remote_name
new_path = backup_name
else:
old_path = f"{remote_dir.rstrip('/')}/{remote_name}"
new_path = f"{remote_dir.rstrip('/')}/{backup_name}"
try:
conn.rename(SMB_SHARE_NAME, old_path, new_path)
logging.info("SMB: existing %s renamed to %s", old_path, new_path)
except Exception as e:
logging.error("SMB: failed to rename existing %s: %s", old_path, e)
# -------- UPLOAD NEW clients.xml --------
with open(local_path, "rb") as f:
logging.info(
"SMB: uploading %s to //%s/%s/%s",
local_path,
SMB_SERVER_IP,
SMB_SHARE_NAME,
SMB_REMOTE_PATH,
)
conn.storeFile(SMB_SHARE_NAME, SMB_REMOTE_PATH, f)
logging.info("SMB: upload completed successfully.")
conn.close()
return True
except Exception as e:
logging.error("SMB: error during upload of %s: %s", local_path, e)
return False
def combine_xml_once():
combined_path = READY_DIR / COMBINED_FILENAME
# Scan all per-client XMLs
xml_files = sorted(XML_ROOT_PATH.rglob("*.xml")) # sorted for deterministic order
# Track files to delete after upload
processed_files = []
combined_rows: dict[str, ET.Element] = {}
if xml_files:
logging.info("Found %d new per-client XML file(s) to merge.", len(xml_files))
# Limit how many XMLs (clients) we handle per run
limited_files = xml_files[:MAX_CLIENTS_PER_RUN]
if len(xml_files) > MAX_CLIENTS_PER_RUN:
logging.info(
"Limiting this run to %d clients; %d will be processed in later runs.",
MAX_CLIENTS_PER_RUN,
len(xml_files) - MAX_CLIENTS_PER_RUN,
)
for path in limited_files:
logging.info(" Reading %s", path)
try:
tree = ET.parse(path)
root = tree.getroot()
except Exception as e:
logging.error(" Failed to parse %s: %s", path, e)
# Keep previous behavior: delete bad file
processed_files.append(path)
continue
processed_files.append(path)
# Extract rows
if root.tag == "Clients":
rows = root.findall("Row")
elif root.tag == "Row":
rows = [root]
else:
rows = root.findall(".//Row")
for row in rows:
id_el = row.find("__id")
if id_el is None or not (id_el.text or "").strip():
logging.warning(" Skipping row without __id in %s", path)
continue
cid = id_el.text.strip()
logging.info(" Including client __id=%s", cid)
combined_rows[cid] = copy.deepcopy(row)
else:
logging.info("No NEW client XMLs found locally — will upload an empty Clients file.")
# Build XML root (may be empty)
new_root = ET.Element("Clients")
for cid, row in combined_rows.items():
new_root.append(row)
# Always write combined XML (new or empty)
tmp_path = combined_path.with_suffix(".tmp")
try:
ET.ElementTree(new_root).write(tmp_path, encoding="utf-8", xml_declaration=True)
os.replace(tmp_path, combined_path)
logging.info("Wrote combined (or empty) clients.xml to %s", combined_path)
except Exception as e:
logging.error("Failed writing combined XML: %s", e)
if os.path.exists(tmp_path):
os.remove(tmp_path)
return
# Rotate & upload to SMB
upload_ok = upload_to_smb(combined_path)
if not upload_ok:
logging.warning(
"SMB upload failed; keeping per-client XMLs so this batch can be retried next run."
)
return
# Mark all combined client IDs as pending_import
try:
client_ids_in_batch = list(combined_rows.keys()) # these are the __id values
mark_clients_pending_import(client_ids_in_batch)
logging.info(
"Marked %d client(s) as pending_import in %s",
len(client_ids_in_batch),
CLIENT_STATE_FILE,
)
except Exception as e:
logging.error("Failed to update client pending_import state: %s", e)
# Delete ONLY the processed new XMLs if upload succeeded
for src in processed_files:
try:
src.unlink()
logging.info("Deleted processed source XML: %s", src)
except Exception as e:
logging.error("Failed to delete %s: %s", src, e)
def sleep_until_next_run():
"""
Sleep until 1 minute before the next full hour (hh:59).
"""
now = datetime.now()
# Top of the next hour
next_hour = now.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)
run_time = next_hour - timedelta(minutes=1) # hh:59
# If we are already past that run_time (e.g., started at 10:59:30), move to the next hour
if run_time <= now:
next_hour = next_hour + timedelta(hours=1)
run_time = next_hour - timedelta(minutes=1) # still hh:59, just next hour
delta = (run_time - now).total_seconds()
logging.info("Next combine run scheduled at %s (in %.0f seconds)", run_time, delta)
time.sleep(delta)
def main():
while True:
sleep_until_next_run()
logging.info("==== XML combine run ====")
combine_xml_once()
# Safety fallback: if anything goes wrong with time calc, we still avoid tight loop
time.sleep(1)
if __name__ == "__main__":
main()