Port core API routes for sites and devices

This commit is contained in:
2025-10-22 23:43:16 -06:00
parent d0fa6929b2
commit 4bc529aaf4
22 changed files with 2092 additions and 1 deletions

View File

@@ -0,0 +1,28 @@
"""Domain objects for saved device list views."""
from __future__ import annotations
from dataclasses import dataclass
from typing import Dict, List
__all__ = ["DeviceListView"]
@dataclass(frozen=True, slots=True)
class DeviceListView:
id: int
name: str
columns: List[str]
filters: Dict[str, object]
created_at: int
updated_at: int
def to_dict(self) -> Dict[str, object]:
return {
"id": self.id,
"name": self.name,
"columns": self.columns,
"filters": self.filters,
"created_at": self.created_at,
"updated_at": self.updated_at,
}

View File

@@ -0,0 +1,291 @@
"""Device domain helpers mirroring the legacy server payloads."""
from __future__ import annotations
import json
from dataclasses import dataclass
from datetime import datetime, timezone
from typing import Any, Dict, List, Mapping, Optional, Sequence
__all__ = [
"DEVICE_TABLE_COLUMNS",
"DEVICE_TABLE",
"DeviceSnapshot",
"assemble_device_snapshot",
"row_to_device_dict",
"serialize_device_json",
"clean_device_str",
"coerce_int",
"ts_to_iso",
"device_column_sql",
"ts_to_human",
]
DEVICE_TABLE = "devices"
DEVICE_JSON_LIST_FIELDS: Mapping[str, List[Any]] = {
"memory": [],
"network": [],
"software": [],
"storage": [],
}
DEVICE_JSON_OBJECT_FIELDS: Mapping[str, Dict[str, Any]] = {
"cpu": {},
}
DEVICE_TABLE_COLUMNS: Sequence[str] = (
"guid",
"hostname",
"description",
"created_at",
"agent_hash",
"memory",
"network",
"software",
"storage",
"cpu",
"device_type",
"domain",
"external_ip",
"internal_ip",
"last_reboot",
"last_seen",
"last_user",
"operating_system",
"uptime",
"agent_id",
"ansible_ee_ver",
"connection_type",
"connection_endpoint",
"ssl_key_fingerprint",
"token_version",
"status",
"key_added_at",
)
@dataclass(frozen=True)
class DeviceSnapshot:
hostname: str
description: str
created_at: int
created_at_iso: str
agent_hash: str
agent_guid: str
guid: str
memory: List[Dict[str, Any]]
network: List[Dict[str, Any]]
software: List[Dict[str, Any]]
storage: List[Dict[str, Any]]
cpu: Dict[str, Any]
device_type: str
domain: str
external_ip: str
internal_ip: str
last_reboot: str
last_seen: int
last_seen_iso: str
last_user: str
operating_system: str
uptime: int
agent_id: str
connection_type: str
connection_endpoint: str
details: Dict[str, Any]
summary: Dict[str, Any]
def to_dict(self) -> Dict[str, Any]:
return {
"hostname": self.hostname,
"description": self.description,
"created_at": self.created_at,
"created_at_iso": self.created_at_iso,
"agent_hash": self.agent_hash,
"agent_guid": self.agent_guid,
"guid": self.guid,
"memory": self.memory,
"network": self.network,
"software": self.software,
"storage": self.storage,
"cpu": self.cpu,
"device_type": self.device_type,
"domain": self.domain,
"external_ip": self.external_ip,
"internal_ip": self.internal_ip,
"last_reboot": self.last_reboot,
"last_seen": self.last_seen,
"last_seen_iso": self.last_seen_iso,
"last_user": self.last_user,
"operating_system": self.operating_system,
"uptime": self.uptime,
"agent_id": self.agent_id,
"connection_type": self.connection_type,
"connection_endpoint": self.connection_endpoint,
"details": self.details,
"summary": self.summary,
}
def ts_to_iso(ts: Optional[int]) -> str:
if not ts:
return ""
try:
return datetime.fromtimestamp(int(ts), timezone.utc).isoformat()
except Exception:
return ""
def _ts_to_human(ts: Optional[int]) -> str:
if not ts:
return ""
try:
return datetime.utcfromtimestamp(int(ts)).strftime("%Y-%m-%d %H:%M:%S")
except Exception:
return ""
def _parse_device_json(raw: Optional[str], default: Any) -> Any:
if raw is None:
return json.loads(json.dumps(default)) if isinstance(default, (list, dict)) else default
try:
data = json.loads(raw)
except Exception:
data = None
if isinstance(default, list):
if isinstance(data, list):
return data
return []
if isinstance(default, dict):
if isinstance(data, dict):
return data
return {}
return default
def serialize_device_json(value: Any, default: Any) -> str:
candidate = value
if candidate is None:
candidate = default
if not isinstance(candidate, (list, dict)):
candidate = default
try:
return json.dumps(candidate)
except Exception:
try:
return json.dumps(default)
except Exception:
return "{}" if isinstance(default, dict) else "[]"
def clean_device_str(value: Any) -> Optional[str]:
if value is None:
return None
if isinstance(value, (int, float)) and not isinstance(value, bool):
text = str(value)
elif isinstance(value, str):
text = value
else:
try:
text = str(value)
except Exception:
return None
text = text.strip()
return text or None
def coerce_int(value: Any) -> Optional[int]:
if value is None:
return None
try:
if isinstance(value, str) and value.strip() == "":
return None
return int(float(value))
except (ValueError, TypeError):
return None
def row_to_device_dict(row: Sequence[Any], columns: Sequence[str]) -> Dict[str, Any]:
return {columns[idx]: row[idx] for idx in range(min(len(row), len(columns)))}
def assemble_device_snapshot(record: Mapping[str, Any]) -> Dict[str, Any]:
summary = {
"hostname": record.get("hostname") or "",
"description": record.get("description") or "",
"device_type": record.get("device_type") or "",
"domain": record.get("domain") or "",
"external_ip": record.get("external_ip") or "",
"internal_ip": record.get("internal_ip") or "",
"last_reboot": record.get("last_reboot") or "",
"last_seen": record.get("last_seen") or 0,
"last_user": record.get("last_user") or "",
"operating_system": record.get("operating_system") or "",
"uptime": record.get("uptime") or 0,
"agent_id": record.get("agent_id") or "",
"agent_hash": record.get("agent_hash") or "",
"agent_guid": record.get("guid") or record.get("agent_guid") or "",
"connection_type": record.get("connection_type") or "",
"connection_endpoint": record.get("connection_endpoint") or "",
"created_at": record.get("created_at") or 0,
}
created_ts = coerce_int(summary.get("created_at")) or 0
last_seen_ts = coerce_int(summary.get("last_seen")) or 0
uptime_val = coerce_int(summary.get("uptime")) or 0
parsed_lists = {
key: _parse_device_json(record.get(key), default)
for key, default in DEVICE_JSON_LIST_FIELDS.items()
}
cpu_obj = _parse_device_json(record.get("cpu"), DEVICE_JSON_OBJECT_FIELDS["cpu"])
details = {
"memory": parsed_lists["memory"],
"network": parsed_lists["network"],
"software": parsed_lists["software"],
"storage": parsed_lists["storage"],
"cpu": cpu_obj,
}
payload: Dict[str, Any] = {
"hostname": summary["hostname"],
"description": summary.get("description", ""),
"created_at": created_ts,
"created_at_iso": ts_to_iso(created_ts),
"agent_hash": summary.get("agent_hash", ""),
"agent_guid": summary.get("agent_guid", ""),
"guid": summary.get("agent_guid", ""),
"memory": parsed_lists["memory"],
"network": parsed_lists["network"],
"software": parsed_lists["software"],
"storage": parsed_lists["storage"],
"cpu": cpu_obj,
"device_type": summary.get("device_type", ""),
"domain": summary.get("domain", ""),
"external_ip": summary.get("external_ip", ""),
"internal_ip": summary.get("internal_ip", ""),
"last_reboot": summary.get("last_reboot", ""),
"last_seen": last_seen_ts,
"last_seen_iso": ts_to_iso(last_seen_ts),
"last_user": summary.get("last_user", ""),
"operating_system": summary.get("operating_system", ""),
"uptime": uptime_val,
"agent_id": summary.get("agent_id", ""),
"connection_type": summary.get("connection_type", ""),
"connection_endpoint": summary.get("connection_endpoint", ""),
"details": details,
"summary": summary,
}
return payload
def device_column_sql(alias: Optional[str] = None) -> str:
if alias:
return ", ".join(f"{alias}.{col}" for col in DEVICE_TABLE_COLUMNS)
return ", ".join(DEVICE_TABLE_COLUMNS)
def ts_to_human(ts: Optional[int]) -> str:
return _ts_to_human(ts)

View File

@@ -0,0 +1,43 @@
"""Domain models for operator site management."""
from __future__ import annotations
from dataclasses import dataclass
from typing import Dict, Optional
__all__ = ["SiteSummary", "SiteDeviceMapping"]
@dataclass(frozen=True, slots=True)
class SiteSummary:
"""Representation of a site record including device counts."""
id: int
name: str
description: str
created_at: int
device_count: int
def to_dict(self) -> Dict[str, object]:
return {
"id": self.id,
"name": self.name,
"description": self.description,
"created_at": self.created_at,
"device_count": self.device_count,
}
@dataclass(frozen=True, slots=True)
class SiteDeviceMapping:
"""Mapping entry describing which site a device belongs to."""
hostname: str
site_id: Optional[int]
site_name: str
def to_dict(self) -> Dict[str, object]:
return {
"site_id": self.site_id,
"site_name": self.site_name,
}

View File

@@ -6,7 +6,20 @@ from flask import Flask
from Data.Engine.services.container import EngineServiceContainer from Data.Engine.services.container import EngineServiceContainer
from . import admin, agents, auth, enrollment, github, health, job_management, tokens, users from . import (
admin,
agents,
auth,
enrollment,
github,
health,
job_management,
tokens,
users,
sites,
devices,
credentials,
)
_REGISTRARS = ( _REGISTRARS = (
health.register, health.register,
@@ -18,6 +31,9 @@ _REGISTRARS = (
auth.register, auth.register,
admin.register, admin.register,
users.register, users.register,
sites.register,
devices.register,
credentials.register,
) )

View File

@@ -0,0 +1,70 @@
from __future__ import annotations
from flask import Blueprint, Flask, current_app, jsonify, request, session
from Data.Engine.services.container import EngineServiceContainer
blueprint = Blueprint("engine_credentials", __name__)
def register(app: Flask, _services: EngineServiceContainer) -> None:
if "engine_credentials" not in app.blueprints:
app.register_blueprint(blueprint)
def _services() -> EngineServiceContainer:
services = current_app.extensions.get("engine_services")
if services is None: # pragma: no cover - defensive
raise RuntimeError("engine services not initialized")
return services
def _credentials_service():
return _services().credential_service
def _require_admin():
username = session.get("username")
role = (session.get("role") or "").strip().lower()
if not isinstance(username, str) or not username:
return jsonify({"error": "not_authenticated"}), 401
if role != "admin":
return jsonify({"error": "forbidden"}), 403
return None
@blueprint.route("/api/credentials", methods=["GET"])
def list_credentials() -> object:
guard = _require_admin()
if guard:
return guard
site_id_param = request.args.get("site_id")
connection_type = (request.args.get("connection_type") or "").strip() or None
try:
site_id = int(site_id_param) if site_id_param not in (None, "") else None
except (TypeError, ValueError):
site_id = None
records = _credentials_service().list_credentials(
site_id=site_id,
connection_type=connection_type,
)
return jsonify({"credentials": records})
@blueprint.route("/api/credentials", methods=["POST"])
def create_credential() -> object: # pragma: no cover - placeholder
return jsonify({"error": "not implemented"}), 501
@blueprint.route("/api/credentials/<int:credential_id>", methods=["GET", "PUT", "DELETE"])
def credential_detail(credential_id: int) -> object: # pragma: no cover - placeholder
if request.method == "GET":
return jsonify({"error": "not implemented"}), 501
if request.method == "DELETE":
return jsonify({"error": "not implemented"}), 501
return jsonify({"error": "not implemented"}), 501
__all__ = ["register", "blueprint"]

View File

@@ -0,0 +1,301 @@
from __future__ import annotations
from ipaddress import ip_address
from flask import Blueprint, Flask, current_app, jsonify, request, session
from Data.Engine.services.container import EngineServiceContainer
from Data.Engine.services.devices import RemoteDeviceError
blueprint = Blueprint("engine_devices", __name__)
def register(app: Flask, _services: EngineServiceContainer) -> None:
if "engine_devices" not in app.blueprints:
app.register_blueprint(blueprint)
def _services() -> EngineServiceContainer:
services = current_app.extensions.get("engine_services")
if services is None: # pragma: no cover - defensive
raise RuntimeError("engine services not initialized")
return services
def _inventory():
return _services().device_inventory
def _views():
return _services().device_view_service
def _require_admin():
username = session.get("username")
role = (session.get("role") or "").strip().lower()
if not isinstance(username, str) or not username:
return jsonify({"error": "not_authenticated"}), 401
if role != "admin":
return jsonify({"error": "forbidden"}), 403
return None
def _is_internal_request(req: request) -> bool:
remote = (req.remote_addr or "").strip()
if not remote:
return False
try:
return ip_address(remote).is_loopback
except ValueError:
return remote in {"localhost"}
@blueprint.route("/api/devices", methods=["GET"])
def list_devices() -> object:
devices = _inventory().list_devices()
return jsonify({"devices": devices})
@blueprint.route("/api/devices/<guid>", methods=["GET"])
def get_device_by_guid(guid: str) -> object:
device = _inventory().get_device_by_guid(guid)
if not device:
return jsonify({"error": "not found"}), 404
return jsonify(device)
@blueprint.route("/api/agent_devices", methods=["GET"])
def list_agent_devices() -> object:
guard = _require_admin()
if guard:
return guard
devices = _inventory().list_agent_devices()
return jsonify({"devices": devices})
@blueprint.route("/api/ssh_devices", methods=["GET", "POST"])
def ssh_devices() -> object:
return _remote_devices_endpoint("ssh")
@blueprint.route("/api/winrm_devices", methods=["GET", "POST"])
def winrm_devices() -> object:
return _remote_devices_endpoint("winrm")
@blueprint.route("/api/ssh_devices/<hostname>", methods=["PUT", "DELETE"])
def ssh_device_detail(hostname: str) -> object:
return _remote_device_detail("ssh", hostname)
@blueprint.route("/api/winrm_devices/<hostname>", methods=["PUT", "DELETE"])
def winrm_device_detail(hostname: str) -> object:
return _remote_device_detail("winrm", hostname)
@blueprint.route("/api/agent/hash_list", methods=["GET"])
def agent_hash_list() -> object:
if not _is_internal_request(request):
remote_addr = (request.remote_addr or "unknown").strip() or "unknown"
current_app.logger.warning(
"/api/agent/hash_list denied non-local request from %s", remote_addr
)
return jsonify({"error": "forbidden"}), 403
try:
records = _inventory().collect_agent_hash_records()
except Exception as exc: # pragma: no cover - defensive logging
current_app.logger.exception("/api/agent/hash_list error: %s", exc)
return jsonify({"error": "internal error"}), 500
return jsonify({"agents": records})
@blueprint.route("/api/device_list_views", methods=["GET"])
def list_device_list_views() -> object:
views = _views().list_views()
return jsonify({"views": [view.to_dict() for view in views]})
@blueprint.route("/api/device_list_views/<int:view_id>", methods=["GET"])
def get_device_list_view(view_id: int) -> object:
view = _views().get_view(view_id)
if not view:
return jsonify({"error": "not found"}), 404
return jsonify(view.to_dict())
@blueprint.route("/api/device_list_views", methods=["POST"])
def create_device_list_view() -> object:
payload = request.get_json(silent=True) or {}
name = (payload.get("name") or "").strip()
columns = payload.get("columns") or []
filters = payload.get("filters") or {}
if not name:
return jsonify({"error": "name is required"}), 400
if name.lower() == "default view":
return jsonify({"error": "reserved name"}), 400
if not isinstance(columns, list) or not all(isinstance(x, str) for x in columns):
return jsonify({"error": "columns must be a list of strings"}), 400
if not isinstance(filters, dict):
return jsonify({"error": "filters must be an object"}), 400
try:
view = _views().create_view(name, columns, filters)
except ValueError as exc:
if str(exc) == "duplicate":
return jsonify({"error": "name already exists"}), 409
raise
response = jsonify(view.to_dict())
response.status_code = 201
return response
@blueprint.route("/api/device_list_views/<int:view_id>", methods=["PUT"])
def update_device_list_view(view_id: int) -> object:
payload = request.get_json(silent=True) or {}
updates: dict = {}
if "name" in payload:
name_val = payload.get("name")
if name_val is None:
return jsonify({"error": "name cannot be empty"}), 400
normalized = (str(name_val) or "").strip()
if not normalized:
return jsonify({"error": "name cannot be empty"}), 400
if normalized.lower() == "default view":
return jsonify({"error": "reserved name"}), 400
updates["name"] = normalized
if "columns" in payload:
columns_val = payload.get("columns")
if not isinstance(columns_val, list) or not all(isinstance(x, str) for x in columns_val):
return jsonify({"error": "columns must be a list of strings"}), 400
updates["columns"] = columns_val
if "filters" in payload:
filters_val = payload.get("filters")
if filters_val is not None and not isinstance(filters_val, dict):
return jsonify({"error": "filters must be an object"}), 400
if filters_val is not None:
updates["filters"] = filters_val
if not updates:
return jsonify({"error": "no fields to update"}), 400
try:
view = _views().update_view(
view_id,
name=updates.get("name"),
columns=updates.get("columns"),
filters=updates.get("filters"),
)
except ValueError as exc:
code = str(exc)
if code == "duplicate":
return jsonify({"error": "name already exists"}), 409
if code == "missing_name":
return jsonify({"error": "name cannot be empty"}), 400
if code == "reserved":
return jsonify({"error": "reserved name"}), 400
return jsonify({"error": "invalid payload"}), 400
except LookupError:
return jsonify({"error": "not found"}), 404
return jsonify(view.to_dict())
@blueprint.route("/api/device_list_views/<int:view_id>", methods=["DELETE"])
def delete_device_list_view(view_id: int) -> object:
if not _views().delete_view(view_id):
return jsonify({"error": "not found"}), 404
return jsonify({"status": "ok"})
def _remote_devices_endpoint(connection_type: str) -> object:
guard = _require_admin()
if guard:
return guard
if request.method == "GET":
devices = _inventory().list_remote_devices(connection_type)
return jsonify({"devices": devices})
payload = request.get_json(silent=True) or {}
hostname = (payload.get("hostname") or "").strip()
address = (
payload.get("address")
or payload.get("connection_endpoint")
or payload.get("endpoint")
or payload.get("host")
)
description = payload.get("description")
os_hint = payload.get("operating_system") or payload.get("os")
if not hostname:
return jsonify({"error": "hostname is required"}), 400
if not (address or "").strip():
return jsonify({"error": "address is required"}), 400
try:
device = _inventory().upsert_remote_device(
connection_type,
hostname,
address,
description,
os_hint,
ensure_existing_type=None,
)
except RemoteDeviceError as exc:
status = 409 if exc.code in {"conflict", "address_required"} else 500
if exc.code == "conflict":
return jsonify({"error": str(exc)}), 409
if exc.code == "address_required":
return jsonify({"error": "address is required"}), 400
return jsonify({"error": str(exc)}), status
return jsonify({"device": device}), 201
def _remote_device_detail(connection_type: str, hostname: str) -> object:
guard = _require_admin()
if guard:
return guard
normalized_host = (hostname or "").strip()
if not normalized_host:
return jsonify({"error": "invalid hostname"}), 400
if request.method == "DELETE":
try:
_inventory().delete_remote_device(connection_type, normalized_host)
except RemoteDeviceError as exc:
if exc.code == "not_found":
return jsonify({"error": "device not found"}), 404
if exc.code == "invalid_hostname":
return jsonify({"error": "invalid hostname"}), 400
return jsonify({"error": str(exc)}), 500
return jsonify({"status": "ok"})
payload = request.get_json(silent=True) or {}
address = (
payload.get("address")
or payload.get("connection_endpoint")
or payload.get("endpoint")
)
description = payload.get("description")
os_hint = payload.get("operating_system") or payload.get("os")
if address is None and description is None and os_hint is None:
return jsonify({"error": "no fields to update"}), 400
try:
device = _inventory().upsert_remote_device(
connection_type,
normalized_host,
address if address is not None else "",
description,
os_hint,
ensure_existing_type=connection_type,
)
except RemoteDeviceError as exc:
if exc.code == "not_found":
return jsonify({"error": "device not found"}), 404
if exc.code == "address_required":
return jsonify({"error": "address is required"}), 400
return jsonify({"error": str(exc)}), 500
return jsonify({"device": device})
__all__ = ["register", "blueprint"]

View File

@@ -0,0 +1,112 @@
from __future__ import annotations
from flask import Blueprint, Flask, current_app, jsonify, request
from Data.Engine.services.container import EngineServiceContainer
blueprint = Blueprint("engine_sites", __name__)
def register(app: Flask, _services: EngineServiceContainer) -> None:
if "engine_sites" not in app.blueprints:
app.register_blueprint(blueprint)
def _services() -> EngineServiceContainer:
services = current_app.extensions.get("engine_services")
if services is None: # pragma: no cover - defensive
raise RuntimeError("engine services not initialized")
return services
def _site_service():
return _services().site_service
@blueprint.route("/api/sites", methods=["GET"])
def list_sites() -> object:
records = _site_service().list_sites()
return jsonify({"sites": [record.to_dict() for record in records]})
@blueprint.route("/api/sites", methods=["POST"])
def create_site() -> object:
payload = request.get_json(silent=True) or {}
name = payload.get("name")
description = payload.get("description")
try:
record = _site_service().create_site(name or "", description or "")
except ValueError as exc:
if str(exc) == "missing_name":
return jsonify({"error": "name is required"}), 400
if str(exc) == "duplicate":
return jsonify({"error": "name already exists"}), 409
raise
response = jsonify(record.to_dict())
response.status_code = 201
return response
@blueprint.route("/api/sites/delete", methods=["POST"])
def delete_sites() -> object:
payload = request.get_json(silent=True) or {}
ids = payload.get("ids") or []
if not isinstance(ids, list):
return jsonify({"error": "ids must be a list"}), 400
deleted = _site_service().delete_sites(ids)
return jsonify({"status": "ok", "deleted": deleted})
@blueprint.route("/api/sites/device_map", methods=["GET"])
def sites_device_map() -> object:
host_param = (request.args.get("hostnames") or "").strip()
filter_set = []
if host_param:
for part in host_param.split(","):
normalized = part.strip()
if normalized:
filter_set.append(normalized)
mapping = _site_service().map_devices(filter_set or None)
return jsonify({"mapping": {hostname: entry.to_dict() for hostname, entry in mapping.items()}})
@blueprint.route("/api/sites/assign", methods=["POST"])
def assign_devices_to_site() -> object:
payload = request.get_json(silent=True) or {}
site_id = payload.get("site_id")
hostnames = payload.get("hostnames") or []
if not isinstance(hostnames, list):
return jsonify({"error": "hostnames must be a list of strings"}), 400
try:
_site_service().assign_devices(site_id, hostnames)
except ValueError as exc:
message = str(exc)
if message == "invalid_site_id":
return jsonify({"error": "invalid site_id"}), 400
if message == "invalid_hostnames":
return jsonify({"error": "hostnames must be a list of strings"}), 400
raise
except LookupError:
return jsonify({"error": "site not found"}), 404
return jsonify({"status": "ok"})
@blueprint.route("/api/sites/rename", methods=["POST"])
def rename_site() -> object:
payload = request.get_json(silent=True) or {}
site_id = payload.get("id")
new_name = payload.get("new_name") or ""
try:
record = _site_service().rename_site(site_id, new_name)
except ValueError as exc:
if str(exc) == "missing_name":
return jsonify({"error": "new_name is required"}), 400
if str(exc) == "duplicate":
return jsonify({"error": "name already exists"}), 409
raise
except LookupError:
return jsonify({"error": "site not found"}), 404
return jsonify(record.to_dict())
__all__ = ["register", "blueprint"]

View File

@@ -24,8 +24,12 @@ __all__ = [
try: # pragma: no cover - optional dependency shim try: # pragma: no cover - optional dependency shim
from .device_repository import SQLiteDeviceRepository from .device_repository import SQLiteDeviceRepository
from .enrollment_repository import SQLiteEnrollmentRepository from .enrollment_repository import SQLiteEnrollmentRepository
from .device_inventory_repository import SQLiteDeviceInventoryRepository
from .device_view_repository import SQLiteDeviceViewRepository
from .credential_repository import SQLiteCredentialRepository
from .github_repository import SQLiteGitHubRepository from .github_repository import SQLiteGitHubRepository
from .job_repository import SQLiteJobRepository from .job_repository import SQLiteJobRepository
from .site_repository import SQLiteSiteRepository
from .token_repository import SQLiteRefreshTokenRepository from .token_repository import SQLiteRefreshTokenRepository
from .user_repository import SQLiteUserRepository from .user_repository import SQLiteUserRepository
except ModuleNotFoundError as exc: # pragma: no cover - triggered when auth deps missing except ModuleNotFoundError as exc: # pragma: no cover - triggered when auth deps missing
@@ -36,8 +40,12 @@ except ModuleNotFoundError as exc: # pragma: no cover - triggered when auth dep
SQLiteDeviceRepository = _missing_repo # type: ignore[assignment] SQLiteDeviceRepository = _missing_repo # type: ignore[assignment]
SQLiteEnrollmentRepository = _missing_repo # type: ignore[assignment] SQLiteEnrollmentRepository = _missing_repo # type: ignore[assignment]
SQLiteDeviceInventoryRepository = _missing_repo # type: ignore[assignment]
SQLiteDeviceViewRepository = _missing_repo # type: ignore[assignment]
SQLiteCredentialRepository = _missing_repo # type: ignore[assignment]
SQLiteGitHubRepository = _missing_repo # type: ignore[assignment] SQLiteGitHubRepository = _missing_repo # type: ignore[assignment]
SQLiteJobRepository = _missing_repo # type: ignore[assignment] SQLiteJobRepository = _missing_repo # type: ignore[assignment]
SQLiteSiteRepository = _missing_repo # type: ignore[assignment]
SQLiteRefreshTokenRepository = _missing_repo # type: ignore[assignment] SQLiteRefreshTokenRepository = _missing_repo # type: ignore[assignment]
else: else:
__all__ += [ __all__ += [
@@ -45,6 +53,10 @@ else:
"SQLiteRefreshTokenRepository", "SQLiteRefreshTokenRepository",
"SQLiteJobRepository", "SQLiteJobRepository",
"SQLiteEnrollmentRepository", "SQLiteEnrollmentRepository",
"SQLiteDeviceInventoryRepository",
"SQLiteDeviceViewRepository",
"SQLiteCredentialRepository",
"SQLiteGitHubRepository", "SQLiteGitHubRepository",
"SQLiteUserRepository", "SQLiteUserRepository",
"SQLiteSiteRepository",
] ]

View File

@@ -0,0 +1,103 @@
"""SQLite access for operator credential metadata."""
from __future__ import annotations
import json
import logging
import sqlite3
from contextlib import closing
from typing import Dict, List, Optional
from Data.Engine.repositories.sqlite.connection import SQLiteConnectionFactory
__all__ = ["SQLiteCredentialRepository"]
class SQLiteCredentialRepository:
def __init__(
self,
connection_factory: SQLiteConnectionFactory,
*,
logger: Optional[logging.Logger] = None,
) -> None:
self._connections = connection_factory
self._log = logger or logging.getLogger("borealis.engine.repositories.credentials")
def list_credentials(
self,
*,
site_id: Optional[int] = None,
connection_type: Optional[str] = None,
) -> List[Dict[str, object]]:
sql = """
SELECT c.id,
c.name,
c.description,
c.credential_type,
c.connection_type,
c.username,
c.site_id,
s.name AS site_name,
c.become_method,
c.become_username,
c.metadata_json,
c.created_at,
c.updated_at,
c.password_encrypted,
c.private_key_encrypted,
c.private_key_passphrase_encrypted,
c.become_password_encrypted
FROM credentials c
LEFT JOIN sites s ON s.id = c.site_id
"""
clauses: List[str] = []
params: List[object] = []
if site_id is not None:
clauses.append("c.site_id = ?")
params.append(site_id)
if connection_type:
clauses.append("LOWER(c.connection_type) = LOWER(?)")
params.append(connection_type)
if clauses:
sql += " WHERE " + " AND ".join(clauses)
sql += " ORDER BY LOWER(c.name) ASC"
with closing(self._connections()) as conn:
conn.row_factory = sqlite3.Row # type: ignore[attr-defined]
cur = conn.cursor()
cur.execute(sql, params)
rows = cur.fetchall()
results: List[Dict[str, object]] = []
for row in rows:
metadata_json = row["metadata_json"] if "metadata_json" in row.keys() else None
metadata = {}
if metadata_json:
try:
candidate = json.loads(metadata_json)
if isinstance(candidate, dict):
metadata = candidate
except Exception:
metadata = {}
results.append(
{
"id": row["id"],
"name": row["name"],
"description": row["description"] or "",
"credential_type": row["credential_type"] or "machine",
"connection_type": row["connection_type"] or "ssh",
"site_id": row["site_id"],
"site_name": row["site_name"],
"username": row["username"] or "",
"become_method": row["become_method"] or "",
"become_username": row["become_username"] or "",
"metadata": metadata,
"created_at": int(row["created_at"] or 0),
"updated_at": int(row["updated_at"] or 0),
"has_password": bool(row["password_encrypted"]),
"has_private_key": bool(row["private_key_encrypted"]),
"has_private_key_passphrase": bool(row["private_key_passphrase_encrypted"]),
"has_become_password": bool(row["become_password_encrypted"]),
}
)
return results

View File

@@ -0,0 +1,253 @@
"""Device inventory operations backed by SQLite."""
from __future__ import annotations
import logging
import sqlite3
import time
from contextlib import closing
from typing import Any, Dict, List, Optional, Tuple
from Data.Engine.domain.devices import (
DEVICE_TABLE,
DEVICE_TABLE_COLUMNS,
assemble_device_snapshot,
clean_device_str,
coerce_int,
device_column_sql,
row_to_device_dict,
serialize_device_json,
)
from Data.Engine.repositories.sqlite.connection import SQLiteConnectionFactory
__all__ = ["SQLiteDeviceInventoryRepository"]
class SQLiteDeviceInventoryRepository:
def __init__(
self,
connection_factory: SQLiteConnectionFactory,
*,
logger: Optional[logging.Logger] = None,
) -> None:
self._connections = connection_factory
self._log = logger or logging.getLogger("borealis.engine.repositories.device_inventory")
def fetch_devices(
self,
*,
connection_type: Optional[str] = None,
hostname: Optional[str] = None,
only_agents: bool = False,
) -> List[Dict[str, Any]]:
sql = f"""
SELECT {device_column_sql('d')}, s.id, s.name, s.description
FROM {DEVICE_TABLE} d
LEFT JOIN device_sites ds ON ds.device_hostname = d.hostname
LEFT JOIN sites s ON s.id = ds.site_id
"""
clauses: List[str] = []
params: List[Any] = []
if connection_type:
clauses.append("LOWER(d.connection_type) = LOWER(?)")
params.append(connection_type)
if hostname:
clauses.append("LOWER(d.hostname) = LOWER(?)")
params.append(hostname.lower())
if only_agents:
clauses.append("(d.connection_type IS NULL OR TRIM(d.connection_type) = '')")
if clauses:
sql += " WHERE " + " AND ".join(clauses)
with closing(self._connections()) as conn:
cur = conn.cursor()
cur.execute(sql, params)
rows = cur.fetchall()
now = time.time()
devices: List[Dict[str, Any]] = []
for row in rows:
core = row[: len(DEVICE_TABLE_COLUMNS)]
site_id, site_name, site_description = row[len(DEVICE_TABLE_COLUMNS) :]
record = row_to_device_dict(core, DEVICE_TABLE_COLUMNS)
snapshot = assemble_device_snapshot(record)
summary = snapshot.get("summary", {})
last_seen = snapshot.get("last_seen") or 0
status = "Offline"
try:
if last_seen and (now - float(last_seen)) <= 300:
status = "Online"
except Exception:
pass
devices.append(
{
**snapshot,
"site_id": site_id,
"site_name": site_name or "",
"site_description": site_description or "",
"status": status,
}
)
return devices
def load_snapshot(self, *, hostname: Optional[str] = None, guid: Optional[str] = None) -> Optional[Dict[str, Any]]:
if not hostname and not guid:
return None
sql = None
params: Tuple[Any, ...]
if hostname:
sql = f"SELECT {device_column_sql()} FROM {DEVICE_TABLE} WHERE hostname = ?"
params = (hostname,)
else:
sql = f"SELECT {device_column_sql()} FROM {DEVICE_TABLE} WHERE LOWER(guid) = LOWER(?)"
params = (guid,)
with closing(self._connections()) as conn:
cur = conn.cursor()
cur.execute(sql, params)
row = cur.fetchone()
if not row:
return None
record = row_to_device_dict(row, DEVICE_TABLE_COLUMNS)
return assemble_device_snapshot(record)
def upsert_device(
self,
hostname: str,
description: Optional[str],
merged_details: Dict[str, Any],
created_at: Optional[int],
*,
agent_hash: Optional[str] = None,
guid: Optional[str] = None,
) -> None:
if not hostname:
return
column_values = self._extract_device_columns(merged_details or {})
normalized_description = description if description is not None else ""
try:
normalized_description = str(normalized_description)
except Exception:
normalized_description = ""
normalized_hash = clean_device_str(agent_hash) or None
normalized_guid = clean_device_str(guid) or None
created_ts = coerce_int(created_at) or int(time.time())
sql = f"""
INSERT INTO {DEVICE_TABLE}(
hostname,
description,
created_at,
agent_hash,
guid,
memory,
network,
software,
storage,
cpu,
device_type,
domain,
external_ip,
internal_ip,
last_reboot,
last_seen,
last_user,
operating_system,
uptime,
agent_id,
ansible_ee_ver,
connection_type,
connection_endpoint
) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
ON CONFLICT(hostname) DO UPDATE SET
description=excluded.description,
created_at=COALESCE({DEVICE_TABLE}.created_at, excluded.created_at),
agent_hash=COALESCE(NULLIF(excluded.agent_hash, ''), {DEVICE_TABLE}.agent_hash),
guid=COALESCE(NULLIF(excluded.guid, ''), {DEVICE_TABLE}.guid),
memory=excluded.memory,
network=excluded.network,
software=excluded.software,
storage=excluded.storage,
cpu=excluded.cpu,
device_type=COALESCE(NULLIF(excluded.device_type, ''), {DEVICE_TABLE}.device_type),
domain=COALESCE(NULLIF(excluded.domain, ''), {DEVICE_TABLE}.domain),
external_ip=COALESCE(NULLIF(excluded.external_ip, ''), {DEVICE_TABLE}.external_ip),
internal_ip=COALESCE(NULLIF(excluded.internal_ip, ''), {DEVICE_TABLE}.internal_ip),
last_reboot=COALESCE(NULLIF(excluded.last_reboot, ''), {DEVICE_TABLE}.last_reboot),
last_seen=COALESCE(NULLIF(excluded.last_seen, 0), {DEVICE_TABLE}.last_seen),
last_user=COALESCE(NULLIF(excluded.last_user, ''), {DEVICE_TABLE}.last_user),
operating_system=COALESCE(NULLIF(excluded.operating_system, ''), {DEVICE_TABLE}.operating_system),
uptime=COALESCE(NULLIF(excluded.uptime, 0), {DEVICE_TABLE}.uptime),
agent_id=COALESCE(NULLIF(excluded.agent_id, ''), {DEVICE_TABLE}.agent_id),
ansible_ee_ver=COALESCE(NULLIF(excluded.ansible_ee_ver, ''), {DEVICE_TABLE}.ansible_ee_ver),
connection_type=COALESCE(NULLIF(excluded.connection_type, ''), {DEVICE_TABLE}.connection_type),
connection_endpoint=COALESCE(NULLIF(excluded.connection_endpoint, ''), {DEVICE_TABLE}.connection_endpoint)
"""
params: List[Any] = [
hostname,
normalized_description,
created_ts,
normalized_hash,
normalized_guid,
column_values.get("memory"),
column_values.get("network"),
column_values.get("software"),
column_values.get("storage"),
column_values.get("cpu"),
column_values.get("device_type"),
column_values.get("domain"),
column_values.get("external_ip"),
column_values.get("internal_ip"),
column_values.get("last_reboot"),
column_values.get("last_seen"),
column_values.get("last_user"),
column_values.get("operating_system"),
column_values.get("uptime"),
column_values.get("agent_id"),
column_values.get("ansible_ee_ver"),
column_values.get("connection_type"),
column_values.get("connection_endpoint"),
]
with closing(self._connections()) as conn:
cur = conn.cursor()
cur.execute(sql, params)
conn.commit()
def delete_device_by_hostname(self, hostname: str) -> None:
with closing(self._connections()) as conn:
cur = conn.cursor()
cur.execute("DELETE FROM device_sites WHERE device_hostname = ?", (hostname,))
cur.execute(f"DELETE FROM {DEVICE_TABLE} WHERE hostname = ?", (hostname,))
conn.commit()
def _extract_device_columns(self, details: Dict[str, Any]) -> Dict[str, Any]:
summary = details.get("summary") or {}
payload: Dict[str, Any] = {}
for field in ("memory", "network", "software", "storage"):
payload[field] = serialize_device_json(details.get(field), [])
payload["cpu"] = serialize_device_json(summary.get("cpu") or details.get("cpu"), {})
payload["device_type"] = clean_device_str(summary.get("device_type") or summary.get("type"))
payload["domain"] = clean_device_str(summary.get("domain"))
payload["external_ip"] = clean_device_str(summary.get("external_ip") or summary.get("public_ip"))
payload["internal_ip"] = clean_device_str(summary.get("internal_ip") or summary.get("private_ip"))
payload["last_reboot"] = clean_device_str(summary.get("last_reboot") or summary.get("last_boot"))
payload["last_seen"] = coerce_int(summary.get("last_seen"))
payload["last_user"] = clean_device_str(
summary.get("last_user")
or summary.get("last_user_name")
or summary.get("logged_in_user")
)
payload["operating_system"] = clean_device_str(
summary.get("operating_system") or summary.get("os")
)
payload["uptime"] = coerce_int(summary.get("uptime"))
payload["agent_id"] = clean_device_str(summary.get("agent_id"))
payload["ansible_ee_ver"] = clean_device_str(summary.get("ansible_ee_ver"))
payload["connection_type"] = clean_device_str(summary.get("connection_type"))
payload["connection_endpoint"] = clean_device_str(
summary.get("connection_endpoint") or summary.get("endpoint")
)
return payload

View File

@@ -0,0 +1,143 @@
"""SQLite persistence for device list views."""
from __future__ import annotations
import json
import logging
import sqlite3
import time
from contextlib import closing
from typing import Dict, Iterable, List, Optional
from Data.Engine.domain.device_views import DeviceListView
from Data.Engine.repositories.sqlite.connection import SQLiteConnectionFactory
__all__ = ["SQLiteDeviceViewRepository"]
class SQLiteDeviceViewRepository:
def __init__(
self,
connection_factory: SQLiteConnectionFactory,
*,
logger: Optional[logging.Logger] = None,
) -> None:
self._connections = connection_factory
self._log = logger or logging.getLogger("borealis.engine.repositories.device_views")
def list_views(self) -> List[DeviceListView]:
with closing(self._connections()) as conn:
cur = conn.cursor()
cur.execute(
"SELECT id, name, columns_json, filters_json, created_at, updated_at\n"
" FROM device_list_views ORDER BY name COLLATE NOCASE ASC"
)
rows = cur.fetchall()
return [self._row_to_view(row) for row in rows]
def get_view(self, view_id: int) -> Optional[DeviceListView]:
with closing(self._connections()) as conn:
cur = conn.cursor()
cur.execute(
"SELECT id, name, columns_json, filters_json, created_at, updated_at\n"
" FROM device_list_views WHERE id = ?",
(view_id,),
)
row = cur.fetchone()
return self._row_to_view(row) if row else None
def create_view(self, name: str, columns: List[str], filters: Dict[str, object]) -> DeviceListView:
now = int(time.time())
with closing(self._connections()) as conn:
cur = conn.cursor()
try:
cur.execute(
"INSERT INTO device_list_views(name, columns_json, filters_json, created_at, updated_at)\n"
"VALUES (?, ?, ?, ?, ?)",
(name, json.dumps(columns), json.dumps(filters), now, now),
)
except sqlite3.IntegrityError as exc:
raise ValueError("duplicate") from exc
view_id = cur.lastrowid
conn.commit()
cur.execute(
"SELECT id, name, columns_json, filters_json, created_at, updated_at FROM device_list_views WHERE id = ?",
(view_id,),
)
row = cur.fetchone()
if not row:
raise RuntimeError("view missing after insert")
return self._row_to_view(row)
def update_view(
self,
view_id: int,
*,
name: Optional[str] = None,
columns: Optional[List[str]] = None,
filters: Optional[Dict[str, object]] = None,
) -> DeviceListView:
fields: List[str] = []
params: List[object] = []
if name is not None:
fields.append("name = ?")
params.append(name)
if columns is not None:
fields.append("columns_json = ?")
params.append(json.dumps(columns))
if filters is not None:
fields.append("filters_json = ?")
params.append(json.dumps(filters))
fields.append("updated_at = ?")
params.append(int(time.time()))
params.append(view_id)
with closing(self._connections()) as conn:
cur = conn.cursor()
try:
cur.execute(
f"UPDATE device_list_views SET {', '.join(fields)} WHERE id = ?",
params,
)
except sqlite3.IntegrityError as exc:
raise ValueError("duplicate") from exc
if cur.rowcount == 0:
raise LookupError("not_found")
conn.commit()
cur.execute(
"SELECT id, name, columns_json, filters_json, created_at, updated_at FROM device_list_views WHERE id = ?",
(view_id,),
)
row = cur.fetchone()
if not row:
raise LookupError("not_found")
return self._row_to_view(row)
def delete_view(self, view_id: int) -> bool:
with closing(self._connections()) as conn:
cur = conn.cursor()
cur.execute("DELETE FROM device_list_views WHERE id = ?", (view_id,))
deleted = cur.rowcount
conn.commit()
return bool(deleted)
def _row_to_view(self, row: Optional[Iterable[object]]) -> DeviceListView:
if row is None:
raise ValueError("row required")
view_id, name, columns_json, filters_json, created_at, updated_at = row
try:
columns = json.loads(columns_json or "[]")
except Exception:
columns = []
try:
filters = json.loads(filters_json or "{}")
except Exception:
filters = {}
return DeviceListView(
id=int(view_id),
name=str(name or ""),
columns=list(columns) if isinstance(columns, list) else [],
filters=dict(filters) if isinstance(filters, dict) else {},
created_at=int(created_at or 0),
updated_at=int(updated_at or 0),
)

View File

@@ -0,0 +1,189 @@
"""SQLite persistence for site management."""
from __future__ import annotations
import logging
import sqlite3
import time
from contextlib import closing
from typing import Dict, Iterable, List, Optional, Sequence
from Data.Engine.domain.sites import SiteDeviceMapping, SiteSummary
from Data.Engine.repositories.sqlite.connection import SQLiteConnectionFactory
__all__ = ["SQLiteSiteRepository"]
class SQLiteSiteRepository:
"""Repository exposing site CRUD and device assignment helpers."""
def __init__(
self,
connection_factory: SQLiteConnectionFactory,
*,
logger: Optional[logging.Logger] = None,
) -> None:
self._connections = connection_factory
self._log = logger or logging.getLogger("borealis.engine.repositories.sites")
def list_sites(self) -> List[SiteSummary]:
with closing(self._connections()) as conn:
cur = conn.cursor()
cur.execute(
"""
SELECT s.id, s.name, s.description, s.created_at,
COALESCE(ds.cnt, 0) AS device_count
FROM sites s
LEFT JOIN (
SELECT site_id, COUNT(*) AS cnt
FROM device_sites
GROUP BY site_id
) ds
ON ds.site_id = s.id
ORDER BY LOWER(s.name) ASC
"""
)
rows = cur.fetchall()
return [self._row_to_site(row) for row in rows]
def create_site(self, name: str, description: str) -> SiteSummary:
now = int(time.time())
with closing(self._connections()) as conn:
cur = conn.cursor()
try:
cur.execute(
"INSERT INTO sites(name, description, created_at) VALUES (?, ?, ?)",
(name, description, now),
)
except sqlite3.IntegrityError as exc:
raise ValueError("duplicate") from exc
site_id = cur.lastrowid
conn.commit()
cur.execute(
"SELECT id, name, description, created_at, 0 FROM sites WHERE id = ?",
(site_id,),
)
row = cur.fetchone()
if not row:
raise RuntimeError("site not found after insert")
return self._row_to_site(row)
def delete_sites(self, ids: Sequence[int]) -> int:
if not ids:
return 0
with closing(self._connections()) as conn:
cur = conn.cursor()
placeholders = ",".join("?" for _ in ids)
try:
cur.execute(
f"DELETE FROM device_sites WHERE site_id IN ({placeholders})",
tuple(ids),
)
cur.execute(
f"DELETE FROM sites WHERE id IN ({placeholders})",
tuple(ids),
)
except sqlite3.DatabaseError as exc:
conn.rollback()
raise
deleted = cur.rowcount
conn.commit()
return deleted
def rename_site(self, site_id: int, new_name: str) -> SiteSummary:
with closing(self._connections()) as conn:
cur = conn.cursor()
try:
cur.execute("UPDATE sites SET name = ? WHERE id = ?", (new_name, site_id))
except sqlite3.IntegrityError as exc:
raise ValueError("duplicate") from exc
if cur.rowcount == 0:
raise LookupError("not_found")
conn.commit()
cur.execute(
"""
SELECT s.id, s.name, s.description, s.created_at,
COALESCE(ds.cnt, 0) AS device_count
FROM sites s
LEFT JOIN (
SELECT site_id, COUNT(*) AS cnt
FROM device_sites
GROUP BY site_id
) ds
ON ds.site_id = s.id
WHERE s.id = ?
""",
(site_id,),
)
row = cur.fetchone()
if not row:
raise LookupError("not_found")
return self._row_to_site(row)
def map_devices(self, hostnames: Optional[Iterable[str]] = None) -> Dict[str, SiteDeviceMapping]:
with closing(self._connections()) as conn:
cur = conn.cursor()
if hostnames:
normalized = [hn.strip() for hn in hostnames if hn and hn.strip()]
if not normalized:
return {}
placeholders = ",".join("?" for _ in normalized)
cur.execute(
f"""
SELECT ds.device_hostname, s.id, s.name
FROM device_sites ds
INNER JOIN sites s ON s.id = ds.site_id
WHERE ds.device_hostname IN ({placeholders})
""",
tuple(normalized),
)
else:
cur.execute(
"""
SELECT ds.device_hostname, s.id, s.name
FROM device_sites ds
INNER JOIN sites s ON s.id = ds.site_id
"""
)
rows = cur.fetchall()
mapping: Dict[str, SiteDeviceMapping] = {}
for hostname, site_id, site_name in rows:
mapping[str(hostname)] = SiteDeviceMapping(
hostname=str(hostname),
site_id=int(site_id) if site_id is not None else None,
site_name=str(site_name or ""),
)
return mapping
def assign_devices(self, site_id: int, hostnames: Sequence[str]) -> None:
now = int(time.time())
normalized = [hn.strip() for hn in hostnames if isinstance(hn, str) and hn.strip()]
if not normalized:
return
with closing(self._connections()) as conn:
cur = conn.cursor()
cur.execute("SELECT 1 FROM sites WHERE id = ?", (site_id,))
if not cur.fetchone():
raise LookupError("not_found")
for hostname in normalized:
cur.execute(
"""
INSERT INTO device_sites(device_hostname, site_id, assigned_at)
VALUES (?, ?, ?)
ON CONFLICT(device_hostname)
DO UPDATE SET site_id = excluded.site_id,
assigned_at = excluded.assigned_at
""",
(hostname, site_id, now),
)
conn.commit()
def _row_to_site(self, row: Sequence[object]) -> SiteSummary:
return SiteSummary(
id=int(row[0]),
name=str(row[1] or ""),
description=str(row[2] or ""),
created_at=int(row[3] or 0),
device_count=int(row[4] or 0),
)

View File

@@ -24,6 +24,10 @@ __all__ = [
"GitHubService", "GitHubService",
"GitHubTokenPayload", "GitHubTokenPayload",
"EnrollmentAdminService", "EnrollmentAdminService",
"SiteService",
"DeviceInventoryService",
"DeviceViewService",
"CredentialService",
] ]
_LAZY_TARGETS: Dict[str, Tuple[str, str]] = { _LAZY_TARGETS: Dict[str, Tuple[str, str]] = {
@@ -48,6 +52,19 @@ _LAZY_TARGETS: Dict[str, Tuple[str, str]] = {
"Data.Engine.services.enrollment.admin_service", "Data.Engine.services.enrollment.admin_service",
"EnrollmentAdminService", "EnrollmentAdminService",
), ),
"SiteService": ("Data.Engine.services.sites.site_service", "SiteService"),
"DeviceInventoryService": (
"Data.Engine.services.devices.device_inventory_service",
"DeviceInventoryService",
),
"DeviceViewService": (
"Data.Engine.services.devices.device_view_service",
"DeviceViewService",
),
"CredentialService": (
"Data.Engine.services.credentials.credential_service",
"CredentialService",
),
} }

View File

@@ -13,10 +13,14 @@ from Data.Engine.integrations.github import GitHubArtifactProvider
from Data.Engine.repositories.sqlite import ( from Data.Engine.repositories.sqlite import (
SQLiteConnectionFactory, SQLiteConnectionFactory,
SQLiteDeviceRepository, SQLiteDeviceRepository,
SQLiteDeviceInventoryRepository,
SQLiteDeviceViewRepository,
SQLiteCredentialRepository,
SQLiteEnrollmentRepository, SQLiteEnrollmentRepository,
SQLiteGitHubRepository, SQLiteGitHubRepository,
SQLiteJobRepository, SQLiteJobRepository,
SQLiteRefreshTokenRepository, SQLiteRefreshTokenRepository,
SQLiteSiteRepository,
SQLiteUserRepository, SQLiteUserRepository,
) )
from Data.Engine.services.auth import ( from Data.Engine.services.auth import (
@@ -32,10 +36,14 @@ from Data.Engine.services.crypto.signing import ScriptSigner, load_signer
from Data.Engine.services.enrollment import EnrollmentService from Data.Engine.services.enrollment import EnrollmentService
from Data.Engine.services.enrollment.admin_service import EnrollmentAdminService from Data.Engine.services.enrollment.admin_service import EnrollmentAdminService
from Data.Engine.services.enrollment.nonce_cache import NonceCache from Data.Engine.services.enrollment.nonce_cache import NonceCache
from Data.Engine.services.devices import DeviceInventoryService
from Data.Engine.services.devices import DeviceViewService
from Data.Engine.services.credentials import CredentialService
from Data.Engine.services.github import GitHubService from Data.Engine.services.github import GitHubService
from Data.Engine.services.jobs import SchedulerService from Data.Engine.services.jobs import SchedulerService
from Data.Engine.services.rate_limit import SlidingWindowRateLimiter from Data.Engine.services.rate_limit import SlidingWindowRateLimiter
from Data.Engine.services.realtime import AgentRealtimeService from Data.Engine.services.realtime import AgentRealtimeService
from Data.Engine.services.sites import SiteService
__all__ = ["EngineServiceContainer", "build_service_container"] __all__ = ["EngineServiceContainer", "build_service_container"]
@@ -43,9 +51,13 @@ __all__ = ["EngineServiceContainer", "build_service_container"]
@dataclass(frozen=True, slots=True) @dataclass(frozen=True, slots=True)
class EngineServiceContainer: class EngineServiceContainer:
device_auth: DeviceAuthService device_auth: DeviceAuthService
device_inventory: DeviceInventoryService
device_view_service: DeviceViewService
credential_service: CredentialService
token_service: TokenService token_service: TokenService
enrollment_service: EnrollmentService enrollment_service: EnrollmentService
enrollment_admin_service: EnrollmentAdminService enrollment_admin_service: EnrollmentAdminService
site_service: SiteService
jwt_service: JWTService jwt_service: JWTService
dpop_validator: DPoPValidator dpop_validator: DPoPValidator
agent_realtime: AgentRealtimeService agent_realtime: AgentRealtimeService
@@ -64,10 +76,20 @@ def build_service_container(
log = logger or logging.getLogger("borealis.engine.services") log = logger or logging.getLogger("borealis.engine.services")
device_repo = SQLiteDeviceRepository(db_factory, logger=log.getChild("devices")) device_repo = SQLiteDeviceRepository(db_factory, logger=log.getChild("devices"))
device_inventory_repo = SQLiteDeviceInventoryRepository(
db_factory, logger=log.getChild("devices.inventory")
)
device_view_repo = SQLiteDeviceViewRepository(
db_factory, logger=log.getChild("devices.views")
)
credential_repo = SQLiteCredentialRepository(
db_factory, logger=log.getChild("credentials.repo")
)
token_repo = SQLiteRefreshTokenRepository(db_factory, logger=log.getChild("tokens")) token_repo = SQLiteRefreshTokenRepository(db_factory, logger=log.getChild("tokens"))
enrollment_repo = SQLiteEnrollmentRepository(db_factory, logger=log.getChild("enrollment")) enrollment_repo = SQLiteEnrollmentRepository(db_factory, logger=log.getChild("enrollment"))
job_repo = SQLiteJobRepository(db_factory, logger=log.getChild("jobs")) job_repo = SQLiteJobRepository(db_factory, logger=log.getChild("jobs"))
github_repo = SQLiteGitHubRepository(db_factory, logger=log.getChild("github_repo")) github_repo = SQLiteGitHubRepository(db_factory, logger=log.getChild("github_repo"))
site_repo = SQLiteSiteRepository(db_factory, logger=log.getChild("sites.repo"))
user_repo = SQLiteUserRepository(db_factory, logger=log.getChild("users")) user_repo = SQLiteUserRepository(db_factory, logger=log.getChild("users"))
jwt_service = load_jwt_service() jwt_service = load_jwt_service()
@@ -128,6 +150,22 @@ def build_service_container(
repository=user_repo, repository=user_repo,
logger=log.getChild("operator_accounts"), logger=log.getChild("operator_accounts"),
) )
device_inventory = DeviceInventoryService(
repository=device_inventory_repo,
logger=log.getChild("device_inventory"),
)
device_view_service = DeviceViewService(
repository=device_view_repo,
logger=log.getChild("device_views"),
)
credential_service = CredentialService(
repository=credential_repo,
logger=log.getChild("credentials"),
)
site_service = SiteService(
repository=site_repo,
logger=log.getChild("sites"),
)
github_provider = GitHubArtifactProvider( github_provider = GitHubArtifactProvider(
cache_file=settings.github.cache_file, cache_file=settings.github.cache_file,
@@ -155,6 +193,10 @@ def build_service_container(
github_service=github_service, github_service=github_service,
operator_auth_service=operator_auth_service, operator_auth_service=operator_auth_service,
operator_account_service=operator_account_service, operator_account_service=operator_account_service,
device_inventory=device_inventory,
device_view_service=device_view_service,
credential_service=credential_service,
site_service=site_service,
) )

View File

@@ -0,0 +1,3 @@
from .credential_service import CredentialService
__all__ = ["CredentialService"]

View File

@@ -0,0 +1,29 @@
"""Expose read access to stored credentials."""
from __future__ import annotations
import logging
from typing import List, Optional
from Data.Engine.repositories.sqlite.credential_repository import SQLiteCredentialRepository
__all__ = ["CredentialService"]
class CredentialService:
def __init__(
self,
repository: SQLiteCredentialRepository,
*,
logger: Optional[logging.Logger] = None,
) -> None:
self._repo = repository
self._log = logger or logging.getLogger("borealis.engine.services.credentials")
def list_credentials(
self,
*,
site_id: Optional[int] = None,
connection_type: Optional[str] = None,
) -> List[dict]:
return self._repo.list_credentials(site_id=site_id, connection_type=connection_type)

View File

@@ -0,0 +1,4 @@
from .device_inventory_service import DeviceInventoryService, RemoteDeviceError
from .device_view_service import DeviceViewService
__all__ = ["DeviceInventoryService", "RemoteDeviceError", "DeviceViewService"]

View File

@@ -0,0 +1,178 @@
"""Mirrors the legacy device inventory HTTP behaviour."""
from __future__ import annotations
import logging
import sqlite3
from typing import Dict, List, Optional
from Data.Engine.repositories.sqlite.device_inventory_repository import (
SQLiteDeviceInventoryRepository,
)
__all__ = ["DeviceInventoryService", "RemoteDeviceError"]
class RemoteDeviceError(Exception):
def __init__(self, code: str, message: Optional[str] = None) -> None:
super().__init__(message or code)
self.code = code
class DeviceInventoryService:
def __init__(
self,
repository: SQLiteDeviceInventoryRepository,
*,
logger: Optional[logging.Logger] = None,
) -> None:
self._repo = repository
self._log = logger or logging.getLogger("borealis.engine.services.devices")
def list_devices(self) -> List[Dict[str, object]]:
return self._repo.fetch_devices()
def list_agent_devices(self) -> List[Dict[str, object]]:
return self._repo.fetch_devices(only_agents=True)
def list_remote_devices(self, connection_type: str) -> List[Dict[str, object]]:
return self._repo.fetch_devices(connection_type=connection_type)
def get_device_by_guid(self, guid: str) -> Optional[Dict[str, object]]:
snapshot = self._repo.load_snapshot(guid=guid)
if not snapshot:
return None
devices = self._repo.fetch_devices(hostname=snapshot.get("hostname"))
return devices[0] if devices else None
def collect_agent_hash_records(self) -> List[Dict[str, object]]:
records: List[Dict[str, object]] = []
key_to_index: Dict[str, int] = {}
for device in self._repo.fetch_devices():
summary = device.get("summary", {}) if isinstance(device, dict) else {}
agent_id = (summary.get("agent_id") or "").strip()
agent_guid = (summary.get("agent_guid") or "").strip()
hostname = (summary.get("hostname") or device.get("hostname") or "").strip()
agent_hash = (summary.get("agent_hash") or device.get("agent_hash") or "").strip()
keys: List[str] = []
if agent_id:
keys.append(f"id:{agent_id.lower()}")
if agent_guid:
keys.append(f"guid:{agent_guid.lower()}")
if hostname:
keys.append(f"host:{hostname.lower()}")
payload = {
"agent_id": agent_id or None,
"agent_guid": agent_guid or None,
"hostname": hostname or None,
"agent_hash": agent_hash or None,
"source": "database",
}
if not keys:
records.append(payload)
continue
existing_index = None
for key in keys:
if key in key_to_index:
existing_index = key_to_index[key]
break
if existing_index is None:
existing_index = len(records)
records.append(payload)
for key in keys:
key_to_index[key] = existing_index
continue
merged = records[existing_index]
for key in ("agent_id", "agent_guid", "hostname", "agent_hash"):
if not merged.get(key) and payload.get(key):
merged[key] = payload[key]
return records
def upsert_remote_device(
self,
connection_type: str,
hostname: str,
address: Optional[str],
description: Optional[str],
os_hint: Optional[str],
*,
ensure_existing_type: Optional[str],
) -> Dict[str, object]:
normalized_type = (connection_type or "").strip().lower()
if not normalized_type:
raise RemoteDeviceError("invalid_type", "connection type required")
normalized_host = (hostname or "").strip()
if not normalized_host:
raise RemoteDeviceError("invalid_hostname", "hostname is required")
existing = self._repo.load_snapshot(hostname=normalized_host)
existing_type = (existing or {}).get("summary", {}).get("connection_type") or ""
existing_type = existing_type.strip().lower()
if ensure_existing_type and existing_type != ensure_existing_type.lower():
raise RemoteDeviceError("not_found", "device not found")
if ensure_existing_type is None and existing_type and existing_type != normalized_type:
raise RemoteDeviceError("conflict", "device already exists with different connection type")
created_ts = None
if existing:
created_ts = existing.get("summary", {}).get("created_at")
endpoint = (address or "").strip() or (existing or {}).get("summary", {}).get("connection_endpoint") or ""
if not endpoint:
raise RemoteDeviceError("address_required", "address is required")
description_val = description if description is not None else (existing or {}).get("summary", {}).get("description")
os_value = os_hint or (existing or {}).get("summary", {}).get("operating_system")
os_value = (os_value or "").strip()
device_type_label = "SSH Remote" if normalized_type == "ssh" else "WinRM Remote"
summary_payload = {
"connection_type": normalized_type,
"connection_endpoint": endpoint,
"internal_ip": endpoint,
"external_ip": endpoint,
"device_type": device_type_label,
"operating_system": os_value or "",
"last_seen": 0,
"description": (description_val or ""),
}
try:
self._repo.upsert_device(
normalized_host,
description_val,
{"summary": summary_payload},
created_ts,
)
except sqlite3.DatabaseError as exc: # type: ignore[name-defined]
raise RemoteDeviceError("storage_error", str(exc)) from exc
except Exception as exc: # pragma: no cover - defensive
raise RemoteDeviceError("storage_error", str(exc)) from exc
devices = self._repo.fetch_devices(hostname=normalized_host)
if not devices:
raise RemoteDeviceError("reload_failed", "failed to load device after upsert")
return devices[0]
def delete_remote_device(self, connection_type: str, hostname: str) -> None:
normalized_host = (hostname or "").strip()
if not normalized_host:
raise RemoteDeviceError("invalid_hostname", "invalid hostname")
existing = self._repo.load_snapshot(hostname=normalized_host)
if not existing:
raise RemoteDeviceError("not_found", "device not found")
existing_type = (existing.get("summary", {}) or {}).get("connection_type") or ""
if (existing_type or "").strip().lower() != (connection_type or "").strip().lower():
raise RemoteDeviceError("not_found", "device not found")
self._repo.delete_device_by_hostname(normalized_host)

View File

@@ -0,0 +1,73 @@
"""Service exposing CRUD for saved device list views."""
from __future__ import annotations
import logging
from typing import List, Optional
from Data.Engine.domain.device_views import DeviceListView
from Data.Engine.repositories.sqlite.device_view_repository import SQLiteDeviceViewRepository
__all__ = ["DeviceViewService"]
class DeviceViewService:
def __init__(
self,
repository: SQLiteDeviceViewRepository,
*,
logger: Optional[logging.Logger] = None,
) -> None:
self._repo = repository
self._log = logger or logging.getLogger("borealis.engine.services.device_views")
def list_views(self) -> List[DeviceListView]:
return self._repo.list_views()
def get_view(self, view_id: int) -> Optional[DeviceListView]:
return self._repo.get_view(view_id)
def create_view(self, name: str, columns: List[str], filters: dict) -> DeviceListView:
normalized_name = (name or "").strip()
if not normalized_name:
raise ValueError("missing_name")
if normalized_name.lower() == "default view":
raise ValueError("reserved")
return self._repo.create_view(normalized_name, list(columns), dict(filters))
def update_view(
self,
view_id: int,
*,
name: Optional[str] = None,
columns: Optional[List[str]] = None,
filters: Optional[dict] = None,
) -> DeviceListView:
updates: dict = {}
if name is not None:
normalized = (name or "").strip()
if not normalized:
raise ValueError("missing_name")
if normalized.lower() == "default view":
raise ValueError("reserved")
updates["name"] = normalized
if columns is not None:
if not isinstance(columns, list) or not all(isinstance(col, str) for col in columns):
raise ValueError("invalid_columns")
updates["columns"] = list(columns)
if filters is not None:
if not isinstance(filters, dict):
raise ValueError("invalid_filters")
updates["filters"] = dict(filters)
if not updates:
raise ValueError("no_fields")
return self._repo.update_view(
view_id,
name=updates.get("name"),
columns=updates.get("columns"),
filters=updates.get("filters"),
)
def delete_view(self, view_id: int) -> bool:
return self._repo.delete_view(view_id)

View File

@@ -0,0 +1,3 @@
from .site_service import SiteService
__all__ = ["SiteService"]

View File

@@ -0,0 +1,73 @@
"""Site management service that mirrors the legacy Flask behaviour."""
from __future__ import annotations
import logging
from typing import Dict, Iterable, List, Optional
from Data.Engine.domain.sites import SiteDeviceMapping, SiteSummary
from Data.Engine.repositories.sqlite.site_repository import SQLiteSiteRepository
__all__ = ["SiteService"]
class SiteService:
def __init__(self, repository: SQLiteSiteRepository, *, logger: Optional[logging.Logger] = None) -> None:
self._repo = repository
self._log = logger or logging.getLogger("borealis.engine.services.sites")
def list_sites(self) -> List[SiteSummary]:
return self._repo.list_sites()
def create_site(self, name: str, description: str) -> SiteSummary:
normalized_name = (name or "").strip()
normalized_description = (description or "").strip()
if not normalized_name:
raise ValueError("missing_name")
try:
return self._repo.create_site(normalized_name, normalized_description)
except ValueError as exc:
if str(exc) == "duplicate":
raise ValueError("duplicate") from exc
raise
def delete_sites(self, ids: Iterable[int]) -> int:
normalized = []
for value in ids:
try:
normalized.append(int(value))
except Exception:
continue
if not normalized:
return 0
return self._repo.delete_sites(tuple(normalized))
def rename_site(self, site_id: int, new_name: str) -> SiteSummary:
normalized_name = (new_name or "").strip()
if not normalized_name:
raise ValueError("missing_name")
try:
return self._repo.rename_site(int(site_id), normalized_name)
except ValueError as exc:
if str(exc) == "duplicate":
raise ValueError("duplicate") from exc
raise
def map_devices(self, hostnames: Optional[Iterable[str]] = None) -> Dict[str, SiteDeviceMapping]:
return self._repo.map_devices(hostnames)
def assign_devices(self, site_id: int, hostnames: Iterable[str]) -> None:
try:
numeric_id = int(site_id)
except Exception as exc:
raise ValueError("invalid_site_id") from exc
normalized = [hn for hn in hostnames if isinstance(hn, str) and hn.strip()]
if not normalized:
raise ValueError("invalid_hostnames")
try:
self._repo.assign_devices(numeric_id, normalized)
except LookupError as exc:
if str(exc) == "not_found":
raise LookupError("not_found") from exc
raise

View File

@@ -0,0 +1,108 @@
import sqlite3
from datetime import datetime, timezone
import pytest
pytest.importorskip("flask")
from .test_http_auth import _login, prepared_app, engine_settings
def _ensure_admin_session(client):
_login(client)
def test_sites_crud_flow(prepared_app):
client = prepared_app.test_client()
_ensure_admin_session(client)
resp = client.get("/api/sites")
assert resp.status_code == 200
assert resp.get_json() == {"sites": []}
create = client.post("/api/sites", json={"name": "HQ", "description": "Primary"})
assert create.status_code == 201
created = create.get_json()
assert created["name"] == "HQ"
listing = client.get("/api/sites")
sites = listing.get_json()["sites"]
assert len(sites) == 1
resp = client.post("/api/sites/assign", json={"site_id": created["id"], "hostnames": ["device-1"]})
assert resp.status_code == 200
mapping = client.get("/api/sites/device_map?hostnames=device-1")
data = mapping.get_json()["mapping"]
assert data["device-1"]["site_id"] == created["id"]
rename = client.post("/api/sites/rename", json={"id": created["id"], "new_name": "Main"})
assert rename.status_code == 200
assert rename.get_json()["name"] == "Main"
delete = client.post("/api/sites/delete", json={"ids": [created["id"]]})
assert delete.status_code == 200
assert delete.get_json()["deleted"] == 1
def test_devices_listing(prepared_app, engine_settings):
client = prepared_app.test_client()
_ensure_admin_session(client)
now = datetime.now(tz=timezone.utc)
conn = sqlite3.connect(engine_settings.database.path)
cur = conn.cursor()
cur.execute(
"""
INSERT INTO devices (
guid,
hostname,
description,
created_at,
agent_hash,
last_seen,
connection_type,
connection_endpoint
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
""",
(
"11111111-1111-1111-1111-111111111111",
"test-device",
"Test Device",
int(now.timestamp()),
"hashvalue",
int(now.timestamp()),
"",
"",
),
)
conn.commit()
conn.close()
resp = client.get("/api/devices")
assert resp.status_code == 200
devices = resp.get_json()["devices"]
assert any(device["hostname"] == "test-device" for device in devices)
def test_agent_hash_list_requires_local_request(prepared_app):
client = prepared_app.test_client()
_ensure_admin_session(client)
resp = client.get("/api/agent/hash_list", environ_overrides={"REMOTE_ADDR": "203.0.113.5"})
assert resp.status_code == 403
resp = client.get("/api/agent/hash_list", environ_overrides={"REMOTE_ADDR": "127.0.0.1"})
assert resp.status_code == 200
assert resp.get_json() == {"agents": []}
def test_credentials_list_requires_admin(prepared_app):
client = prepared_app.test_client()
resp = client.get("/api/credentials")
assert resp.status_code == 401
_ensure_admin_session(client)
resp = client.get("/api/credentials")
assert resp.status_code == 200
assert resp.get_json() == {"credentials": []}