mirror of
https://github.com/bunny-lab-io/Borealis.git
synced 2025-10-26 15:41:58 -06:00
Add assembly endpoints and approval flows
This commit is contained in:
@@ -19,6 +19,8 @@ from . import (
|
||||
sites,
|
||||
devices,
|
||||
credentials,
|
||||
assemblies,
|
||||
server_info,
|
||||
)
|
||||
|
||||
_REGISTRARS = (
|
||||
@@ -34,6 +36,8 @@ _REGISTRARS = (
|
||||
sites.register,
|
||||
devices.register,
|
||||
credentials.register,
|
||||
assemblies.register,
|
||||
server_info.register,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -113,4 +113,61 @@ def list_device_approvals() -> object:
|
||||
return jsonify({"approvals": [record.to_dict() for record in records]})
|
||||
|
||||
|
||||
@blueprint.route("/device-approvals/<approval_id>/approve", methods=["POST"])
|
||||
def approve_device_approval(approval_id: str) -> object:
|
||||
guard = _require_admin()
|
||||
if guard:
|
||||
return guard
|
||||
|
||||
payload = request.get_json(silent=True) or {}
|
||||
guid = payload.get("guid")
|
||||
resolution_raw = payload.get("conflict_resolution") or payload.get("resolution")
|
||||
resolution = resolution_raw.strip().lower() if isinstance(resolution_raw, str) else None
|
||||
|
||||
actor = session.get("username") if isinstance(session.get("username"), str) else None
|
||||
|
||||
try:
|
||||
result = _admin_service().approve_device_approval(
|
||||
approval_id,
|
||||
actor=actor,
|
||||
guid=guid,
|
||||
conflict_resolution=resolution,
|
||||
)
|
||||
except LookupError:
|
||||
return jsonify({"error": "not_found"}), 404
|
||||
except ValueError as exc:
|
||||
code = str(exc)
|
||||
if code == "approval_not_pending":
|
||||
return jsonify({"error": "approval_not_pending"}), 409
|
||||
if code == "conflict_resolution_required":
|
||||
return jsonify({"error": "conflict_resolution_required"}), 409
|
||||
if code == "invalid_guid":
|
||||
return jsonify({"error": "invalid_guid"}), 400
|
||||
raise
|
||||
|
||||
response = jsonify(result.to_dict())
|
||||
response.status_code = 200
|
||||
return response
|
||||
|
||||
|
||||
@blueprint.route("/device-approvals/<approval_id>/deny", methods=["POST"])
|
||||
def deny_device_approval(approval_id: str) -> object:
|
||||
guard = _require_admin()
|
||||
if guard:
|
||||
return guard
|
||||
|
||||
actor = session.get("username") if isinstance(session.get("username"), str) else None
|
||||
|
||||
try:
|
||||
result = _admin_service().deny_device_approval(approval_id, actor=actor)
|
||||
except LookupError:
|
||||
return jsonify({"error": "not_found"}), 404
|
||||
except ValueError as exc:
|
||||
if str(exc) == "approval_not_pending":
|
||||
return jsonify({"error": "approval_not_pending"}), 409
|
||||
raise
|
||||
|
||||
return jsonify(result.to_dict())
|
||||
|
||||
|
||||
__all__ = ["register", "blueprint"]
|
||||
|
||||
182
Data/Engine/interfaces/http/assemblies.py
Normal file
182
Data/Engine/interfaces/http/assemblies.py
Normal file
@@ -0,0 +1,182 @@
|
||||
"""HTTP endpoints for assembly management."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from flask import Blueprint, Flask, current_app, jsonify, request
|
||||
|
||||
from Data.Engine.services.container import EngineServiceContainer
|
||||
|
||||
blueprint = Blueprint("engine_assemblies", __name__)
|
||||
|
||||
|
||||
def register(app: Flask, _services: EngineServiceContainer) -> None:
|
||||
if "engine_assemblies" not in app.blueprints:
|
||||
app.register_blueprint(blueprint)
|
||||
|
||||
|
||||
def _services() -> EngineServiceContainer:
|
||||
services = current_app.extensions.get("engine_services")
|
||||
if services is None: # pragma: no cover - defensive
|
||||
raise RuntimeError("engine services not initialized")
|
||||
return services
|
||||
|
||||
|
||||
def _assembly_service():
|
||||
return _services().assembly_service
|
||||
|
||||
|
||||
def _value_error_response(exc: ValueError):
|
||||
code = str(exc)
|
||||
if code == "invalid_island":
|
||||
return jsonify({"error": "invalid island"}), 400
|
||||
if code == "path_required":
|
||||
return jsonify({"error": "path required"}), 400
|
||||
if code == "invalid_kind":
|
||||
return jsonify({"error": "invalid kind"}), 400
|
||||
if code == "invalid_destination":
|
||||
return jsonify({"error": "invalid destination"}), 400
|
||||
if code == "invalid_path":
|
||||
return jsonify({"error": "invalid path"}), 400
|
||||
if code == "cannot_delete_root":
|
||||
return jsonify({"error": "cannot delete root"}), 400
|
||||
return jsonify({"error": code or "invalid request"}), 400
|
||||
|
||||
|
||||
def _not_found_response(exc: FileNotFoundError):
|
||||
code = str(exc)
|
||||
if code == "file_not_found":
|
||||
return jsonify({"error": "file not found"}), 404
|
||||
if code == "folder_not_found":
|
||||
return jsonify({"error": "folder not found"}), 404
|
||||
return jsonify({"error": "not found"}), 404
|
||||
|
||||
|
||||
@blueprint.route("/api/assembly/list", methods=["GET"])
|
||||
def list_assemblies() -> object:
|
||||
island = (request.args.get("island") or "").strip()
|
||||
try:
|
||||
listing = _assembly_service().list_items(island)
|
||||
except ValueError as exc:
|
||||
return _value_error_response(exc)
|
||||
return jsonify(listing.to_dict())
|
||||
|
||||
|
||||
@blueprint.route("/api/assembly/load", methods=["GET"])
|
||||
def load_assembly() -> object:
|
||||
island = (request.args.get("island") or "").strip()
|
||||
rel_path = (request.args.get("path") or "").strip()
|
||||
try:
|
||||
result = _assembly_service().load_item(island, rel_path)
|
||||
except ValueError as exc:
|
||||
return _value_error_response(exc)
|
||||
except FileNotFoundError as exc:
|
||||
return _not_found_response(exc)
|
||||
return jsonify(result.to_dict())
|
||||
|
||||
|
||||
@blueprint.route("/api/assembly/create", methods=["POST"])
|
||||
def create_assembly() -> object:
|
||||
payload = request.get_json(silent=True) or {}
|
||||
island = (payload.get("island") or "").strip()
|
||||
kind = (payload.get("kind") or "").strip().lower()
|
||||
rel_path = (payload.get("path") or "").strip()
|
||||
content = payload.get("content")
|
||||
item_type = payload.get("type")
|
||||
try:
|
||||
result = _assembly_service().create_item(
|
||||
island,
|
||||
kind=kind,
|
||||
rel_path=rel_path,
|
||||
content=content,
|
||||
item_type=item_type if isinstance(item_type, str) else None,
|
||||
)
|
||||
except ValueError as exc:
|
||||
return _value_error_response(exc)
|
||||
return jsonify(result.to_dict())
|
||||
|
||||
|
||||
@blueprint.route("/api/assembly/edit", methods=["POST"])
|
||||
def edit_assembly() -> object:
|
||||
payload = request.get_json(silent=True) or {}
|
||||
island = (payload.get("island") or "").strip()
|
||||
rel_path = (payload.get("path") or "").strip()
|
||||
content = payload.get("content")
|
||||
item_type = payload.get("type")
|
||||
try:
|
||||
result = _assembly_service().edit_item(
|
||||
island,
|
||||
rel_path=rel_path,
|
||||
content=content,
|
||||
item_type=item_type if isinstance(item_type, str) else None,
|
||||
)
|
||||
except ValueError as exc:
|
||||
return _value_error_response(exc)
|
||||
except FileNotFoundError as exc:
|
||||
return _not_found_response(exc)
|
||||
return jsonify(result.to_dict())
|
||||
|
||||
|
||||
@blueprint.route("/api/assembly/rename", methods=["POST"])
|
||||
def rename_assembly() -> object:
|
||||
payload = request.get_json(silent=True) or {}
|
||||
island = (payload.get("island") or "").strip()
|
||||
kind = (payload.get("kind") or "").strip().lower()
|
||||
rel_path = (payload.get("path") or "").strip()
|
||||
new_name = (payload.get("new_name") or "").strip()
|
||||
item_type = payload.get("type")
|
||||
try:
|
||||
result = _assembly_service().rename_item(
|
||||
island,
|
||||
kind=kind,
|
||||
rel_path=rel_path,
|
||||
new_name=new_name,
|
||||
item_type=item_type if isinstance(item_type, str) else None,
|
||||
)
|
||||
except ValueError as exc:
|
||||
return _value_error_response(exc)
|
||||
except FileNotFoundError as exc:
|
||||
return _not_found_response(exc)
|
||||
return jsonify(result.to_dict())
|
||||
|
||||
|
||||
@blueprint.route("/api/assembly/move", methods=["POST"])
|
||||
def move_assembly() -> object:
|
||||
payload = request.get_json(silent=True) or {}
|
||||
island = (payload.get("island") or "").strip()
|
||||
rel_path = (payload.get("path") or "").strip()
|
||||
new_path = (payload.get("new_path") or "").strip()
|
||||
kind = (payload.get("kind") or "").strip().lower()
|
||||
try:
|
||||
result = _assembly_service().move_item(
|
||||
island,
|
||||
rel_path=rel_path,
|
||||
new_path=new_path,
|
||||
kind=kind,
|
||||
)
|
||||
except ValueError as exc:
|
||||
return _value_error_response(exc)
|
||||
except FileNotFoundError as exc:
|
||||
return _not_found_response(exc)
|
||||
return jsonify(result.to_dict())
|
||||
|
||||
|
||||
@blueprint.route("/api/assembly/delete", methods=["POST"])
|
||||
def delete_assembly() -> object:
|
||||
payload = request.get_json(silent=True) or {}
|
||||
island = (payload.get("island") or "").strip()
|
||||
rel_path = (payload.get("path") or "").strip()
|
||||
kind = (payload.get("kind") or "").strip().lower()
|
||||
try:
|
||||
result = _assembly_service().delete_item(
|
||||
island,
|
||||
rel_path=rel_path,
|
||||
kind=kind,
|
||||
)
|
||||
except ValueError as exc:
|
||||
return _value_error_response(exc)
|
||||
except FileNotFoundError as exc:
|
||||
return _not_found_response(exc)
|
||||
return jsonify(result.to_dict())
|
||||
|
||||
|
||||
__all__ = ["register", "blueprint"]
|
||||
53
Data/Engine/interfaces/http/server_info.py
Normal file
53
Data/Engine/interfaces/http/server_info.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""Server metadata endpoints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from flask import Blueprint, Flask, jsonify
|
||||
|
||||
from Data.Engine.services.container import EngineServiceContainer
|
||||
|
||||
blueprint = Blueprint("engine_server_info", __name__)
|
||||
|
||||
|
||||
def register(app: Flask, _services: EngineServiceContainer) -> None:
|
||||
if "engine_server_info" not in app.blueprints:
|
||||
app.register_blueprint(blueprint)
|
||||
|
||||
|
||||
@blueprint.route("/api/server/time", methods=["GET"])
|
||||
def server_time() -> object:
|
||||
now_local = datetime.now().astimezone()
|
||||
now_utc = datetime.now(timezone.utc)
|
||||
tzinfo = now_local.tzinfo
|
||||
offset = tzinfo.utcoffset(now_local) if tzinfo else None
|
||||
|
||||
def _ordinal(n: int) -> str:
|
||||
if 11 <= (n % 100) <= 13:
|
||||
suffix = "th"
|
||||
else:
|
||||
suffix = {1: "st", 2: "nd", 3: "rd"}.get(n % 10, "th")
|
||||
return f"{n}{suffix}"
|
||||
|
||||
month = now_local.strftime("%B")
|
||||
day_disp = _ordinal(now_local.day)
|
||||
year = now_local.strftime("%Y")
|
||||
hour24 = now_local.hour
|
||||
hour12 = hour24 % 12 or 12
|
||||
minute = now_local.minute
|
||||
ampm = "AM" if hour24 < 12 else "PM"
|
||||
display = f"{month} {day_disp} {year} @ {hour12}:{minute:02d}{ampm}"
|
||||
|
||||
payload = {
|
||||
"epoch": int(now_local.timestamp()),
|
||||
"iso": now_local.isoformat(),
|
||||
"utc_iso": now_utc.isoformat().replace("+00:00", "Z"),
|
||||
"timezone": str(tzinfo) if tzinfo else "",
|
||||
"offset_seconds": int(offset.total_seconds()) if offset else 0,
|
||||
"display": display,
|
||||
}
|
||||
return jsonify(payload)
|
||||
|
||||
|
||||
__all__ = ["register", "blueprint"]
|
||||
@@ -28,6 +28,10 @@ __all__ = [
|
||||
"DeviceInventoryService",
|
||||
"DeviceViewService",
|
||||
"CredentialService",
|
||||
"AssemblyService",
|
||||
"AssemblyListing",
|
||||
"AssemblyLoadResult",
|
||||
"AssemblyMutationResult",
|
||||
]
|
||||
|
||||
_LAZY_TARGETS: Dict[str, Tuple[str, str]] = {
|
||||
@@ -65,6 +69,22 @@ _LAZY_TARGETS: Dict[str, Tuple[str, str]] = {
|
||||
"Data.Engine.services.credentials.credential_service",
|
||||
"CredentialService",
|
||||
),
|
||||
"AssemblyService": (
|
||||
"Data.Engine.services.assemblies.assembly_service",
|
||||
"AssemblyService",
|
||||
),
|
||||
"AssemblyListing": (
|
||||
"Data.Engine.services.assemblies.assembly_service",
|
||||
"AssemblyListing",
|
||||
),
|
||||
"AssemblyLoadResult": (
|
||||
"Data.Engine.services.assemblies.assembly_service",
|
||||
"AssemblyLoadResult",
|
||||
),
|
||||
"AssemblyMutationResult": (
|
||||
"Data.Engine.services.assemblies.assembly_service",
|
||||
"AssemblyMutationResult",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
||||
10
Data/Engine/services/assemblies/__init__.py
Normal file
10
Data/Engine/services/assemblies/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
||||
"""Assembly management services."""
|
||||
|
||||
from .assembly_service import AssemblyService, AssemblyMutationResult, AssemblyLoadResult, AssemblyListing
|
||||
|
||||
__all__ = [
|
||||
"AssemblyService",
|
||||
"AssemblyMutationResult",
|
||||
"AssemblyLoadResult",
|
||||
"AssemblyListing",
|
||||
]
|
||||
715
Data/Engine/services/assemblies/assembly_service.py
Normal file
715
Data/Engine/services/assemblies/assembly_service.py
Normal file
@@ -0,0 +1,715 @@
|
||||
"""Filesystem-backed assembly management service."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
__all__ = [
|
||||
"AssemblyService",
|
||||
"AssemblyListing",
|
||||
"AssemblyLoadResult",
|
||||
"AssemblyMutationResult",
|
||||
]
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class AssemblyListing:
|
||||
"""Listing payload for an assembly island."""
|
||||
|
||||
root: Path
|
||||
items: List[Dict[str, Any]]
|
||||
folders: List[str]
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
return {
|
||||
"root": str(self.root),
|
||||
"items": self.items,
|
||||
"folders": self.folders,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class AssemblyLoadResult:
|
||||
"""Container describing a loaded assembly artifact."""
|
||||
|
||||
payload: Dict[str, Any]
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return dict(self.payload)
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class AssemblyMutationResult:
|
||||
"""Mutation acknowledgement for create/edit/rename operations."""
|
||||
|
||||
status: str = "ok"
|
||||
rel_path: Optional[str] = None
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
payload: Dict[str, Any] = {"status": self.status}
|
||||
if self.rel_path:
|
||||
payload["rel_path"] = self.rel_path
|
||||
return payload
|
||||
|
||||
|
||||
class AssemblyService:
|
||||
"""Provide CRUD helpers for workflow/script/ansible assemblies."""
|
||||
|
||||
_ISLAND_DIR_MAP = {
|
||||
"workflows": "Workflows",
|
||||
"workflow": "Workflows",
|
||||
"scripts": "Scripts",
|
||||
"script": "Scripts",
|
||||
"ansible": "Ansible_Playbooks",
|
||||
"ansible_playbooks": "Ansible_Playbooks",
|
||||
"ansible-playbooks": "Ansible_Playbooks",
|
||||
"playbooks": "Ansible_Playbooks",
|
||||
}
|
||||
|
||||
_SCRIPT_EXTENSIONS = (".json", ".ps1", ".bat", ".sh")
|
||||
_ANSIBLE_EXTENSIONS = (".json", ".yml")
|
||||
|
||||
def __init__(self, *, root: Path, logger: Optional[logging.Logger] = None) -> None:
|
||||
self._root = root.resolve()
|
||||
self._log = logger or logging.getLogger("borealis.engine.services.assemblies")
|
||||
try:
|
||||
self._root.mkdir(parents=True, exist_ok=True)
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
self._log.warning("failed to ensure assemblies root %s: %s", self._root, exc)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Public API
|
||||
# ------------------------------------------------------------------
|
||||
def list_items(self, island: str) -> AssemblyListing:
|
||||
root = self._resolve_island_root(island)
|
||||
root.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
items: List[Dict[str, Any]] = []
|
||||
folders: List[str] = []
|
||||
|
||||
isl = (island or "").strip().lower()
|
||||
if isl in {"workflows", "workflow"}:
|
||||
for dirpath, dirnames, filenames in os.walk(root):
|
||||
rel_root = os.path.relpath(dirpath, root)
|
||||
if rel_root != ".":
|
||||
folders.append(rel_root.replace(os.sep, "/"))
|
||||
for fname in filenames:
|
||||
if not fname.lower().endswith(".json"):
|
||||
continue
|
||||
abs_path = Path(dirpath) / fname
|
||||
rel_path = abs_path.relative_to(root).as_posix()
|
||||
try:
|
||||
mtime = abs_path.stat().st_mtime
|
||||
except OSError:
|
||||
mtime = 0.0
|
||||
obj = self._safe_read_json(abs_path)
|
||||
tab = self._extract_tab_name(obj)
|
||||
items.append(
|
||||
{
|
||||
"file_name": fname,
|
||||
"rel_path": rel_path,
|
||||
"type": "workflow",
|
||||
"tab_name": tab,
|
||||
"last_edited": time.strftime(
|
||||
"%Y-%m-%dT%H:%M:%S", time.localtime(mtime)
|
||||
),
|
||||
"last_edited_epoch": mtime,
|
||||
}
|
||||
)
|
||||
elif isl in {"scripts", "script"}:
|
||||
for dirpath, dirnames, filenames in os.walk(root):
|
||||
rel_root = os.path.relpath(dirpath, root)
|
||||
if rel_root != ".":
|
||||
folders.append(rel_root.replace(os.sep, "/"))
|
||||
for fname in filenames:
|
||||
if not fname.lower().endswith(self._SCRIPT_EXTENSIONS):
|
||||
continue
|
||||
abs_path = Path(dirpath) / fname
|
||||
rel_path = abs_path.relative_to(root).as_posix()
|
||||
try:
|
||||
mtime = abs_path.stat().st_mtime
|
||||
except OSError:
|
||||
mtime = 0.0
|
||||
script_type = self._detect_script_type(abs_path)
|
||||
doc = self._load_assembly_document(abs_path, "scripts", script_type)
|
||||
items.append(
|
||||
{
|
||||
"file_name": fname,
|
||||
"rel_path": rel_path,
|
||||
"type": doc.get("type", script_type),
|
||||
"name": doc.get("name"),
|
||||
"category": doc.get("category"),
|
||||
"description": doc.get("description"),
|
||||
"last_edited": time.strftime(
|
||||
"%Y-%m-%dT%H:%M:%S", time.localtime(mtime)
|
||||
),
|
||||
"last_edited_epoch": mtime,
|
||||
}
|
||||
)
|
||||
elif isl in {
|
||||
"ansible",
|
||||
"ansible_playbooks",
|
||||
"ansible-playbooks",
|
||||
"playbooks",
|
||||
}:
|
||||
for dirpath, dirnames, filenames in os.walk(root):
|
||||
rel_root = os.path.relpath(dirpath, root)
|
||||
if rel_root != ".":
|
||||
folders.append(rel_root.replace(os.sep, "/"))
|
||||
for fname in filenames:
|
||||
if not fname.lower().endswith(self._ANSIBLE_EXTENSIONS):
|
||||
continue
|
||||
abs_path = Path(dirpath) / fname
|
||||
rel_path = abs_path.relative_to(root).as_posix()
|
||||
try:
|
||||
mtime = abs_path.stat().st_mtime
|
||||
except OSError:
|
||||
mtime = 0.0
|
||||
script_type = self._detect_script_type(abs_path)
|
||||
doc = self._load_assembly_document(abs_path, "ansible", script_type)
|
||||
items.append(
|
||||
{
|
||||
"file_name": fname,
|
||||
"rel_path": rel_path,
|
||||
"type": doc.get("type", "ansible"),
|
||||
"name": doc.get("name"),
|
||||
"category": doc.get("category"),
|
||||
"description": doc.get("description"),
|
||||
"last_edited": time.strftime(
|
||||
"%Y-%m-%dT%H:%M:%S", time.localtime(mtime)
|
||||
),
|
||||
"last_edited_epoch": mtime,
|
||||
}
|
||||
)
|
||||
else:
|
||||
raise ValueError("invalid_island")
|
||||
|
||||
items.sort(key=lambda entry: entry.get("last_edited_epoch", 0.0), reverse=True)
|
||||
return AssemblyListing(root=root, items=items, folders=folders)
|
||||
|
||||
def load_item(self, island: str, rel_path: str) -> AssemblyLoadResult:
|
||||
root, abs_path, _ = self._resolve_assembly_path(island, rel_path)
|
||||
if not abs_path.is_file():
|
||||
raise FileNotFoundError("file_not_found")
|
||||
|
||||
isl = (island or "").strip().lower()
|
||||
if isl in {"workflows", "workflow"}:
|
||||
payload = self._safe_read_json(abs_path)
|
||||
return AssemblyLoadResult(payload=payload)
|
||||
|
||||
doc = self._load_assembly_document(abs_path, island)
|
||||
rel = abs_path.relative_to(root).as_posix()
|
||||
payload = {
|
||||
"file_name": abs_path.name,
|
||||
"rel_path": rel,
|
||||
"type": doc.get("type"),
|
||||
"assembly": doc,
|
||||
"content": doc.get("script"),
|
||||
}
|
||||
return AssemblyLoadResult(payload=payload)
|
||||
|
||||
def create_item(
|
||||
self,
|
||||
island: str,
|
||||
*,
|
||||
kind: str,
|
||||
rel_path: str,
|
||||
content: Any,
|
||||
item_type: Optional[str] = None,
|
||||
) -> AssemblyMutationResult:
|
||||
root, abs_path, rel_norm = self._resolve_assembly_path(island, rel_path)
|
||||
if not rel_norm:
|
||||
raise ValueError("path_required")
|
||||
|
||||
normalized_kind = (kind or "").strip().lower()
|
||||
if normalized_kind == "folder":
|
||||
abs_path.mkdir(parents=True, exist_ok=True)
|
||||
return AssemblyMutationResult()
|
||||
if normalized_kind != "file":
|
||||
raise ValueError("invalid_kind")
|
||||
|
||||
target_path = abs_path
|
||||
if not target_path.suffix:
|
||||
target_path = target_path.with_suffix(
|
||||
self._default_ext_for_island(island, item_type or "")
|
||||
)
|
||||
target_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
isl = (island or "").strip().lower()
|
||||
if isl in {"workflows", "workflow"}:
|
||||
payload = self._ensure_workflow_document(content)
|
||||
base_name = target_path.stem
|
||||
payload.setdefault("tab_name", base_name)
|
||||
self._write_json(target_path, payload)
|
||||
else:
|
||||
document = self._normalize_assembly_document(
|
||||
content,
|
||||
self._default_type_for_island(island, item_type or ""),
|
||||
target_path.stem,
|
||||
)
|
||||
self._write_json(target_path, self._prepare_assembly_storage(document))
|
||||
|
||||
rel_new = target_path.relative_to(root).as_posix()
|
||||
return AssemblyMutationResult(rel_path=rel_new)
|
||||
|
||||
def edit_item(
|
||||
self,
|
||||
island: str,
|
||||
*,
|
||||
rel_path: str,
|
||||
content: Any,
|
||||
item_type: Optional[str] = None,
|
||||
) -> AssemblyMutationResult:
|
||||
root, abs_path, _ = self._resolve_assembly_path(island, rel_path)
|
||||
if not abs_path.exists():
|
||||
raise FileNotFoundError("file_not_found")
|
||||
|
||||
target_path = abs_path
|
||||
if not target_path.suffix:
|
||||
target_path = target_path.with_suffix(
|
||||
self._default_ext_for_island(island, item_type or "")
|
||||
)
|
||||
|
||||
isl = (island or "").strip().lower()
|
||||
if isl in {"workflows", "workflow"}:
|
||||
payload = self._ensure_workflow_document(content)
|
||||
self._write_json(target_path, payload)
|
||||
else:
|
||||
document = self._normalize_assembly_document(
|
||||
content,
|
||||
self._default_type_for_island(island, item_type or ""),
|
||||
target_path.stem,
|
||||
)
|
||||
self._write_json(target_path, self._prepare_assembly_storage(document))
|
||||
|
||||
if target_path != abs_path and abs_path.exists():
|
||||
try:
|
||||
abs_path.unlink()
|
||||
except OSError: # pragma: no cover - best effort cleanup
|
||||
pass
|
||||
|
||||
rel_new = target_path.relative_to(root).as_posix()
|
||||
return AssemblyMutationResult(rel_path=rel_new)
|
||||
|
||||
def rename_item(
|
||||
self,
|
||||
island: str,
|
||||
*,
|
||||
kind: str,
|
||||
rel_path: str,
|
||||
new_name: str,
|
||||
item_type: Optional[str] = None,
|
||||
) -> AssemblyMutationResult:
|
||||
root, old_path, _ = self._resolve_assembly_path(island, rel_path)
|
||||
|
||||
normalized_kind = (kind or "").strip().lower()
|
||||
if normalized_kind not in {"file", "folder"}:
|
||||
raise ValueError("invalid_kind")
|
||||
|
||||
if normalized_kind == "folder":
|
||||
if not old_path.is_dir():
|
||||
raise FileNotFoundError("folder_not_found")
|
||||
destination = old_path.parent / new_name
|
||||
else:
|
||||
if not old_path.is_file():
|
||||
raise FileNotFoundError("file_not_found")
|
||||
candidate = Path(new_name)
|
||||
if not candidate.suffix:
|
||||
candidate = candidate.with_suffix(
|
||||
self._default_ext_for_island(island, item_type or "")
|
||||
)
|
||||
destination = old_path.parent / candidate.name
|
||||
|
||||
destination = destination.resolve()
|
||||
if not str(destination).startswith(str(root)):
|
||||
raise ValueError("invalid_destination")
|
||||
|
||||
old_path.rename(destination)
|
||||
|
||||
isl = (island or "").strip().lower()
|
||||
if normalized_kind == "file" and isl in {"workflows", "workflow"}:
|
||||
try:
|
||||
obj = self._safe_read_json(destination)
|
||||
base_name = destination.stem
|
||||
for key in ["tabName", "tab_name", "name", "title"]:
|
||||
if key in obj:
|
||||
obj[key] = base_name
|
||||
obj.setdefault("tab_name", base_name)
|
||||
self._write_json(destination, obj)
|
||||
except Exception: # pragma: no cover - best effort update
|
||||
self._log.debug("failed to normalize workflow metadata for %s", destination)
|
||||
|
||||
rel_new = destination.relative_to(root).as_posix()
|
||||
return AssemblyMutationResult(rel_path=rel_new)
|
||||
|
||||
def move_item(
|
||||
self,
|
||||
island: str,
|
||||
*,
|
||||
rel_path: str,
|
||||
new_path: str,
|
||||
kind: Optional[str] = None,
|
||||
) -> AssemblyMutationResult:
|
||||
root, old_path, _ = self._resolve_assembly_path(island, rel_path)
|
||||
_, dest_path, _ = self._resolve_assembly_path(island, new_path)
|
||||
|
||||
normalized_kind = (kind or "").strip().lower()
|
||||
if normalized_kind == "folder":
|
||||
if not old_path.is_dir():
|
||||
raise FileNotFoundError("folder_not_found")
|
||||
else:
|
||||
if not old_path.exists():
|
||||
raise FileNotFoundError("file_not_found")
|
||||
|
||||
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.move(str(old_path), str(dest_path))
|
||||
return AssemblyMutationResult()
|
||||
|
||||
def delete_item(
|
||||
self,
|
||||
island: str,
|
||||
*,
|
||||
rel_path: str,
|
||||
kind: str,
|
||||
) -> AssemblyMutationResult:
|
||||
_, abs_path, rel_norm = self._resolve_assembly_path(island, rel_path)
|
||||
if not rel_norm:
|
||||
raise ValueError("cannot_delete_root")
|
||||
|
||||
normalized_kind = (kind or "").strip().lower()
|
||||
if normalized_kind == "folder":
|
||||
if not abs_path.is_dir():
|
||||
raise FileNotFoundError("folder_not_found")
|
||||
shutil.rmtree(abs_path)
|
||||
elif normalized_kind == "file":
|
||||
if not abs_path.is_file():
|
||||
raise FileNotFoundError("file_not_found")
|
||||
abs_path.unlink()
|
||||
else:
|
||||
raise ValueError("invalid_kind")
|
||||
|
||||
return AssemblyMutationResult()
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ------------------------------------------------------------------
|
||||
def _resolve_island_root(self, island: str) -> Path:
|
||||
key = (island or "").strip().lower()
|
||||
subdir = self._ISLAND_DIR_MAP.get(key)
|
||||
if not subdir:
|
||||
raise ValueError("invalid_island")
|
||||
root = (self._root / subdir).resolve()
|
||||
root.mkdir(parents=True, exist_ok=True)
|
||||
return root
|
||||
|
||||
def _resolve_assembly_path(self, island: str, rel_path: str) -> Tuple[Path, Path, str]:
|
||||
root = self._resolve_island_root(island)
|
||||
rel_norm = self._normalize_relpath(rel_path)
|
||||
abs_path = (root / rel_norm).resolve()
|
||||
if not str(abs_path).startswith(str(root)):
|
||||
raise ValueError("invalid_path")
|
||||
return root, abs_path, rel_norm
|
||||
|
||||
@staticmethod
|
||||
def _normalize_relpath(value: str) -> str:
|
||||
return (value or "").replace("\\", "/").strip("/")
|
||||
|
||||
@staticmethod
|
||||
def _default_ext_for_island(island: str, item_type: str) -> str:
|
||||
isl = (island or "").strip().lower()
|
||||
if isl in {"workflows", "workflow"}:
|
||||
return ".json"
|
||||
if isl in {"ansible", "ansible_playbooks", "ansible-playbooks", "playbooks"}:
|
||||
return ".json"
|
||||
if isl in {"scripts", "script"}:
|
||||
return ".json"
|
||||
typ = (item_type or "").strip().lower()
|
||||
if typ in {"bash", "batch", "powershell"}:
|
||||
return ".json"
|
||||
return ".json"
|
||||
|
||||
@staticmethod
|
||||
def _default_type_for_island(island: str, item_type: str) -> str:
|
||||
isl = (island or "").strip().lower()
|
||||
if isl in {"ansible", "ansible_playbooks", "ansible-playbooks", "playbooks"}:
|
||||
return "ansible"
|
||||
typ = (item_type or "").strip().lower()
|
||||
if typ in {"powershell", "batch", "bash", "ansible"}:
|
||||
return typ
|
||||
return "powershell"
|
||||
|
||||
@staticmethod
|
||||
def _empty_assembly_document(default_type: str) -> Dict[str, Any]:
|
||||
return {
|
||||
"version": 1,
|
||||
"name": "",
|
||||
"description": "",
|
||||
"category": "application" if default_type.lower() == "ansible" else "script",
|
||||
"type": default_type or "powershell",
|
||||
"script": "",
|
||||
"timeout_seconds": 3600,
|
||||
"sites": {"mode": "all", "values": []},
|
||||
"variables": [],
|
||||
"files": [],
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _decode_base64_text(value: Any) -> Optional[str]:
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
stripped = value.strip()
|
||||
if not stripped:
|
||||
return ""
|
||||
try:
|
||||
cleaned = re.sub(r"\s+", "", stripped)
|
||||
except Exception:
|
||||
cleaned = stripped
|
||||
try:
|
||||
decoded = base64.b64decode(cleaned, validate=True)
|
||||
except Exception:
|
||||
return None
|
||||
try:
|
||||
return decoded.decode("utf-8")
|
||||
except Exception:
|
||||
return decoded.decode("utf-8", errors="replace")
|
||||
|
||||
def _decode_script_content(self, value: Any, encoding_hint: str = "") -> str:
|
||||
encoding = (encoding_hint or "").strip().lower()
|
||||
if isinstance(value, str):
|
||||
if encoding in {"base64", "b64", "base-64"}:
|
||||
decoded = self._decode_base64_text(value)
|
||||
if decoded is not None:
|
||||
return decoded.replace("\r\n", "\n")
|
||||
decoded = self._decode_base64_text(value)
|
||||
if decoded is not None:
|
||||
return decoded.replace("\r\n", "\n")
|
||||
return value.replace("\r\n", "\n")
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def _encode_script_content(script_text: Any) -> str:
|
||||
if not isinstance(script_text, str):
|
||||
if script_text is None:
|
||||
script_text = ""
|
||||
else:
|
||||
script_text = str(script_text)
|
||||
normalized = script_text.replace("\r\n", "\n")
|
||||
if not normalized:
|
||||
return ""
|
||||
encoded = base64.b64encode(normalized.encode("utf-8"))
|
||||
return encoded.decode("ascii")
|
||||
|
||||
def _prepare_assembly_storage(self, document: Dict[str, Any]) -> Dict[str, Any]:
|
||||
stored: Dict[str, Any] = {}
|
||||
for key, value in (document or {}).items():
|
||||
if key == "script":
|
||||
stored[key] = self._encode_script_content(value)
|
||||
else:
|
||||
stored[key] = value
|
||||
stored["script_encoding"] = "base64"
|
||||
return stored
|
||||
|
||||
def _normalize_assembly_document(
|
||||
self,
|
||||
obj: Any,
|
||||
default_type: str,
|
||||
base_name: str,
|
||||
) -> Dict[str, Any]:
|
||||
doc = self._empty_assembly_document(default_type)
|
||||
if not isinstance(obj, dict):
|
||||
obj = {}
|
||||
base = (base_name or "assembly").strip()
|
||||
doc["name"] = str(obj.get("name") or obj.get("display_name") or base)
|
||||
doc["description"] = str(obj.get("description") or "")
|
||||
category = str(obj.get("category") or doc["category"]).strip().lower()
|
||||
if category in {"script", "application"}:
|
||||
doc["category"] = category
|
||||
typ = str(obj.get("type") or obj.get("script_type") or default_type or "powershell").strip().lower()
|
||||
if typ in {"powershell", "batch", "bash", "ansible"}:
|
||||
doc["type"] = typ
|
||||
script_val = obj.get("script")
|
||||
content_val = obj.get("content")
|
||||
script_lines = obj.get("script_lines")
|
||||
if isinstance(script_lines, list):
|
||||
try:
|
||||
doc["script"] = "\n".join(str(line) for line in script_lines)
|
||||
except Exception:
|
||||
doc["script"] = ""
|
||||
elif isinstance(script_val, str):
|
||||
doc["script"] = script_val
|
||||
elif isinstance(content_val, str):
|
||||
doc["script"] = content_val
|
||||
encoding_hint = str(
|
||||
obj.get("script_encoding") or obj.get("scriptEncoding") or ""
|
||||
).strip().lower()
|
||||
doc["script"] = self._decode_script_content(doc.get("script"), encoding_hint)
|
||||
if encoding_hint in {"base64", "b64", "base-64"}:
|
||||
doc["script_encoding"] = "base64"
|
||||
else:
|
||||
probe_source = ""
|
||||
if isinstance(script_val, str) and script_val:
|
||||
probe_source = script_val
|
||||
elif isinstance(content_val, str) and content_val:
|
||||
probe_source = content_val
|
||||
decoded_probe = self._decode_base64_text(probe_source) if probe_source else None
|
||||
if decoded_probe is not None:
|
||||
doc["script_encoding"] = "base64"
|
||||
doc["script"] = decoded_probe.replace("\r\n", "\n")
|
||||
else:
|
||||
doc["script_encoding"] = "plain"
|
||||
timeout_val = obj.get("timeout_seconds", obj.get("timeout"))
|
||||
if timeout_val is not None:
|
||||
try:
|
||||
doc["timeout_seconds"] = max(0, int(timeout_val))
|
||||
except Exception:
|
||||
pass
|
||||
sites = obj.get("sites") if isinstance(obj.get("sites"), dict) else {}
|
||||
values = sites.get("values") if isinstance(sites.get("values"), list) else []
|
||||
mode = str(sites.get("mode") or ("specific" if values else "all")).strip().lower()
|
||||
if mode not in {"all", "specific"}:
|
||||
mode = "all"
|
||||
doc["sites"] = {
|
||||
"mode": mode,
|
||||
"values": [
|
||||
str(v).strip()
|
||||
for v in values
|
||||
if isinstance(v, (str, int, float)) and str(v).strip()
|
||||
],
|
||||
}
|
||||
vars_in = obj.get("variables") if isinstance(obj.get("variables"), list) else []
|
||||
doc_vars: List[Dict[str, Any]] = []
|
||||
for entry in vars_in:
|
||||
if not isinstance(entry, dict):
|
||||
continue
|
||||
name = str(entry.get("name") or entry.get("key") or "").strip()
|
||||
if not name:
|
||||
continue
|
||||
vtype = str(entry.get("type") or "string").strip().lower()
|
||||
if vtype not in {"string", "number", "boolean", "credential"}:
|
||||
vtype = "string"
|
||||
default_val = entry.get("default", entry.get("default_value"))
|
||||
doc_vars.append(
|
||||
{
|
||||
"name": name,
|
||||
"label": str(entry.get("label") or ""),
|
||||
"type": vtype,
|
||||
"default": default_val,
|
||||
"required": bool(entry.get("required")),
|
||||
"description": str(entry.get("description") or ""),
|
||||
}
|
||||
)
|
||||
doc["variables"] = doc_vars
|
||||
files_in = obj.get("files") if isinstance(obj.get("files"), list) else []
|
||||
doc_files: List[Dict[str, Any]] = []
|
||||
for record in files_in:
|
||||
if not isinstance(record, dict):
|
||||
continue
|
||||
fname = record.get("file_name") or record.get("name")
|
||||
data = record.get("data")
|
||||
if not fname or not isinstance(data, str):
|
||||
continue
|
||||
size_val = record.get("size")
|
||||
try:
|
||||
size_int = int(size_val)
|
||||
except Exception:
|
||||
size_int = 0
|
||||
doc_files.append(
|
||||
{
|
||||
"file_name": str(fname),
|
||||
"size": size_int,
|
||||
"mime_type": str(record.get("mime_type") or record.get("mimeType") or ""),
|
||||
"data": data,
|
||||
}
|
||||
)
|
||||
doc["files"] = doc_files
|
||||
try:
|
||||
doc["version"] = int(obj.get("version") or doc["version"])
|
||||
except Exception:
|
||||
pass
|
||||
return doc
|
||||
|
||||
def _load_assembly_document(
|
||||
self,
|
||||
abs_path: Path,
|
||||
island: str,
|
||||
type_hint: str = "",
|
||||
) -> Dict[str, Any]:
|
||||
base_name = abs_path.stem
|
||||
default_type = self._default_type_for_island(island, type_hint)
|
||||
if abs_path.suffix.lower() == ".json":
|
||||
data = self._safe_read_json(abs_path)
|
||||
return self._normalize_assembly_document(data, default_type, base_name)
|
||||
try:
|
||||
content = abs_path.read_text(encoding="utf-8", errors="replace")
|
||||
except Exception:
|
||||
content = ""
|
||||
document = self._empty_assembly_document(default_type)
|
||||
document["name"] = base_name
|
||||
document["script"] = (content or "").replace("\r\n", "\n")
|
||||
if default_type == "ansible":
|
||||
document["category"] = "application"
|
||||
return document
|
||||
|
||||
@staticmethod
|
||||
def _safe_read_json(path: Path) -> Dict[str, Any]:
|
||||
try:
|
||||
return json.loads(path.read_text(encoding="utf-8"))
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def _extract_tab_name(obj: Dict[str, Any]) -> str:
|
||||
if not isinstance(obj, dict):
|
||||
return ""
|
||||
for key in ["tabName", "tab_name", "name", "title"]:
|
||||
value = obj.get(key)
|
||||
if isinstance(value, str) and value.strip():
|
||||
return value.strip()
|
||||
return ""
|
||||
|
||||
def _detect_script_type(self, path: Path) -> str:
|
||||
lower = path.name.lower()
|
||||
if lower.endswith(".json") and path.is_file():
|
||||
obj = self._safe_read_json(path)
|
||||
if isinstance(obj, dict):
|
||||
typ = str(
|
||||
obj.get("type") or obj.get("script_type") or ""
|
||||
).strip().lower()
|
||||
if typ in {"powershell", "batch", "bash", "ansible"}:
|
||||
return typ
|
||||
return "powershell"
|
||||
if lower.endswith(".yml"):
|
||||
return "ansible"
|
||||
if lower.endswith(".ps1"):
|
||||
return "powershell"
|
||||
if lower.endswith(".bat"):
|
||||
return "batch"
|
||||
if lower.endswith(".sh"):
|
||||
return "bash"
|
||||
return "unknown"
|
||||
|
||||
@staticmethod
|
||||
def _ensure_workflow_document(content: Any) -> Dict[str, Any]:
|
||||
payload = content
|
||||
if isinstance(payload, str):
|
||||
try:
|
||||
payload = json.loads(payload)
|
||||
except Exception:
|
||||
payload = {}
|
||||
if not isinstance(payload, dict):
|
||||
payload = {}
|
||||
return payload
|
||||
|
||||
@staticmethod
|
||||
def _write_json(path: Path, payload: Dict[str, Any]) -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(json.dumps(payload, indent=2), encoding="utf-8")
|
||||
@@ -44,6 +44,7 @@ from Data.Engine.services.jobs import SchedulerService
|
||||
from Data.Engine.services.rate_limit import SlidingWindowRateLimiter
|
||||
from Data.Engine.services.realtime import AgentRealtimeService
|
||||
from Data.Engine.services.sites import SiteService
|
||||
from Data.Engine.services.assemblies import AssemblyService
|
||||
|
||||
__all__ = ["EngineServiceContainer", "build_service_container"]
|
||||
|
||||
@@ -65,6 +66,7 @@ class EngineServiceContainer:
|
||||
github_service: GitHubService
|
||||
operator_auth_service: OperatorAuthService
|
||||
operator_account_service: OperatorAccountService
|
||||
assembly_service: AssemblyService
|
||||
|
||||
|
||||
def build_service_container(
|
||||
@@ -167,6 +169,11 @@ def build_service_container(
|
||||
logger=log.getChild("sites"),
|
||||
)
|
||||
|
||||
assembly_service = AssemblyService(
|
||||
root=settings.project_root / "Assemblies",
|
||||
logger=log.getChild("assemblies"),
|
||||
)
|
||||
|
||||
github_provider = GitHubArtifactProvider(
|
||||
cache_file=settings.github.cache_file,
|
||||
default_repo=settings.github.default_repo,
|
||||
@@ -197,6 +204,7 @@ def build_service_container(
|
||||
device_view_service=device_view_service,
|
||||
credential_service=credential_service,
|
||||
site_service=site_service,
|
||||
assembly_service=assembly_service,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -8,11 +8,29 @@ import uuid
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from Data.Engine.domain.device_auth import DeviceGuid, normalize_guid
|
||||
from Data.Engine.domain.device_enrollment import EnrollmentApprovalStatus
|
||||
from Data.Engine.domain.enrollment_admin import DeviceApprovalRecord, EnrollmentCodeRecord
|
||||
from Data.Engine.repositories.sqlite.enrollment_repository import SQLiteEnrollmentRepository
|
||||
from Data.Engine.repositories.sqlite.user_repository import SQLiteUserRepository
|
||||
|
||||
__all__ = ["EnrollmentAdminService"]
|
||||
__all__ = ["EnrollmentAdminService", "DeviceApprovalActionResult"]
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class DeviceApprovalActionResult:
|
||||
"""Outcome metadata returned after mutating an approval."""
|
||||
|
||||
status: str
|
||||
conflict_resolution: Optional[str] = None
|
||||
|
||||
def to_dict(self) -> dict[str, str]:
|
||||
payload = {"status": self.status}
|
||||
if self.conflict_resolution:
|
||||
payload["conflict_resolution"] = self.conflict_resolution
|
||||
return payload
|
||||
|
||||
|
||||
class EnrollmentAdminService:
|
||||
@@ -91,6 +109,36 @@ class EnrollmentAdminService:
|
||||
def list_device_approvals(self, *, status: Optional[str] = None) -> List[DeviceApprovalRecord]:
|
||||
return self._repository.list_device_approvals(status=status)
|
||||
|
||||
def approve_device_approval(
|
||||
self,
|
||||
record_id: str,
|
||||
*,
|
||||
actor: Optional[str],
|
||||
guid: Optional[str] = None,
|
||||
conflict_resolution: Optional[str] = None,
|
||||
) -> DeviceApprovalActionResult:
|
||||
return self._set_device_approval_status(
|
||||
record_id,
|
||||
EnrollmentApprovalStatus.APPROVED,
|
||||
actor=actor,
|
||||
guid=guid,
|
||||
conflict_resolution=conflict_resolution,
|
||||
)
|
||||
|
||||
def deny_device_approval(
|
||||
self,
|
||||
record_id: str,
|
||||
*,
|
||||
actor: Optional[str],
|
||||
) -> DeviceApprovalActionResult:
|
||||
return self._set_device_approval_status(
|
||||
record_id,
|
||||
EnrollmentApprovalStatus.DENIED,
|
||||
actor=actor,
|
||||
guid=None,
|
||||
conflict_resolution=None,
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ------------------------------------------------------------------
|
||||
@@ -111,3 +159,87 @@ class EnrollmentAdminService:
|
||||
return 10
|
||||
return count
|
||||
|
||||
def _set_device_approval_status(
|
||||
self,
|
||||
record_id: str,
|
||||
status: EnrollmentApprovalStatus,
|
||||
*,
|
||||
actor: Optional[str],
|
||||
guid: Optional[str],
|
||||
conflict_resolution: Optional[str],
|
||||
) -> DeviceApprovalActionResult:
|
||||
approval = self._repository.fetch_device_approval(record_id)
|
||||
if approval is None:
|
||||
raise LookupError("not_found")
|
||||
|
||||
if approval.status is not EnrollmentApprovalStatus.PENDING:
|
||||
raise ValueError("approval_not_pending")
|
||||
|
||||
normalized_guid = normalize_guid(guid) or (approval.guid.value if approval.guid else "")
|
||||
resolution_normalized = (conflict_resolution or "").strip().lower() or None
|
||||
|
||||
fingerprint_match = False
|
||||
conflict_guid: Optional[str] = None
|
||||
|
||||
if status is EnrollmentApprovalStatus.APPROVED:
|
||||
pending_records = self._repository.list_device_approvals(status="pending")
|
||||
current_record = next(
|
||||
(record for record in pending_records if record.record_id == approval.record_id),
|
||||
None,
|
||||
)
|
||||
|
||||
conflict = current_record.hostname_conflict if current_record else None
|
||||
if conflict:
|
||||
conflict_guid = normalize_guid(conflict.guid)
|
||||
fingerprint_match = bool(conflict.fingerprint_match)
|
||||
|
||||
if fingerprint_match:
|
||||
normalized_guid = conflict_guid or normalized_guid or ""
|
||||
if resolution_normalized is None:
|
||||
resolution_normalized = "auto_merge_fingerprint"
|
||||
elif resolution_normalized == "overwrite":
|
||||
normalized_guid = conflict_guid or normalized_guid or ""
|
||||
elif resolution_normalized == "coexist":
|
||||
pass
|
||||
else:
|
||||
raise ValueError("conflict_resolution_required")
|
||||
|
||||
if normalized_guid:
|
||||
try:
|
||||
guid_value = DeviceGuid(normalized_guid)
|
||||
except ValueError as exc:
|
||||
raise ValueError("invalid_guid") from exc
|
||||
else:
|
||||
guid_value = None
|
||||
|
||||
actor_identifier = None
|
||||
if actor:
|
||||
actor_identifier = self._users.resolve_identifier(actor)
|
||||
if not actor_identifier:
|
||||
actor_identifier = actor.strip() or None
|
||||
if not actor_identifier:
|
||||
actor_identifier = "system"
|
||||
|
||||
self._repository.update_device_approval_status(
|
||||
approval.record_id,
|
||||
status=status,
|
||||
updated_at=self._clock(),
|
||||
approved_by=actor_identifier,
|
||||
guid=guid_value,
|
||||
)
|
||||
|
||||
if status is EnrollmentApprovalStatus.APPROVED:
|
||||
self._log.info(
|
||||
"device approval %s approved resolution=%s guid=%s",
|
||||
approval.record_id,
|
||||
resolution_normalized or "",
|
||||
guid_value.value if guid_value else normalized_guid or "",
|
||||
)
|
||||
else:
|
||||
self._log.info("device approval %s denied", approval.record_id)
|
||||
|
||||
return DeviceApprovalActionResult(
|
||||
status=status.value,
|
||||
conflict_resolution=resolution_normalized,
|
||||
)
|
||||
|
||||
|
||||
@@ -109,3 +109,245 @@ def test_device_approvals_listing(prepared_app, engine_settings):
|
||||
record = next(a for a in approvals if a["id"] == "approval-http")
|
||||
assert record.get("hostname_conflict", {}).get("fingerprint_match") is True
|
||||
|
||||
|
||||
def test_device_approval_requires_resolution(prepared_app, engine_settings):
|
||||
client = prepared_app.test_client()
|
||||
_login(client)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
conn = sqlite3.connect(engine_settings.database.path)
|
||||
cur = conn.cursor()
|
||||
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO devices (
|
||||
guid,
|
||||
hostname,
|
||||
created_at,
|
||||
last_seen,
|
||||
ssl_key_fingerprint,
|
||||
status
|
||||
) VALUES (?, ?, ?, ?, ?, 'active')
|
||||
""",
|
||||
(
|
||||
"33333333-3333-3333-3333-333333333333",
|
||||
"conflict-host",
|
||||
int(now.timestamp()),
|
||||
int(now.timestamp()),
|
||||
"existingfp",
|
||||
),
|
||||
)
|
||||
|
||||
now_iso = now.isoformat()
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO device_approvals (
|
||||
id,
|
||||
approval_reference,
|
||||
guid,
|
||||
hostname_claimed,
|
||||
ssl_key_fingerprint_claimed,
|
||||
enrollment_code_id,
|
||||
status,
|
||||
client_nonce,
|
||||
server_nonce,
|
||||
created_at,
|
||||
updated_at,
|
||||
approved_by_user_id,
|
||||
agent_pubkey_der
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
"approval-conflict",
|
||||
"REF-CONFLICT",
|
||||
None,
|
||||
"conflict-host",
|
||||
"newfinger",
|
||||
"code-conflict",
|
||||
"pending",
|
||||
base64.b64encode(b"client").decode(),
|
||||
base64.b64encode(b"server").decode(),
|
||||
now_iso,
|
||||
now_iso,
|
||||
None,
|
||||
b"pub",
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
resp = client.post("/api/admin/device-approvals/approval-conflict/approve", json={})
|
||||
assert resp.status_code == 409
|
||||
assert resp.get_json().get("error") == "conflict_resolution_required"
|
||||
|
||||
resp = client.post(
|
||||
"/api/admin/device-approvals/approval-conflict/approve",
|
||||
json={"conflict_resolution": "overwrite"},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
body = resp.get_json()
|
||||
assert body == {"status": "approved", "conflict_resolution": "overwrite"}
|
||||
|
||||
conn = sqlite3.connect(engine_settings.database.path)
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"SELECT status, guid, approved_by_user_id FROM device_approvals WHERE id = ?",
|
||||
("approval-conflict",),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
conn.close()
|
||||
assert row[0] == "approved"
|
||||
assert row[1] == "33333333-3333-3333-3333-333333333333"
|
||||
assert row[2]
|
||||
|
||||
resp = client.post(
|
||||
"/api/admin/device-approvals/approval-conflict/approve",
|
||||
json={"conflict_resolution": "overwrite"},
|
||||
)
|
||||
assert resp.status_code == 409
|
||||
assert resp.get_json().get("error") == "approval_not_pending"
|
||||
|
||||
|
||||
def test_device_approval_auto_merge(prepared_app, engine_settings):
|
||||
client = prepared_app.test_client()
|
||||
_login(client)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
conn = sqlite3.connect(engine_settings.database.path)
|
||||
cur = conn.cursor()
|
||||
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO devices (
|
||||
guid,
|
||||
hostname,
|
||||
created_at,
|
||||
last_seen,
|
||||
ssl_key_fingerprint,
|
||||
status
|
||||
) VALUES (?, ?, ?, ?, ?, 'active')
|
||||
""",
|
||||
(
|
||||
"44444444-4444-4444-4444-444444444444",
|
||||
"merge-host",
|
||||
int(now.timestamp()),
|
||||
int(now.timestamp()),
|
||||
"deadbeef",
|
||||
),
|
||||
)
|
||||
|
||||
now_iso = now.isoformat()
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO device_approvals (
|
||||
id,
|
||||
approval_reference,
|
||||
guid,
|
||||
hostname_claimed,
|
||||
ssl_key_fingerprint_claimed,
|
||||
enrollment_code_id,
|
||||
status,
|
||||
client_nonce,
|
||||
server_nonce,
|
||||
created_at,
|
||||
updated_at,
|
||||
approved_by_user_id,
|
||||
agent_pubkey_der
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
"approval-merge",
|
||||
"REF-MERGE",
|
||||
None,
|
||||
"merge-host",
|
||||
"deadbeef",
|
||||
"code-merge",
|
||||
"pending",
|
||||
base64.b64encode(b"client").decode(),
|
||||
base64.b64encode(b"server").decode(),
|
||||
now_iso,
|
||||
now_iso,
|
||||
None,
|
||||
b"pub",
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
resp = client.post("/api/admin/device-approvals/approval-merge/approve", json={})
|
||||
assert resp.status_code == 200
|
||||
body = resp.get_json()
|
||||
assert body.get("status") == "approved"
|
||||
assert body.get("conflict_resolution") == "auto_merge_fingerprint"
|
||||
|
||||
conn = sqlite3.connect(engine_settings.database.path)
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"SELECT guid, status FROM device_approvals WHERE id = ?",
|
||||
("approval-merge",),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
conn.close()
|
||||
assert row[1] == "approved"
|
||||
assert row[0] == "44444444-4444-4444-4444-444444444444"
|
||||
|
||||
|
||||
def test_device_approval_deny(prepared_app, engine_settings):
|
||||
client = prepared_app.test_client()
|
||||
_login(client)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
conn = sqlite3.connect(engine_settings.database.path)
|
||||
cur = conn.cursor()
|
||||
|
||||
now_iso = now.isoformat()
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO device_approvals (
|
||||
id,
|
||||
approval_reference,
|
||||
guid,
|
||||
hostname_claimed,
|
||||
ssl_key_fingerprint_claimed,
|
||||
enrollment_code_id,
|
||||
status,
|
||||
client_nonce,
|
||||
server_nonce,
|
||||
created_at,
|
||||
updated_at,
|
||||
approved_by_user_id,
|
||||
agent_pubkey_der
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
"approval-deny",
|
||||
"REF-DENY",
|
||||
None,
|
||||
"deny-host",
|
||||
"cafebabe",
|
||||
"code-deny",
|
||||
"pending",
|
||||
base64.b64encode(b"client").decode(),
|
||||
base64.b64encode(b"server").decode(),
|
||||
now_iso,
|
||||
now_iso,
|
||||
None,
|
||||
b"pub",
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
resp = client.post("/api/admin/device-approvals/approval-deny/deny", json={})
|
||||
assert resp.status_code == 200
|
||||
assert resp.get_json() == {"status": "denied"}
|
||||
|
||||
conn = sqlite3.connect(engine_settings.database.path)
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"SELECT status FROM device_approvals WHERE id = ?",
|
||||
("approval-deny",),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
conn.close()
|
||||
assert row[0] == "denied"
|
||||
|
||||
86
Data/Engine/tests/test_http_assemblies.py
Normal file
86
Data/Engine/tests/test_http_assemblies.py
Normal file
@@ -0,0 +1,86 @@
|
||||
import pytest
|
||||
|
||||
pytest.importorskip("flask")
|
||||
|
||||
from .test_http_auth import _login, prepared_app
|
||||
|
||||
|
||||
def test_assembly_crud_flow(prepared_app, engine_settings):
|
||||
client = prepared_app.test_client()
|
||||
_login(client)
|
||||
|
||||
resp = client.post(
|
||||
"/api/assembly/create",
|
||||
json={"island": "scripts", "kind": "folder", "path": "Utilities"},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
|
||||
resp = client.post(
|
||||
"/api/assembly/create",
|
||||
json={
|
||||
"island": "scripts",
|
||||
"kind": "file",
|
||||
"path": "Utilities/sample",
|
||||
"content": {"name": "Sample", "script": "Write-Output 'Hello'", "type": "powershell"},
|
||||
},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
body = resp.get_json()
|
||||
rel_path = body.get("rel_path")
|
||||
assert rel_path and rel_path.endswith(".json")
|
||||
|
||||
resp = client.get("/api/assembly/list?island=scripts")
|
||||
assert resp.status_code == 200
|
||||
listing = resp.get_json()
|
||||
assert any(item["rel_path"] == rel_path for item in listing.get("items", []))
|
||||
|
||||
resp = client.get(f"/api/assembly/load?island=scripts&path={rel_path}")
|
||||
assert resp.status_code == 200
|
||||
loaded = resp.get_json()
|
||||
assert loaded.get("assembly", {}).get("name") == "Sample"
|
||||
|
||||
resp = client.post(
|
||||
"/api/assembly/rename",
|
||||
json={
|
||||
"island": "scripts",
|
||||
"kind": "file",
|
||||
"path": rel_path,
|
||||
"new_name": "renamed",
|
||||
},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
renamed_rel = resp.get_json().get("rel_path")
|
||||
assert renamed_rel and renamed_rel.endswith(".json")
|
||||
|
||||
resp = client.post(
|
||||
"/api/assembly/move",
|
||||
json={
|
||||
"island": "scripts",
|
||||
"path": renamed_rel,
|
||||
"new_path": "Utilities/Nested/renamed.json",
|
||||
"kind": "file",
|
||||
},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
|
||||
resp = client.post(
|
||||
"/api/assembly/delete",
|
||||
json={
|
||||
"island": "scripts",
|
||||
"path": "Utilities/Nested/renamed.json",
|
||||
"kind": "file",
|
||||
},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
|
||||
resp = client.get("/api/assembly/list?island=scripts")
|
||||
remaining = resp.get_json().get("items", [])
|
||||
assert all(item["rel_path"] != "Utilities/Nested/renamed.json" for item in remaining)
|
||||
|
||||
|
||||
def test_server_time_endpoint(prepared_app):
|
||||
client = prepared_app.test_client()
|
||||
resp = client.get("/api/server/time")
|
||||
assert resp.status_code == 200
|
||||
body = resp.get_json()
|
||||
assert set(["epoch", "iso", "utc_iso", "timezone", "offset_seconds", "display"]).issubset(body)
|
||||
Reference in New Issue
Block a user