mirror of
https://github.com/bunny-lab-io/Borealis.git
synced 2025-12-16 07:25:48 -07:00
Assembly Management Rework - Stage 5 & 6 Complete (Stage 4 Pending)
This commit is contained in:
@@ -12,6 +12,8 @@
|
||||
# - POST /api/assemblies/dev-mode/switch (Token Authenticated (Admin)) - Enables or disables Dev Mode overrides for the current session.
|
||||
# - POST /api/assemblies/dev-mode/write (Token Authenticated (Admin+Dev Mode)) - Flushes queued assembly writes immediately.
|
||||
# - POST /api/assemblies/official/sync (Token Authenticated (Admin+Dev Mode)) - Rebuilds the official domain from staged JSON assemblies.
|
||||
# - POST /api/assemblies/import (Token Authenticated (Domain write permissions)) - Imports a legacy assembly JSON document into the selected domain.
|
||||
# - GET /api/assemblies/<assembly_guid>/export (Token Authenticated) - Exports an assembly as legacy JSON with metadata.
|
||||
# ======================================================
|
||||
|
||||
"""Assembly CRUD REST endpoints backed by AssemblyCache."""
|
||||
@@ -161,6 +163,11 @@ def register_assemblies(app, adapters: "EngineServiceAdapters") -> None:
|
||||
service = AssemblyAPIService(app, adapters)
|
||||
blueprint = Blueprint("assemblies", __name__, url_prefix="/api/assemblies")
|
||||
|
||||
def _coerce_mapping(value: Any) -> Optional[Dict[str, Any]]:
|
||||
if isinstance(value, dict):
|
||||
return value
|
||||
return None
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Collections
|
||||
# ------------------------------------------------------------------
|
||||
@@ -406,6 +413,109 @@ def register_assemblies(app, adapters: "EngineServiceAdapters") -> None:
|
||||
)
|
||||
return jsonify({"error": "internal server error"}), 500
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Import legacy assembly JSON
|
||||
# ------------------------------------------------------------------
|
||||
@blueprint.route("/import", methods=["POST"])
|
||||
def import_assembly():
|
||||
payload = request.get_json(silent=True) or {}
|
||||
document = payload.get("document")
|
||||
if document is None:
|
||||
document = payload.get("payload")
|
||||
if document is None:
|
||||
return jsonify({"error": "missing document"}), 400
|
||||
|
||||
domain = service.parse_domain(payload.get("domain")) or AssemblyDomain.USER
|
||||
user, error = service.require_mutation_for_domain(domain)
|
||||
pending_guid = str(payload.get("assembly_guid") or "").strip() or None
|
||||
if error:
|
||||
detail = error[0].get("message") or error[0].get("error") or "permission denied"
|
||||
service._audit(
|
||||
user=user,
|
||||
action="import",
|
||||
domain=domain,
|
||||
assembly_guid=pending_guid,
|
||||
status="denied",
|
||||
detail=detail,
|
||||
)
|
||||
return jsonify(error[0]), error[1]
|
||||
|
||||
try:
|
||||
record = service.runtime.import_assembly(
|
||||
domain=domain,
|
||||
document=document,
|
||||
assembly_guid=pending_guid,
|
||||
metadata_override=_coerce_mapping(payload.get("metadata")),
|
||||
tags_override=_coerce_mapping(payload.get("tags")),
|
||||
)
|
||||
record["queue"] = service.runtime.queue_snapshot()
|
||||
service._audit(
|
||||
user=user,
|
||||
action="import",
|
||||
domain=domain,
|
||||
assembly_guid=record.get("assembly_guid"),
|
||||
status="success",
|
||||
detail="queued",
|
||||
)
|
||||
return jsonify(record), 201
|
||||
except AssemblySerializationError as exc:
|
||||
service._audit(
|
||||
user=user,
|
||||
action="import",
|
||||
domain=domain,
|
||||
assembly_guid=pending_guid,
|
||||
status="failed",
|
||||
detail=str(exc),
|
||||
)
|
||||
return jsonify({"error": str(exc)}), 400
|
||||
except ValueError as exc:
|
||||
service._audit(
|
||||
user=user,
|
||||
action="import",
|
||||
domain=domain,
|
||||
assembly_guid=pending_guid,
|
||||
status="failed",
|
||||
detail=str(exc),
|
||||
)
|
||||
return jsonify({"error": str(exc)}), 400
|
||||
except Exception: # pragma: no cover
|
||||
service.logger.exception("Failed to import assembly.")
|
||||
service._audit(
|
||||
user=user,
|
||||
action="import",
|
||||
domain=domain,
|
||||
assembly_guid=pending_guid,
|
||||
status="error",
|
||||
detail="internal server error",
|
||||
)
|
||||
return jsonify({"error": "internal server error"}), 500
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Export legacy assembly JSON
|
||||
# ------------------------------------------------------------------
|
||||
@blueprint.route("/<string:assembly_guid>/export", methods=["GET"])
|
||||
def export_assembly(assembly_guid: str):
|
||||
user, error = service.require_user()
|
||||
if error:
|
||||
return jsonify(error[0]), error[1]
|
||||
try:
|
||||
data = service.runtime.export_assembly(assembly_guid)
|
||||
data["queue"] = service.runtime.queue_snapshot()
|
||||
service._audit(
|
||||
user=user,
|
||||
action="export",
|
||||
domain=AssemblyAPIService.parse_domain(data.get("domain")),
|
||||
assembly_guid=assembly_guid,
|
||||
status="success",
|
||||
detail="legacy export",
|
||||
)
|
||||
return jsonify(data), 200
|
||||
except ValueError:
|
||||
return jsonify({"error": "not found"}), 404
|
||||
except Exception: # pragma: no cover
|
||||
service.logger.exception("Failed to export assembly %s.", assembly_guid)
|
||||
return jsonify({"error": "internal server error"}), 500
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Dev Mode toggle
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@@ -282,7 +282,7 @@ def _add_months(dt_tuple: Tuple[int, int, int, int, int, int], months: int = 1)
|
||||
Handles month-end clamping.
|
||||
"""
|
||||
from calendar import monthrange
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
y, m, d, hh, mm, ss = dt_tuple
|
||||
m2 = m + months
|
||||
@@ -292,28 +292,28 @@ def _add_months(dt_tuple: Tuple[int, int, int, int, int, int], months: int = 1)
|
||||
last_day = monthrange(y, m2)[1]
|
||||
d = min(d, last_day)
|
||||
try:
|
||||
return int(datetime(y, m2, d, hh, mm, ss).timestamp())
|
||||
return int(datetime(y, m2, d, hh, mm, ss, tzinfo=timezone.utc).timestamp())
|
||||
except Exception:
|
||||
# Fallback to first of month if something odd
|
||||
return int(datetime(y, m2, 1, hh, mm, ss).timestamp())
|
||||
return int(datetime(y, m2, 1, hh, mm, ss, tzinfo=timezone.utc).timestamp())
|
||||
|
||||
|
||||
def _add_years(dt_tuple: Tuple[int, int, int, int, int, int], years: int = 1) -> int:
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
y, m, d, hh, mm, ss = dt_tuple
|
||||
y += years
|
||||
# Handle Feb 29 -> Feb 28 if needed
|
||||
try:
|
||||
return int(datetime(y, m, d, hh, mm, ss).timestamp())
|
||||
return int(datetime(y, m, d, hh, mm, ss, tzinfo=timezone.utc).timestamp())
|
||||
except Exception:
|
||||
# clamp day to 28
|
||||
d2 = 28 if (m == 2 and d > 28) else 1
|
||||
return int(datetime(y, m, d2, hh, mm, ss).timestamp())
|
||||
return int(datetime(y, m, d2, hh, mm, ss, tzinfo=timezone.utc).timestamp())
|
||||
|
||||
|
||||
def _to_dt_tuple(ts: int) -> Tuple[int, int, int, int, int, int]:
|
||||
from datetime import datetime
|
||||
dt = datetime.utcfromtimestamp(int(ts))
|
||||
from datetime import datetime, timezone
|
||||
dt = datetime.fromtimestamp(int(ts), tz=timezone.utc)
|
||||
return (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
|
||||
|
||||
|
||||
@@ -928,35 +928,37 @@ class JobScheduler:
|
||||
"every_hour": 60 * 60,
|
||||
}
|
||||
period = period_map.get(st)
|
||||
candidate = (last + period) if last else start_ts
|
||||
while candidate is not None and candidate <= now_ts - 1:
|
||||
candidate += period
|
||||
if last is None:
|
||||
return start_ts
|
||||
candidate = last + period
|
||||
return candidate
|
||||
if st == "daily":
|
||||
period = 86400
|
||||
candidate = last + period if last else start_ts
|
||||
while candidate is not None and candidate <= now_ts - 1:
|
||||
candidate += period
|
||||
return candidate
|
||||
if last is None:
|
||||
return start_ts
|
||||
candidate = last + period
|
||||
return candidate if candidate <= now_ts else candidate
|
||||
if st == "weekly":
|
||||
period = 7 * 86400
|
||||
candidate = last + period if last else start_ts
|
||||
while candidate is not None and candidate <= now_ts - 1:
|
||||
candidate += period
|
||||
return candidate
|
||||
if last is None:
|
||||
return start_ts
|
||||
candidate = last + period
|
||||
return candidate if candidate <= now_ts else candidate
|
||||
if st == "monthly":
|
||||
base = _to_dt_tuple(last) if last else _to_dt_tuple(start_ts)
|
||||
candidate = _add_months(base, 1 if last else 0)
|
||||
while candidate <= now_ts - 1:
|
||||
base = _to_dt_tuple(candidate)
|
||||
candidate = _add_months(base, 1)
|
||||
if last is None:
|
||||
return start_ts
|
||||
base = _to_dt_tuple(last)
|
||||
candidate = _add_months(base, 1)
|
||||
if candidate <= now_ts:
|
||||
return candidate
|
||||
return candidate
|
||||
if st == "yearly":
|
||||
base = _to_dt_tuple(last) if last else _to_dt_tuple(start_ts)
|
||||
candidate = _add_years(base, 1 if last else 0)
|
||||
while candidate <= now_ts - 1:
|
||||
base = _to_dt_tuple(candidate)
|
||||
candidate = _add_years(base, 1)
|
||||
if last is None:
|
||||
return start_ts
|
||||
base = _to_dt_tuple(last)
|
||||
candidate = _add_years(base, 1)
|
||||
if candidate <= now_ts:
|
||||
return candidate
|
||||
return candidate
|
||||
return None
|
||||
|
||||
|
||||
240
Data/Engine/services/assemblies/serialization.py
Normal file
240
Data/Engine/services/assemblies/serialization.py
Normal file
@@ -0,0 +1,240 @@
|
||||
# ======================================================
|
||||
# Data\Engine\services\assemblies\serialization.py
|
||||
# Description: Converts assembly records to and from legacy JSON documents for import/export.
|
||||
#
|
||||
# API Endpoints (if applicable): None
|
||||
# ======================================================
|
||||
|
||||
"""Legacy assembly serialization helpers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Any, Dict, Mapping, Optional, Tuple, Union
|
||||
|
||||
from ...assembly_management.models import AssemblyDomain, AssemblyRecord
|
||||
|
||||
|
||||
MAX_DOCUMENT_BYTES = 1_048_576 # 1 MiB safety limit for import payloads
|
||||
|
||||
|
||||
class AssemblySerializationError(ValueError):
|
||||
"""Raised when legacy assembly serialization/deserialization fails."""
|
||||
|
||||
|
||||
LegacyDocument = Dict[str, Any]
|
||||
|
||||
|
||||
def record_to_legacy_payload(
|
||||
record: AssemblyRecord,
|
||||
*,
|
||||
domain: AssemblyDomain,
|
||||
payload_text: str,
|
||||
) -> Dict[str, Any]:
|
||||
"""Convert an assembly record into an export-friendly legacy JSON payload."""
|
||||
|
||||
payload_body: Union[LegacyDocument, str]
|
||||
try:
|
||||
payload_body = json.loads(payload_text)
|
||||
except json.JSONDecodeError:
|
||||
payload_body = payload_text
|
||||
|
||||
return {
|
||||
"assembly_guid": record.assembly_guid,
|
||||
"domain": domain.value,
|
||||
"assembly_kind": record.assembly_kind,
|
||||
"assembly_type": record.assembly_type,
|
||||
"version": record.version,
|
||||
"display_name": record.display_name,
|
||||
"summary": record.summary,
|
||||
"category": record.category,
|
||||
"metadata": dict(record.metadata or {}),
|
||||
"tags": dict(record.tags or {}),
|
||||
"payload": payload_body,
|
||||
"payload_type": record.payload.payload_type.value,
|
||||
"payload_guid": record.payload.assembly_guid,
|
||||
"payload_checksum": record.payload.checksum,
|
||||
"created_at": record.created_at.isoformat(),
|
||||
"updated_at": record.updated_at.isoformat(),
|
||||
}
|
||||
|
||||
|
||||
def prepare_import_request(
|
||||
document: Union[str, Mapping[str, Any]],
|
||||
*,
|
||||
domain: AssemblyDomain,
|
||||
assembly_guid: Optional[str] = None,
|
||||
metadata_override: Optional[Mapping[str, Any]] = None,
|
||||
tags_override: Optional[Mapping[str, Any]] = None,
|
||||
) -> Tuple[str, Dict[str, Any]]:
|
||||
"""
|
||||
Validate a legacy assembly document and convert it into a runtime payload suitable
|
||||
for AssemblyRuntimeService create/update calls.
|
||||
|
||||
Returns the resolved assembly GUID plus the payload dictionary to pass into the runtime service.
|
||||
"""
|
||||
|
||||
payload_json = _coerce_document(document)
|
||||
_enforce_size_limit(payload_json)
|
||||
assembly_kind = _infer_kind(payload_json)
|
||||
if assembly_kind == "unknown":
|
||||
raise AssemblySerializationError("Unable to determine assembly kind from legacy JSON document.")
|
||||
|
||||
metadata = _metadata_from_document(assembly_kind, payload_json, source_path=None)
|
||||
if metadata_override:
|
||||
metadata.update({k: v for k, v in metadata_override.items() if v is not None})
|
||||
|
||||
tags = dict(tags_override or {})
|
||||
display_name = _coerce_str(
|
||||
metadata.get("display_name")
|
||||
or payload_json.get("name")
|
||||
or payload_json.get("tab_name")
|
||||
or "Imported Assembly"
|
||||
)
|
||||
summary = _coerce_optional_str(metadata.get("summary") or payload_json.get("description"))
|
||||
category = _coerce_optional_str(metadata.get("category") or payload_json.get("category"))
|
||||
assembly_type = _coerce_optional_str(metadata.get("assembly_type") or payload_json.get("type"))
|
||||
version = _coerce_positive_int(payload_json.get("version"), default=1)
|
||||
|
||||
resolved_guid = _coerce_guid(assembly_guid)
|
||||
|
||||
payload = {
|
||||
"assembly_guid": resolved_guid,
|
||||
"domain": domain.value,
|
||||
"assembly_kind": assembly_kind,
|
||||
"display_name": display_name,
|
||||
"summary": summary,
|
||||
"category": category,
|
||||
"assembly_type": assembly_type,
|
||||
"version": version,
|
||||
"metadata": metadata,
|
||||
"tags": tags,
|
||||
"payload": payload_json,
|
||||
}
|
||||
|
||||
return resolved_guid, payload
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ----------------------------------------------------------------------
|
||||
def _coerce_document(document: Union[str, Mapping[str, Any]]) -> LegacyDocument:
|
||||
if isinstance(document, Mapping):
|
||||
return dict(document)
|
||||
if isinstance(document, str):
|
||||
try:
|
||||
value = json.loads(document)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise AssemblySerializationError("Import document is not valid JSON.") from exc
|
||||
if not isinstance(value, Mapping):
|
||||
raise AssemblySerializationError("Import document must decode to a JSON object.")
|
||||
return dict(value)
|
||||
raise AssemblySerializationError("Import document must be a JSON object or string.")
|
||||
|
||||
|
||||
def _enforce_size_limit(document: Mapping[str, Any]) -> None:
|
||||
encoded = json.dumps(document, separators=(",", ":")).encode("utf-8")
|
||||
if len(encoded) > MAX_DOCUMENT_BYTES:
|
||||
raise AssemblySerializationError(
|
||||
f"Import document exceeds maximum allowed size of {MAX_DOCUMENT_BYTES} bytes."
|
||||
)
|
||||
|
||||
|
||||
def _infer_kind(document: Mapping[str, Any]) -> str:
|
||||
kind_hint = _coerce_optional_str(document.get("assembly_kind") or document.get("kind"))
|
||||
if kind_hint:
|
||||
lowercase = kind_hint.lower()
|
||||
if lowercase in {"script", "workflow", "ansible"}:
|
||||
return lowercase
|
||||
if "nodes" in document and "edges" in document:
|
||||
return "workflow"
|
||||
if "script" in document:
|
||||
return "script"
|
||||
if "playbook" in document or "tasks" in document or "roles" in document:
|
||||
return "ansible"
|
||||
return "unknown"
|
||||
|
||||
|
||||
def _metadata_from_document(kind: str, document: Mapping[str, Any], source_path: Optional[str]) -> Dict[str, Any]:
|
||||
metadata: Dict[str, Any] = {
|
||||
"source_path": source_path,
|
||||
"display_name": None,
|
||||
"summary": None,
|
||||
"category": None,
|
||||
"assembly_type": None,
|
||||
}
|
||||
|
||||
if kind == "workflow":
|
||||
metadata.update(
|
||||
{
|
||||
"display_name": document.get("tab_name") or document.get("name"),
|
||||
"summary": document.get("description"),
|
||||
"category": "workflow",
|
||||
"assembly_type": "workflow",
|
||||
}
|
||||
)
|
||||
elif kind == "script":
|
||||
metadata.update(
|
||||
{
|
||||
"display_name": document.get("name") or document.get("display_name"),
|
||||
"summary": document.get("description"),
|
||||
"category": (document.get("category") or "script"),
|
||||
"assembly_type": (document.get("type") or "powershell"),
|
||||
}
|
||||
)
|
||||
elif kind == "ansible":
|
||||
metadata.update(
|
||||
{
|
||||
"display_name": document.get("name") or document.get("display_name"),
|
||||
"summary": document.get("description"),
|
||||
"category": "ansible",
|
||||
"assembly_type": "ansible",
|
||||
}
|
||||
)
|
||||
|
||||
# Carry additional legacy fields through metadata for round-trip fidelity.
|
||||
for key in ("sites", "variables", "files", "timeout_seconds", "script_encoding"):
|
||||
if key in document:
|
||||
metadata[key] = document[key]
|
||||
|
||||
metadata = {key: value for key, value in metadata.items() if value is not None}
|
||||
return metadata
|
||||
|
||||
|
||||
def _coerce_guid(value: Optional[str]) -> Optional[str]:
|
||||
if value is None:
|
||||
return None
|
||||
text = str(value).strip()
|
||||
return text or None
|
||||
|
||||
|
||||
def _coerce_str(value: Any, default: str = "") -> str:
|
||||
if value is None:
|
||||
return default
|
||||
text = str(value).strip()
|
||||
return text if text else default
|
||||
|
||||
|
||||
def _coerce_optional_str(value: Any) -> Optional[str]:
|
||||
if value is None:
|
||||
return None
|
||||
text = str(value).strip()
|
||||
return text or None
|
||||
|
||||
|
||||
def _coerce_positive_int(value: Any, *, default: int) -> int:
|
||||
try:
|
||||
candidate = int(value)
|
||||
if candidate > 0:
|
||||
return candidate
|
||||
except (TypeError, ValueError):
|
||||
pass
|
||||
return default
|
||||
|
||||
|
||||
__all__ = [
|
||||
"AssemblySerializationError",
|
||||
"MAX_DOCUMENT_BYTES",
|
||||
"prepare_import_request",
|
||||
"record_to_legacy_payload",
|
||||
]
|
||||
@@ -15,11 +15,17 @@ import hashlib
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from typing import Any, Dict, List, Mapping, Optional
|
||||
from typing import Any, Dict, List, Mapping, Optional, Union
|
||||
|
||||
from ...assembly_management.bootstrap import AssemblyCache
|
||||
from ...assembly_management.models import AssemblyDomain, AssemblyRecord, CachedAssembly, PayloadType
|
||||
from ...assembly_management.sync import sync_official_domain
|
||||
from .serialization import (
|
||||
AssemblySerializationError,
|
||||
MAX_DOCUMENT_BYTES,
|
||||
prepare_import_request,
|
||||
record_to_legacy_payload,
|
||||
)
|
||||
|
||||
|
||||
class AssemblyRuntimeService:
|
||||
@@ -58,6 +64,17 @@ class AssemblyRuntimeService:
|
||||
data = self._serialize_entry(entry, include_payload=True, payload_text=payload_text)
|
||||
return data
|
||||
|
||||
def export_assembly(self, assembly_guid: str) -> Dict[str, Any]:
|
||||
entry = self._cache.get_entry(assembly_guid)
|
||||
if not entry:
|
||||
raise ValueError(f"Assembly '{assembly_guid}' not found")
|
||||
payload_text = self._read_payload_text(assembly_guid)
|
||||
export_payload = record_to_legacy_payload(entry.record, domain=entry.domain, payload_text=payload_text)
|
||||
export_payload["is_dirty"] = entry.is_dirty
|
||||
export_payload["dirty_since"] = entry.dirty_since.isoformat() if entry.dirty_since else None
|
||||
export_payload["last_persisted"] = entry.last_persisted.isoformat() if entry.last_persisted else None
|
||||
return export_payload
|
||||
|
||||
def get_cached_entry(self, assembly_guid: str) -> Optional[CachedAssembly]:
|
||||
return self._cache.get_entry(assembly_guid)
|
||||
|
||||
@@ -158,6 +175,26 @@ class AssemblyRuntimeService:
|
||||
sync_official_domain(db_manager, payload_manager, staging_root, logger=self._logger)
|
||||
self._cache.reload()
|
||||
|
||||
def import_assembly(
|
||||
self,
|
||||
*,
|
||||
domain: AssemblyDomain,
|
||||
document: Union[str, Mapping[str, Any]],
|
||||
assembly_guid: Optional[str] = None,
|
||||
metadata_override: Optional[Mapping[str, Any]] = None,
|
||||
tags_override: Optional[Mapping[str, Any]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
resolved_guid, payload = prepare_import_request(
|
||||
document,
|
||||
domain=domain,
|
||||
assembly_guid=assembly_guid,
|
||||
metadata_override=metadata_override,
|
||||
tags_override=tags_override,
|
||||
)
|
||||
if resolved_guid and self._cache.get_entry(resolved_guid):
|
||||
return self.update_assembly(resolved_guid, payload)
|
||||
return self.create_assembly(payload)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Internal helpers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
Reference in New Issue
Block a user