mirror of
https://github.com/bunny-lab-io/Borealis.git
synced 2025-12-14 22:35:47 -07:00
Removed Legacy Assembly JSON Files
This commit is contained in:
14
.gitignore
vendored
14
.gitignore
vendored
@@ -23,16 +23,4 @@ database.db
|
||||
# Misc Files/Folders
|
||||
.vs/s
|
||||
__pycache__
|
||||
/Update_Staging/
|
||||
|
||||
# Assembly Databases
|
||||
/Data/Engine/Assemblies/community.db
|
||||
/Data/Engine/Assemblies/community.db-shm
|
||||
/Data/Engine/Assemblies/community.db-wal
|
||||
/Data/Engine/Assemblies/official.db
|
||||
/Data/Engine/Assemblies/official.db-shm
|
||||
/Data/Engine/Assemblies/official.db-wal
|
||||
/Data/Engine/Assemblies/user_created.db
|
||||
/Data/Engine/Assemblies/user_created.db-shm
|
||||
/Data/Engine/Assemblies/user_created.db-wal
|
||||
/Data/Engine/Assemblies/Payloads/
|
||||
/Update_Staging/
|
||||
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"category": "script",
|
||||
"description": "Import/export test script.",
|
||||
"files": [],
|
||||
"name": "Import Script",
|
||||
"script": "V3JpdGUtSG9zdCAicm91bmQgdHJpcCBleHBvcnQi",
|
||||
"script_encoding": "base64",
|
||||
"sites": {
|
||||
"mode": "all",
|
||||
"values": []
|
||||
},
|
||||
"timeout_seconds": 45,
|
||||
"type": "powershell",
|
||||
"variables": [
|
||||
{
|
||||
"default": "",
|
||||
"label": "Example",
|
||||
"name": "example",
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"version": 2
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"category": "script",
|
||||
"description": "Import/export test script.",
|
||||
"files": [],
|
||||
"name": "Import Script",
|
||||
"script": "V3JpdGUtSG9zdCAicm91bmQgdHJpcCBleHBvcnQi",
|
||||
"script_encoding": "base64",
|
||||
"sites": {
|
||||
"mode": "all",
|
||||
"values": []
|
||||
},
|
||||
"timeout_seconds": 45,
|
||||
"type": "powershell",
|
||||
"variables": [
|
||||
{
|
||||
"default": "",
|
||||
"label": "Example",
|
||||
"name": "example",
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"version": 2
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"category": "script",
|
||||
"description": "Permission test script.",
|
||||
"files": [],
|
||||
"name": "Official Dev Mode Script",
|
||||
"script": "V3JpdGUtSG9zdCAicGVybWlzc2lvbnMi",
|
||||
"script_encoding": "base64",
|
||||
"sites": {
|
||||
"mode": "all",
|
||||
"values": []
|
||||
},
|
||||
"timeout_seconds": 60,
|
||||
"type": "powershell",
|
||||
"variables": [],
|
||||
"version": 1
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"description": "Import/export workflow test.",
|
||||
"edges": [],
|
||||
"nodes": [
|
||||
{
|
||||
"data": {
|
||||
"label": "Input",
|
||||
"value": "example"
|
||||
},
|
||||
"id": "node-1",
|
||||
"position": {
|
||||
"x": 10,
|
||||
"y": 20
|
||||
},
|
||||
"type": "DataNode"
|
||||
}
|
||||
],
|
||||
"tab_name": "Import Workflow"
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"description": "Import/export workflow test.",
|
||||
"edges": [],
|
||||
"nodes": [
|
||||
{
|
||||
"data": {
|
||||
"label": "Input",
|
||||
"value": "example"
|
||||
},
|
||||
"id": "node-1",
|
||||
"position": {
|
||||
"x": 10,
|
||||
"y": 20
|
||||
},
|
||||
"type": "DataNode"
|
||||
}
|
||||
],
|
||||
"tab_name": "Import Workflow"
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"category": "script",
|
||||
"description": "Import/export test script.",
|
||||
"files": [],
|
||||
"name": "Import Script",
|
||||
"script": "V3JpdGUtSG9zdCAicm91bmQgdHJpcCBleHBvcnQi",
|
||||
"script_encoding": "base64",
|
||||
"sites": {
|
||||
"mode": "all",
|
||||
"values": []
|
||||
},
|
||||
"timeout_seconds": 45,
|
||||
"type": "powershell",
|
||||
"variables": [
|
||||
{
|
||||
"default": "",
|
||||
"label": "Example",
|
||||
"name": "example",
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"version": 2
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"category": "script",
|
||||
"description": "Import/export test script.",
|
||||
"files": [],
|
||||
"name": "Import Script",
|
||||
"script": "V3JpdGUtSG9zdCAicm91bmQgdHJpcCBleHBvcnQi",
|
||||
"script_encoding": "base64",
|
||||
"sites": {
|
||||
"mode": "all",
|
||||
"values": []
|
||||
},
|
||||
"timeout_seconds": 45,
|
||||
"type": "powershell",
|
||||
"variables": [
|
||||
{
|
||||
"default": "",
|
||||
"label": "Example",
|
||||
"name": "example",
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"version": 2
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"description": "Import/export workflow test.",
|
||||
"edges": [],
|
||||
"nodes": [
|
||||
{
|
||||
"data": {
|
||||
"label": "Input",
|
||||
"value": "example"
|
||||
},
|
||||
"id": "node-1",
|
||||
"position": {
|
||||
"x": 10,
|
||||
"y": 20
|
||||
},
|
||||
"type": "DataNode"
|
||||
}
|
||||
],
|
||||
"tab_name": "Import Workflow"
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"category": "script",
|
||||
"description": "Import/export test script.",
|
||||
"files": [],
|
||||
"name": "Import Script",
|
||||
"script": "V3JpdGUtSG9zdCAicm91bmQgdHJpcCBleHBvcnQi",
|
||||
"script_encoding": "base64",
|
||||
"sites": {
|
||||
"mode": "all",
|
||||
"values": []
|
||||
},
|
||||
"timeout_seconds": 45,
|
||||
"type": "powershell",
|
||||
"variables": [
|
||||
{
|
||||
"default": "",
|
||||
"label": "Example",
|
||||
"name": "example",
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"version": 2
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"category": "script",
|
||||
"description": "Permission test script.",
|
||||
"files": [],
|
||||
"name": "Official Dev Mode Script",
|
||||
"script": "V3JpdGUtSG9zdCAicGVybWlzc2lvbnMi",
|
||||
"script_encoding": "base64",
|
||||
"sites": {
|
||||
"mode": "all",
|
||||
"values": []
|
||||
},
|
||||
"timeout_seconds": 60,
|
||||
"type": "powershell",
|
||||
"variables": [],
|
||||
"version": 1
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"description": "Import/export workflow test.",
|
||||
"edges": [],
|
||||
"nodes": [
|
||||
{
|
||||
"data": {
|
||||
"label": "Input",
|
||||
"value": "example"
|
||||
},
|
||||
"id": "node-1",
|
||||
"position": {
|
||||
"x": 10,
|
||||
"y": 20
|
||||
},
|
||||
"type": "DataNode"
|
||||
}
|
||||
],
|
||||
"tab_name": "Import Workflow"
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"description": "Import/export workflow test.",
|
||||
"edges": [],
|
||||
"nodes": [
|
||||
{
|
||||
"data": {
|
||||
"label": "Input",
|
||||
"value": "example"
|
||||
},
|
||||
"id": "node-1",
|
||||
"position": {
|
||||
"x": 10,
|
||||
"y": 20
|
||||
},
|
||||
"type": "DataNode"
|
||||
}
|
||||
],
|
||||
"tab_name": "Import Workflow"
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"category": "script",
|
||||
"description": "Permission test script.",
|
||||
"files": [],
|
||||
"name": "Official Dev Mode Script",
|
||||
"script": "V3JpdGUtSG9zdCAicGVybWlzc2lvbnMi",
|
||||
"script_encoding": "base64",
|
||||
"sites": {
|
||||
"mode": "all",
|
||||
"values": []
|
||||
},
|
||||
"timeout_seconds": 60,
|
||||
"type": "powershell",
|
||||
"variables": [],
|
||||
"version": 1
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"category": "script",
|
||||
"description": "Permission test script.",
|
||||
"files": [],
|
||||
"name": "Official Dev Mode Script",
|
||||
"script": "V3JpdGUtSG9zdCAicGVybWlzc2lvbnMi",
|
||||
"script_encoding": "base64",
|
||||
"sites": {
|
||||
"mode": "all",
|
||||
"values": []
|
||||
},
|
||||
"timeout_seconds": 60,
|
||||
"type": "powershell",
|
||||
"variables": [],
|
||||
"version": 1
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"category": "script",
|
||||
"description": "Permission test script.",
|
||||
"files": [],
|
||||
"name": "Official Dev Mode Script",
|
||||
"script": "V3JpdGUtSG9zdCAicGVybWlzc2lvbnMi",
|
||||
"script_encoding": "base64",
|
||||
"sites": {
|
||||
"mode": "all",
|
||||
"values": []
|
||||
},
|
||||
"timeout_seconds": 60,
|
||||
"type": "powershell",
|
||||
"variables": [],
|
||||
"version": 1
|
||||
}
|
||||
BIN
Data/Engine/Assemblies/community.db
Normal file
BIN
Data/Engine/Assemblies/community.db
Normal file
Binary file not shown.
BIN
Data/Engine/Assemblies/official.db
Normal file
BIN
Data/Engine/Assemblies/official.db
Normal file
Binary file not shown.
BIN
Data/Engine/Assemblies/user_created.db
Normal file
BIN
Data/Engine/Assemblies/user_created.db
Normal file
Binary file not shown.
@@ -18,9 +18,6 @@ from typing import Dict, List, Mapping, Optional
|
||||
from .databases import AssemblyDatabaseManager
|
||||
from .models import AssemblyDomain, AssemblyRecord, CachedAssembly
|
||||
from .payloads import PayloadManager
|
||||
from .sync import sync_official_domain
|
||||
|
||||
|
||||
class AssemblyCache:
|
||||
"""Caches assemblies in memory and coordinates background persistence."""
|
||||
|
||||
@@ -278,10 +275,8 @@ def initialise_assembly_runtime(
|
||||
db_manager.initialise()
|
||||
|
||||
payload_manager = PayloadManager(staging_root=payload_staging, runtime_root=payload_runtime, logger=logger)
|
||||
try:
|
||||
sync_official_domain(db_manager, payload_manager, staging_root, logger=logger)
|
||||
except Exception: # pragma: no cover - best effort during bootstrap
|
||||
(logger or logging.getLogger(__name__)).exception("Official assembly sync failed during startup.")
|
||||
# Automatic JSON-to-database imports have been retired so that staging official.db remains the single source
|
||||
# of truth.
|
||||
flush_interval = _resolve_flush_interval(config)
|
||||
|
||||
return AssemblyCache.initialise(
|
||||
|
||||
@@ -1,215 +0,0 @@
|
||||
# ======================================================
|
||||
# Data\Engine\assembly_management\sync.py
|
||||
# Description: Synchronises assembly databases from staged filesystem sources (official domain importer).
|
||||
#
|
||||
# API Endpoints (if applicable): None
|
||||
# ======================================================
|
||||
|
||||
"""Synchronisation helpers for assembly persistence domains."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime as _dt
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Iterable, Optional, Tuple
|
||||
|
||||
from .databases import AssemblyDatabaseManager
|
||||
from .models import AssemblyDomain, AssemblyRecord, CachedAssembly, PayloadType
|
||||
from .payloads import PayloadManager
|
||||
|
||||
|
||||
_SCRIPT_DIRS = {"scripts", "script"}
|
||||
_WORKFLOW_DIRS = {"workflows", "workflow"}
|
||||
_ANSIBLE_DIRS = {"ansible_playbooks", "ansible-playbooks", "ansible"}
|
||||
|
||||
|
||||
def sync_official_domain(
|
||||
db_manager: AssemblyDatabaseManager,
|
||||
payload_manager: PayloadManager,
|
||||
staging_root: Path,
|
||||
*,
|
||||
logger: Optional[logging.Logger] = None,
|
||||
) -> None:
|
||||
"""Repopulate the official domain database from staged JSON assemblies."""
|
||||
|
||||
log = logger or logging.getLogger(__name__)
|
||||
root = staging_root.resolve()
|
||||
if not root.is_dir():
|
||||
log.warning("Assembly staging root missing during official sync: %s", root)
|
||||
return
|
||||
|
||||
files = tuple(_iter_assembly_sources(root))
|
||||
if not files:
|
||||
log.info("No staged assemblies discovered for official sync; clearing domain.")
|
||||
db_manager.reset_domain(AssemblyDomain.OFFICIAL)
|
||||
return
|
||||
|
||||
db_manager.reset_domain(AssemblyDomain.OFFICIAL)
|
||||
|
||||
imported = 0
|
||||
skipped = 0
|
||||
|
||||
for rel_path, file_path in files:
|
||||
record = _record_from_file(rel_path, file_path, payload_manager, log)
|
||||
if record is None:
|
||||
skipped += 1
|
||||
continue
|
||||
entry = CachedAssembly(
|
||||
domain=AssemblyDomain.OFFICIAL,
|
||||
record=record,
|
||||
is_dirty=False,
|
||||
last_persisted=record.updated_at,
|
||||
)
|
||||
try:
|
||||
db_manager.upsert_record(AssemblyDomain.OFFICIAL, entry)
|
||||
imported += 1
|
||||
except Exception: # pragma: no cover - defensive logging
|
||||
skipped += 1
|
||||
log.exception("Failed to import assembly %s during official sync.", rel_path)
|
||||
|
||||
log.info(
|
||||
"Official assembly sync complete: imported=%s skipped=%s source_root=%s",
|
||||
imported,
|
||||
skipped,
|
||||
root,
|
||||
)
|
||||
|
||||
|
||||
def _iter_assembly_sources(root: Path) -> Iterable[Tuple[str, Path]]:
|
||||
for path in root.rglob("*.json"):
|
||||
if not path.is_file():
|
||||
continue
|
||||
rel_path = path.relative_to(root).as_posix()
|
||||
yield rel_path, path
|
||||
|
||||
|
||||
def _record_from_file(
|
||||
rel_path: str,
|
||||
file_path: Path,
|
||||
payload_manager: PayloadManager,
|
||||
logger: logging.Logger,
|
||||
) -> Optional[AssemblyRecord]:
|
||||
try:
|
||||
text = file_path.read_text(encoding="utf-8")
|
||||
except Exception as exc:
|
||||
logger.warning("Unable to read assembly source %s: %s", file_path, exc)
|
||||
return None
|
||||
|
||||
try:
|
||||
document = json.loads(text)
|
||||
except Exception as exc:
|
||||
logger.warning("Invalid JSON for assembly %s: %s", file_path, exc)
|
||||
return None
|
||||
|
||||
kind = _infer_kind(rel_path)
|
||||
if kind == "unknown":
|
||||
logger.debug("Skipping non-assembly file %s", rel_path)
|
||||
return None
|
||||
|
||||
payload_type = _payload_type_for_kind(kind)
|
||||
assembly_guid = hashlib.sha1(rel_path.encode("utf-8")).hexdigest()
|
||||
descriptor = payload_manager.store_payload(payload_type, text, assembly_guid=assembly_guid, extension=".json")
|
||||
|
||||
file_stat = file_path.stat()
|
||||
timestamp = _dt.datetime.utcfromtimestamp(file_stat.st_mtime).replace(microsecond=0)
|
||||
descriptor.created_at = timestamp
|
||||
descriptor.updated_at = timestamp
|
||||
|
||||
assembly_path = _assembly_id_from_path(rel_path)
|
||||
document_metadata = _metadata_from_document(kind, document, rel_path)
|
||||
tags = _coerce_dict(document.get("tags"))
|
||||
|
||||
record = AssemblyRecord(
|
||||
assembly_guid=assembly_guid,
|
||||
display_name=document_metadata.get("display_name") or assembly_path.rsplit("/", 1)[-1],
|
||||
summary=document_metadata.get("summary"),
|
||||
category=document_metadata.get("category"),
|
||||
assembly_kind=kind,
|
||||
assembly_type=document_metadata.get("assembly_type"),
|
||||
version=_coerce_int(document.get("version"), default=1),
|
||||
payload=descriptor,
|
||||
metadata=document_metadata,
|
||||
tags=tags,
|
||||
checksum=hashlib.sha256(text.encode("utf-8")).hexdigest(),
|
||||
created_at=timestamp,
|
||||
updated_at=timestamp,
|
||||
)
|
||||
return record
|
||||
|
||||
|
||||
def _infer_kind(rel_path: str) -> str:
|
||||
first = rel_path.split("/", 1)[0].strip().lower()
|
||||
if first in _SCRIPT_DIRS:
|
||||
return "script"
|
||||
if first in _WORKFLOW_DIRS:
|
||||
return "workflow"
|
||||
if first in _ANSIBLE_DIRS:
|
||||
return "ansible"
|
||||
return "unknown"
|
||||
|
||||
|
||||
def _payload_type_for_kind(kind: str) -> PayloadType:
|
||||
if kind == "workflow":
|
||||
return PayloadType.WORKFLOW
|
||||
if kind == "script":
|
||||
return PayloadType.SCRIPT
|
||||
if kind == "ansible":
|
||||
return PayloadType.BINARY
|
||||
return PayloadType.UNKNOWN
|
||||
|
||||
|
||||
def _assembly_id_from_path(rel_path: str) -> str:
|
||||
normalised = rel_path.replace("\\", "/")
|
||||
if normalised.lower().endswith(".json"):
|
||||
normalised = normalised[:-5]
|
||||
return normalised
|
||||
|
||||
|
||||
def _metadata_from_document(kind: str, document: dict, rel_path: str) -> dict:
|
||||
metadata = {
|
||||
"source_path": rel_path,
|
||||
"display_name": None,
|
||||
"summary": None,
|
||||
"category": None,
|
||||
"assembly_type": None,
|
||||
}
|
||||
|
||||
if kind == "workflow":
|
||||
metadata["display_name"] = document.get("tab_name") or document.get("name")
|
||||
metadata["summary"] = document.get("description")
|
||||
metadata["category"] = "workflow"
|
||||
metadata["assembly_type"] = "workflow"
|
||||
elif kind == "script":
|
||||
metadata["display_name"] = document.get("name") or document.get("display_name")
|
||||
metadata["summary"] = document.get("description")
|
||||
metadata["category"] = (document.get("category") or "script").lower()
|
||||
metadata["assembly_type"] = (document.get("type") or "powershell").lower()
|
||||
elif kind == "ansible":
|
||||
metadata["display_name"] = document.get("name") or document.get("display_name") or rel_path.rsplit("/", 1)[-1]
|
||||
metadata["summary"] = document.get("description")
|
||||
metadata["category"] = "ansible"
|
||||
metadata["assembly_type"] = "ansible"
|
||||
|
||||
metadata.update(
|
||||
{
|
||||
"sites": document.get("sites"),
|
||||
"variables": document.get("variables"),
|
||||
"files": document.get("files"),
|
||||
}
|
||||
)
|
||||
metadata["display_name"] = metadata.get("display_name") or rel_path.rsplit("/", 1)[-1]
|
||||
return metadata
|
||||
|
||||
|
||||
def _coerce_int(value, *, default: int = 0) -> int:
|
||||
try:
|
||||
return int(value)
|
||||
except Exception:
|
||||
return default
|
||||
|
||||
|
||||
def _coerce_dict(value) -> dict:
|
||||
return value if isinstance(value, dict) else {}
|
||||
@@ -11,7 +11,6 @@
|
||||
# - POST /api/assemblies/<assembly_guid>/clone (Token Authenticated (Admin+Dev Mode for non-user domains)) - Clones an assembly into a target domain.
|
||||
# - POST /api/assemblies/dev-mode/switch (Token Authenticated (Admin)) - Enables or disables Dev Mode overrides for the current session.
|
||||
# - POST /api/assemblies/dev-mode/write (Token Authenticated (Admin+Dev Mode)) - Flushes queued assembly writes immediately.
|
||||
# - POST /api/assemblies/official/sync (Token Authenticated (Admin+Dev Mode)) - Rebuilds the official domain from staged JSON assemblies.
|
||||
# - POST /api/assemblies/import (Token Authenticated (Domain write permissions)) - Imports a legacy assembly JSON document into the selected domain.
|
||||
# - GET /api/assemblies/<assembly_guid>/export (Token Authenticated) - Exports an assembly as legacy JSON with metadata.
|
||||
# ======================================================
|
||||
@@ -552,21 +551,4 @@ def register_assemblies(app, adapters: "EngineServiceAdapters") -> None:
|
||||
service._audit(user=user, action="flush_queue", status="error", detail="internal server error")
|
||||
return jsonify({"error": "internal server error"}), 500
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Official sync
|
||||
# ------------------------------------------------------------------
|
||||
@blueprint.route("/official/sync", methods=["POST"])
|
||||
def sync_official():
|
||||
user, error = service.require_admin(dev_mode_required=True)
|
||||
if error:
|
||||
return jsonify(error[0]), error[1]
|
||||
try:
|
||||
service.runtime.sync_official()
|
||||
service._audit(user=user, action="sync_official", status="success")
|
||||
return jsonify({"status": "synced"}), 200
|
||||
except Exception: # pragma: no cover
|
||||
service.logger.exception("Official assembly sync failed.")
|
||||
service._audit(user=user, action="sync_official", status="error", detail="internal server error")
|
||||
return jsonify({"error": "internal server error"}), 500
|
||||
|
||||
app.register_blueprint(blueprint)
|
||||
|
||||
@@ -19,7 +19,6 @@ from typing import Any, Dict, List, Mapping, Optional, Union
|
||||
|
||||
from ...assembly_management.bootstrap import AssemblyCache
|
||||
from ...assembly_management.models import AssemblyDomain, AssemblyRecord, CachedAssembly, PayloadType
|
||||
from ...assembly_management.sync import sync_official_domain
|
||||
from .serialization import (
|
||||
AssemblySerializationError,
|
||||
MAX_DOCUMENT_BYTES,
|
||||
@@ -168,13 +167,6 @@ class AssemblyRuntimeService:
|
||||
def flush_writes(self) -> None:
|
||||
self._cache.flush_now()
|
||||
|
||||
def sync_official(self) -> None:
|
||||
db_manager = self._cache.database_manager
|
||||
payload_manager = self._cache.payload_manager
|
||||
staging_root = db_manager.staging_root
|
||||
sync_official_domain(db_manager, payload_manager, staging_root, logger=self._logger)
|
||||
self._cache.reload()
|
||||
|
||||
def import_assembly(
|
||||
self,
|
||||
*,
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
# ======================================================
|
||||
# Data\Engine\tools\assemblies.py
|
||||
# Description: CLI helper for assembly maintenance tasks, including official domain synchronisation.
|
||||
#
|
||||
# API Endpoints (if applicable): None
|
||||
# ======================================================
|
||||
|
||||
"""Assembly maintenance CLI."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from Data.Engine.assembly_management.databases import AssemblyDatabaseManager
|
||||
from Data.Engine.assembly_management.models import AssemblyDomain
|
||||
from Data.Engine.assembly_management.payloads import PayloadManager
|
||||
from Data.Engine.assembly_management.sync import sync_official_domain
|
||||
|
||||
|
||||
logger = logging.getLogger("borealis.assembly.cli")
|
||||
|
||||
|
||||
def _default_staging_root() -> Path:
|
||||
return Path(__file__).resolve().parents[3] / "Data" / "Engine" / "Assemblies"
|
||||
|
||||
|
||||
def _default_runtime_root() -> Path:
|
||||
return Path(__file__).resolve().parents[3] / "Engine" / "Assemblies"
|
||||
|
||||
|
||||
def cmd_sync_official(*, staging_root: Optional[Path], runtime_root: Optional[Path]) -> int:
|
||||
staging = staging_root or _default_staging_root()
|
||||
runtime = runtime_root or _default_runtime_root()
|
||||
staging.mkdir(parents=True, exist_ok=True)
|
||||
runtime.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
logger.info("Starting official assembly sync.")
|
||||
db_manager = AssemblyDatabaseManager(staging_root=staging, runtime_root=runtime, logger=logger)
|
||||
db_manager.initialise()
|
||||
payload_manager = PayloadManager(staging_root=staging / "Payloads", runtime_root=runtime / "Payloads", logger=logger)
|
||||
|
||||
sync_official_domain(db_manager, payload_manager, staging, logger=logger)
|
||||
records = db_manager.load_all(AssemblyDomain.OFFICIAL)
|
||||
source_count = sum(1 for path in staging.rglob("*.json") if path.is_file())
|
||||
|
||||
logger.info(
|
||||
"Official sync complete: %s records persisted (staging sources=%s).",
|
||||
len(records),
|
||||
source_count,
|
||||
)
|
||||
print(f"Official assemblies synced: records={len(records)} staged_json={source_count}")
|
||||
if len(records) != source_count:
|
||||
print("warning: record count does not match JSON source file count", file=sys.stderr)
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def build_parser() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(description="Borealis assembly maintenance CLI.")
|
||||
subparsers = parser.add_subparsers(dest="command")
|
||||
|
||||
sync_parser = subparsers.add_parser("sync-official", help="Rebuild the official assembly database from staged JSON sources.")
|
||||
sync_parser.add_argument("--staging-root", type=Path, default=None, help="Override the staging assemblies directory.")
|
||||
sync_parser.add_argument("--runtime-root", type=Path, default=None, help="Override the runtime assemblies directory.")
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def main(argv: Optional[list[str]] = None) -> int:
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s-%(levelname)s: %(message)s")
|
||||
parser = build_parser()
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
if args.command == "sync-official":
|
||||
return cmd_sync_official(staging_root=args.staging_root, runtime_root=args.runtime_root)
|
||||
|
||||
parser.print_help()
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover - CLI entrypoint
|
||||
raise SystemExit(main())
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
## Database Layout
|
||||
- Three SQLite databases live under `Data/Engine/Assemblies` (`official.db`, `community.db`, `user_created.db`) and mirror to `Engine/Assemblies` at runtime.
|
||||
- Automatic JSON → SQLite imports for the official domain have been retired; the staged `official.db` now serves as the authoritative store unless you invoke a manual sync.
|
||||
- Payload binaries/json store under `Payloads/<payload-guid>` in both staging and runtime directories; the AssemblyCache references payload GUIDs instead of embedding large blobs.
|
||||
- WAL mode with shared-cache is enabled on every connection; queue flushes copy the refreshed `.db`, `-wal`, and `-shm` files into the runtime mirror.
|
||||
- `AssemblyCache.describe()` reveals dirty/clean state per assembly, helping operators spot pending writes before shutdown or sync operations.
|
||||
@@ -15,5 +16,5 @@
|
||||
## Backup Guidance
|
||||
- Regularly snapshot `Data/Engine/Assemblies` and `Data/Engine/Assemblies/Payloads` alongside the mirrored runtime copies to preserve both metadata and payload artifacts.
|
||||
- Include the queue inspection endpoint (`GET /api/assemblies`) in maintenance scripts to verify no dirty entries remain before capturing backups.
|
||||
- The new CLI helper `python Data/Engine/tools/assemblies.py sync-official` refreshes the official domain from staged JSON and reports row counts, useful after restoring from backup or before releases.
|
||||
- Maintain the staged databases directly; to publish new official assemblies copy the curated `official.db` into `Data/Engine/Assemblies` before restarting the Engine.
|
||||
- Future automation will extend to scheduled backups and staged restore helpers; until then, ensure filesystem backups capture both SQLite databases and payload directories atomically.
|
||||
|
||||
Reference in New Issue
Block a user