mirror of
https://github.com/bunny-lab-io/Borealis.git
synced 2025-12-16 04:05:48 -07:00
Revert from Gitea Mirror Due to Catastrophic Destruction in Github
This commit is contained in:
@@ -27,6 +27,7 @@ from ...auth.rate_limit import SlidingWindowRateLimiter
|
||||
from ...database import initialise_engine_database
|
||||
from ...security import signing
|
||||
from ...enrollment import NonceCache
|
||||
from ...integrations import GitHubIntegration
|
||||
from .enrollment import routes as enrollment_routes
|
||||
from .tokens import routes as token_routes
|
||||
|
||||
@@ -151,6 +152,7 @@ class EngineServiceAdapters:
|
||||
script_signer: Any = field(init=False)
|
||||
service_log: Callable[[str, str, Optional[str]], None] = field(init=False)
|
||||
device_auth_manager: DeviceAuthManager = field(init=False)
|
||||
github_integration: GitHubIntegration = field(init=False)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
self.db_conn_factory = _make_db_conn_factory(self.context.database_path)
|
||||
@@ -181,6 +183,32 @@ class EngineServiceAdapters:
|
||||
rate_limiter=self.device_rate_limiter,
|
||||
)
|
||||
|
||||
config = self.context.config or {}
|
||||
cache_root_value = config.get("cache_dir") or config.get("CACHE_DIR")
|
||||
if cache_root_value:
|
||||
cache_root = Path(str(cache_root_value))
|
||||
else:
|
||||
cache_root = Path(self.context.database_path).resolve().parent / "cache"
|
||||
cache_file = cache_root / "repo_hash_cache.json"
|
||||
|
||||
default_repo = config.get("default_repo") or config.get("DEFAULT_REPO")
|
||||
default_branch = config.get("default_branch") or config.get("DEFAULT_BRANCH")
|
||||
ttl_raw = config.get("repo_hash_refresh") or config.get("REPO_HASH_REFRESH")
|
||||
try:
|
||||
default_ttl_seconds = int(ttl_raw) if ttl_raw is not None else None
|
||||
except (TypeError, ValueError):
|
||||
default_ttl_seconds = None
|
||||
|
||||
self.github_integration = GitHubIntegration(
|
||||
cache_file=cache_file,
|
||||
db_conn_factory=self.db_conn_factory,
|
||||
service_log=self.service_log,
|
||||
logger=self.context.logger,
|
||||
default_repo=default_repo,
|
||||
default_branch=default_branch,
|
||||
default_ttl_seconds=default_ttl_seconds,
|
||||
)
|
||||
|
||||
|
||||
def _register_tokens(app: Flask, adapters: EngineServiceAdapters) -> None:
|
||||
token_routes.register(
|
||||
|
||||
146
Data/Engine/services/API/access_management/github.py
Normal file
146
Data/Engine/services/API/access_management/github.py
Normal file
@@ -0,0 +1,146 @@
|
||||
# ======================================================
|
||||
# Data\Engine\services\API\access_management\github.py
|
||||
# Description: GitHub API token management endpoints for Engine access-management parity.
|
||||
#
|
||||
# API Endpoints (if applicable):
|
||||
# - GET /api/github/token (Token Authenticated (Admin)) - Returns stored GitHub API token details and verification status.
|
||||
# - POST /api/github/token (Token Authenticated (Admin)) - Updates the stored GitHub API token and triggers verification.
|
||||
# ======================================================
|
||||
|
||||
"""GitHub token administration endpoints for the Borealis Engine."""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple
|
||||
|
||||
from flask import Blueprint, Flask, jsonify, request, session
|
||||
from itsdangerous import BadSignature, SignatureExpired, URLSafeTimedSerializer
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover - typing helper
|
||||
from .. import EngineServiceAdapters
|
||||
|
||||
|
||||
def _now_ts() -> int:
|
||||
return int(time.time())
|
||||
|
||||
|
||||
class GitHubTokenService:
|
||||
"""Admin endpoints for storing and validating GitHub REST API tokens."""
|
||||
|
||||
def __init__(self, app: Flask, adapters: "EngineServiceAdapters") -> None:
|
||||
self.app = app
|
||||
self.adapters = adapters
|
||||
self.github = adapters.github_integration
|
||||
self.logger = adapters.context.logger
|
||||
|
||||
def _token_serializer(self) -> URLSafeTimedSerializer:
|
||||
secret = self.app.secret_key or "borealis-dev-secret"
|
||||
return URLSafeTimedSerializer(secret, salt="borealis-auth")
|
||||
|
||||
def _current_user(self) -> Optional[Dict[str, Any]]:
|
||||
username = session.get("username")
|
||||
role = session.get("role") or "User"
|
||||
if username:
|
||||
return {"username": username, "role": role}
|
||||
|
||||
token = None
|
||||
auth_header = request.headers.get("Authorization") or ""
|
||||
if auth_header.lower().startswith("bearer "):
|
||||
token = auth_header.split(" ", 1)[1].strip()
|
||||
if not token:
|
||||
token = request.cookies.get("borealis_auth")
|
||||
if not token:
|
||||
return None
|
||||
try:
|
||||
data = self._token_serializer().loads(
|
||||
token,
|
||||
max_age=int(os.environ.get("BOREALIS_TOKEN_TTL_SECONDS", 60 * 60 * 24 * 30)),
|
||||
)
|
||||
username = data.get("u")
|
||||
role = data.get("r") or "User"
|
||||
if username:
|
||||
return {"username": username, "role": role}
|
||||
except (BadSignature, SignatureExpired, Exception):
|
||||
return None
|
||||
return None
|
||||
|
||||
def _require_admin(self) -> Optional[Tuple[Dict[str, Any], int]]:
|
||||
user = self._current_user()
|
||||
if not user:
|
||||
return {"error": "unauthorized"}, 401
|
||||
if (user.get("role") or "").lower() != "admin":
|
||||
return {"error": "forbidden"}, 403
|
||||
return None
|
||||
|
||||
def get_token(self):
|
||||
requirement = self._require_admin()
|
||||
if requirement:
|
||||
payload, status = requirement
|
||||
return jsonify(payload), status
|
||||
|
||||
token = self.github.load_token(force_refresh=True)
|
||||
verification = self.github.verify_token(token)
|
||||
message = verification.get("message") or ("API Token Invalid" if token else "API Token Not Configured")
|
||||
payload = {
|
||||
"token": token or "",
|
||||
"has_token": bool(token),
|
||||
"valid": bool(verification.get("valid")),
|
||||
"message": message,
|
||||
"status": verification.get("status") or ("missing" if not token else "unknown"),
|
||||
"rate_limit": verification.get("rate_limit"),
|
||||
"error": verification.get("error"),
|
||||
"checked_at": _now_ts(),
|
||||
}
|
||||
return jsonify(payload)
|
||||
|
||||
def update_token(self):
|
||||
requirement = self._require_admin()
|
||||
if requirement:
|
||||
payload, status = requirement
|
||||
return jsonify(payload), status
|
||||
|
||||
data = request.get_json(silent=True) or {}
|
||||
token = str(data.get("token") or "").strip()
|
||||
try:
|
||||
self.github.store_token(token or None)
|
||||
except RuntimeError as exc:
|
||||
self.logger.debug("Failed to store GitHub token", exc_info=True)
|
||||
return jsonify({"error": str(exc)}), 500
|
||||
|
||||
verification = self.github.verify_token(token or None)
|
||||
message = verification.get("message") or ("API Token Invalid" if token else "API Token Not Configured")
|
||||
|
||||
try:
|
||||
self.github.refresh_default_repo_hash(force=True)
|
||||
except Exception:
|
||||
self.logger.debug("Failed to refresh default repo hash after token update", exc_info=True)
|
||||
|
||||
payload = {
|
||||
"token": token,
|
||||
"has_token": bool(token),
|
||||
"valid": bool(verification.get("valid")),
|
||||
"message": message,
|
||||
"status": verification.get("status") or ("missing" if not token else "unknown"),
|
||||
"rate_limit": verification.get("rate_limit"),
|
||||
"error": verification.get("error"),
|
||||
"checked_at": _now_ts(),
|
||||
}
|
||||
return jsonify(payload)
|
||||
|
||||
|
||||
def register_github_token_management(app: Flask, adapters: "EngineServiceAdapters") -> None:
|
||||
"""Register GitHub API token administration endpoints."""
|
||||
|
||||
service = GitHubTokenService(app, adapters)
|
||||
blueprint = Blueprint("github_access", __name__)
|
||||
|
||||
@blueprint.route("/api/github/token", methods=["GET"])
|
||||
def _github_token_get():
|
||||
return service.get_token()
|
||||
|
||||
@blueprint.route("/api/github/token", methods=["POST"])
|
||||
def _github_token_post():
|
||||
return service.update_token()
|
||||
|
||||
app.register_blueprint(blueprint)
|
||||
@@ -15,6 +15,7 @@ from __future__ import annotations
|
||||
import base64
|
||||
import hashlib
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
import time
|
||||
@@ -37,6 +38,13 @@ except Exception: # pragma: no cover - optional dependency
|
||||
if TYPE_CHECKING: # pragma: no cover - typing helper
|
||||
from Data.Engine.services.API import EngineServiceAdapters
|
||||
|
||||
from .github import register_github_token_management
|
||||
from .multi_factor_authentication import register_mfa_management
|
||||
from .users import register_user_management
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
_qr_logger_warning_emitted = False
|
||||
|
||||
|
||||
def _now_ts() -> int:
|
||||
return int(time.time())
|
||||
@@ -71,7 +79,13 @@ def _totp_provisioning_uri(secret: str, username: str) -> Optional[str]:
|
||||
|
||||
|
||||
def _totp_qr_data_uri(payload: str) -> Optional[str]:
|
||||
if not payload or qrcode is None:
|
||||
global _qr_logger_warning_emitted
|
||||
if not payload:
|
||||
return None
|
||||
if qrcode is None:
|
||||
if not _qr_logger_warning_emitted:
|
||||
_logger.warning("MFA QR generation skipped: 'qrcode' dependency not available.")
|
||||
_qr_logger_warning_emitted = True
|
||||
return None
|
||||
try:
|
||||
image = qrcode.make(payload, box_size=6, border=4)
|
||||
@@ -79,7 +93,10 @@ def _totp_qr_data_uri(payload: str) -> Optional[str]:
|
||||
image.save(buffer, format="PNG")
|
||||
encoded = base64.b64encode(buffer.getvalue()).decode("ascii")
|
||||
return f"data:image/png;base64,{encoded}"
|
||||
except Exception:
|
||||
except Exception as exc:
|
||||
if not _qr_logger_warning_emitted:
|
||||
_logger.warning("Failed to generate MFA QR code: %s", exc, exc_info=True)
|
||||
_qr_logger_warning_emitted = True
|
||||
return None
|
||||
|
||||
|
||||
@@ -416,4 +433,7 @@ def register_auth(app: Flask, adapters: "EngineServiceAdapters") -> None:
|
||||
return service.me()
|
||||
|
||||
app.register_blueprint(blueprint)
|
||||
register_user_management(app, adapters)
|
||||
register_mfa_management(app, adapters)
|
||||
register_github_token_management(app, adapters)
|
||||
|
||||
|
||||
@@ -0,0 +1,150 @@
|
||||
# ======================================================
|
||||
# Data\Engine\services\API\access_management\multi_factor_authentication.py
|
||||
# Description: Multifactor administration endpoints for enabling, disabling, or resetting operator MFA state.
|
||||
#
|
||||
# API Endpoints (if applicable):
|
||||
# - POST /api/users/<username>/mfa (Token Authenticated (Admin)) - Toggles MFA and optionally resets shared secrets.
|
||||
# ======================================================
|
||||
|
||||
"""Multifactor administrative endpoints for the Borealis Engine."""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple
|
||||
|
||||
from flask import Blueprint, Flask, jsonify, request, session
|
||||
from itsdangerous import BadSignature, SignatureExpired, URLSafeTimedSerializer
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover - typing helper
|
||||
from .. import EngineServiceAdapters
|
||||
|
||||
|
||||
def _now_ts() -> int:
|
||||
return int(time.time())
|
||||
|
||||
|
||||
class MultiFactorAdministrationService:
|
||||
"""Admin-focused MFA utility wrapper."""
|
||||
|
||||
def __init__(self, app: Flask, adapters: "EngineServiceAdapters") -> None:
|
||||
self.app = app
|
||||
self.adapters = adapters
|
||||
self.db_conn_factory = adapters.db_conn_factory
|
||||
self.logger = adapters.context.logger
|
||||
|
||||
def _db_conn(self) -> sqlite3.Connection:
|
||||
return self.db_conn_factory()
|
||||
|
||||
def _token_serializer(self) -> URLSafeTimedSerializer:
|
||||
secret = self.app.secret_key or "borealis-dev-secret"
|
||||
return URLSafeTimedSerializer(secret, salt="borealis-auth")
|
||||
|
||||
def _current_user(self) -> Optional[Dict[str, Any]]:
|
||||
username = session.get("username")
|
||||
role = session.get("role") or "User"
|
||||
if username:
|
||||
return {"username": username, "role": role}
|
||||
|
||||
token = None
|
||||
auth_header = request.headers.get("Authorization") or ""
|
||||
if auth_header.lower().startswith("bearer "):
|
||||
token = auth_header.split(" ", 1)[1].strip()
|
||||
if not token:
|
||||
token = request.cookies.get("borealis_auth")
|
||||
if not token:
|
||||
return None
|
||||
try:
|
||||
data = self._token_serializer().loads(
|
||||
token,
|
||||
max_age=int(os.environ.get("BOREALIS_TOKEN_TTL_SECONDS", 60 * 60 * 24 * 30)),
|
||||
)
|
||||
username = data.get("u")
|
||||
role = data.get("r") or "User"
|
||||
if username:
|
||||
return {"username": username, "role": role}
|
||||
except (BadSignature, SignatureExpired, Exception):
|
||||
return None
|
||||
return None
|
||||
|
||||
def _require_admin(self) -> Optional[Tuple[Dict[str, Any], int]]:
|
||||
user = self._current_user()
|
||||
if not user:
|
||||
return {"error": "unauthorized"}, 401
|
||||
if (user.get("role") or "").lower() != "admin":
|
||||
return {"error": "forbidden"}, 403
|
||||
return None
|
||||
|
||||
def toggle_mfa(self, username: str):
|
||||
requirement = self._require_admin()
|
||||
if requirement:
|
||||
payload, status = requirement
|
||||
return jsonify(payload), status
|
||||
|
||||
username_norm = (username or "").strip()
|
||||
if not username_norm:
|
||||
return jsonify({"error": "invalid username"}), 400
|
||||
|
||||
payload = request.get_json(silent=True) or {}
|
||||
enabled = bool(payload.get("enabled"))
|
||||
reset_secret = bool(payload.get("reset_secret", False))
|
||||
|
||||
conn: Optional[sqlite3.Connection] = None
|
||||
try:
|
||||
conn = self._db_conn()
|
||||
cur = conn.cursor()
|
||||
now_ts = _now_ts()
|
||||
|
||||
if enabled:
|
||||
if reset_secret:
|
||||
cur.execute(
|
||||
"UPDATE users SET mfa_enabled=1, mfa_secret=NULL, updated_at=? WHERE LOWER(username)=LOWER(?)",
|
||||
(now_ts, username_norm),
|
||||
)
|
||||
else:
|
||||
cur.execute(
|
||||
"UPDATE users SET mfa_enabled=1, updated_at=? WHERE LOWER(username)=LOWER(?)",
|
||||
(now_ts, username_norm),
|
||||
)
|
||||
else:
|
||||
if reset_secret:
|
||||
cur.execute(
|
||||
"UPDATE users SET mfa_enabled=0, mfa_secret=NULL, updated_at=? WHERE LOWER(username)=LOWER(?)",
|
||||
(now_ts, username_norm),
|
||||
)
|
||||
else:
|
||||
cur.execute(
|
||||
"UPDATE users SET mfa_enabled=0, updated_at=? WHERE LOWER(username)=LOWER(?)",
|
||||
(now_ts, username_norm),
|
||||
)
|
||||
|
||||
if cur.rowcount == 0:
|
||||
return jsonify({"error": "user not found"}), 404
|
||||
|
||||
conn.commit()
|
||||
|
||||
me = self._current_user()
|
||||
if me and me.get("username", "").lower() == username_norm.lower() and not enabled:
|
||||
session.pop("mfa_pending", None)
|
||||
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as exc:
|
||||
self.logger.debug("Failed to update MFA for %s", username_norm, exc_info=True)
|
||||
return jsonify({"error": str(exc)}), 500
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
|
||||
def register_mfa_management(app: Flask, adapters: "EngineServiceAdapters") -> None:
|
||||
"""Register MFA administration endpoints."""
|
||||
|
||||
service = MultiFactorAdministrationService(app, adapters)
|
||||
blueprint = Blueprint("access_mgmt_mfa", __name__)
|
||||
|
||||
@blueprint.route("/api/users/<username>/mfa", methods=["POST"])
|
||||
def _toggle_mfa(username: str):
|
||||
return service.toggle_mfa(username)
|
||||
|
||||
app.register_blueprint(blueprint)
|
||||
@@ -1,8 +1,317 @@
|
||||
# ======================================================
|
||||
# Data\Engine\services\API\access_management\users.py
|
||||
# Description: Placeholder for operator user management endpoints (not yet implemented).
|
||||
# Description: Operator user CRUD endpoints for the Engine auth group, mirroring the legacy server behaviour.
|
||||
#
|
||||
# API Endpoints (if applicable): None
|
||||
# API Endpoints (if applicable):
|
||||
# - GET /api/users (Token Authenticated (Admin)) - Lists operator accounts.
|
||||
# - POST /api/users (Token Authenticated (Admin)) - Creates a new operator account.
|
||||
# - DELETE /api/users/<username> (Token Authenticated (Admin)) - Deletes an operator account.
|
||||
# - POST /api/users/<username>/reset_password (Token Authenticated (Admin)) - Resets an operator password hash.
|
||||
# - POST /api/users/<username>/role (Token Authenticated (Admin)) - Updates an operator role.
|
||||
# ======================================================
|
||||
|
||||
"""Placeholder for users API module."""
|
||||
"""Operator user management endpoints for the Borealis Engine."""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Sequence, Tuple
|
||||
|
||||
from flask import Blueprint, Flask, jsonify, request, session
|
||||
from itsdangerous import BadSignature, SignatureExpired, URLSafeTimedSerializer
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover - typing helper
|
||||
from .. import EngineServiceAdapters
|
||||
|
||||
|
||||
def _now_ts() -> int:
|
||||
return int(time.time())
|
||||
|
||||
|
||||
def _row_to_user(row: Sequence[Any]) -> Mapping[str, Any]:
|
||||
"""Convert a database row into a user payload."""
|
||||
return {
|
||||
"id": row[0],
|
||||
"username": row[1],
|
||||
"display_name": row[2] or row[1],
|
||||
"role": row[3] or "User",
|
||||
"last_login": row[4] or 0,
|
||||
"created_at": row[5] or 0,
|
||||
"updated_at": row[6] or 0,
|
||||
"mfa_enabled": 1 if (row[7] or 0) else 0,
|
||||
}
|
||||
|
||||
|
||||
class UserManagementService:
|
||||
"""Utility wrapper that performs admin-authenticated user CRUD operations."""
|
||||
|
||||
def __init__(self, app: Flask, adapters: "EngineServiceAdapters") -> None:
|
||||
self.app = app
|
||||
self.adapters = adapters
|
||||
self.db_conn_factory = adapters.db_conn_factory
|
||||
self.logger = adapters.context.logger
|
||||
|
||||
def _db_conn(self) -> sqlite3.Connection:
|
||||
return self.db_conn_factory()
|
||||
|
||||
def _token_serializer(self) -> URLSafeTimedSerializer:
|
||||
secret = self.app.secret_key or "borealis-dev-secret"
|
||||
return URLSafeTimedSerializer(secret, salt="borealis-auth")
|
||||
|
||||
def _current_user(self) -> Optional[Dict[str, Any]]:
|
||||
username = session.get("username")
|
||||
role = session.get("role") or "User"
|
||||
if username:
|
||||
return {"username": username, "role": role}
|
||||
|
||||
token = None
|
||||
auth_header = request.headers.get("Authorization") or ""
|
||||
if auth_header.lower().startswith("bearer "):
|
||||
token = auth_header.split(" ", 1)[1].strip()
|
||||
if not token:
|
||||
token = request.cookies.get("borealis_auth")
|
||||
if not token:
|
||||
return None
|
||||
try:
|
||||
data = self._token_serializer().loads(
|
||||
token,
|
||||
max_age=int(os.environ.get("BOREALIS_TOKEN_TTL_SECONDS", 60 * 60 * 24 * 30)),
|
||||
)
|
||||
username = data.get("u")
|
||||
role = data.get("r") or "User"
|
||||
if username:
|
||||
return {"username": username, "role": role}
|
||||
except (BadSignature, SignatureExpired, Exception):
|
||||
return None
|
||||
return None
|
||||
|
||||
def _require_admin(self) -> Optional[Tuple[Dict[str, Any], int]]:
|
||||
user = self._current_user()
|
||||
if not user:
|
||||
return {"error": "unauthorized"}, 401
|
||||
if (user.get("role") or "").lower() != "admin":
|
||||
return {"error": "forbidden"}, 403
|
||||
return None
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Endpoint implementations
|
||||
# ------------------------------------------------------------------ #
|
||||
|
||||
def list_users(self):
|
||||
requirement = self._require_admin()
|
||||
if requirement:
|
||||
payload, status = requirement
|
||||
return jsonify(payload), status
|
||||
|
||||
conn: Optional[sqlite3.Connection] = None
|
||||
try:
|
||||
conn = self._db_conn()
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"SELECT id, username, display_name, role, last_login, created_at, updated_at, "
|
||||
"COALESCE(mfa_enabled, 0) FROM users ORDER BY LOWER(username) ASC"
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
users: List[Mapping[str, Any]] = [_row_to_user(row) for row in rows]
|
||||
return jsonify({"users": users})
|
||||
except Exception as exc:
|
||||
self.logger.debug("Failed to list users", exc_info=True)
|
||||
return jsonify({"error": str(exc)}), 500
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
def create_user(self):
|
||||
requirement = self._require_admin()
|
||||
if requirement:
|
||||
payload, status = requirement
|
||||
return jsonify(payload), status
|
||||
|
||||
data = request.get_json(silent=True) or {}
|
||||
username = (data.get("username") or "").strip()
|
||||
display_name = (data.get("display_name") or username).strip()
|
||||
role = (data.get("role") or "User").strip().title()
|
||||
password_sha512 = (data.get("password_sha512") or "").strip().lower()
|
||||
|
||||
if not username or not password_sha512:
|
||||
return jsonify({"error": "username and password_sha512 are required"}), 400
|
||||
if role not in ("User", "Admin"):
|
||||
return jsonify({"error": "invalid role"}), 400
|
||||
|
||||
now_ts = _now_ts()
|
||||
conn: Optional[sqlite3.Connection] = None
|
||||
try:
|
||||
conn = self._db_conn()
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"INSERT INTO users(username, display_name, password_sha512, role, created_at, updated_at) "
|
||||
"VALUES(?,?,?,?,?,?)",
|
||||
(username, display_name or username, password_sha512, role, now_ts, now_ts),
|
||||
)
|
||||
conn.commit()
|
||||
return jsonify({"status": "ok"})
|
||||
except sqlite3.IntegrityError:
|
||||
return jsonify({"error": "username already exists"}), 409
|
||||
except Exception as exc:
|
||||
self.logger.debug("Failed to create user %s", username, exc_info=True)
|
||||
return jsonify({"error": str(exc)}), 500
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
def delete_user(self, username: str):
|
||||
requirement = self._require_admin()
|
||||
if requirement:
|
||||
payload, status = requirement
|
||||
return jsonify(payload), status
|
||||
|
||||
username_norm = (username or "").strip()
|
||||
if not username_norm:
|
||||
return jsonify({"error": "invalid username"}), 400
|
||||
|
||||
conn: Optional[sqlite3.Connection] = None
|
||||
try:
|
||||
conn = self._db_conn()
|
||||
cur = conn.cursor()
|
||||
|
||||
me = self._current_user()
|
||||
if me and (me.get("username", "").lower() == username_norm.lower()):
|
||||
return (
|
||||
jsonify({"error": "You cannot delete the user you are currently logged in as."}),
|
||||
400,
|
||||
)
|
||||
|
||||
cur.execute("SELECT COUNT(*) FROM users")
|
||||
total_users = cur.fetchone()[0] or 0
|
||||
if total_users <= 1:
|
||||
return (
|
||||
jsonify(
|
||||
{
|
||||
"error": "There is only one user currently configured, you cannot delete this user until you have created another."
|
||||
}
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
cur.execute("DELETE FROM users WHERE LOWER(username)=LOWER(?)", (username_norm,))
|
||||
deleted = cur.rowcount or 0
|
||||
conn.commit()
|
||||
if deleted == 0:
|
||||
return jsonify({"error": "user not found"}), 404
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as exc:
|
||||
self.logger.debug("Failed to delete user %s", username_norm, exc_info=True)
|
||||
return jsonify({"error": str(exc)}), 500
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
def reset_password(self, username: str):
|
||||
requirement = self._require_admin()
|
||||
if requirement:
|
||||
payload, status = requirement
|
||||
return jsonify(payload), status
|
||||
|
||||
data = request.get_json(silent=True) or {}
|
||||
password_sha512 = (data.get("password_sha512") or "").strip().lower()
|
||||
if not password_sha512 or len(password_sha512) != 128:
|
||||
return jsonify({"error": "invalid password hash"}), 400
|
||||
|
||||
conn: Optional[sqlite3.Connection] = None
|
||||
try:
|
||||
conn = self._db_conn()
|
||||
cur = conn.cursor()
|
||||
now_ts = _now_ts()
|
||||
cur.execute(
|
||||
"UPDATE users SET password_sha512=?, updated_at=? WHERE LOWER(username)=LOWER(?)",
|
||||
(password_sha512, now_ts, username),
|
||||
)
|
||||
if cur.rowcount == 0:
|
||||
return jsonify({"error": "user not found"}), 404
|
||||
conn.commit()
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as exc:
|
||||
self.logger.debug("Failed to reset password for %s", username, exc_info=True)
|
||||
return jsonify({"error": str(exc)}), 500
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
def change_role(self, username: str):
|
||||
requirement = self._require_admin()
|
||||
if requirement:
|
||||
payload, status = requirement
|
||||
return jsonify(payload), status
|
||||
|
||||
data = request.get_json(silent=True) or {}
|
||||
role = (data.get("role") or "").strip().title()
|
||||
if role not in ("User", "Admin"):
|
||||
return jsonify({"error": "invalid role"}), 400
|
||||
|
||||
conn: Optional[sqlite3.Connection] = None
|
||||
try:
|
||||
conn = self._db_conn()
|
||||
cur = conn.cursor()
|
||||
|
||||
if role == "User":
|
||||
cur.execute("SELECT COUNT(*) FROM users WHERE LOWER(role)='admin'")
|
||||
admin_count = cur.fetchone()[0] or 0
|
||||
cur.execute(
|
||||
"SELECT LOWER(role) FROM users WHERE LOWER(username)=LOWER(?)",
|
||||
(username,),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
current_role = (row[0] or "").lower() if row else ""
|
||||
if current_role == "admin" and admin_count <= 1:
|
||||
return jsonify({"error": "cannot demote the last admin"}), 400
|
||||
|
||||
now_ts = _now_ts()
|
||||
cur.execute(
|
||||
"UPDATE users SET role=?, updated_at=? WHERE LOWER(username)=LOWER(?)",
|
||||
(role, now_ts, username),
|
||||
)
|
||||
if cur.rowcount == 0:
|
||||
return jsonify({"error": "user not found"}), 404
|
||||
conn.commit()
|
||||
|
||||
me = self._current_user()
|
||||
if me and me.get("username", "").lower() == (username or "").lower():
|
||||
session["role"] = role
|
||||
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as exc:
|
||||
self.logger.debug("Failed to update role for %s", username, exc_info=True)
|
||||
return jsonify({"error": str(exc)}), 500
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
|
||||
def register_user_management(app: Flask, adapters: "EngineServiceAdapters") -> None:
|
||||
"""Register user management endpoints."""
|
||||
|
||||
service = UserManagementService(app, adapters)
|
||||
blueprint = Blueprint("access_mgmt_users", __name__)
|
||||
|
||||
@blueprint.route("/api/users", methods=["GET"])
|
||||
def _list_users():
|
||||
return service.list_users()
|
||||
|
||||
@blueprint.route("/api/users", methods=["POST"])
|
||||
def _create_user():
|
||||
return service.create_user()
|
||||
|
||||
@blueprint.route("/api/users/<username>", methods=["DELETE"])
|
||||
def _delete_user(username: str):
|
||||
return service.delete_user(username)
|
||||
|
||||
@blueprint.route("/api/users/<username>/reset_password", methods=["POST"])
|
||||
def _reset_password(username: str):
|
||||
return service.reset_password(username)
|
||||
|
||||
@blueprint.route("/api/users/<username>/role", methods=["POST"])
|
||||
def _change_role(username: str):
|
||||
return service.change_role(username)
|
||||
|
||||
app.register_blueprint(blueprint)
|
||||
|
||||
@@ -13,20 +13,367 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any, Dict, List
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional
|
||||
|
||||
from flask import Blueprint, jsonify, request
|
||||
|
||||
from ..scheduled_jobs.management import ensure_scheduler, get_scheduler
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover - typing aide
|
||||
from flask import Flask
|
||||
|
||||
from .. import EngineServiceAdapters
|
||||
|
||||
|
||||
def _assemblies_root() -> Path:
|
||||
base = Path(__file__).resolve()
|
||||
search_roots = (base, *base.parents)
|
||||
for candidate in search_roots:
|
||||
engine_dir: Optional[Path]
|
||||
if candidate.name.lower() == "engine":
|
||||
engine_dir = candidate
|
||||
else:
|
||||
tentative = candidate / "Engine"
|
||||
engine_dir = tentative if tentative.is_dir() else None
|
||||
if not engine_dir:
|
||||
continue
|
||||
assemblies_dir = engine_dir / "Assemblies"
|
||||
if assemblies_dir.is_dir():
|
||||
return assemblies_dir.resolve()
|
||||
raise RuntimeError("Engine assemblies directory not found; expected Engine/Assemblies.")
|
||||
|
||||
|
||||
def _scripts_root() -> Path:
|
||||
assemblies_root = _assemblies_root()
|
||||
scripts_dir = assemblies_root / "Scripts"
|
||||
if not scripts_dir.is_dir():
|
||||
raise RuntimeError("Engine scripts directory not found; expected Engine/Assemblies/Scripts.")
|
||||
return scripts_dir.resolve()
|
||||
|
||||
|
||||
def _normalize_script_relpath(rel_path: Any) -> Optional[str]:
|
||||
"""Return a canonical Scripts-relative path or ``None`` when invalid."""
|
||||
|
||||
if not isinstance(rel_path, str):
|
||||
return None
|
||||
|
||||
raw = rel_path.replace("\\", "/").strip()
|
||||
if not raw:
|
||||
return None
|
||||
|
||||
segments: List[str] = []
|
||||
for part in raw.split("/"):
|
||||
candidate = part.strip()
|
||||
if not candidate or candidate == ".":
|
||||
continue
|
||||
if candidate == "..":
|
||||
return None
|
||||
segments.append(candidate)
|
||||
|
||||
if not segments:
|
||||
return None
|
||||
|
||||
first = segments[0]
|
||||
if first.lower() != "scripts":
|
||||
segments.insert(0, "Scripts")
|
||||
else:
|
||||
segments[0] = "Scripts"
|
||||
|
||||
return "/".join(segments)
|
||||
|
||||
|
||||
def _decode_base64_text(value: Any) -> Optional[str]:
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
stripped = value.strip()
|
||||
if not stripped:
|
||||
return ""
|
||||
try:
|
||||
cleaned = re.sub(r"\s+", "", stripped)
|
||||
except Exception:
|
||||
cleaned = stripped
|
||||
try:
|
||||
decoded = base64.b64decode(cleaned, validate=True)
|
||||
except Exception:
|
||||
return None
|
||||
try:
|
||||
return decoded.decode("utf-8")
|
||||
except Exception:
|
||||
return decoded.decode("utf-8", errors="replace")
|
||||
|
||||
|
||||
def _decode_script_content(value: Any, encoding_hint: str = "") -> str:
|
||||
encoding = (encoding_hint or "").strip().lower()
|
||||
if isinstance(value, str):
|
||||
if encoding in {"base64", "b64", "base-64"}:
|
||||
decoded = _decode_base64_text(value)
|
||||
if decoded is not None:
|
||||
return decoded.replace("\r\n", "\n")
|
||||
decoded = _decode_base64_text(value)
|
||||
if decoded is not None:
|
||||
return decoded.replace("\r\n", "\n")
|
||||
return value.replace("\r\n", "\n")
|
||||
return ""
|
||||
|
||||
|
||||
def _canonical_env_key(name: Any) -> str:
|
||||
try:
|
||||
return re.sub(r"[^A-Za-z0-9_]", "_", str(name or "").strip()).upper()
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def _env_string(value: Any) -> str:
|
||||
if isinstance(value, bool):
|
||||
return "True" if value else "False"
|
||||
if value is None:
|
||||
return ""
|
||||
return str(value)
|
||||
|
||||
|
||||
def _powershell_literal(value: Any, var_type: str) -> str:
|
||||
typ = str(var_type or "string").lower()
|
||||
if typ == "boolean":
|
||||
if isinstance(value, bool):
|
||||
truthy = value
|
||||
elif value is None:
|
||||
truthy = False
|
||||
elif isinstance(value, (int, float)):
|
||||
truthy = value != 0
|
||||
else:
|
||||
s = str(value).strip().lower()
|
||||
if s in {"true", "1", "yes", "y", "on"}:
|
||||
truthy = True
|
||||
elif s in {"false", "0", "no", "n", "off", ""}:
|
||||
truthy = False
|
||||
else:
|
||||
truthy = bool(s)
|
||||
return "$true" if truthy else "$false"
|
||||
if typ == "number":
|
||||
if value is None or value == "":
|
||||
return "0"
|
||||
return str(value)
|
||||
s = "" if value is None else str(value)
|
||||
return "'" + s.replace("'", "''") + "'"
|
||||
|
||||
|
||||
def _expand_env_aliases(env_map: Dict[str, str], variables: List[Dict[str, Any]]) -> Dict[str, str]:
|
||||
expanded: Dict[str, str] = dict(env_map or {})
|
||||
if not isinstance(variables, list):
|
||||
return expanded
|
||||
for var in variables:
|
||||
if not isinstance(var, dict):
|
||||
continue
|
||||
name = str(var.get("name") or "").strip()
|
||||
if not name:
|
||||
continue
|
||||
canonical = _canonical_env_key(name)
|
||||
if not canonical or canonical not in expanded:
|
||||
continue
|
||||
value = expanded[canonical]
|
||||
alias = re.sub(r"[^A-Za-z0-9_]", "_", name)
|
||||
if alias and alias not in expanded:
|
||||
expanded[alias] = value
|
||||
if alias != name and re.match(r"^[A-Za-z_][A-Za-z0-9_]*$", name) and name not in expanded:
|
||||
expanded[name] = value
|
||||
return expanded
|
||||
|
||||
|
||||
def _extract_variable_default(var: Dict[str, Any]) -> Any:
|
||||
for key in ("value", "default", "defaultValue", "default_value"):
|
||||
if key in var:
|
||||
val = var.get(key)
|
||||
return "" if val is None else val
|
||||
return ""
|
||||
|
||||
|
||||
def prepare_variable_context(doc_variables: List[Dict[str, Any]], overrides: Dict[str, Any]):
|
||||
env_map: Dict[str, str] = {}
|
||||
variables: List[Dict[str, Any]] = []
|
||||
literal_lookup: Dict[str, str] = {}
|
||||
doc_names: Dict[str, bool] = {}
|
||||
|
||||
overrides = overrides or {}
|
||||
|
||||
if not isinstance(doc_variables, list):
|
||||
doc_variables = []
|
||||
|
||||
for var in doc_variables:
|
||||
if not isinstance(var, dict):
|
||||
continue
|
||||
name = str(var.get("name") or "").strip()
|
||||
if not name:
|
||||
continue
|
||||
doc_names[name] = True
|
||||
canonical = _canonical_env_key(name)
|
||||
var_type = str(var.get("type") or "string").lower()
|
||||
default_val = _extract_variable_default(var)
|
||||
final_val = overrides[name] if name in overrides else default_val
|
||||
if canonical:
|
||||
env_map[canonical] = _env_string(final_val)
|
||||
literal_lookup[canonical] = _powershell_literal(final_val, var_type)
|
||||
if name in overrides:
|
||||
new_var = dict(var)
|
||||
new_var["value"] = overrides[name]
|
||||
variables.append(new_var)
|
||||
else:
|
||||
variables.append(var)
|
||||
|
||||
for name, val in overrides.items():
|
||||
if name in doc_names:
|
||||
continue
|
||||
canonical = _canonical_env_key(name)
|
||||
if canonical:
|
||||
env_map[canonical] = _env_string(val)
|
||||
literal_lookup[canonical] = _powershell_literal(val, "string")
|
||||
variables.append({"name": name, "value": val, "type": "string"})
|
||||
|
||||
env_map = _expand_env_aliases(env_map, variables)
|
||||
return env_map, variables, literal_lookup
|
||||
|
||||
|
||||
_ENV_VAR_PATTERN = re.compile(r"(?i)\$env:(\{)?([A-Za-z0-9_\-]+)(?(1)\})")
|
||||
|
||||
|
||||
def rewrite_powershell_script(content: str, literal_lookup: Dict[str, str]) -> str:
|
||||
if not content or not literal_lookup:
|
||||
return content
|
||||
|
||||
def _replace(match: Any) -> str:
|
||||
name = match.group(2)
|
||||
canonical = _canonical_env_key(name)
|
||||
if not canonical:
|
||||
return match.group(0)
|
||||
literal = literal_lookup.get(canonical)
|
||||
if literal is None:
|
||||
return match.group(0)
|
||||
return literal
|
||||
|
||||
return _ENV_VAR_PATTERN.sub(_replace, content)
|
||||
|
||||
|
||||
def _load_assembly_document(abs_path: str, default_type: str) -> Dict[str, Any]:
|
||||
abs_path_str = os.fspath(abs_path)
|
||||
base_name = os.path.splitext(os.path.basename(abs_path_str))[0]
|
||||
doc: Dict[str, Any] = {
|
||||
"name": base_name,
|
||||
"description": "",
|
||||
"category": "application" if default_type == "ansible" else "script",
|
||||
"type": default_type,
|
||||
"script": "",
|
||||
"variables": [],
|
||||
"files": [],
|
||||
"timeout_seconds": 3600,
|
||||
}
|
||||
if abs_path_str.lower().endswith(".json") and os.path.isfile(abs_path_str):
|
||||
try:
|
||||
with open(abs_path_str, "r", encoding="utf-8") as fh:
|
||||
data = json.load(fh)
|
||||
except Exception:
|
||||
data = {}
|
||||
if isinstance(data, dict):
|
||||
doc["name"] = str(data.get("name") or doc["name"])
|
||||
doc["description"] = str(data.get("description") or "")
|
||||
cat = str(data.get("category") or doc["category"]).strip().lower()
|
||||
if cat in {"application", "script"}:
|
||||
doc["category"] = cat
|
||||
typ = str(data.get("type") or data.get("script_type") or default_type).strip().lower()
|
||||
if typ in {"powershell", "batch", "bash", "ansible"}:
|
||||
doc["type"] = typ
|
||||
script_val = data.get("script")
|
||||
content_val = data.get("content")
|
||||
script_lines = data.get("script_lines")
|
||||
if isinstance(script_lines, list):
|
||||
try:
|
||||
doc["script"] = "\n".join(str(line) for line in script_lines)
|
||||
except Exception:
|
||||
doc["script"] = ""
|
||||
elif isinstance(script_val, str):
|
||||
doc["script"] = script_val
|
||||
else:
|
||||
if isinstance(content_val, str):
|
||||
doc["script"] = content_val
|
||||
encoding_hint = str(
|
||||
data.get("script_encoding") or data.get("scriptEncoding") or ""
|
||||
).strip().lower()
|
||||
doc["script"] = _decode_script_content(doc.get("script"), encoding_hint)
|
||||
if encoding_hint in {"base64", "b64", "base-64"}:
|
||||
doc["script_encoding"] = "base64"
|
||||
else:
|
||||
probe_source = ""
|
||||
if isinstance(script_val, str) and script_val:
|
||||
probe_source = script_val
|
||||
elif isinstance(content_val, str) and content_val:
|
||||
probe_source = content_val
|
||||
decoded_probe = _decode_base64_text(probe_source) if probe_source else None
|
||||
if decoded_probe is not None:
|
||||
doc["script_encoding"] = "base64"
|
||||
doc["script"] = decoded_probe.replace("\r\n", "\n")
|
||||
else:
|
||||
doc["script_encoding"] = "plain"
|
||||
try:
|
||||
timeout_raw = data.get("timeout_seconds", data.get("timeout"))
|
||||
if timeout_raw is None:
|
||||
doc["timeout_seconds"] = 3600
|
||||
else:
|
||||
doc["timeout_seconds"] = max(0, int(timeout_raw))
|
||||
except Exception:
|
||||
doc["timeout_seconds"] = 3600
|
||||
vars_in = data.get("variables") if isinstance(data.get("variables"), list) else []
|
||||
doc["variables"] = []
|
||||
for item in vars_in:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
name = str(item.get("name") or item.get("key") or "").strip()
|
||||
if not name:
|
||||
continue
|
||||
vtype = str(item.get("type") or "string").strip().lower()
|
||||
if vtype not in {"string", "number", "boolean", "credential"}:
|
||||
vtype = "string"
|
||||
doc["variables"].append(
|
||||
{
|
||||
"name": name,
|
||||
"label": str(item.get("label") or ""),
|
||||
"type": vtype,
|
||||
"default": item.get("default", item.get("default_value")),
|
||||
"required": bool(item.get("required")),
|
||||
"description": str(item.get("description") or ""),
|
||||
}
|
||||
)
|
||||
files_in = data.get("files") if isinstance(data.get("files"), list) else []
|
||||
doc["files"] = []
|
||||
for file_item in files_in:
|
||||
if not isinstance(file_item, dict):
|
||||
continue
|
||||
fname = file_item.get("file_name") or file_item.get("name")
|
||||
if not fname or not isinstance(file_item.get("data"), str):
|
||||
continue
|
||||
try:
|
||||
size_val = int(file_item.get("size") or 0)
|
||||
except Exception:
|
||||
size_val = 0
|
||||
doc["files"].append(
|
||||
{
|
||||
"file_name": str(fname),
|
||||
"size": size_val,
|
||||
"mime_type": str(file_item.get("mime_type") or file_item.get("mimeType") or ""),
|
||||
"data": file_item.get("data"),
|
||||
}
|
||||
)
|
||||
return doc
|
||||
try:
|
||||
with open(abs_path_str, "r", encoding="utf-8", errors="replace") as fh:
|
||||
content = fh.read()
|
||||
except Exception:
|
||||
content = ""
|
||||
normalized_script = (content or "").replace("\r\n", "\n")
|
||||
doc["script"] = normalized_script
|
||||
return doc
|
||||
|
||||
|
||||
def _normalize_hostnames(value: Any) -> List[str]:
|
||||
if not isinstance(value, list):
|
||||
return []
|
||||
@@ -41,31 +388,52 @@ def _normalize_hostnames(value: Any) -> List[str]:
|
||||
def register_execution(app: "Flask", adapters: "EngineServiceAdapters") -> None:
|
||||
"""Register quick execution endpoints for assemblies."""
|
||||
|
||||
ensure_scheduler(app, adapters)
|
||||
blueprint = Blueprint("assemblies_execution", __name__)
|
||||
service_log = adapters.service_log
|
||||
|
||||
@blueprint.route("/api/scripts/quick_run", methods=["POST"])
|
||||
def scripts_quick_run():
|
||||
scheduler = get_scheduler(adapters)
|
||||
data = request.get_json(silent=True) or {}
|
||||
rel_path = (data.get("script_path") or "").strip()
|
||||
rel_path_input = data.get("script_path")
|
||||
rel_path_normalized = _normalize_script_relpath(rel_path_input)
|
||||
hostnames = _normalize_hostnames(data.get("hostnames"))
|
||||
run_mode = (data.get("run_mode") or "system").strip().lower()
|
||||
admin_user = str(data.get("admin_user") or "").strip()
|
||||
admin_pass = str(data.get("admin_pass") or "").strip()
|
||||
|
||||
if not rel_path or not hostnames:
|
||||
if not rel_path_normalized or not hostnames:
|
||||
return jsonify({"error": "Missing script_path or hostnames[]"}), 400
|
||||
|
||||
scripts_root = scheduler._scripts_root() # type: ignore[attr-defined]
|
||||
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
||||
if (
|
||||
not abs_path.startswith(scripts_root)
|
||||
or not scheduler._is_valid_scripts_relpath(rel_path) # type: ignore[attr-defined]
|
||||
or not os.path.isfile(abs_path)
|
||||
):
|
||||
rel_path_canonical = rel_path_normalized
|
||||
|
||||
try:
|
||||
scripts_root = _scripts_root()
|
||||
assemblies_root = scripts_root.parent.resolve()
|
||||
abs_path = (assemblies_root / rel_path_canonical).resolve()
|
||||
except Exception as exc: # pragma: no cover - defensive guard
|
||||
service_log(
|
||||
"assemblies",
|
||||
f"quick job failed to resolve script path={rel_path_input!r}: {exc}",
|
||||
level="ERROR",
|
||||
)
|
||||
return jsonify({"error": "Failed to resolve script path"}), 500
|
||||
|
||||
scripts_root_str = str(scripts_root)
|
||||
abs_path_str = str(abs_path)
|
||||
try:
|
||||
within_scripts = os.path.commonpath([scripts_root_str, abs_path_str]) == scripts_root_str
|
||||
except ValueError:
|
||||
within_scripts = False
|
||||
|
||||
if not within_scripts or not os.path.isfile(abs_path_str):
|
||||
service_log(
|
||||
"assemblies",
|
||||
f"quick job requested missing or out-of-scope script input={rel_path_input!r} normalized={rel_path_canonical}",
|
||||
level="WARNING",
|
||||
)
|
||||
return jsonify({"error": "Script not found"}), 404
|
||||
|
||||
doc = scheduler._load_assembly_document(abs_path, "scripts") # type: ignore[attr-defined]
|
||||
doc = _load_assembly_document(abs_path, "powershell")
|
||||
script_type = (doc.get("type") or "powershell").lower()
|
||||
if script_type != "powershell":
|
||||
return jsonify({"error": f"Unsupported script type '{script_type}'. Only PowerShell is supported."}), 400
|
||||
@@ -81,8 +449,8 @@ def register_execution(app: "Flask", adapters: "EngineServiceAdapters") -> None:
|
||||
continue
|
||||
overrides[name] = val
|
||||
|
||||
env_map, variables, literal_lookup = scheduler._prepare_variable_context(doc_variables, overrides) # type: ignore[attr-defined]
|
||||
content = scheduler._rewrite_powershell_script(content, literal_lookup) # type: ignore[attr-defined]
|
||||
env_map, variables, literal_lookup = prepare_variable_context(doc_variables, overrides)
|
||||
content = rewrite_powershell_script(content, literal_lookup)
|
||||
normalized_script = (content or "").replace("\r\n", "\n")
|
||||
script_bytes = normalized_script.encode("utf-8")
|
||||
encoded_content = (
|
||||
@@ -127,7 +495,7 @@ def register_execution(app: "Flask", adapters: "EngineServiceAdapters") -> None:
|
||||
""",
|
||||
(
|
||||
host,
|
||||
rel_path.replace(os.sep, "/"),
|
||||
rel_path_canonical.replace(os.sep, "/"),
|
||||
friendly_name,
|
||||
script_type,
|
||||
now,
|
||||
@@ -144,7 +512,7 @@ def register_execution(app: "Flask", adapters: "EngineServiceAdapters") -> None:
|
||||
"target_hostname": host,
|
||||
"script_type": script_type,
|
||||
"script_name": friendly_name,
|
||||
"script_path": rel_path.replace(os.sep, "/"),
|
||||
"script_path": rel_path_canonical.replace(os.sep, "/"),
|
||||
"script_content": encoded_content,
|
||||
"script_encoding": "base64",
|
||||
"environment": env_map,
|
||||
@@ -152,6 +520,8 @@ def register_execution(app: "Flask", adapters: "EngineServiceAdapters") -> None:
|
||||
"timeout_seconds": timeout_seconds,
|
||||
"files": doc.get("files") if isinstance(doc.get("files"), list) else [],
|
||||
"run_mode": run_mode,
|
||||
"admin_user": admin_user,
|
||||
"admin_pass": admin_pass,
|
||||
}
|
||||
if signature_b64:
|
||||
payload["signature"] = signature_b64
|
||||
@@ -176,7 +546,7 @@ def register_execution(app: "Flask", adapters: "EngineServiceAdapters") -> None:
|
||||
results.append({"hostname": host, "job_id": job_id, "status": "Running"})
|
||||
service_log(
|
||||
"assemblies",
|
||||
f"quick job queued hostname={host} path={rel_path} run_mode={run_mode}",
|
||||
f"quick job queued hostname={host} path={rel_path_canonical} run_mode={run_mode}",
|
||||
)
|
||||
except Exception as exc:
|
||||
if conn is not None:
|
||||
|
||||
@@ -15,6 +15,7 @@ from __future__ import annotations
|
||||
import base64
|
||||
import hashlib
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
import time
|
||||
@@ -37,6 +38,9 @@ except Exception: # pragma: no cover - optional dependency
|
||||
if TYPE_CHECKING: # pragma: no cover - typing helper
|
||||
from . import EngineServiceAdapters
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
_qr_logger_warning_emitted = False
|
||||
|
||||
|
||||
def _now_ts() -> int:
|
||||
return int(time.time())
|
||||
@@ -71,7 +75,13 @@ def _totp_provisioning_uri(secret: str, username: str) -> Optional[str]:
|
||||
|
||||
|
||||
def _totp_qr_data_uri(payload: str) -> Optional[str]:
|
||||
if not payload or qrcode is None:
|
||||
global _qr_logger_warning_emitted
|
||||
if not payload:
|
||||
return None
|
||||
if qrcode is None:
|
||||
if not _qr_logger_warning_emitted:
|
||||
_logger.warning("MFA QR generation skipped: 'qrcode' dependency not available.")
|
||||
_qr_logger_warning_emitted = True
|
||||
return None
|
||||
try:
|
||||
image = qrcode.make(payload, box_size=6, border=4)
|
||||
@@ -79,7 +89,10 @@ def _totp_qr_data_uri(payload: str) -> Optional[str]:
|
||||
image.save(buffer, format="PNG")
|
||||
encoded = base64.b64encode(buffer.getvalue()).decode("ascii")
|
||||
return f"data:image/png;base64,{encoded}"
|
||||
except Exception:
|
||||
except Exception as exc:
|
||||
if not _qr_logger_warning_emitted:
|
||||
_logger.warning("Failed to generate MFA QR code: %s", exc, exc_info=True)
|
||||
_qr_logger_warning_emitted = True
|
||||
return None
|
||||
|
||||
|
||||
|
||||
@@ -29,9 +29,7 @@ from __future__ import annotations
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import ssl
|
||||
import sqlite3
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
@@ -41,20 +39,8 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
|
||||
from flask import Blueprint, jsonify, request, session, g
|
||||
from itsdangerous import BadSignature, SignatureExpired, URLSafeTimedSerializer
|
||||
|
||||
from ....auth.device_auth import require_device_auth
|
||||
from ....auth.guid_utils import normalize_guid
|
||||
|
||||
try:
|
||||
import requests # type: ignore
|
||||
except ImportError: # pragma: no cover - fallback for minimal test environments
|
||||
class _RequestsStub:
|
||||
class RequestException(RuntimeError):
|
||||
"""Stand-in exception when the requests module is unavailable."""
|
||||
|
||||
def get(self, *args: Any, **kwargs: Any) -> Any:
|
||||
raise self.RequestException("The 'requests' library is required for repository hash lookups.")
|
||||
|
||||
requests = _RequestsStub() # type: ignore
|
||||
from ....auth.device_auth import require_device_auth
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover - typing aide
|
||||
from .. import EngineServiceAdapters
|
||||
@@ -96,6 +82,25 @@ def _status_from_last_seen(last_seen: Optional[int]) -> str:
|
||||
return "Offline"
|
||||
|
||||
|
||||
def _normalize_service_mode(value: Any, agent_id: Optional[str] = None) -> str:
|
||||
try:
|
||||
text = str(value or "").strip().lower()
|
||||
except Exception:
|
||||
text = ""
|
||||
if not text and agent_id:
|
||||
try:
|
||||
aid = agent_id.lower()
|
||||
if "-svc-" in aid or aid.endswith("-svc"):
|
||||
return "system"
|
||||
except Exception:
|
||||
pass
|
||||
if text in {"system", "svc", "service", "system_service"}:
|
||||
return "system"
|
||||
if text in {"interactive", "currentuser", "user", "current_user"}:
|
||||
return "currentuser"
|
||||
return "currentuser"
|
||||
|
||||
|
||||
def _is_internal_request(remote_addr: Optional[str]) -> bool:
|
||||
addr = (remote_addr or "").strip()
|
||||
if not addr:
|
||||
@@ -337,257 +342,6 @@ def _device_upsert(
|
||||
cur.execute(sql, params)
|
||||
|
||||
|
||||
class RepositoryHashCache:
|
||||
"""Lightweight GitHub head cache with on-disk persistence."""
|
||||
|
||||
def __init__(self, adapters: "EngineServiceAdapters") -> None:
|
||||
self._db_conn_factory = adapters.db_conn_factory
|
||||
self._service_log = adapters.service_log
|
||||
self._logger = adapters.context.logger
|
||||
config = adapters.context.config or {}
|
||||
default_root = Path(adapters.context.database_path).resolve().parent / "cache"
|
||||
cache_root = Path(config.get("cache_dir") or default_root)
|
||||
cache_root.mkdir(parents=True, exist_ok=True)
|
||||
self._cache_file = cache_root / "repo_hash_cache.json"
|
||||
self._cache: Dict[Tuple[str, str], Tuple[str, float]] = {}
|
||||
self._lock = threading.Lock()
|
||||
self._load_cache()
|
||||
|
||||
def _load_cache(self) -> None:
|
||||
try:
|
||||
if not self._cache_file.is_file():
|
||||
return
|
||||
data = json.loads(self._cache_file.read_text(encoding="utf-8"))
|
||||
entries = data.get("entries") or {}
|
||||
for key, payload in entries.items():
|
||||
sha = payload.get("sha")
|
||||
ts = payload.get("ts")
|
||||
if not sha or ts is None:
|
||||
continue
|
||||
repo, _, branch = key.partition(":")
|
||||
if not repo or not branch:
|
||||
continue
|
||||
self._cache[(repo, branch)] = (str(sha), float(ts))
|
||||
except Exception:
|
||||
self._logger.debug("Failed to hydrate repository hash cache", exc_info=True)
|
||||
|
||||
def _persist_cache(self) -> None:
|
||||
try:
|
||||
snapshot = {
|
||||
f"{repo}:{branch}": {"sha": sha, "ts": ts}
|
||||
for (repo, branch), (sha, ts) in self._cache.items()
|
||||
if sha
|
||||
}
|
||||
payload = {"version": 1, "entries": snapshot}
|
||||
tmp_path = self._cache_file.with_suffix(".tmp")
|
||||
tmp_path.write_text(json.dumps(payload), encoding="utf-8")
|
||||
tmp_path.replace(self._cache_file)
|
||||
except Exception:
|
||||
self._logger.debug("Failed to persist repository hash cache", exc_info=True)
|
||||
|
||||
def _resolve_original_ssl_module(self):
|
||||
try:
|
||||
from eventlet import patcher # type: ignore
|
||||
|
||||
original_ssl = patcher.original("ssl")
|
||||
if original_ssl is not None:
|
||||
return original_ssl
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
module_name = getattr(ssl.SSLContext, "__module__", "")
|
||||
if module_name != "eventlet.green.ssl":
|
||||
return ssl
|
||||
return None
|
||||
|
||||
def _build_requests_session(self):
|
||||
if isinstance(requests, _RequestsStub):
|
||||
return None
|
||||
try:
|
||||
from requests import Session # type: ignore
|
||||
from requests.adapters import HTTPAdapter # type: ignore
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
original_ssl = self._resolve_original_ssl_module()
|
||||
if original_ssl is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
context = original_ssl.create_default_context()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
tls_version = getattr(original_ssl, "TLSVersion", None)
|
||||
if tls_version is not None and hasattr(context, "minimum_version"):
|
||||
try:
|
||||
context.minimum_version = tls_version.TLSv1_2
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
class _ContextAdapter(HTTPAdapter):
|
||||
def init_poolmanager(self, *args, **kwargs):
|
||||
kwargs.setdefault("ssl_context", context)
|
||||
return super().init_poolmanager(*args, **kwargs)
|
||||
|
||||
def proxy_manager_for(self, *args, **kwargs):
|
||||
kwargs.setdefault("ssl_context", context)
|
||||
return super().proxy_manager_for(*args, **kwargs)
|
||||
|
||||
session = Session()
|
||||
adapter = _ContextAdapter()
|
||||
session.mount("https://", adapter)
|
||||
return session
|
||||
|
||||
def _github_token(self, *, force_refresh: bool = False) -> Optional[str]:
|
||||
env_token = (request.headers.get("X-GitHub-Token") or "").strip()
|
||||
if env_token:
|
||||
return env_token
|
||||
token = None
|
||||
if not force_refresh:
|
||||
token = request.headers.get("Authorization")
|
||||
if token and token.lower().startswith("bearer "):
|
||||
return token.split(" ", 1)[1].strip()
|
||||
conn: Optional[sqlite3.Connection] = None
|
||||
try:
|
||||
conn = self._db_conn_factory()
|
||||
cur = conn.cursor()
|
||||
cur.execute("SELECT token FROM github_token LIMIT 1")
|
||||
row = cur.fetchone()
|
||||
if row and row[0]:
|
||||
candidate = str(row[0]).strip()
|
||||
if candidate:
|
||||
token = candidate
|
||||
except sqlite3.Error:
|
||||
token = None
|
||||
except Exception as exc:
|
||||
self._service_log("server", f"github token lookup failed: {exc}")
|
||||
token = None
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
if token:
|
||||
return token
|
||||
fallback = os.environ.get("BOREALIS_GITHUB_TOKEN") or os.environ.get("GITHUB_TOKEN")
|
||||
return fallback.strip() if fallback else None
|
||||
|
||||
def resolve(
|
||||
self,
|
||||
repo: str,
|
||||
branch: str,
|
||||
*,
|
||||
ttl: int = 60,
|
||||
force_refresh: bool = False,
|
||||
) -> Tuple[Dict[str, Any], int]:
|
||||
ttl = max(30, min(int(ttl or 60), 3600))
|
||||
key = (repo, branch)
|
||||
now = time.time()
|
||||
with self._lock:
|
||||
cached = self._cache.get(key)
|
||||
if cached and not force_refresh:
|
||||
sha, ts = cached
|
||||
if sha and (now - ts) < ttl:
|
||||
return (
|
||||
{
|
||||
"repo": repo,
|
||||
"branch": branch,
|
||||
"sha": sha,
|
||||
"cached": True,
|
||||
"age_seconds": now - ts,
|
||||
"source": "cache",
|
||||
},
|
||||
200,
|
||||
)
|
||||
|
||||
headers = {
|
||||
"Accept": "application/vnd.github+json",
|
||||
"User-Agent": "Borealis-Engine",
|
||||
}
|
||||
token = self._github_token(force_refresh=force_refresh)
|
||||
if token:
|
||||
headers["Authorization"] = f"Bearer {token}"
|
||||
|
||||
sha: Optional[str] = None
|
||||
error: Optional[str] = None
|
||||
session = None
|
||||
try:
|
||||
session = self._build_requests_session()
|
||||
except Exception:
|
||||
session = None
|
||||
|
||||
try:
|
||||
target = session if session is not None else requests
|
||||
resp = target.get(
|
||||
f"https://api.github.com/repos/{repo}/branches/{branch}",
|
||||
headers=headers,
|
||||
timeout=20,
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
sha = ((data.get("commit") or {}).get("sha") or "").strip()
|
||||
else:
|
||||
error = f"GitHub head lookup failed: HTTP {resp.status_code}"
|
||||
except RecursionError as exc:
|
||||
error = f"GitHub head lookup recursion error: {exc}"
|
||||
except requests.RequestException as exc:
|
||||
error = f"GitHub head lookup raised: {exc}"
|
||||
except Exception as exc:
|
||||
error = f"GitHub head lookup unexpected error: {exc}"
|
||||
finally:
|
||||
if session is not None:
|
||||
try:
|
||||
session.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if sha:
|
||||
with self._lock:
|
||||
self._cache[key] = (sha, now)
|
||||
self._persist_cache()
|
||||
return (
|
||||
{
|
||||
"repo": repo,
|
||||
"branch": branch,
|
||||
"sha": sha,
|
||||
"cached": False,
|
||||
"age_seconds": 0.0,
|
||||
"source": "github",
|
||||
},
|
||||
200,
|
||||
)
|
||||
|
||||
if error:
|
||||
self._service_log("server", f"/api/repo/current_hash error: {error}")
|
||||
|
||||
if cached:
|
||||
cached_sha, ts = cached
|
||||
return (
|
||||
{
|
||||
"repo": repo,
|
||||
"branch": branch,
|
||||
"sha": cached_sha or None,
|
||||
"cached": True,
|
||||
"age_seconds": now - ts,
|
||||
"error": error or "using cached value",
|
||||
"source": "cache-stale",
|
||||
},
|
||||
200 if cached_sha else 503,
|
||||
)
|
||||
|
||||
return (
|
||||
{
|
||||
"repo": repo,
|
||||
"branch": branch,
|
||||
"sha": None,
|
||||
"cached": False,
|
||||
"age_seconds": None,
|
||||
"error": error or "unable to resolve repository head",
|
||||
"source": "github",
|
||||
},
|
||||
503,
|
||||
)
|
||||
|
||||
|
||||
class DeviceManagementService:
|
||||
"""Encapsulates database access for device-focused API routes."""
|
||||
|
||||
@@ -623,7 +377,7 @@ class DeviceManagementService:
|
||||
self.db_conn_factory = adapters.db_conn_factory
|
||||
self.service_log = adapters.service_log
|
||||
self.logger = adapters.context.logger or logging.getLogger(__name__)
|
||||
self.repo_cache = RepositoryHashCache(adapters)
|
||||
self.repo_cache = adapters.github_integration
|
||||
|
||||
def _db_conn(self) -> sqlite3.Connection:
|
||||
return self.db_conn_factory()
|
||||
@@ -795,6 +549,76 @@ class DeviceManagementService:
|
||||
self.logger.debug("Failed to list devices", exc_info=True)
|
||||
return {"error": str(exc)}, 500
|
||||
|
||||
def list_agents(self) -> Tuple[Dict[str, Any], int]:
|
||||
try:
|
||||
devices = self._fetch_devices(only_agents=True)
|
||||
grouped: Dict[str, Dict[str, Dict[str, Any]]] = {}
|
||||
now = time.time()
|
||||
for record in devices:
|
||||
hostname = (record.get("hostname") or "").strip() or "unknown"
|
||||
agent_id = (record.get("agent_id") or "").strip()
|
||||
mode = _normalize_service_mode(record.get("service_mode"), agent_id)
|
||||
if mode != "currentuser":
|
||||
lowered = agent_id.lower()
|
||||
if lowered.endswith("-script"):
|
||||
continue
|
||||
last_seen_raw = record.get("last_seen") or 0
|
||||
try:
|
||||
last_seen = int(last_seen_raw)
|
||||
except Exception:
|
||||
last_seen = 0
|
||||
collector_active = bool(last_seen and (now - float(last_seen)) < 130)
|
||||
agent_guid = normalize_guid(record.get("agent_guid")) if record.get("agent_guid") else ""
|
||||
status_value = record.get("status")
|
||||
if status_value in (None, ""):
|
||||
status = "Online" if collector_active else "Offline"
|
||||
else:
|
||||
status = str(status_value)
|
||||
payload = {
|
||||
"hostname": hostname,
|
||||
"agent_hostname": hostname,
|
||||
"service_mode": mode,
|
||||
"collector_active": collector_active,
|
||||
"collector_active_ts": last_seen,
|
||||
"last_seen": last_seen,
|
||||
"status": status,
|
||||
"agent_id": agent_id,
|
||||
"agent_guid": agent_guid or "",
|
||||
"agent_hash": record.get("agent_hash") or "",
|
||||
"connection_type": record.get("connection_type") or "",
|
||||
"connection_endpoint": record.get("connection_endpoint") or "",
|
||||
"device_type": record.get("device_type") or "",
|
||||
"domain": record.get("domain") or "",
|
||||
"external_ip": record.get("external_ip") or "",
|
||||
"internal_ip": record.get("internal_ip") or "",
|
||||
"last_reboot": record.get("last_reboot") or "",
|
||||
"last_user": record.get("last_user") or "",
|
||||
"operating_system": record.get("operating_system") or "",
|
||||
"uptime": record.get("uptime") or 0,
|
||||
"site_id": record.get("site_id"),
|
||||
"site_name": record.get("site_name") or "",
|
||||
"site_description": record.get("site_description") or "",
|
||||
}
|
||||
bucket = grouped.setdefault(hostname, {})
|
||||
existing = bucket.get(mode)
|
||||
if not existing or last_seen >= existing.get("last_seen", 0):
|
||||
bucket[mode] = payload
|
||||
|
||||
agents: Dict[str, Dict[str, Any]] = {}
|
||||
for bucket in grouped.values():
|
||||
for payload in bucket.values():
|
||||
agent_key = payload.get("agent_id") or payload.get("agent_guid")
|
||||
if not agent_key:
|
||||
agent_key = f"{payload['hostname']}|{payload['service_mode']}"
|
||||
if not payload.get("agent_id"):
|
||||
payload["agent_id"] = agent_key
|
||||
agents[agent_key] = payload
|
||||
|
||||
return {"agents": agents}, 200
|
||||
except Exception as exc:
|
||||
self.logger.debug("Failed to list agents", exc_info=True)
|
||||
return {"error": str(exc)}, 500
|
||||
|
||||
def get_device_by_guid(self, guid: str) -> Tuple[Dict[str, Any], int]:
|
||||
normalized_guid = normalize_guid(guid)
|
||||
if not normalized_guid:
|
||||
@@ -1465,18 +1289,14 @@ class DeviceManagementService:
|
||||
conn.close()
|
||||
|
||||
def repo_current_hash(self) -> Tuple[Dict[str, Any], int]:
|
||||
repo = (request.args.get("repo") or "bunny-lab-io/Borealis").strip()
|
||||
branch = (request.args.get("branch") or "main").strip()
|
||||
refresh_flag = (request.args.get("refresh") or "").strip().lower()
|
||||
ttl_raw = request.args.get("ttl")
|
||||
if "/" not in repo:
|
||||
return {"error": "repo must be in the form owner/name"}, 400
|
||||
try:
|
||||
ttl = int(ttl_raw) if ttl_raw else 60
|
||||
except ValueError:
|
||||
ttl = 60
|
||||
force_refresh = refresh_flag in {"1", "true", "yes", "force", "refresh"}
|
||||
payload, status = self.repo_cache.resolve(repo, branch, ttl=ttl, force_refresh=force_refresh)
|
||||
payload, status = self.repo_cache.current_repo_hash(
|
||||
request.args.get("repo"),
|
||||
request.args.get("branch"),
|
||||
ttl=request.args.get("ttl"),
|
||||
force_refresh=force_refresh,
|
||||
)
|
||||
return payload, status
|
||||
|
||||
def agent_hash_list(self) -> Tuple[Dict[str, Any], int]:
|
||||
@@ -1525,6 +1345,11 @@ def register_management(app, adapters: "EngineServiceAdapters") -> None:
|
||||
payload, status = service.save_agent_details()
|
||||
return jsonify(payload), status
|
||||
|
||||
@blueprint.route("/api/agents", methods=["GET"])
|
||||
def _list_agents():
|
||||
payload, status = service.list_agents()
|
||||
return jsonify(payload), status
|
||||
|
||||
@blueprint.route("/api/devices", methods=["GET"])
|
||||
def _list_devices():
|
||||
payload, status = service.list_devices()
|
||||
@@ -1679,4 +1504,3 @@ def register_management(app, adapters: "EngineServiceAdapters") -> None:
|
||||
return jsonify(payload), status
|
||||
|
||||
app.register_blueprint(blueprint)
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -17,10 +17,10 @@
|
||||
"""Scheduled job management integration for the Borealis Engine runtime."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
try: # pragma: no cover - Engine-local legacy scheduler shim
|
||||
from . import legacy_job_scheduler # type: ignore
|
||||
try: # pragma: no cover - legacy module import guard
|
||||
import job_scheduler as legacy_job_scheduler # type: ignore
|
||||
except Exception as exc: # pragma: no cover - runtime guard
|
||||
legacy_job_scheduler = None # type: ignore
|
||||
_SCHEDULER_IMPORT_ERROR = exc
|
||||
@@ -36,8 +36,8 @@ if TYPE_CHECKING: # pragma: no cover - typing aide
|
||||
def _raise_scheduler_import() -> None:
|
||||
if _SCHEDULER_IMPORT_ERROR is not None:
|
||||
raise RuntimeError(
|
||||
"Legacy job scheduler module could not be imported; ensure "
|
||||
"Data/Engine/services/API/scheduled_jobs/legacy_job_scheduler.py remains available."
|
||||
"Legacy job scheduler module could not be imported; ensure Data/Server/job_scheduler.py "
|
||||
"remains available during the Engine migration."
|
||||
) from _SCHEDULER_IMPORT_ERROR
|
||||
|
||||
|
||||
@@ -79,3 +79,4 @@ def register_management(app: "Flask", adapters: "EngineServiceAdapters") -> None
|
||||
"""Ensure scheduled job routes are registered via the legacy scheduler."""
|
||||
|
||||
ensure_scheduler(app, adapters)
|
||||
|
||||
|
||||
@@ -1,24 +1,187 @@
|
||||
# ======================================================
|
||||
# Data\Engine\services\WebSocket\__init__.py
|
||||
# Description: Placeholder hook for registering Engine Socket.IO namespaces.
|
||||
# Description: Socket.IO handlers for Engine runtime quick job updates and realtime notifications.
|
||||
#
|
||||
# API Endpoints (if applicable): None
|
||||
# ======================================================
|
||||
|
||||
"""WebSocket service stubs for the Borealis Engine runtime.
|
||||
|
||||
Future stages will move Socket.IO namespaces and event handlers here. Stage 1
|
||||
only keeps a placeholder so the Engine bootstrapper can stub registration
|
||||
without touching legacy behaviour.
|
||||
"""
|
||||
"""WebSocket service registration for the Borealis Engine runtime."""
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, Optional
|
||||
|
||||
from flask_socketio import SocketIO
|
||||
|
||||
from ...database import initialise_engine_database
|
||||
from ...server import EngineContext
|
||||
from ..API import _make_db_conn_factory, _make_service_logger
|
||||
|
||||
|
||||
def _now_ts() -> int:
|
||||
return int(time.time())
|
||||
|
||||
|
||||
def _normalize_text(value: Any) -> str:
|
||||
if value is None:
|
||||
return ""
|
||||
if isinstance(value, bytes):
|
||||
try:
|
||||
return value.decode("utf-8")
|
||||
except Exception:
|
||||
return value.decode("utf-8", errors="replace")
|
||||
return str(value)
|
||||
|
||||
|
||||
@dataclass
|
||||
class EngineRealtimeAdapters:
|
||||
context: EngineContext
|
||||
db_conn_factory: Callable[[], sqlite3.Connection] = field(init=False)
|
||||
service_log: Callable[[str, str, Optional[str]], None] = field(init=False)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
initialise_engine_database(self.context.database_path, logger=self.context.logger)
|
||||
self.db_conn_factory = _make_db_conn_factory(self.context.database_path)
|
||||
|
||||
log_file = str(
|
||||
self.context.config.get("log_file")
|
||||
or self.context.config.get("LOG_FILE")
|
||||
or ""
|
||||
).strip()
|
||||
if log_file:
|
||||
base = Path(log_file).resolve().parent
|
||||
else:
|
||||
base = Path(self.context.database_path).resolve().parent
|
||||
self.service_log = _make_service_logger(base, self.context.logger)
|
||||
|
||||
|
||||
def register_realtime(socket_server: SocketIO, context: EngineContext) -> None:
|
||||
"""Placeholder hook for Socket.IO namespace registration."""
|
||||
"""Register Socket.IO event handlers for the Engine runtime."""
|
||||
|
||||
context.logger.debug("Engine WebSocket services are not yet implemented.")
|
||||
adapters = EngineRealtimeAdapters(context)
|
||||
logger = context.logger.getChild("realtime.quick_jobs")
|
||||
|
||||
@socket_server.on("quick_job_result")
|
||||
def _handle_quick_job_result(data: Any) -> None:
|
||||
if not isinstance(data, dict):
|
||||
logger.debug("quick_job_result payload ignored (non-dict): %r", data)
|
||||
return
|
||||
|
||||
job_id_raw = data.get("job_id")
|
||||
try:
|
||||
job_id = int(job_id_raw)
|
||||
except (TypeError, ValueError):
|
||||
logger.debug("quick_job_result missing valid job_id: %r", job_id_raw)
|
||||
return
|
||||
|
||||
status = str(data.get("status") or "").strip() or "Failed"
|
||||
stdout = _normalize_text(data.get("stdout"))
|
||||
stderr = _normalize_text(data.get("stderr"))
|
||||
|
||||
conn: Optional[sqlite3.Connection] = None
|
||||
cursor = None
|
||||
broadcast_payload: Optional[Dict[str, Any]] = None
|
||||
|
||||
try:
|
||||
conn = adapters.db_conn_factory()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"UPDATE activity_history SET status=?, stdout=?, stderr=? WHERE id=?",
|
||||
(status, stdout, stderr, job_id),
|
||||
)
|
||||
if cursor.rowcount == 0:
|
||||
logger.debug("quick_job_result missing activity_history row for job_id=%s", job_id)
|
||||
conn.commit()
|
||||
|
||||
try:
|
||||
cursor.execute(
|
||||
"SELECT run_id FROM scheduled_job_run_activity WHERE activity_id=?",
|
||||
(job_id,),
|
||||
)
|
||||
link = cursor.fetchone()
|
||||
except sqlite3.Error:
|
||||
link = None
|
||||
|
||||
if link:
|
||||
try:
|
||||
run_id = int(link[0])
|
||||
ts_now = _now_ts()
|
||||
if status.lower() == "running":
|
||||
cursor.execute(
|
||||
"UPDATE scheduled_job_runs SET status='Running', updated_at=? WHERE id=?",
|
||||
(ts_now, run_id),
|
||||
)
|
||||
else:
|
||||
cursor.execute(
|
||||
"""
|
||||
UPDATE scheduled_job_runs
|
||||
SET status=?,
|
||||
finished_ts=COALESCE(finished_ts, ?),
|
||||
updated_at=?
|
||||
WHERE id=?
|
||||
""",
|
||||
(status, ts_now, ts_now, run_id),
|
||||
)
|
||||
conn.commit()
|
||||
except Exception as exc: # pragma: no cover - defensive guard
|
||||
logger.debug(
|
||||
"quick_job_result failed to update scheduled_job_runs for job_id=%s: %s",
|
||||
job_id,
|
||||
exc,
|
||||
)
|
||||
|
||||
try:
|
||||
cursor.execute(
|
||||
"SELECT id, hostname, status FROM activity_history WHERE id=?",
|
||||
(job_id,),
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
except sqlite3.Error:
|
||||
row = None
|
||||
|
||||
if row:
|
||||
hostname = (row[1] or "").strip()
|
||||
if hostname:
|
||||
broadcast_payload = {
|
||||
"activity_id": int(row[0]),
|
||||
"hostname": hostname,
|
||||
"status": row[2] or status,
|
||||
"change": "updated",
|
||||
"source": "quick_job",
|
||||
}
|
||||
|
||||
adapters.service_log(
|
||||
"assemblies",
|
||||
f"quick_job_result processed job_id={job_id} status={status}",
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - defensive guard
|
||||
logger.warning(
|
||||
"quick_job_result handler error for job_id=%s: %s",
|
||||
job_id,
|
||||
exc,
|
||||
exc_info=True,
|
||||
)
|
||||
finally:
|
||||
if cursor is not None:
|
||||
try:
|
||||
cursor.close()
|
||||
except Exception:
|
||||
pass
|
||||
if conn is not None:
|
||||
try:
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if broadcast_payload:
|
||||
try:
|
||||
socket_server.emit("device_activity_changed", broadcast_payload)
|
||||
except Exception as exc: # pragma: no cover - defensive guard
|
||||
logger.debug(
|
||||
"Failed to emit device_activity_changed for job_id=%s: %s",
|
||||
job_id,
|
||||
exc,
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user