diff --git a/Data/Engine/Assemblies/Scripts/Borealis/Migrate_Agent_to_Different_Borealis_Server.json b/Data/Engine/Assemblies/Scripts/Borealis/Migrate_Agent_to_Different_Borealis_Server.json index c0691518..97dd2752 100644 --- a/Data/Engine/Assemblies/Scripts/Borealis/Migrate_Agent_to_Different_Borealis_Server.json +++ b/Data/Engine/Assemblies/Scripts/Borealis/Migrate_Agent_to_Different_Borealis_Server.json @@ -15,7 +15,7 @@ "name": "server_url", "label": "Borealis Server URL", "type": "string", - "default": "https://localhost:5000", + "default": "http://localhost:5000", "required": true, "description": "URL of where the agent is going to reach-out to moving forward." } diff --git a/Data/Engine/CODE_MIGRATION_TRACKER.md b/Data/Engine/CODE_MIGRATION_TRACKER.md new file mode 100644 index 00000000..3bc0c481 --- /dev/null +++ b/Data/Engine/CODE_MIGRATION_TRACKER.md @@ -0,0 +1,52 @@ +# Migration Prompt +You are working in the Borealis Automation Platform repo (root: ). The legacy runtime lives under Data/Server/server.py. Your objective is to introduce a new Engine runtime under Data/Engine that will progressively take over responsibilities (API first, then WebUI, then WebSocket). Execute the migration in the stages seen below (be sure to not overstep stages, we only want to work on one stage at a time, until I give approval to move onto the next stage): + +Everytime you do work, you indicate the current stage you are on by writing to the file in /Data/Engine/CODE_MIGRATION_TRACKER.md, inside of this file, you will keep an up-to-date ledger of the overall task list seen below, as well as the current stage you are on, and what task within that stage you are working on. You will keep this file up-to-date at all times whenever you make progress, and you will reference this file whenever making changes in case you forget where you were last at in the codebase migration work. You will never make modifications to the "# Migration Prompt" section, only the "# Borealis Engine Migration Tracker" section. + +Lastly, everytime that you complete a stage, you will create a pull request named "Stage - Implemented" I will merge your pull request associated with that stage into the "main" branch of the codebase, then I will create a new gpt-5-codex conversation to keep teh conversation fresh and relevant, instructing the agent to work from the next stage in-line, and I expect the Codex agent to read the aforementioned /Data/Engine/CODE_MIGRATION_TRACKER.md to understand what it has already done thus far, and what it needs to work on next. Every time that I start the new conversation, I will instruct gpt-5-codex to read /Data/Engine/CODE_MIGRATION_TRACKER.md to understand it's tasks to determine what to do. + + +# Borealis Engine Migration Tracker +## Task Ledger +- [x] **Stage 1 — Establish the Engine skeleton and bootstrapper** + - [x] Add Data/Engine/__init__.py plus service subpackages with placeholder modules and docstrings. + - [x] Scaffold Data/Engine/server.py with the create_app(config) factory and stub service registration hooks. + - [x] Return a shared context object containing handles such as the database path, logger, and scheduler. + - [x] Update project tooling so the Engine runtime can be launched alongside the legacy path. +- [x] **Stage 2 — Port configuration and dependency loading into the Engine factory** + - [x] Extract configuration loading logic from Data/Server/server.py into Data/Engine/config.py helpers. + - [x] Verify context parity between Engine and legacy startup. + - [x] Initialize logging to Logs/Server/server.log when Engine mode is active. + - [x] Document Engine launch paths and configuration requirements in module docstrings. +- [x] **Stage 3 — Introduce API blueprints and service adapters** + - [x] Create domain-focused API blueprints and register_api entry point. + - [x] Mirror route behaviour from the legacy server via service adapters. + - [x] Add configuration toggles for enabling API groups incrementally. +- [x] **Stage 4 — Build unit and smoke tests for Engine APIs** + - [x] Add pytest modules under Data/Engine/Unit_Tests exercising API blueprints. + - [x] Provide fixtures that mirror the legacy SQLite schema and seed data. + - [x] Assert HTTP status codes, payloads, and side effects for parity. + - [x] Integrate Engine API tests into CI/local workflows. +- [x] **Stage 5 — Bridge the legacy server to Engine APIs** + - [x] Delegate API blueprint registration to the Engine factory from the legacy server. + - [x] Replace legacy API routes with Engine-provided blueprints gated by a flag. + - [x] Emit transitional logging when Engine handles requests. +- [ ] **Stage 6 — Plan WebUI migration** + - [x] Move static/template handling into Data/Engine/services/WebUI. + - [x] Ensure that data from /Data/Server/WebUI is copied into /Engine/web-interface during engine Deployment via Borealis.ps1 + - [x] Preserve TLS-aware URL generation and caching. + - [ ] Add migration switch in the legacy server for WebUI delegation. + - [x] Extend tests to cover critical WebUI routes. + - [ ] Port device API endpoints into Engine services (device + admin coverage in progress). + - [x] Move authentication/token stack onto Engine services without legacy fallbacks. + - [x] Port enrollment request/poll flows to Engine services and drop legacy imports. +- [ ] **Stage 7 — Plan WebSocket migration** + - [ ] Extract Socket.IO handlers into Data/Engine/services/WebSocket. + - [x] Ported quick_job_result handler to keep device activity statuses in sync. + - [ ] Provide register_realtime hook for the Engine factory. + - [ ] Add integration tests or smoke checks for key events. + - [ ] Update legacy server to consume Engine WebSocket registration. + +## Current Status +- **Stage:** Stage 6 — Plan WebUI migration +- **Active Task:** Continue Stage 6 device/admin API migration (focus on remaining device and admin endpoints now that auth, token, and enrollment paths are Engine-native). diff --git a/Data/Engine/Unit_Tests/test_access_management_api.py b/Data/Engine/Unit_Tests/test_access_management_api.py new file mode 100644 index 00000000..4acce344 --- /dev/null +++ b/Data/Engine/Unit_Tests/test_access_management_api.py @@ -0,0 +1,66 @@ +# ====================================================== +# Data\Engine\Unit_Tests\test_access_management_api.py +# Description: Exercises access-management endpoints covering GitHub API token administration. +# +# API Endpoints (if applicable): None +# ====================================================== + +from __future__ import annotations + +from typing import Any, Dict + +import pytest + +from Data.Engine.integrations import github as github_integration + +from .conftest import EngineTestHarness + + +def _admin_client(harness: EngineTestHarness): + client = harness.app.test_client() + with client.session_transaction() as sess: + sess["username"] = "admin" + sess["role"] = "Admin" + return client + + +def test_github_token_get_without_value(engine_harness: EngineTestHarness) -> None: + client = _admin_client(engine_harness) + response = client.get("/api/github/token") + assert response.status_code == 200 + payload = response.get_json() + assert payload["has_token"] is False + assert payload["status"] == "missing" + assert payload["token"] == "" + + +def test_github_token_update(engine_harness: EngineTestHarness, monkeypatch: pytest.MonkeyPatch) -> None: + class DummyResponse: + def __init__(self, status_code: int, payload: Dict[str, Any]): + self.status_code = status_code + self._payload = payload + self.headers = {"X-RateLimit-Limit": "5000"} + self.text = "" + + def json(self) -> Dict[str, Any]: + return self._payload + + def fake_get(url: str, headers: Any = None, timeout: Any = None) -> DummyResponse: + return DummyResponse(200, {"commit": {"sha": "abc123"}}) + + monkeypatch.setattr(github_integration.requests, "get", fake_get) + + client = _admin_client(engine_harness) + response = client.post("/api/github/token", json={"token": "ghp_test"}) + assert response.status_code == 200 + payload = response.get_json() + assert payload["has_token"] is True + assert payload["valid"] is True + assert payload["status"] == "ok" + assert payload["token"] == "ghp_test" + + verify_response = client.get("/api/github/token") + assert verify_response.status_code == 200 + verify_payload = verify_response.get_json() + assert verify_payload["has_token"] is True + assert verify_payload["token"] == "ghp_test" diff --git a/Data/Engine/Unit_Tests/test_devices_api.py b/Data/Engine/Unit_Tests/test_devices_api.py index a1499f6d..9b18ff97 100644 --- a/Data/Engine/Unit_Tests/test_devices_api.py +++ b/Data/Engine/Unit_Tests/test_devices_api.py @@ -10,6 +10,7 @@ from __future__ import annotations from typing import Any import pytest +from Data.Engine.integrations import github as github_integration from Data.Engine.services.API.devices import management as device_management from .conftest import EngineTestHarness @@ -98,13 +99,15 @@ def test_repo_current_hash_uses_cache(engine_harness: EngineTestHarness, monkeyp def json(self) -> Any: return self._payload + request_exception = getattr(github_integration.requests, "RequestException", RuntimeError) + def fake_get(url: str, headers: Any, timeout: int) -> DummyResponse: calls["count"] += 1 if calls["count"] == 1: return DummyResponse(200, {"commit": {"sha": "abc123"}}) - raise device_management.requests.RequestException("network error") + raise request_exception("network error") - monkeypatch.setattr(device_management.requests, "get", fake_get) + monkeypatch.setattr(github_integration.requests, "get", fake_get) client = engine_harness.app.test_client() first = client.get("/api/repo/current_hash?repo=test/test&branch=main") diff --git a/Data/Engine/bootstrapper.py b/Data/Engine/bootstrapper.py index 5761b305..3a5e9248 100644 --- a/Data/Engine/bootstrapper.py +++ b/Data/Engine/bootstrapper.py @@ -77,11 +77,11 @@ def _stage_web_interface_assets(logger: Optional[logging.Logger] = None, *, forc project_root = _project_root() engine_web_root = project_root / "Engine" / "web-interface" - stage_source = project_root / "Data" / "Engine" / "web-interface" + legacy_source = project_root / "Data" / "Server" / "WebUI" - if not stage_source.is_dir(): + if not legacy_source.is_dir(): raise RuntimeError( - f"Engine web interface source missing: {stage_source}" + f"Engine web interface source missing: {legacy_source}" ) index_path = engine_web_root / "index.html" @@ -92,14 +92,14 @@ def _stage_web_interface_assets(logger: Optional[logging.Logger] = None, *, forc if engine_web_root.exists(): shutil.rmtree(engine_web_root) - shutil.copytree(stage_source, engine_web_root) + shutil.copytree(legacy_source, engine_web_root) if not index_path.is_file(): raise RuntimeError( f"Engine web interface staging failed; missing {index_path}" ) - logger.info("Engine web interface staged from %s to %s", stage_source, engine_web_root) + logger.info("Engine web interface staged from %s to %s", legacy_source, engine_web_root) return engine_web_root diff --git a/Data/Engine/engine-requirements.txt b/Data/Engine/engine-requirements.txt index d093b670..30c7c5e5 100644 --- a/Data/Engine/engine-requirements.txt +++ b/Data/Engine/engine-requirements.txt @@ -7,4 +7,5 @@ cryptography PyJWT[crypto] pyotp qrcode +Pillow requests diff --git a/Data/Engine/integrations/__init__.py b/Data/Engine/integrations/__init__.py new file mode 100644 index 00000000..dd210605 --- /dev/null +++ b/Data/Engine/integrations/__init__.py @@ -0,0 +1,12 @@ +# ====================================================== +# Data\Engine\integrations\__init__.py +# Description: Integration namespace exposing helper utilities for external service adapters. +# +# API Endpoints (if applicable): None +# ====================================================== + +"""Integration namespace for the Borealis Engine runtime.""" + +from .github import GitHubIntegration + +__all__ = ["GitHubIntegration"] diff --git a/Data/Engine/integrations/github.py b/Data/Engine/integrations/github.py new file mode 100644 index 00000000..79c9e386 --- /dev/null +++ b/Data/Engine/integrations/github.py @@ -0,0 +1,605 @@ +# ====================================================== +# Data\Engine\integrations\github.py +# Description: GitHub REST integration providing cached repository head lookups for Engine services. +# +# API Endpoints (if applicable): None +# ====================================================== + +"""GitHub integration helpers for the Borealis Engine runtime.""" +from __future__ import annotations + +import base64 +import json +import logging +import os +import sqlite3 +import subprocess +import sys +import threading +import time +from pathlib import Path +from typing import Any, Callable, Dict, Optional, Tuple + +from flask import has_request_context, request + +try: # pragma: no cover - import guard mirrors legacy runtime behaviour + import requests # type: ignore +except ImportError: # pragma: no cover - graceful fallback for minimal environments + class _RequestsStub: + class RequestException(RuntimeError): + """Raised when the ``requests`` library is unavailable.""" + + def get(self, *args: Any, **kwargs: Any) -> Any: + raise self.RequestException("The 'requests' library is required for GitHub integrations.") + + requests = _RequestsStub() # type: ignore + +try: # pragma: no cover - optional dependency for green thread integration + from eventlet import tpool as _eventlet_tpool # type: ignore +except Exception: # pragma: no cover - optional dependency + _eventlet_tpool = None # type: ignore + +try: # pragma: no cover - optional dependency for retrieving original modules + from eventlet import patcher as _eventlet_patcher # type: ignore +except Exception: # pragma: no cover - optional dependency + _eventlet_patcher = None # type: ignore + +__all__ = ["GitHubIntegration"] + + +class GitHubIntegration: + """Lightweight cache for GitHub repository head lookups.""" + + MIN_TTL_SECONDS = 30 + MAX_TTL_SECONDS = 3600 + DEFAULT_TTL_SECONDS = 60 + DEFAULT_REPO = "bunny-lab-io/Borealis" + DEFAULT_BRANCH = "main" + + def __init__( + self, + *, + cache_file: Path, + db_conn_factory: Callable[[], sqlite3.Connection], + service_log: Callable[[str, str, Optional[str]], None], + logger: Optional[logging.Logger] = None, + default_repo: Optional[str] = None, + default_branch: Optional[str] = None, + default_ttl_seconds: Optional[int] = None, + ) -> None: + self._cache_file = cache_file + self._cache_file.parent.mkdir(parents=True, exist_ok=True) + self._db_conn_factory = db_conn_factory + self._service_log = service_log + self._logger = logger or logging.getLogger(__name__) + + self._lock = threading.Lock() + self._token_lock = threading.Lock() + self._cache: Dict[Tuple[str, str], Tuple[str, float]] = {} + self._token_cache: Dict[str, Any] = {"value": None, "loaded_at": 0.0, "known": False} + + self._default_repo = self._determine_default_repo(default_repo) + self._default_branch = self._determine_default_branch(default_branch) + self._default_ttl = self._determine_default_ttl(default_ttl_seconds) + + self._load_cache() + + @property + def default_repo(self) -> str: + return self._default_repo + + @property + def default_branch(self) -> str: + return self._default_branch + + def current_repo_hash( + self, + repo: Optional[str], + branch: Optional[str], + *, + ttl: Optional[Any] = None, + force_refresh: bool = False, + ) -> Tuple[Dict[str, Any], int]: + owner_repo = (repo or self._default_repo).strip() + target_branch = (branch or self._default_branch).strip() + + if "/" not in owner_repo: + return {"error": "repo must be in the form owner/name"}, 400 + + ttl_seconds = self._normalise_ttl(ttl) + return self._resolve(owner_repo, target_branch, ttl_seconds=ttl_seconds, force_refresh=force_refresh) + + def _determine_default_repo(self, override: Optional[str]) -> str: + candidate = (override or os.environ.get("BOREALIS_REPO") or self.DEFAULT_REPO).strip() + if "/" not in candidate: + return self.DEFAULT_REPO + return candidate + + def _determine_default_branch(self, override: Optional[str]) -> str: + candidate = (override or os.environ.get("BOREALIS_REPO_BRANCH") or self.DEFAULT_BRANCH).strip() + return candidate or self.DEFAULT_BRANCH + + def _determine_default_ttl(self, override: Optional[int]) -> int: + env_value = os.environ.get("BOREALIS_REPO_HASH_REFRESH") + candidate: Optional[int] = None + if override is not None: + candidate = override + else: + try: + candidate = int(env_value) if env_value else None + except (TypeError, ValueError): + candidate = None + if candidate is None: + candidate = self.DEFAULT_TTL_SECONDS + return self._normalise_ttl(candidate) + + def _normalise_ttl(self, ttl: Optional[Any]) -> int: + value: Optional[int] = None + if isinstance(ttl, str): + ttl = ttl.strip() + if not ttl: + ttl = None + if ttl is None: + value = self._default_ttl + else: + try: + value = int(ttl) + except (TypeError, ValueError): + value = self._default_ttl + value = value if value is not None else self._default_ttl + return max(self.MIN_TTL_SECONDS, min(value, self.MAX_TTL_SECONDS)) + + def _load_cache(self) -> None: + try: + if not self._cache_file.is_file(): + return + payload = json.loads(self._cache_file.read_text(encoding="utf-8")) + entries = payload.get("entries") + if not isinstance(entries, dict): + return + now = time.time() + with self._lock: + for key, data in entries.items(): + if not isinstance(data, dict): + continue + sha = (data.get("sha") or "").strip() + if not sha: + continue + ts_raw = data.get("ts") + try: + ts = float(ts_raw) + except (TypeError, ValueError): + ts = now + repo, _, branch = key.partition(":") + if repo and branch: + self._cache[(repo, branch)] = (sha, ts) + except Exception: # pragma: no cover - defensive logging + self._logger.debug("Failed to hydrate GitHub repo hash cache", exc_info=True) + + def _persist_cache(self) -> None: + with self._lock: + snapshot = { + f"{repo}:{branch}": {"sha": sha, "ts": ts} + for (repo, branch), (sha, ts) in self._cache.items() + if sha + } + try: + if not snapshot: + try: + if self._cache_file.exists(): + self._cache_file.unlink() + except FileNotFoundError: + return + except Exception: + self._logger.debug("Failed to remove GitHub repo hash cache file", exc_info=True) + return + payload = {"version": 1, "entries": snapshot} + tmp_path = self._cache_file.with_suffix(".tmp") + self._cache_file.parent.mkdir(parents=True, exist_ok=True) + tmp_path.write_text(json.dumps(payload), encoding="utf-8") + tmp_path.replace(self._cache_file) + except Exception: # pragma: no cover - defensive logging + self._logger.debug("Failed to persist GitHub repo hash cache", exc_info=True) + + def _resolve( + self, + repo: str, + branch: str, + *, + ttl_seconds: int, + force_refresh: bool, + ) -> Tuple[Dict[str, Any], int]: + key = (repo, branch) + now = time.time() + + with self._lock: + cached = self._cache.get(key) + + cached_sha: Optional[str] = None + cached_ts: Optional[float] = None + cached_age: Optional[float] = None + if cached: + cached_sha, cached_ts = cached + cached_age = max(0.0, now - cached_ts) + + if cached_sha and not force_refresh and cached_age is not None and cached_age < ttl_seconds: + return self._build_payload(repo, branch, cached_sha, True, cached_age, "cache", None), 200 + + sha, error = self._fetch_repo_head(repo, branch, force_refresh=force_refresh) + if sha: + with self._lock: + self._cache[key] = (sha, now) + self._persist_cache() + return self._build_payload(repo, branch, sha, False, 0.0, "github", None), 200 + + if error: + self._service_log("server", f"/api/repo/current_hash error: {error}") + + if cached_sha is not None: + payload = self._build_payload( + repo, + branch, + cached_sha or None, + True, + cached_age, + "cache-stale", + error or "using cached value", + ) + return payload, (200 if cached_sha else 503) + + payload = self._build_payload( + repo, + branch, + None, + False, + None, + "github", + error or "unable to resolve repository head", + ) + return payload, 503 + + def _build_payload( + self, + repo: str, + branch: str, + sha: Optional[str], + cached: bool, + age_seconds: Optional[float], + source: str, + error: Optional[str], + ) -> Dict[str, Any]: + payload: Dict[str, Any] = { + "repo": repo, + "branch": branch, + "sha": (sha.strip() if isinstance(sha, str) else None) or None, + "cached": cached, + "age_seconds": age_seconds, + "source": source, + } + if error: + payload["error"] = error + return payload + + def _fetch_repo_head(self, repo: str, branch: str, *, force_refresh: bool) -> Tuple[Optional[str], Optional[str]]: + headers = { + "Accept": "application/vnd.github+json", + "User-Agent": "Borealis-Engine", + } + token = self._github_token(force_refresh=force_refresh) + if token: + headers["Authorization"] = f"Bearer {token}" + + + try: + response = self._http_get( + f"https://api.github.com/repos/{repo}/branches/{branch}", + headers=headers, + timeout=20, + ) + status = getattr(response, "status_code", None) + if status == 200: + try: + data = response.json() + except Exception as exc: + return None, f"GitHub REST API repo head decode error: {exc}" + sha = ((data.get("commit") or {}).get("sha") or "").strip() + if sha: + return sha, None + return None, "GitHub REST API repo head missing commit SHA" + snippet = "" + try: + text = getattr(response, "text", "") + snippet = text[:200] if isinstance(text, str) else "" + except Exception: + snippet = "" + error = f"GitHub REST API repo head lookup failed: HTTP {status}" + if snippet: + error = f"{error} {snippet}" + return None, error + except requests.RequestException as exc: # type: ignore[attr-defined] + return None, f"GitHub REST API repo head lookup raised: {exc}" + except RecursionError as exc: # pragma: no cover - defensive guard + return None, f"GitHub REST API repo head lookup recursion error: {exc}" + except Exception as exc: # pragma: no cover - defensive guard + return None, f"GitHub REST API repo head lookup unexpected error: {exc}" + def _github_token(self, *, force_refresh: bool) -> Optional[str]: + if has_request_context(): + header_token = (request.headers.get("X-GitHub-Token") or "").strip() + if header_token: + return header_token + if not force_refresh: + auth_header = request.headers.get("Authorization") or "" + if auth_header.lower().startswith("bearer "): + candidate = auth_header.split(" ", 1)[1].strip() + if candidate: + return candidate + + now = time.time() + with self._token_lock: + if ( + not force_refresh + and self._token_cache.get("known") + and now - (self._token_cache.get("loaded_at") or 0.0) < 15.0 + ): + cached_token = self._token_cache.get("value") + return cached_token if cached_token else None + + token = self._load_token_from_db(force_refresh=force_refresh) + self._set_cached_token(token) + if token: + return token + + fallback = os.environ.get("BOREALIS_GITHUB_TOKEN") or os.environ.get("GITHUB_TOKEN") + fallback = (fallback or "").strip() + return fallback or None + + def _set_cached_token(self, token: Optional[str]) -> None: + with self._token_lock: + self._token_cache["value"] = token if token else None + self._token_cache["loaded_at"] = time.time() + self._token_cache["known"] = True + + def load_token(self, *, force_refresh: bool = False) -> Optional[str]: + token = self._load_token_from_db(force_refresh=force_refresh) + self._set_cached_token(token) + return token + + def store_token(self, token: Optional[str]) -> None: + conn: Optional[sqlite3.Connection] = None + try: + conn = self._db_conn_factory() + cur = conn.cursor() + cur.execute("DELETE FROM github_token") + if token: + cur.execute("INSERT INTO github_token (token) VALUES (?)", (token,)) + conn.commit() + except Exception as exc: + if conn is not None: + try: + conn.rollback() + except Exception: + pass + raise RuntimeError(f"Failed to store token: {exc}") from exc + finally: + if conn is not None: + try: + conn.close() + except Exception: + pass + self._set_cached_token(token if token else None) + + def verify_token(self, token: Optional[str]) -> Dict[str, Any]: + if not token: + return { + "valid": False, + "message": "API Token Not Configured", + "status": "missing", + "rate_limit": None, + } + + headers = { + "Accept": "application/vnd.github+json", + "User-Agent": "Borealis-Engine", + "Authorization": f"Bearer {token}", + } + try: + response = self._http_get( + f"https://api.github.com/repos/{self._default_repo}/branches/{self._default_branch}", + headers=headers, + timeout=20, + ) + limit_header = response.headers.get("X-RateLimit-Limit") + try: + limit_value = int(limit_header) if limit_header is not None else None + except (TypeError, ValueError): + limit_value = None + + if response.status_code == 200: + if limit_value is not None and limit_value >= 5000: + return { + "valid": True, + "message": "API Authentication Successful", + "status": "ok", + "rate_limit": limit_value, + } + return { + "valid": False, + "message": "API Token Invalid", + "status": "insufficient", + "rate_limit": limit_value, + "error": "Authenticated request did not elevate GitHub rate limits", + } + + if response.status_code == 401: + return { + "valid": False, + "message": "API Token Invalid", + "status": "invalid", + "rate_limit": limit_value, + "error": getattr(response, "text", "")[:200], + } + + return { + "valid": False, + "message": f"GitHub API error (HTTP {response.status_code})", + "status": "error", + "rate_limit": limit_value, + "error": getattr(response, "text", "")[:200], + } + except Exception as exc: + return { + "valid": False, + "message": f"API Token validation error: {exc}", + "status": "error", + "rate_limit": None, + "error": str(exc), + } + + def refresh_default_repo_hash(self, *, force: bool = False) -> Tuple[Dict[str, Any], int]: + return self._resolve( + self._default_repo, + self._default_branch, + ttl_seconds=self._default_ttl, + force_refresh=force, + ) + + def _http_get(self, url: str, *, headers: Dict[str, str], timeout: int) -> Any: + try: + if _eventlet_tpool is not None: + try: + return _eventlet_tpool.execute(requests.get, url, headers=headers, timeout=timeout) + except Exception: + pass + return requests.get(url, headers=headers, timeout=timeout) + except Exception: + return self._http_get_subprocess(url, headers=headers, timeout=timeout) + + def _http_get_subprocess(self, url: str, *, headers: Dict[str, str], timeout: int) -> Any: + script = """ +import base64 +import json +import sys +import urllib.request + +url = sys.argv[1] +headers = json.loads(sys.argv[2]) +timeout = float(sys.argv[3]) +req = urllib.request.Request(url, headers=headers, method="GET") +try: + with urllib.request.urlopen(req, timeout=timeout) as resp: + body = resp.read() + payload = { + "status": resp.status, + "headers": dict(resp.getheaders()), + "body": base64.b64encode(body).decode("ascii"), + "encoding": "base64", + } + sys.stdout.write(json.dumps(payload)) +except Exception as exc: + error_payload = {"error": str(exc)} + sys.stdout.write(json.dumps(error_payload)) + sys.exit(1) +""" + proc = subprocess.run( + [sys.executable, "-c", script, url, json.dumps(headers), str(float(timeout))], + capture_output=True, + text=True, + ) + output = proc.stdout.strip() or proc.stderr.strip() + try: + data = json.loads(output or "{}") + except json.JSONDecodeError as exc: + raise RuntimeError(f"GitHub subprocess returned invalid JSON: {output!r}") from exc + + if proc.returncode != 0: + error_msg = data.get("error") if isinstance(data, dict) else output + raise RuntimeError(f"GitHub subprocess request failed: {error_msg}") + + status_code = data.get("status") + raw_headers = data.get("headers") or {} + body_encoded = data.get("body") or "" + encoding = data.get("encoding") + if encoding == "base64": + body_bytes = base64.b64decode(body_encoded.encode("ascii")) + else: + body_bytes = (body_encoded or "").encode("utf-8") + + class _SubprocessResponse: + def __init__(self, status: int, headers: Dict[str, str], body: bytes): + self.status_code = status + self.headers = headers + self._body = body + self.text = body.decode("utf-8", errors="replace") + + def json(self) -> Any: + if not self._body: + return {} + return json.loads(self.text) + + if status_code is None: + raise RuntimeError(f"GitHub subprocess returned no status code: {data}") + + return _SubprocessResponse(int(status_code), {str(k): str(v) for k, v in raw_headers.items()}, body_bytes) + + def _resolve_original_ssl_module(self): + if _eventlet_patcher is not None: + try: + original = _eventlet_patcher.original("ssl") + if original is not None: + return original + except Exception: + pass + return ssl + + def _resolve_original_socket_module(self): + if _eventlet_patcher is not None: + try: + original = _eventlet_patcher.original("socket") + if original is not None: + return original + except Exception: + pass + import socket as socket_module # Local import for fallback + + return socket_module + + def _resolve_original_raw_socket_module(self): + if _eventlet_patcher is not None: + try: + original = _eventlet_patcher.original("_socket") + if original is not None: + return original + except Exception: + pass + try: + import _socket as raw_socket_module # type: ignore + + return raw_socket_module + except Exception: + return self._resolve_original_socket_module() + + def _load_token_from_db(self, *, force_refresh: bool = False) -> Optional[str]: + if force_refresh: + with self._token_lock: + self._token_cache["known"] = False + conn: Optional[sqlite3.Connection] = None + try: + conn = self._db_conn_factory() + cursor = conn.cursor() + cursor.execute("SELECT token FROM github_token LIMIT 1") + row = cursor.fetchone() + if row and row[0]: + candidate = str(row[0]).strip() + return candidate or None + return None + except sqlite3.OperationalError: + return None + except Exception as exc: + self._service_log("server", f"github token lookup failed: {exc}") + return None + finally: + if conn is not None: + try: + conn.close() + except Exception: + pass diff --git a/Data/Engine/services/API/__init__.py b/Data/Engine/services/API/__init__.py index 459d435c..44a43ffe 100644 --- a/Data/Engine/services/API/__init__.py +++ b/Data/Engine/services/API/__init__.py @@ -27,6 +27,7 @@ from ...auth.rate_limit import SlidingWindowRateLimiter from ...database import initialise_engine_database from ...security import signing from ...enrollment import NonceCache +from ...integrations import GitHubIntegration from .enrollment import routes as enrollment_routes from .tokens import routes as token_routes @@ -151,6 +152,7 @@ class EngineServiceAdapters: script_signer: Any = field(init=False) service_log: Callable[[str, str, Optional[str]], None] = field(init=False) device_auth_manager: DeviceAuthManager = field(init=False) + github_integration: GitHubIntegration = field(init=False) def __post_init__(self) -> None: self.db_conn_factory = _make_db_conn_factory(self.context.database_path) @@ -181,6 +183,32 @@ class EngineServiceAdapters: rate_limiter=self.device_rate_limiter, ) + config = self.context.config or {} + cache_root_value = config.get("cache_dir") or config.get("CACHE_DIR") + if cache_root_value: + cache_root = Path(str(cache_root_value)) + else: + cache_root = Path(self.context.database_path).resolve().parent / "cache" + cache_file = cache_root / "repo_hash_cache.json" + + default_repo = config.get("default_repo") or config.get("DEFAULT_REPO") + default_branch = config.get("default_branch") or config.get("DEFAULT_BRANCH") + ttl_raw = config.get("repo_hash_refresh") or config.get("REPO_HASH_REFRESH") + try: + default_ttl_seconds = int(ttl_raw) if ttl_raw is not None else None + except (TypeError, ValueError): + default_ttl_seconds = None + + self.github_integration = GitHubIntegration( + cache_file=cache_file, + db_conn_factory=self.db_conn_factory, + service_log=self.service_log, + logger=self.context.logger, + default_repo=default_repo, + default_branch=default_branch, + default_ttl_seconds=default_ttl_seconds, + ) + def _register_tokens(app: Flask, adapters: EngineServiceAdapters) -> None: token_routes.register( diff --git a/Data/Engine/services/API/access_management/github.py b/Data/Engine/services/API/access_management/github.py new file mode 100644 index 00000000..741cd284 --- /dev/null +++ b/Data/Engine/services/API/access_management/github.py @@ -0,0 +1,146 @@ +# ====================================================== +# Data\Engine\services\API\access_management\github.py +# Description: GitHub API token management endpoints for Engine access-management parity. +# +# API Endpoints (if applicable): +# - GET /api/github/token (Token Authenticated (Admin)) - Returns stored GitHub API token details and verification status. +# - POST /api/github/token (Token Authenticated (Admin)) - Updates the stored GitHub API token and triggers verification. +# ====================================================== + +"""GitHub token administration endpoints for the Borealis Engine.""" +from __future__ import annotations + +import os +import time +from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple + +from flask import Blueprint, Flask, jsonify, request, session +from itsdangerous import BadSignature, SignatureExpired, URLSafeTimedSerializer + +if TYPE_CHECKING: # pragma: no cover - typing helper + from .. import EngineServiceAdapters + + +def _now_ts() -> int: + return int(time.time()) + + +class GitHubTokenService: + """Admin endpoints for storing and validating GitHub REST API tokens.""" + + def __init__(self, app: Flask, adapters: "EngineServiceAdapters") -> None: + self.app = app + self.adapters = adapters + self.github = adapters.github_integration + self.logger = adapters.context.logger + + def _token_serializer(self) -> URLSafeTimedSerializer: + secret = self.app.secret_key or "borealis-dev-secret" + return URLSafeTimedSerializer(secret, salt="borealis-auth") + + def _current_user(self) -> Optional[Dict[str, Any]]: + username = session.get("username") + role = session.get("role") or "User" + if username: + return {"username": username, "role": role} + + token = None + auth_header = request.headers.get("Authorization") or "" + if auth_header.lower().startswith("bearer "): + token = auth_header.split(" ", 1)[1].strip() + if not token: + token = request.cookies.get("borealis_auth") + if not token: + return None + try: + data = self._token_serializer().loads( + token, + max_age=int(os.environ.get("BOREALIS_TOKEN_TTL_SECONDS", 60 * 60 * 24 * 30)), + ) + username = data.get("u") + role = data.get("r") or "User" + if username: + return {"username": username, "role": role} + except (BadSignature, SignatureExpired, Exception): + return None + return None + + def _require_admin(self) -> Optional[Tuple[Dict[str, Any], int]]: + user = self._current_user() + if not user: + return {"error": "unauthorized"}, 401 + if (user.get("role") or "").lower() != "admin": + return {"error": "forbidden"}, 403 + return None + + def get_token(self): + requirement = self._require_admin() + if requirement: + payload, status = requirement + return jsonify(payload), status + + token = self.github.load_token(force_refresh=True) + verification = self.github.verify_token(token) + message = verification.get("message") or ("API Token Invalid" if token else "API Token Not Configured") + payload = { + "token": token or "", + "has_token": bool(token), + "valid": bool(verification.get("valid")), + "message": message, + "status": verification.get("status") or ("missing" if not token else "unknown"), + "rate_limit": verification.get("rate_limit"), + "error": verification.get("error"), + "checked_at": _now_ts(), + } + return jsonify(payload) + + def update_token(self): + requirement = self._require_admin() + if requirement: + payload, status = requirement + return jsonify(payload), status + + data = request.get_json(silent=True) or {} + token = str(data.get("token") or "").strip() + try: + self.github.store_token(token or None) + except RuntimeError as exc: + self.logger.debug("Failed to store GitHub token", exc_info=True) + return jsonify({"error": str(exc)}), 500 + + verification = self.github.verify_token(token or None) + message = verification.get("message") or ("API Token Invalid" if token else "API Token Not Configured") + + try: + self.github.refresh_default_repo_hash(force=True) + except Exception: + self.logger.debug("Failed to refresh default repo hash after token update", exc_info=True) + + payload = { + "token": token, + "has_token": bool(token), + "valid": bool(verification.get("valid")), + "message": message, + "status": verification.get("status") or ("missing" if not token else "unknown"), + "rate_limit": verification.get("rate_limit"), + "error": verification.get("error"), + "checked_at": _now_ts(), + } + return jsonify(payload) + + +def register_github_token_management(app: Flask, adapters: "EngineServiceAdapters") -> None: + """Register GitHub API token administration endpoints.""" + + service = GitHubTokenService(app, adapters) + blueprint = Blueprint("github_access", __name__) + + @blueprint.route("/api/github/token", methods=["GET"]) + def _github_token_get(): + return service.get_token() + + @blueprint.route("/api/github/token", methods=["POST"]) + def _github_token_post(): + return service.update_token() + + app.register_blueprint(blueprint) diff --git a/Data/Engine/services/API/access_management/login.py b/Data/Engine/services/API/access_management/login.py index af571555..08464e89 100644 --- a/Data/Engine/services/API/access_management/login.py +++ b/Data/Engine/services/API/access_management/login.py @@ -15,6 +15,7 @@ from __future__ import annotations import base64 import hashlib import io +import logging import os import sqlite3 import time @@ -37,6 +38,13 @@ except Exception: # pragma: no cover - optional dependency if TYPE_CHECKING: # pragma: no cover - typing helper from Data.Engine.services.API import EngineServiceAdapters +from .github import register_github_token_management +from .multi_factor_authentication import register_mfa_management +from .users import register_user_management + +_logger = logging.getLogger(__name__) +_qr_logger_warning_emitted = False + def _now_ts() -> int: return int(time.time()) @@ -71,7 +79,13 @@ def _totp_provisioning_uri(secret: str, username: str) -> Optional[str]: def _totp_qr_data_uri(payload: str) -> Optional[str]: - if not payload or qrcode is None: + global _qr_logger_warning_emitted + if not payload: + return None + if qrcode is None: + if not _qr_logger_warning_emitted: + _logger.warning("MFA QR generation skipped: 'qrcode' dependency not available.") + _qr_logger_warning_emitted = True return None try: image = qrcode.make(payload, box_size=6, border=4) @@ -79,7 +93,10 @@ def _totp_qr_data_uri(payload: str) -> Optional[str]: image.save(buffer, format="PNG") encoded = base64.b64encode(buffer.getvalue()).decode("ascii") return f"data:image/png;base64,{encoded}" - except Exception: + except Exception as exc: + if not _qr_logger_warning_emitted: + _logger.warning("Failed to generate MFA QR code: %s", exc, exc_info=True) + _qr_logger_warning_emitted = True return None @@ -416,4 +433,7 @@ def register_auth(app: Flask, adapters: "EngineServiceAdapters") -> None: return service.me() app.register_blueprint(blueprint) + register_user_management(app, adapters) + register_mfa_management(app, adapters) + register_github_token_management(app, adapters) diff --git a/Data/Engine/services/API/access_management/multi_factor_authentication.py b/Data/Engine/services/API/access_management/multi_factor_authentication.py new file mode 100644 index 00000000..2a7b1690 --- /dev/null +++ b/Data/Engine/services/API/access_management/multi_factor_authentication.py @@ -0,0 +1,150 @@ +# ====================================================== +# Data\Engine\services\API\access_management\multi_factor_authentication.py +# Description: Multifactor administration endpoints for enabling, disabling, or resetting operator MFA state. +# +# API Endpoints (if applicable): +# - POST /api/users//mfa (Token Authenticated (Admin)) - Toggles MFA and optionally resets shared secrets. +# ====================================================== + +"""Multifactor administrative endpoints for the Borealis Engine.""" +from __future__ import annotations + +import os +import sqlite3 +import time +from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple + +from flask import Blueprint, Flask, jsonify, request, session +from itsdangerous import BadSignature, SignatureExpired, URLSafeTimedSerializer + +if TYPE_CHECKING: # pragma: no cover - typing helper + from .. import EngineServiceAdapters + + +def _now_ts() -> int: + return int(time.time()) + + +class MultiFactorAdministrationService: + """Admin-focused MFA utility wrapper.""" + + def __init__(self, app: Flask, adapters: "EngineServiceAdapters") -> None: + self.app = app + self.adapters = adapters + self.db_conn_factory = adapters.db_conn_factory + self.logger = adapters.context.logger + + def _db_conn(self) -> sqlite3.Connection: + return self.db_conn_factory() + + def _token_serializer(self) -> URLSafeTimedSerializer: + secret = self.app.secret_key or "borealis-dev-secret" + return URLSafeTimedSerializer(secret, salt="borealis-auth") + + def _current_user(self) -> Optional[Dict[str, Any]]: + username = session.get("username") + role = session.get("role") or "User" + if username: + return {"username": username, "role": role} + + token = None + auth_header = request.headers.get("Authorization") or "" + if auth_header.lower().startswith("bearer "): + token = auth_header.split(" ", 1)[1].strip() + if not token: + token = request.cookies.get("borealis_auth") + if not token: + return None + try: + data = self._token_serializer().loads( + token, + max_age=int(os.environ.get("BOREALIS_TOKEN_TTL_SECONDS", 60 * 60 * 24 * 30)), + ) + username = data.get("u") + role = data.get("r") or "User" + if username: + return {"username": username, "role": role} + except (BadSignature, SignatureExpired, Exception): + return None + return None + + def _require_admin(self) -> Optional[Tuple[Dict[str, Any], int]]: + user = self._current_user() + if not user: + return {"error": "unauthorized"}, 401 + if (user.get("role") or "").lower() != "admin": + return {"error": "forbidden"}, 403 + return None + + def toggle_mfa(self, username: str): + requirement = self._require_admin() + if requirement: + payload, status = requirement + return jsonify(payload), status + + username_norm = (username or "").strip() + if not username_norm: + return jsonify({"error": "invalid username"}), 400 + + payload = request.get_json(silent=True) or {} + enabled = bool(payload.get("enabled")) + reset_secret = bool(payload.get("reset_secret", False)) + + conn: Optional[sqlite3.Connection] = None + try: + conn = self._db_conn() + cur = conn.cursor() + now_ts = _now_ts() + + if enabled: + if reset_secret: + cur.execute( + "UPDATE users SET mfa_enabled=1, mfa_secret=NULL, updated_at=? WHERE LOWER(username)=LOWER(?)", + (now_ts, username_norm), + ) + else: + cur.execute( + "UPDATE users SET mfa_enabled=1, updated_at=? WHERE LOWER(username)=LOWER(?)", + (now_ts, username_norm), + ) + else: + if reset_secret: + cur.execute( + "UPDATE users SET mfa_enabled=0, mfa_secret=NULL, updated_at=? WHERE LOWER(username)=LOWER(?)", + (now_ts, username_norm), + ) + else: + cur.execute( + "UPDATE users SET mfa_enabled=0, updated_at=? WHERE LOWER(username)=LOWER(?)", + (now_ts, username_norm), + ) + + if cur.rowcount == 0: + return jsonify({"error": "user not found"}), 404 + + conn.commit() + + me = self._current_user() + if me and me.get("username", "").lower() == username_norm.lower() and not enabled: + session.pop("mfa_pending", None) + + return jsonify({"status": "ok"}) + except Exception as exc: + self.logger.debug("Failed to update MFA for %s", username_norm, exc_info=True) + return jsonify({"error": str(exc)}), 500 + finally: + if conn: + conn.close() + + +def register_mfa_management(app: Flask, adapters: "EngineServiceAdapters") -> None: + """Register MFA administration endpoints.""" + + service = MultiFactorAdministrationService(app, adapters) + blueprint = Blueprint("access_mgmt_mfa", __name__) + + @blueprint.route("/api/users//mfa", methods=["POST"]) + def _toggle_mfa(username: str): + return service.toggle_mfa(username) + + app.register_blueprint(blueprint) diff --git a/Data/Engine/services/API/access_management/users.py b/Data/Engine/services/API/access_management/users.py index 410d863c..813526a9 100644 --- a/Data/Engine/services/API/access_management/users.py +++ b/Data/Engine/services/API/access_management/users.py @@ -1,8 +1,317 @@ # ====================================================== # Data\Engine\services\API\access_management\users.py -# Description: Placeholder for operator user management endpoints (not yet implemented). +# Description: Operator user CRUD endpoints for the Engine auth group, mirroring the legacy server behaviour. # -# API Endpoints (if applicable): None +# API Endpoints (if applicable): +# - GET /api/users (Token Authenticated (Admin)) - Lists operator accounts. +# - POST /api/users (Token Authenticated (Admin)) - Creates a new operator account. +# - DELETE /api/users/ (Token Authenticated (Admin)) - Deletes an operator account. +# - POST /api/users//reset_password (Token Authenticated (Admin)) - Resets an operator password hash. +# - POST /api/users//role (Token Authenticated (Admin)) - Updates an operator role. # ====================================================== -"""Placeholder for users API module.""" +"""Operator user management endpoints for the Borealis Engine.""" +from __future__ import annotations + +import os +import sqlite3 +import time +from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Sequence, Tuple + +from flask import Blueprint, Flask, jsonify, request, session +from itsdangerous import BadSignature, SignatureExpired, URLSafeTimedSerializer + +if TYPE_CHECKING: # pragma: no cover - typing helper + from .. import EngineServiceAdapters + + +def _now_ts() -> int: + return int(time.time()) + + +def _row_to_user(row: Sequence[Any]) -> Mapping[str, Any]: + """Convert a database row into a user payload.""" + return { + "id": row[0], + "username": row[1], + "display_name": row[2] or row[1], + "role": row[3] or "User", + "last_login": row[4] or 0, + "created_at": row[5] or 0, + "updated_at": row[6] or 0, + "mfa_enabled": 1 if (row[7] or 0) else 0, + } + + +class UserManagementService: + """Utility wrapper that performs admin-authenticated user CRUD operations.""" + + def __init__(self, app: Flask, adapters: "EngineServiceAdapters") -> None: + self.app = app + self.adapters = adapters + self.db_conn_factory = adapters.db_conn_factory + self.logger = adapters.context.logger + + def _db_conn(self) -> sqlite3.Connection: + return self.db_conn_factory() + + def _token_serializer(self) -> URLSafeTimedSerializer: + secret = self.app.secret_key or "borealis-dev-secret" + return URLSafeTimedSerializer(secret, salt="borealis-auth") + + def _current_user(self) -> Optional[Dict[str, Any]]: + username = session.get("username") + role = session.get("role") or "User" + if username: + return {"username": username, "role": role} + + token = None + auth_header = request.headers.get("Authorization") or "" + if auth_header.lower().startswith("bearer "): + token = auth_header.split(" ", 1)[1].strip() + if not token: + token = request.cookies.get("borealis_auth") + if not token: + return None + try: + data = self._token_serializer().loads( + token, + max_age=int(os.environ.get("BOREALIS_TOKEN_TTL_SECONDS", 60 * 60 * 24 * 30)), + ) + username = data.get("u") + role = data.get("r") or "User" + if username: + return {"username": username, "role": role} + except (BadSignature, SignatureExpired, Exception): + return None + return None + + def _require_admin(self) -> Optional[Tuple[Dict[str, Any], int]]: + user = self._current_user() + if not user: + return {"error": "unauthorized"}, 401 + if (user.get("role") or "").lower() != "admin": + return {"error": "forbidden"}, 403 + return None + + # ------------------------------------------------------------------ # + # Endpoint implementations + # ------------------------------------------------------------------ # + + def list_users(self): + requirement = self._require_admin() + if requirement: + payload, status = requirement + return jsonify(payload), status + + conn: Optional[sqlite3.Connection] = None + try: + conn = self._db_conn() + cur = conn.cursor() + cur.execute( + "SELECT id, username, display_name, role, last_login, created_at, updated_at, " + "COALESCE(mfa_enabled, 0) FROM users ORDER BY LOWER(username) ASC" + ) + rows = cur.fetchall() + users: List[Mapping[str, Any]] = [_row_to_user(row) for row in rows] + return jsonify({"users": users}) + except Exception as exc: + self.logger.debug("Failed to list users", exc_info=True) + return jsonify({"error": str(exc)}), 500 + finally: + if conn: + conn.close() + + def create_user(self): + requirement = self._require_admin() + if requirement: + payload, status = requirement + return jsonify(payload), status + + data = request.get_json(silent=True) or {} + username = (data.get("username") or "").strip() + display_name = (data.get("display_name") or username).strip() + role = (data.get("role") or "User").strip().title() + password_sha512 = (data.get("password_sha512") or "").strip().lower() + + if not username or not password_sha512: + return jsonify({"error": "username and password_sha512 are required"}), 400 + if role not in ("User", "Admin"): + return jsonify({"error": "invalid role"}), 400 + + now_ts = _now_ts() + conn: Optional[sqlite3.Connection] = None + try: + conn = self._db_conn() + cur = conn.cursor() + cur.execute( + "INSERT INTO users(username, display_name, password_sha512, role, created_at, updated_at) " + "VALUES(?,?,?,?,?,?)", + (username, display_name or username, password_sha512, role, now_ts, now_ts), + ) + conn.commit() + return jsonify({"status": "ok"}) + except sqlite3.IntegrityError: + return jsonify({"error": "username already exists"}), 409 + except Exception as exc: + self.logger.debug("Failed to create user %s", username, exc_info=True) + return jsonify({"error": str(exc)}), 500 + finally: + if conn: + conn.close() + + def delete_user(self, username: str): + requirement = self._require_admin() + if requirement: + payload, status = requirement + return jsonify(payload), status + + username_norm = (username or "").strip() + if not username_norm: + return jsonify({"error": "invalid username"}), 400 + + conn: Optional[sqlite3.Connection] = None + try: + conn = self._db_conn() + cur = conn.cursor() + + me = self._current_user() + if me and (me.get("username", "").lower() == username_norm.lower()): + return ( + jsonify({"error": "You cannot delete the user you are currently logged in as."}), + 400, + ) + + cur.execute("SELECT COUNT(*) FROM users") + total_users = cur.fetchone()[0] or 0 + if total_users <= 1: + return ( + jsonify( + { + "error": "There is only one user currently configured, you cannot delete this user until you have created another." + } + ), + 400, + ) + + cur.execute("DELETE FROM users WHERE LOWER(username)=LOWER(?)", (username_norm,)) + deleted = cur.rowcount or 0 + conn.commit() + if deleted == 0: + return jsonify({"error": "user not found"}), 404 + return jsonify({"status": "ok"}) + except Exception as exc: + self.logger.debug("Failed to delete user %s", username_norm, exc_info=True) + return jsonify({"error": str(exc)}), 500 + finally: + if conn: + conn.close() + + def reset_password(self, username: str): + requirement = self._require_admin() + if requirement: + payload, status = requirement + return jsonify(payload), status + + data = request.get_json(silent=True) or {} + password_sha512 = (data.get("password_sha512") or "").strip().lower() + if not password_sha512 or len(password_sha512) != 128: + return jsonify({"error": "invalid password hash"}), 400 + + conn: Optional[sqlite3.Connection] = None + try: + conn = self._db_conn() + cur = conn.cursor() + now_ts = _now_ts() + cur.execute( + "UPDATE users SET password_sha512=?, updated_at=? WHERE LOWER(username)=LOWER(?)", + (password_sha512, now_ts, username), + ) + if cur.rowcount == 0: + return jsonify({"error": "user not found"}), 404 + conn.commit() + return jsonify({"status": "ok"}) + except Exception as exc: + self.logger.debug("Failed to reset password for %s", username, exc_info=True) + return jsonify({"error": str(exc)}), 500 + finally: + if conn: + conn.close() + + def change_role(self, username: str): + requirement = self._require_admin() + if requirement: + payload, status = requirement + return jsonify(payload), status + + data = request.get_json(silent=True) or {} + role = (data.get("role") or "").strip().title() + if role not in ("User", "Admin"): + return jsonify({"error": "invalid role"}), 400 + + conn: Optional[sqlite3.Connection] = None + try: + conn = self._db_conn() + cur = conn.cursor() + + if role == "User": + cur.execute("SELECT COUNT(*) FROM users WHERE LOWER(role)='admin'") + admin_count = cur.fetchone()[0] or 0 + cur.execute( + "SELECT LOWER(role) FROM users WHERE LOWER(username)=LOWER(?)", + (username,), + ) + row = cur.fetchone() + current_role = (row[0] or "").lower() if row else "" + if current_role == "admin" and admin_count <= 1: + return jsonify({"error": "cannot demote the last admin"}), 400 + + now_ts = _now_ts() + cur.execute( + "UPDATE users SET role=?, updated_at=? WHERE LOWER(username)=LOWER(?)", + (role, now_ts, username), + ) + if cur.rowcount == 0: + return jsonify({"error": "user not found"}), 404 + conn.commit() + + me = self._current_user() + if me and me.get("username", "").lower() == (username or "").lower(): + session["role"] = role + + return jsonify({"status": "ok"}) + except Exception as exc: + self.logger.debug("Failed to update role for %s", username, exc_info=True) + return jsonify({"error": str(exc)}), 500 + finally: + if conn: + conn.close() + + +def register_user_management(app: Flask, adapters: "EngineServiceAdapters") -> None: + """Register user management endpoints.""" + + service = UserManagementService(app, adapters) + blueprint = Blueprint("access_mgmt_users", __name__) + + @blueprint.route("/api/users", methods=["GET"]) + def _list_users(): + return service.list_users() + + @blueprint.route("/api/users", methods=["POST"]) + def _create_user(): + return service.create_user() + + @blueprint.route("/api/users/", methods=["DELETE"]) + def _delete_user(username: str): + return service.delete_user(username) + + @blueprint.route("/api/users//reset_password", methods=["POST"]) + def _reset_password(username: str): + return service.reset_password(username) + + @blueprint.route("/api/users//role", methods=["POST"]) + def _change_role(username: str): + return service.change_role(username) + + app.register_blueprint(blueprint) diff --git a/Data/Engine/services/API/assemblies/execution.py b/Data/Engine/services/API/assemblies/execution.py index eec87497..d604b45f 100644 --- a/Data/Engine/services/API/assemblies/execution.py +++ b/Data/Engine/services/API/assemblies/execution.py @@ -13,20 +13,367 @@ from __future__ import annotations import base64 +import json import os +import re import time -from typing import TYPE_CHECKING, Any, Dict, List +from pathlib import Path +from typing import TYPE_CHECKING, Any, Dict, List, Optional from flask import Blueprint, jsonify, request -from ..scheduled_jobs.management import ensure_scheduler, get_scheduler - if TYPE_CHECKING: # pragma: no cover - typing aide from flask import Flask from .. import EngineServiceAdapters +def _assemblies_root() -> Path: + base = Path(__file__).resolve() + search_roots = (base, *base.parents) + for candidate in search_roots: + engine_dir: Optional[Path] + if candidate.name.lower() == "engine": + engine_dir = candidate + else: + tentative = candidate / "Engine" + engine_dir = tentative if tentative.is_dir() else None + if not engine_dir: + continue + assemblies_dir = engine_dir / "Assemblies" + if assemblies_dir.is_dir(): + return assemblies_dir.resolve() + raise RuntimeError("Engine assemblies directory not found; expected Engine/Assemblies.") + + +def _scripts_root() -> Path: + assemblies_root = _assemblies_root() + scripts_dir = assemblies_root / "Scripts" + if not scripts_dir.is_dir(): + raise RuntimeError("Engine scripts directory not found; expected Engine/Assemblies/Scripts.") + return scripts_dir.resolve() + + +def _normalize_script_relpath(rel_path: Any) -> Optional[str]: + """Return a canonical Scripts-relative path or ``None`` when invalid.""" + + if not isinstance(rel_path, str): + return None + + raw = rel_path.replace("\\", "/").strip() + if not raw: + return None + + segments: List[str] = [] + for part in raw.split("/"): + candidate = part.strip() + if not candidate or candidate == ".": + continue + if candidate == "..": + return None + segments.append(candidate) + + if not segments: + return None + + first = segments[0] + if first.lower() != "scripts": + segments.insert(0, "Scripts") + else: + segments[0] = "Scripts" + + return "/".join(segments) + + +def _decode_base64_text(value: Any) -> Optional[str]: + if not isinstance(value, str): + return None + stripped = value.strip() + if not stripped: + return "" + try: + cleaned = re.sub(r"\s+", "", stripped) + except Exception: + cleaned = stripped + try: + decoded = base64.b64decode(cleaned, validate=True) + except Exception: + return None + try: + return decoded.decode("utf-8") + except Exception: + return decoded.decode("utf-8", errors="replace") + + +def _decode_script_content(value: Any, encoding_hint: str = "") -> str: + encoding = (encoding_hint or "").strip().lower() + if isinstance(value, str): + if encoding in {"base64", "b64", "base-64"}: + decoded = _decode_base64_text(value) + if decoded is not None: + return decoded.replace("\r\n", "\n") + decoded = _decode_base64_text(value) + if decoded is not None: + return decoded.replace("\r\n", "\n") + return value.replace("\r\n", "\n") + return "" + + +def _canonical_env_key(name: Any) -> str: + try: + return re.sub(r"[^A-Za-z0-9_]", "_", str(name or "").strip()).upper() + except Exception: + return "" + + +def _env_string(value: Any) -> str: + if isinstance(value, bool): + return "True" if value else "False" + if value is None: + return "" + return str(value) + + +def _powershell_literal(value: Any, var_type: str) -> str: + typ = str(var_type or "string").lower() + if typ == "boolean": + if isinstance(value, bool): + truthy = value + elif value is None: + truthy = False + elif isinstance(value, (int, float)): + truthy = value != 0 + else: + s = str(value).strip().lower() + if s in {"true", "1", "yes", "y", "on"}: + truthy = True + elif s in {"false", "0", "no", "n", "off", ""}: + truthy = False + else: + truthy = bool(s) + return "$true" if truthy else "$false" + if typ == "number": + if value is None or value == "": + return "0" + return str(value) + s = "" if value is None else str(value) + return "'" + s.replace("'", "''") + "'" + + +def _expand_env_aliases(env_map: Dict[str, str], variables: List[Dict[str, Any]]) -> Dict[str, str]: + expanded: Dict[str, str] = dict(env_map or {}) + if not isinstance(variables, list): + return expanded + for var in variables: + if not isinstance(var, dict): + continue + name = str(var.get("name") or "").strip() + if not name: + continue + canonical = _canonical_env_key(name) + if not canonical or canonical not in expanded: + continue + value = expanded[canonical] + alias = re.sub(r"[^A-Za-z0-9_]", "_", name) + if alias and alias not in expanded: + expanded[alias] = value + if alias != name and re.match(r"^[A-Za-z_][A-Za-z0-9_]*$", name) and name not in expanded: + expanded[name] = value + return expanded + + +def _extract_variable_default(var: Dict[str, Any]) -> Any: + for key in ("value", "default", "defaultValue", "default_value"): + if key in var: + val = var.get(key) + return "" if val is None else val + return "" + + +def prepare_variable_context(doc_variables: List[Dict[str, Any]], overrides: Dict[str, Any]): + env_map: Dict[str, str] = {} + variables: List[Dict[str, Any]] = [] + literal_lookup: Dict[str, str] = {} + doc_names: Dict[str, bool] = {} + + overrides = overrides or {} + + if not isinstance(doc_variables, list): + doc_variables = [] + + for var in doc_variables: + if not isinstance(var, dict): + continue + name = str(var.get("name") or "").strip() + if not name: + continue + doc_names[name] = True + canonical = _canonical_env_key(name) + var_type = str(var.get("type") or "string").lower() + default_val = _extract_variable_default(var) + final_val = overrides[name] if name in overrides else default_val + if canonical: + env_map[canonical] = _env_string(final_val) + literal_lookup[canonical] = _powershell_literal(final_val, var_type) + if name in overrides: + new_var = dict(var) + new_var["value"] = overrides[name] + variables.append(new_var) + else: + variables.append(var) + + for name, val in overrides.items(): + if name in doc_names: + continue + canonical = _canonical_env_key(name) + if canonical: + env_map[canonical] = _env_string(val) + literal_lookup[canonical] = _powershell_literal(val, "string") + variables.append({"name": name, "value": val, "type": "string"}) + + env_map = _expand_env_aliases(env_map, variables) + return env_map, variables, literal_lookup + + +_ENV_VAR_PATTERN = re.compile(r"(?i)\$env:(\{)?([A-Za-z0-9_\-]+)(?(1)\})") + + +def rewrite_powershell_script(content: str, literal_lookup: Dict[str, str]) -> str: + if not content or not literal_lookup: + return content + + def _replace(match: Any) -> str: + name = match.group(2) + canonical = _canonical_env_key(name) + if not canonical: + return match.group(0) + literal = literal_lookup.get(canonical) + if literal is None: + return match.group(0) + return literal + + return _ENV_VAR_PATTERN.sub(_replace, content) + + +def _load_assembly_document(abs_path: str, default_type: str) -> Dict[str, Any]: + abs_path_str = os.fspath(abs_path) + base_name = os.path.splitext(os.path.basename(abs_path_str))[0] + doc: Dict[str, Any] = { + "name": base_name, + "description": "", + "category": "application" if default_type == "ansible" else "script", + "type": default_type, + "script": "", + "variables": [], + "files": [], + "timeout_seconds": 3600, + } + if abs_path_str.lower().endswith(".json") and os.path.isfile(abs_path_str): + try: + with open(abs_path_str, "r", encoding="utf-8") as fh: + data = json.load(fh) + except Exception: + data = {} + if isinstance(data, dict): + doc["name"] = str(data.get("name") or doc["name"]) + doc["description"] = str(data.get("description") or "") + cat = str(data.get("category") or doc["category"]).strip().lower() + if cat in {"application", "script"}: + doc["category"] = cat + typ = str(data.get("type") or data.get("script_type") or default_type).strip().lower() + if typ in {"powershell", "batch", "bash", "ansible"}: + doc["type"] = typ + script_val = data.get("script") + content_val = data.get("content") + script_lines = data.get("script_lines") + if isinstance(script_lines, list): + try: + doc["script"] = "\n".join(str(line) for line in script_lines) + except Exception: + doc["script"] = "" + elif isinstance(script_val, str): + doc["script"] = script_val + else: + if isinstance(content_val, str): + doc["script"] = content_val + encoding_hint = str( + data.get("script_encoding") or data.get("scriptEncoding") or "" + ).strip().lower() + doc["script"] = _decode_script_content(doc.get("script"), encoding_hint) + if encoding_hint in {"base64", "b64", "base-64"}: + doc["script_encoding"] = "base64" + else: + probe_source = "" + if isinstance(script_val, str) and script_val: + probe_source = script_val + elif isinstance(content_val, str) and content_val: + probe_source = content_val + decoded_probe = _decode_base64_text(probe_source) if probe_source else None + if decoded_probe is not None: + doc["script_encoding"] = "base64" + doc["script"] = decoded_probe.replace("\r\n", "\n") + else: + doc["script_encoding"] = "plain" + try: + timeout_raw = data.get("timeout_seconds", data.get("timeout")) + if timeout_raw is None: + doc["timeout_seconds"] = 3600 + else: + doc["timeout_seconds"] = max(0, int(timeout_raw)) + except Exception: + doc["timeout_seconds"] = 3600 + vars_in = data.get("variables") if isinstance(data.get("variables"), list) else [] + doc["variables"] = [] + for item in vars_in: + if not isinstance(item, dict): + continue + name = str(item.get("name") or item.get("key") or "").strip() + if not name: + continue + vtype = str(item.get("type") or "string").strip().lower() + if vtype not in {"string", "number", "boolean", "credential"}: + vtype = "string" + doc["variables"].append( + { + "name": name, + "label": str(item.get("label") or ""), + "type": vtype, + "default": item.get("default", item.get("default_value")), + "required": bool(item.get("required")), + "description": str(item.get("description") or ""), + } + ) + files_in = data.get("files") if isinstance(data.get("files"), list) else [] + doc["files"] = [] + for file_item in files_in: + if not isinstance(file_item, dict): + continue + fname = file_item.get("file_name") or file_item.get("name") + if not fname or not isinstance(file_item.get("data"), str): + continue + try: + size_val = int(file_item.get("size") or 0) + except Exception: + size_val = 0 + doc["files"].append( + { + "file_name": str(fname), + "size": size_val, + "mime_type": str(file_item.get("mime_type") or file_item.get("mimeType") or ""), + "data": file_item.get("data"), + } + ) + return doc + try: + with open(abs_path_str, "r", encoding="utf-8", errors="replace") as fh: + content = fh.read() + except Exception: + content = "" + normalized_script = (content or "").replace("\r\n", "\n") + doc["script"] = normalized_script + return doc + + def _normalize_hostnames(value: Any) -> List[str]: if not isinstance(value, list): return [] @@ -41,31 +388,52 @@ def _normalize_hostnames(value: Any) -> List[str]: def register_execution(app: "Flask", adapters: "EngineServiceAdapters") -> None: """Register quick execution endpoints for assemblies.""" - ensure_scheduler(app, adapters) blueprint = Blueprint("assemblies_execution", __name__) service_log = adapters.service_log @blueprint.route("/api/scripts/quick_run", methods=["POST"]) def scripts_quick_run(): - scheduler = get_scheduler(adapters) data = request.get_json(silent=True) or {} - rel_path = (data.get("script_path") or "").strip() + rel_path_input = data.get("script_path") + rel_path_normalized = _normalize_script_relpath(rel_path_input) hostnames = _normalize_hostnames(data.get("hostnames")) run_mode = (data.get("run_mode") or "system").strip().lower() + admin_user = str(data.get("admin_user") or "").strip() + admin_pass = str(data.get("admin_pass") or "").strip() - if not rel_path or not hostnames: + if not rel_path_normalized or not hostnames: return jsonify({"error": "Missing script_path or hostnames[]"}), 400 - scripts_root = scheduler._scripts_root() # type: ignore[attr-defined] - abs_path = os.path.abspath(os.path.join(scripts_root, rel_path)) - if ( - not abs_path.startswith(scripts_root) - or not scheduler._is_valid_scripts_relpath(rel_path) # type: ignore[attr-defined] - or not os.path.isfile(abs_path) - ): + rel_path_canonical = rel_path_normalized + + try: + scripts_root = _scripts_root() + assemblies_root = scripts_root.parent.resolve() + abs_path = (assemblies_root / rel_path_canonical).resolve() + except Exception as exc: # pragma: no cover - defensive guard + service_log( + "assemblies", + f"quick job failed to resolve script path={rel_path_input!r}: {exc}", + level="ERROR", + ) + return jsonify({"error": "Failed to resolve script path"}), 500 + + scripts_root_str = str(scripts_root) + abs_path_str = str(abs_path) + try: + within_scripts = os.path.commonpath([scripts_root_str, abs_path_str]) == scripts_root_str + except ValueError: + within_scripts = False + + if not within_scripts or not os.path.isfile(abs_path_str): + service_log( + "assemblies", + f"quick job requested missing or out-of-scope script input={rel_path_input!r} normalized={rel_path_canonical}", + level="WARNING", + ) return jsonify({"error": "Script not found"}), 404 - doc = scheduler._load_assembly_document(abs_path, "scripts") # type: ignore[attr-defined] + doc = _load_assembly_document(abs_path, "powershell") script_type = (doc.get("type") or "powershell").lower() if script_type != "powershell": return jsonify({"error": f"Unsupported script type '{script_type}'. Only PowerShell is supported."}), 400 @@ -81,8 +449,8 @@ def register_execution(app: "Flask", adapters: "EngineServiceAdapters") -> None: continue overrides[name] = val - env_map, variables, literal_lookup = scheduler._prepare_variable_context(doc_variables, overrides) # type: ignore[attr-defined] - content = scheduler._rewrite_powershell_script(content, literal_lookup) # type: ignore[attr-defined] + env_map, variables, literal_lookup = prepare_variable_context(doc_variables, overrides) + content = rewrite_powershell_script(content, literal_lookup) normalized_script = (content or "").replace("\r\n", "\n") script_bytes = normalized_script.encode("utf-8") encoded_content = ( @@ -127,7 +495,7 @@ def register_execution(app: "Flask", adapters: "EngineServiceAdapters") -> None: """, ( host, - rel_path.replace(os.sep, "/"), + rel_path_canonical.replace(os.sep, "/"), friendly_name, script_type, now, @@ -144,7 +512,7 @@ def register_execution(app: "Flask", adapters: "EngineServiceAdapters") -> None: "target_hostname": host, "script_type": script_type, "script_name": friendly_name, - "script_path": rel_path.replace(os.sep, "/"), + "script_path": rel_path_canonical.replace(os.sep, "/"), "script_content": encoded_content, "script_encoding": "base64", "environment": env_map, @@ -152,6 +520,8 @@ def register_execution(app: "Flask", adapters: "EngineServiceAdapters") -> None: "timeout_seconds": timeout_seconds, "files": doc.get("files") if isinstance(doc.get("files"), list) else [], "run_mode": run_mode, + "admin_user": admin_user, + "admin_pass": admin_pass, } if signature_b64: payload["signature"] = signature_b64 @@ -176,7 +546,7 @@ def register_execution(app: "Flask", adapters: "EngineServiceAdapters") -> None: results.append({"hostname": host, "job_id": job_id, "status": "Running"}) service_log( "assemblies", - f"quick job queued hostname={host} path={rel_path} run_mode={run_mode}", + f"quick job queued hostname={host} path={rel_path_canonical} run_mode={run_mode}", ) except Exception as exc: if conn is not None: diff --git a/Data/Engine/services/API/authentication.py b/Data/Engine/services/API/authentication.py index d57eb87b..f1d98ffa 100644 --- a/Data/Engine/services/API/authentication.py +++ b/Data/Engine/services/API/authentication.py @@ -15,6 +15,7 @@ from __future__ import annotations import base64 import hashlib import io +import logging import os import sqlite3 import time @@ -37,6 +38,9 @@ except Exception: # pragma: no cover - optional dependency if TYPE_CHECKING: # pragma: no cover - typing helper from . import EngineServiceAdapters +_logger = logging.getLogger(__name__) +_qr_logger_warning_emitted = False + def _now_ts() -> int: return int(time.time()) @@ -71,7 +75,13 @@ def _totp_provisioning_uri(secret: str, username: str) -> Optional[str]: def _totp_qr_data_uri(payload: str) -> Optional[str]: - if not payload or qrcode is None: + global _qr_logger_warning_emitted + if not payload: + return None + if qrcode is None: + if not _qr_logger_warning_emitted: + _logger.warning("MFA QR generation skipped: 'qrcode' dependency not available.") + _qr_logger_warning_emitted = True return None try: image = qrcode.make(payload, box_size=6, border=4) @@ -79,7 +89,10 @@ def _totp_qr_data_uri(payload: str) -> Optional[str]: image.save(buffer, format="PNG") encoded = base64.b64encode(buffer.getvalue()).decode("ascii") return f"data:image/png;base64,{encoded}" - except Exception: + except Exception as exc: + if not _qr_logger_warning_emitted: + _logger.warning("Failed to generate MFA QR code: %s", exc, exc_info=True) + _qr_logger_warning_emitted = True return None diff --git a/Data/Engine/services/API/devices/management.py b/Data/Engine/services/API/devices/management.py index 5a8d0bc5..4ab1c6af 100644 --- a/Data/Engine/services/API/devices/management.py +++ b/Data/Engine/services/API/devices/management.py @@ -29,9 +29,7 @@ from __future__ import annotations import json import logging import os -import ssl import sqlite3 -import threading import time import uuid from datetime import datetime, timezone @@ -41,20 +39,8 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple from flask import Blueprint, jsonify, request, session, g from itsdangerous import BadSignature, SignatureExpired, URLSafeTimedSerializer -from ....auth.device_auth import require_device_auth from ....auth.guid_utils import normalize_guid - -try: - import requests # type: ignore -except ImportError: # pragma: no cover - fallback for minimal test environments - class _RequestsStub: - class RequestException(RuntimeError): - """Stand-in exception when the requests module is unavailable.""" - - def get(self, *args: Any, **kwargs: Any) -> Any: - raise self.RequestException("The 'requests' library is required for repository hash lookups.") - - requests = _RequestsStub() # type: ignore +from ....auth.device_auth import require_device_auth if TYPE_CHECKING: # pragma: no cover - typing aide from .. import EngineServiceAdapters @@ -96,6 +82,25 @@ def _status_from_last_seen(last_seen: Optional[int]) -> str: return "Offline" +def _normalize_service_mode(value: Any, agent_id: Optional[str] = None) -> str: + try: + text = str(value or "").strip().lower() + except Exception: + text = "" + if not text and agent_id: + try: + aid = agent_id.lower() + if "-svc-" in aid or aid.endswith("-svc"): + return "system" + except Exception: + pass + if text in {"system", "svc", "service", "system_service"}: + return "system" + if text in {"interactive", "currentuser", "user", "current_user"}: + return "currentuser" + return "currentuser" + + def _is_internal_request(remote_addr: Optional[str]) -> bool: addr = (remote_addr or "").strip() if not addr: @@ -337,257 +342,6 @@ def _device_upsert( cur.execute(sql, params) -class RepositoryHashCache: - """Lightweight GitHub head cache with on-disk persistence.""" - - def __init__(self, adapters: "EngineServiceAdapters") -> None: - self._db_conn_factory = adapters.db_conn_factory - self._service_log = adapters.service_log - self._logger = adapters.context.logger - config = adapters.context.config or {} - default_root = Path(adapters.context.database_path).resolve().parent / "cache" - cache_root = Path(config.get("cache_dir") or default_root) - cache_root.mkdir(parents=True, exist_ok=True) - self._cache_file = cache_root / "repo_hash_cache.json" - self._cache: Dict[Tuple[str, str], Tuple[str, float]] = {} - self._lock = threading.Lock() - self._load_cache() - - def _load_cache(self) -> None: - try: - if not self._cache_file.is_file(): - return - data = json.loads(self._cache_file.read_text(encoding="utf-8")) - entries = data.get("entries") or {} - for key, payload in entries.items(): - sha = payload.get("sha") - ts = payload.get("ts") - if not sha or ts is None: - continue - repo, _, branch = key.partition(":") - if not repo or not branch: - continue - self._cache[(repo, branch)] = (str(sha), float(ts)) - except Exception: - self._logger.debug("Failed to hydrate repository hash cache", exc_info=True) - - def _persist_cache(self) -> None: - try: - snapshot = { - f"{repo}:{branch}": {"sha": sha, "ts": ts} - for (repo, branch), (sha, ts) in self._cache.items() - if sha - } - payload = {"version": 1, "entries": snapshot} - tmp_path = self._cache_file.with_suffix(".tmp") - tmp_path.write_text(json.dumps(payload), encoding="utf-8") - tmp_path.replace(self._cache_file) - except Exception: - self._logger.debug("Failed to persist repository hash cache", exc_info=True) - - def _resolve_original_ssl_module(self): - try: - from eventlet import patcher # type: ignore - - original_ssl = patcher.original("ssl") - if original_ssl is not None: - return original_ssl - except Exception: - pass - - module_name = getattr(ssl.SSLContext, "__module__", "") - if module_name != "eventlet.green.ssl": - return ssl - return None - - def _build_requests_session(self): - if isinstance(requests, _RequestsStub): - return None - try: - from requests import Session # type: ignore - from requests.adapters import HTTPAdapter # type: ignore - except Exception: - return None - - original_ssl = self._resolve_original_ssl_module() - if original_ssl is None: - return None - - try: - context = original_ssl.create_default_context() - except Exception: - return None - - tls_version = getattr(original_ssl, "TLSVersion", None) - if tls_version is not None and hasattr(context, "minimum_version"): - try: - context.minimum_version = tls_version.TLSv1_2 - except Exception: - pass - - class _ContextAdapter(HTTPAdapter): - def init_poolmanager(self, *args, **kwargs): - kwargs.setdefault("ssl_context", context) - return super().init_poolmanager(*args, **kwargs) - - def proxy_manager_for(self, *args, **kwargs): - kwargs.setdefault("ssl_context", context) - return super().proxy_manager_for(*args, **kwargs) - - session = Session() - adapter = _ContextAdapter() - session.mount("https://", adapter) - return session - - def _github_token(self, *, force_refresh: bool = False) -> Optional[str]: - env_token = (request.headers.get("X-GitHub-Token") or "").strip() - if env_token: - return env_token - token = None - if not force_refresh: - token = request.headers.get("Authorization") - if token and token.lower().startswith("bearer "): - return token.split(" ", 1)[1].strip() - conn: Optional[sqlite3.Connection] = None - try: - conn = self._db_conn_factory() - cur = conn.cursor() - cur.execute("SELECT token FROM github_token LIMIT 1") - row = cur.fetchone() - if row and row[0]: - candidate = str(row[0]).strip() - if candidate: - token = candidate - except sqlite3.Error: - token = None - except Exception as exc: - self._service_log("server", f"github token lookup failed: {exc}") - token = None - finally: - if conn: - conn.close() - if token: - return token - fallback = os.environ.get("BOREALIS_GITHUB_TOKEN") or os.environ.get("GITHUB_TOKEN") - return fallback.strip() if fallback else None - - def resolve( - self, - repo: str, - branch: str, - *, - ttl: int = 60, - force_refresh: bool = False, - ) -> Tuple[Dict[str, Any], int]: - ttl = max(30, min(int(ttl or 60), 3600)) - key = (repo, branch) - now = time.time() - with self._lock: - cached = self._cache.get(key) - if cached and not force_refresh: - sha, ts = cached - if sha and (now - ts) < ttl: - return ( - { - "repo": repo, - "branch": branch, - "sha": sha, - "cached": True, - "age_seconds": now - ts, - "source": "cache", - }, - 200, - ) - - headers = { - "Accept": "application/vnd.github+json", - "User-Agent": "Borealis-Engine", - } - token = self._github_token(force_refresh=force_refresh) - if token: - headers["Authorization"] = f"Bearer {token}" - - sha: Optional[str] = None - error: Optional[str] = None - session = None - try: - session = self._build_requests_session() - except Exception: - session = None - - try: - target = session if session is not None else requests - resp = target.get( - f"https://api.github.com/repos/{repo}/branches/{branch}", - headers=headers, - timeout=20, - ) - if resp.status_code == 200: - data = resp.json() - sha = ((data.get("commit") or {}).get("sha") or "").strip() - else: - error = f"GitHub head lookup failed: HTTP {resp.status_code}" - except RecursionError as exc: - error = f"GitHub head lookup recursion error: {exc}" - except requests.RequestException as exc: - error = f"GitHub head lookup raised: {exc}" - except Exception as exc: - error = f"GitHub head lookup unexpected error: {exc}" - finally: - if session is not None: - try: - session.close() - except Exception: - pass - - if sha: - with self._lock: - self._cache[key] = (sha, now) - self._persist_cache() - return ( - { - "repo": repo, - "branch": branch, - "sha": sha, - "cached": False, - "age_seconds": 0.0, - "source": "github", - }, - 200, - ) - - if error: - self._service_log("server", f"/api/repo/current_hash error: {error}") - - if cached: - cached_sha, ts = cached - return ( - { - "repo": repo, - "branch": branch, - "sha": cached_sha or None, - "cached": True, - "age_seconds": now - ts, - "error": error or "using cached value", - "source": "cache-stale", - }, - 200 if cached_sha else 503, - ) - - return ( - { - "repo": repo, - "branch": branch, - "sha": None, - "cached": False, - "age_seconds": None, - "error": error or "unable to resolve repository head", - "source": "github", - }, - 503, - ) - - class DeviceManagementService: """Encapsulates database access for device-focused API routes.""" @@ -623,7 +377,7 @@ class DeviceManagementService: self.db_conn_factory = adapters.db_conn_factory self.service_log = adapters.service_log self.logger = adapters.context.logger or logging.getLogger(__name__) - self.repo_cache = RepositoryHashCache(adapters) + self.repo_cache = adapters.github_integration def _db_conn(self) -> sqlite3.Connection: return self.db_conn_factory() @@ -795,6 +549,76 @@ class DeviceManagementService: self.logger.debug("Failed to list devices", exc_info=True) return {"error": str(exc)}, 500 + def list_agents(self) -> Tuple[Dict[str, Any], int]: + try: + devices = self._fetch_devices(only_agents=True) + grouped: Dict[str, Dict[str, Dict[str, Any]]] = {} + now = time.time() + for record in devices: + hostname = (record.get("hostname") or "").strip() or "unknown" + agent_id = (record.get("agent_id") or "").strip() + mode = _normalize_service_mode(record.get("service_mode"), agent_id) + if mode != "currentuser": + lowered = agent_id.lower() + if lowered.endswith("-script"): + continue + last_seen_raw = record.get("last_seen") or 0 + try: + last_seen = int(last_seen_raw) + except Exception: + last_seen = 0 + collector_active = bool(last_seen and (now - float(last_seen)) < 130) + agent_guid = normalize_guid(record.get("agent_guid")) if record.get("agent_guid") else "" + status_value = record.get("status") + if status_value in (None, ""): + status = "Online" if collector_active else "Offline" + else: + status = str(status_value) + payload = { + "hostname": hostname, + "agent_hostname": hostname, + "service_mode": mode, + "collector_active": collector_active, + "collector_active_ts": last_seen, + "last_seen": last_seen, + "status": status, + "agent_id": agent_id, + "agent_guid": agent_guid or "", + "agent_hash": record.get("agent_hash") or "", + "connection_type": record.get("connection_type") or "", + "connection_endpoint": record.get("connection_endpoint") or "", + "device_type": record.get("device_type") or "", + "domain": record.get("domain") or "", + "external_ip": record.get("external_ip") or "", + "internal_ip": record.get("internal_ip") or "", + "last_reboot": record.get("last_reboot") or "", + "last_user": record.get("last_user") or "", + "operating_system": record.get("operating_system") or "", + "uptime": record.get("uptime") or 0, + "site_id": record.get("site_id"), + "site_name": record.get("site_name") or "", + "site_description": record.get("site_description") or "", + } + bucket = grouped.setdefault(hostname, {}) + existing = bucket.get(mode) + if not existing or last_seen >= existing.get("last_seen", 0): + bucket[mode] = payload + + agents: Dict[str, Dict[str, Any]] = {} + for bucket in grouped.values(): + for payload in bucket.values(): + agent_key = payload.get("agent_id") or payload.get("agent_guid") + if not agent_key: + agent_key = f"{payload['hostname']}|{payload['service_mode']}" + if not payload.get("agent_id"): + payload["agent_id"] = agent_key + agents[agent_key] = payload + + return {"agents": agents}, 200 + except Exception as exc: + self.logger.debug("Failed to list agents", exc_info=True) + return {"error": str(exc)}, 500 + def get_device_by_guid(self, guid: str) -> Tuple[Dict[str, Any], int]: normalized_guid = normalize_guid(guid) if not normalized_guid: @@ -1465,18 +1289,14 @@ class DeviceManagementService: conn.close() def repo_current_hash(self) -> Tuple[Dict[str, Any], int]: - repo = (request.args.get("repo") or "bunny-lab-io/Borealis").strip() - branch = (request.args.get("branch") or "main").strip() refresh_flag = (request.args.get("refresh") or "").strip().lower() - ttl_raw = request.args.get("ttl") - if "/" not in repo: - return {"error": "repo must be in the form owner/name"}, 400 - try: - ttl = int(ttl_raw) if ttl_raw else 60 - except ValueError: - ttl = 60 force_refresh = refresh_flag in {"1", "true", "yes", "force", "refresh"} - payload, status = self.repo_cache.resolve(repo, branch, ttl=ttl, force_refresh=force_refresh) + payload, status = self.repo_cache.current_repo_hash( + request.args.get("repo"), + request.args.get("branch"), + ttl=request.args.get("ttl"), + force_refresh=force_refresh, + ) return payload, status def agent_hash_list(self) -> Tuple[Dict[str, Any], int]: @@ -1525,6 +1345,11 @@ def register_management(app, adapters: "EngineServiceAdapters") -> None: payload, status = service.save_agent_details() return jsonify(payload), status + @blueprint.route("/api/agents", methods=["GET"]) + def _list_agents(): + payload, status = service.list_agents() + return jsonify(payload), status + @blueprint.route("/api/devices", methods=["GET"]) def _list_devices(): payload, status = service.list_devices() @@ -1679,4 +1504,3 @@ def register_management(app, adapters: "EngineServiceAdapters") -> None: return jsonify(payload), status app.register_blueprint(blueprint) - diff --git a/Data/Engine/services/API/scheduled_jobs/management.py b/Data/Engine/services/API/scheduled_jobs/management.py index 66ca6518..ef78d0f3 100644 --- a/Data/Engine/services/API/scheduled_jobs/management.py +++ b/Data/Engine/services/API/scheduled_jobs/management.py @@ -17,10 +17,10 @@ """Scheduled job management integration for the Borealis Engine runtime.""" from __future__ import annotations -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING -try: # pragma: no cover - Engine-local legacy scheduler shim - from . import legacy_job_scheduler # type: ignore +try: # pragma: no cover - legacy module import guard + import job_scheduler as legacy_job_scheduler # type: ignore except Exception as exc: # pragma: no cover - runtime guard legacy_job_scheduler = None # type: ignore _SCHEDULER_IMPORT_ERROR = exc @@ -36,8 +36,8 @@ if TYPE_CHECKING: # pragma: no cover - typing aide def _raise_scheduler_import() -> None: if _SCHEDULER_IMPORT_ERROR is not None: raise RuntimeError( - "Legacy job scheduler module could not be imported; ensure " - "Data/Engine/services/API/scheduled_jobs/legacy_job_scheduler.py remains available." + "Legacy job scheduler module could not be imported; ensure Data/Server/job_scheduler.py " + "remains available during the Engine migration." ) from _SCHEDULER_IMPORT_ERROR @@ -79,3 +79,4 @@ def register_management(app: "Flask", adapters: "EngineServiceAdapters") -> None """Ensure scheduled job routes are registered via the legacy scheduler.""" ensure_scheduler(app, adapters) + diff --git a/Data/Engine/services/WebSocket/__init__.py b/Data/Engine/services/WebSocket/__init__.py index db8c0e2f..af235fdd 100644 --- a/Data/Engine/services/WebSocket/__init__.py +++ b/Data/Engine/services/WebSocket/__init__.py @@ -1,24 +1,187 @@ # ====================================================== # Data\Engine\services\WebSocket\__init__.py -# Description: Placeholder hook for registering Engine Socket.IO namespaces. +# Description: Socket.IO handlers for Engine runtime quick job updates and realtime notifications. # # API Endpoints (if applicable): None # ====================================================== -"""WebSocket service stubs for the Borealis Engine runtime. - -Future stages will move Socket.IO namespaces and event handlers here. Stage 1 -only keeps a placeholder so the Engine bootstrapper can stub registration -without touching legacy behaviour. -""" +"""WebSocket service registration for the Borealis Engine runtime.""" from __future__ import annotations +import sqlite3 +import time +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Callable, Dict, Optional + from flask_socketio import SocketIO +from ...database import initialise_engine_database from ...server import EngineContext +from ..API import _make_db_conn_factory, _make_service_logger + + +def _now_ts() -> int: + return int(time.time()) + + +def _normalize_text(value: Any) -> str: + if value is None: + return "" + if isinstance(value, bytes): + try: + return value.decode("utf-8") + except Exception: + return value.decode("utf-8", errors="replace") + return str(value) + + +@dataclass +class EngineRealtimeAdapters: + context: EngineContext + db_conn_factory: Callable[[], sqlite3.Connection] = field(init=False) + service_log: Callable[[str, str, Optional[str]], None] = field(init=False) + + def __post_init__(self) -> None: + initialise_engine_database(self.context.database_path, logger=self.context.logger) + self.db_conn_factory = _make_db_conn_factory(self.context.database_path) + + log_file = str( + self.context.config.get("log_file") + or self.context.config.get("LOG_FILE") + or "" + ).strip() + if log_file: + base = Path(log_file).resolve().parent + else: + base = Path(self.context.database_path).resolve().parent + self.service_log = _make_service_logger(base, self.context.logger) def register_realtime(socket_server: SocketIO, context: EngineContext) -> None: - """Placeholder hook for Socket.IO namespace registration.""" + """Register Socket.IO event handlers for the Engine runtime.""" - context.logger.debug("Engine WebSocket services are not yet implemented.") + adapters = EngineRealtimeAdapters(context) + logger = context.logger.getChild("realtime.quick_jobs") + + @socket_server.on("quick_job_result") + def _handle_quick_job_result(data: Any) -> None: + if not isinstance(data, dict): + logger.debug("quick_job_result payload ignored (non-dict): %r", data) + return + + job_id_raw = data.get("job_id") + try: + job_id = int(job_id_raw) + except (TypeError, ValueError): + logger.debug("quick_job_result missing valid job_id: %r", job_id_raw) + return + + status = str(data.get("status") or "").strip() or "Failed" + stdout = _normalize_text(data.get("stdout")) + stderr = _normalize_text(data.get("stderr")) + + conn: Optional[sqlite3.Connection] = None + cursor = None + broadcast_payload: Optional[Dict[str, Any]] = None + + try: + conn = adapters.db_conn_factory() + cursor = conn.cursor() + cursor.execute( + "UPDATE activity_history SET status=?, stdout=?, stderr=? WHERE id=?", + (status, stdout, stderr, job_id), + ) + if cursor.rowcount == 0: + logger.debug("quick_job_result missing activity_history row for job_id=%s", job_id) + conn.commit() + + try: + cursor.execute( + "SELECT run_id FROM scheduled_job_run_activity WHERE activity_id=?", + (job_id,), + ) + link = cursor.fetchone() + except sqlite3.Error: + link = None + + if link: + try: + run_id = int(link[0]) + ts_now = _now_ts() + if status.lower() == "running": + cursor.execute( + "UPDATE scheduled_job_runs SET status='Running', updated_at=? WHERE id=?", + (ts_now, run_id), + ) + else: + cursor.execute( + """ + UPDATE scheduled_job_runs + SET status=?, + finished_ts=COALESCE(finished_ts, ?), + updated_at=? + WHERE id=? + """, + (status, ts_now, ts_now, run_id), + ) + conn.commit() + except Exception as exc: # pragma: no cover - defensive guard + logger.debug( + "quick_job_result failed to update scheduled_job_runs for job_id=%s: %s", + job_id, + exc, + ) + + try: + cursor.execute( + "SELECT id, hostname, status FROM activity_history WHERE id=?", + (job_id,), + ) + row = cursor.fetchone() + except sqlite3.Error: + row = None + + if row: + hostname = (row[1] or "").strip() + if hostname: + broadcast_payload = { + "activity_id": int(row[0]), + "hostname": hostname, + "status": row[2] or status, + "change": "updated", + "source": "quick_job", + } + + adapters.service_log( + "assemblies", + f"quick_job_result processed job_id={job_id} status={status}", + ) + except Exception as exc: # pragma: no cover - defensive guard + logger.warning( + "quick_job_result handler error for job_id=%s: %s", + job_id, + exc, + exc_info=True, + ) + finally: + if cursor is not None: + try: + cursor.close() + except Exception: + pass + if conn is not None: + try: + conn.close() + except Exception: + pass + + if broadcast_payload: + try: + socket_server.emit("device_activity_changed", broadcast_payload) + except Exception as exc: # pragma: no cover - defensive guard + logger.debug( + "Failed to emit device_activity_changed for job_id=%s: %s", + job_id, + exc, + ) diff --git a/Data/Engine/web-interface/src/Login.jsx b/Data/Engine/web-interface/src/Login.jsx index 375bc61b..593a3d81 100644 --- a/Data/Engine/web-interface/src/Login.jsx +++ b/Data/Engine/web-interface/src/Login.jsx @@ -229,11 +229,13 @@ export default function Login({ onLogin }) { Scan the QR code with your authenticator app, then enter the 6-digit code to complete setup for {username}. {setupQr ? ( - MFA enrollment QR code + + MFA enrollment QR code + ) : null} {formattedSecret ? ( None: + blueprint = Blueprint("admin", __name__) + + def _now() -> datetime: + return datetime.now(tz=timezone.utc) + + def _iso(dt: datetime) -> str: + return dt.isoformat() + + def _lookup_user_id(cur: sqlite3.Cursor, username: str) -> Optional[str]: + if not username: + return None + cur.execute( + "SELECT id FROM users WHERE LOWER(username) = LOWER(?)", + (username,), + ) + row = cur.fetchone() + if row: + return str(row[0]) + return None + + def _hostname_conflict( + cur: sqlite3.Cursor, + hostname: Optional[str], + pending_guid: Optional[str], + ) -> Optional[Dict[str, Any]]: + if not hostname: + return None + cur.execute( + """ + SELECT d.guid, d.ssl_key_fingerprint, ds.site_id, s.name + FROM devices d + LEFT JOIN device_sites ds ON ds.device_hostname = d.hostname + LEFT JOIN sites s ON s.id = ds.site_id + WHERE d.hostname = ? + """, + (hostname,), + ) + row = cur.fetchone() + if not row: + return None + existing_guid = normalize_guid(row[0]) + existing_fingerprint = (row[1] or "").strip().lower() + pending_norm = normalize_guid(pending_guid) + if existing_guid and pending_norm and existing_guid == pending_norm: + return None + site_id_raw = row[2] + site_id = None + if site_id_raw is not None: + try: + site_id = int(site_id_raw) + except (TypeError, ValueError): + site_id = None + site_name = row[3] or "" + return { + "guid": existing_guid or None, + "ssl_key_fingerprint": existing_fingerprint or None, + "site_id": site_id, + "site_name": site_name, + } + + def _suggest_alternate_hostname( + cur: sqlite3.Cursor, + hostname: Optional[str], + pending_guid: Optional[str], + ) -> Optional[str]: + base = (hostname or "").strip() + if not base: + return None + base = base[:253] + candidate = base + pending_norm = normalize_guid(pending_guid) + suffix = 1 + while True: + cur.execute( + "SELECT guid FROM devices WHERE hostname = ?", + (candidate,), + ) + row = cur.fetchone() + if not row: + return candidate + existing_guid = normalize_guid(row[0]) + if pending_norm and existing_guid == pending_norm: + return candidate + candidate = f"{base}-{suffix}" + suffix += 1 + if suffix > 50: + return pending_norm or candidate + + @blueprint.before_request + def _check_admin(): + result = require_admin() + if result is not None: + return result + return None + + @blueprint.route("/api/admin/enrollment-codes", methods=["GET"]) + def list_enrollment_codes(): + status_filter = request.args.get("status") + conn = db_conn_factory() + try: + cur = conn.cursor() + sql = """ + SELECT id, + code, + expires_at, + created_by_user_id, + used_at, + used_by_guid, + max_uses, + use_count, + last_used_at + FROM enrollment_install_codes + """ + params: List[str] = [] + now_iso = _iso(_now()) + if status_filter == "active": + sql += " WHERE use_count < max_uses AND expires_at > ?" + params.append(now_iso) + elif status_filter == "expired": + sql += " WHERE use_count < max_uses AND expires_at <= ?" + params.append(now_iso) + elif status_filter == "used": + sql += " WHERE use_count >= max_uses" + sql += " ORDER BY expires_at ASC" + cur.execute(sql, params) + rows = cur.fetchall() + finally: + conn.close() + + records = [] + for row in rows: + records.append( + { + "id": row[0], + "code": row[1], + "expires_at": row[2], + "created_by_user_id": row[3], + "used_at": row[4], + "used_by_guid": row[5], + "max_uses": row[6], + "use_count": row[7], + "last_used_at": row[8], + } + ) + return jsonify({"codes": records}) + + @blueprint.route("/api/admin/enrollment-codes", methods=["POST"]) + def create_enrollment_code(): + payload = request.get_json(force=True, silent=True) or {} + ttl_hours = int(payload.get("ttl_hours") or 1) + if ttl_hours not in VALID_TTL_HOURS: + return jsonify({"error": "invalid_ttl"}), 400 + + max_uses_value = payload.get("max_uses") + if max_uses_value is None: + max_uses_value = payload.get("allowed_uses") + try: + max_uses = int(max_uses_value) + except Exception: + max_uses = 2 + if max_uses < 1: + max_uses = 1 + if max_uses > 10: + max_uses = 10 + + user = current_user() or {} + username = user.get("username") or "" + + conn = db_conn_factory() + try: + cur = conn.cursor() + created_by = _lookup_user_id(cur, username) or username or "system" + code_value = _generate_install_code() + issued_at = _now() + expires_at = issued_at + timedelta(hours=ttl_hours) + record_id = str(uuid.uuid4()) + cur.execute( + """ + INSERT INTO enrollment_install_codes ( + id, code, expires_at, created_by_user_id, max_uses, use_count + ) + VALUES (?, ?, ?, ?, ?, 0) + """, + (record_id, code_value, _iso(expires_at), created_by, max_uses), + ) + cur.execute( + """ + INSERT INTO enrollment_install_codes_persistent ( + id, + code, + created_at, + expires_at, + created_by_user_id, + used_at, + used_by_guid, + max_uses, + last_known_use_count, + last_used_at, + is_active, + archived_at, + consumed_at + ) + VALUES (?, ?, ?, ?, ?, NULL, NULL, ?, 0, NULL, 1, NULL, NULL) + ON CONFLICT(id) DO UPDATE + SET code = excluded.code, + created_at = excluded.created_at, + expires_at = excluded.expires_at, + created_by_user_id = excluded.created_by_user_id, + max_uses = excluded.max_uses, + last_known_use_count = 0, + used_at = NULL, + used_by_guid = NULL, + last_used_at = NULL, + is_active = 1, + archived_at = NULL, + consumed_at = NULL + """, + (record_id, code_value, _iso(issued_at), _iso(expires_at), created_by, max_uses), + ) + conn.commit() + finally: + conn.close() + + log( + "server", + f"installer code created id={record_id} by={username} ttl={ttl_hours}h max_uses={max_uses}", + ) + return jsonify( + { + "id": record_id, + "code": code_value, + "expires_at": _iso(expires_at), + "max_uses": max_uses, + "use_count": 0, + "last_used_at": None, + } + ) + + @blueprint.route("/api/admin/enrollment-codes/", methods=["DELETE"]) + def delete_enrollment_code(code_id: str): + conn = db_conn_factory() + try: + cur = conn.cursor() + cur.execute( + "DELETE FROM enrollment_install_codes WHERE id = ? AND use_count = 0", + (code_id,), + ) + deleted = cur.rowcount + if deleted: + archive_ts = _iso(_now()) + cur.execute( + """ + UPDATE enrollment_install_codes_persistent + SET is_active = 0, + archived_at = COALESCE(archived_at, ?) + WHERE id = ? + """, + (archive_ts, code_id), + ) + conn.commit() + finally: + conn.close() + + if not deleted: + return jsonify({"error": "not_found"}), 404 + log("server", f"installer code deleted id={code_id}") + return jsonify({"status": "deleted"}) + + @blueprint.route("/api/admin/device-approvals", methods=["GET"]) + def list_device_approvals(): + status_raw = request.args.get("status") + status = (status_raw or "").strip().lower() + approvals: List[Dict[str, Any]] = [] + conn = db_conn_factory() + try: + cur = conn.cursor() + params: List[str] = [] + sql = """ + SELECT + da.id, + da.approval_reference, + da.guid, + da.hostname_claimed, + da.ssl_key_fingerprint_claimed, + da.enrollment_code_id, + da.status, + da.client_nonce, + da.server_nonce, + da.created_at, + da.updated_at, + da.approved_by_user_id, + u.username AS approved_by_username + FROM device_approvals AS da + LEFT JOIN users AS u + ON ( + CAST(da.approved_by_user_id AS TEXT) = CAST(u.id AS TEXT) + OR LOWER(da.approved_by_user_id) = LOWER(u.username) + ) + """ + if status and status != "all": + sql += " WHERE LOWER(da.status) = ?" + params.append(status) + sql += " ORDER BY da.created_at ASC" + cur.execute(sql, params) + rows = cur.fetchall() + for row in rows: + record_guid = row[2] + hostname = row[3] + fingerprint_claimed = row[4] + claimed_fp_norm = (fingerprint_claimed or "").strip().lower() + conflict_raw = _hostname_conflict(cur, hostname, record_guid) + fingerprint_match = False + requires_prompt = False + conflict = None + if conflict_raw: + conflict_fp = (conflict_raw.get("ssl_key_fingerprint") or "").strip().lower() + fingerprint_match = bool(conflict_fp and claimed_fp_norm) and conflict_fp == claimed_fp_norm + requires_prompt = not fingerprint_match + conflict = { + **conflict_raw, + "fingerprint_match": fingerprint_match, + "requires_prompt": requires_prompt, + } + alternate_hostname = ( + _suggest_alternate_hostname(cur, hostname, record_guid) + if conflict_raw and requires_prompt + else None + ) + approvals.append( + { + "id": row[0], + "approval_reference": row[1], + "guid": record_guid, + "hostname_claimed": hostname, + "ssl_key_fingerprint_claimed": fingerprint_claimed, + "enrollment_code_id": row[5], + "status": row[6], + "client_nonce": row[7], + "server_nonce": row[8], + "created_at": row[9], + "updated_at": row[10], + "approved_by_user_id": row[11], + "hostname_conflict": conflict, + "alternate_hostname": alternate_hostname, + "conflict_requires_prompt": requires_prompt, + "fingerprint_match": fingerprint_match, + "approved_by_username": row[12], + } + ) + finally: + conn.close() + + return jsonify({"approvals": approvals}) + + def _set_approval_status( + approval_id: str, + status: str, + *, + guid: Optional[str] = None, + resolution: Optional[str] = None, + ): + user = current_user() or {} + username = user.get("username") or "" + + conn = db_conn_factory() + try: + cur = conn.cursor() + cur.execute( + """ + SELECT status, + guid, + hostname_claimed, + ssl_key_fingerprint_claimed + FROM device_approvals + WHERE id = ? + """, + (approval_id,), + ) + row = cur.fetchone() + if not row: + return {"error": "not_found"}, 404 + existing_status = (row[0] or "").strip().lower() + if existing_status != "pending": + return {"error": "approval_not_pending"}, 409 + stored_guid = row[1] + hostname_claimed = row[2] + fingerprint_claimed = (row[3] or "").strip().lower() + + guid_effective = normalize_guid(guid) if guid else normalize_guid(stored_guid) + resolution_effective = (resolution.strip().lower() if isinstance(resolution, str) else None) + + conflict = None + if status == "approved": + conflict = _hostname_conflict(cur, hostname_claimed, guid_effective) + if conflict: + conflict_fp = (conflict.get("ssl_key_fingerprint") or "").strip().lower() + fingerprint_match = bool(conflict_fp and fingerprint_claimed) and conflict_fp == fingerprint_claimed + if fingerprint_match: + guid_effective = conflict.get("guid") or guid_effective + if not resolution_effective: + resolution_effective = "auto_merge_fingerprint" + elif resolution_effective == "overwrite": + guid_effective = conflict.get("guid") or guid_effective + elif resolution_effective == "coexist": + pass + else: + return { + "error": "conflict_resolution_required", + "hostname": hostname_claimed, + }, 409 + + guid_to_store = guid_effective or normalize_guid(stored_guid) or None + + approved_by = _lookup_user_id(cur, username) or username or "system" + cur.execute( + """ + UPDATE device_approvals + SET status = ?, + guid = ?, + approved_by_user_id = ?, + updated_at = ? + WHERE id = ? + """, + ( + status, + guid_to_store, + approved_by, + _iso(_now()), + approval_id, + ), + ) + conn.commit() + finally: + conn.close() + resolution_note = f" ({resolution_effective})" if resolution_effective else "" + log("server", f"device approval {approval_id} -> {status}{resolution_note} by {username}") + payload: Dict[str, Any] = {"status": status} + if resolution_effective: + payload["conflict_resolution"] = resolution_effective + return payload, 200 + + @blueprint.route("/api/admin/device-approvals//approve", methods=["POST"]) + def approve_device(approval_id: str): + payload = request.get_json(force=True, silent=True) or {} + guid = payload.get("guid") + if guid: + guid = str(guid).strip() + resolution_val = payload.get("conflict_resolution") + resolution = None + if isinstance(resolution_val, str): + cleaned = resolution_val.strip().lower() + if cleaned: + resolution = cleaned + result, status_code = _set_approval_status( + approval_id, + "approved", + guid=guid, + resolution=resolution, + ) + return jsonify(result), status_code + + @blueprint.route("/api/admin/device-approvals//deny", methods=["POST"]) + def deny_device(approval_id: str): + result, status_code = _set_approval_status(approval_id, "denied") + return jsonify(result), status_code + + app.register_blueprint(blueprint) + + +def _generate_install_code() -> str: + raw = secrets.token_hex(16).upper() + return "-".join(raw[i : i + 4] for i in range(0, len(raw), 4)) diff --git a/Data/Server/Modules/agents/__init__.py b/Data/Server/Modules/agents/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/Data/Server/Modules/agents/__init__.py @@ -0,0 +1 @@ + diff --git a/Data/Server/Modules/agents/routes.py b/Data/Server/Modules/agents/routes.py new file mode 100644 index 00000000..990f684b --- /dev/null +++ b/Data/Server/Modules/agents/routes.py @@ -0,0 +1,218 @@ +from __future__ import annotations + +import json +import time +import sqlite3 +from typing import Any, Callable, Dict, Optional + +from flask import Blueprint, jsonify, request, g + +from Modules.auth.device_auth import DeviceAuthManager, require_device_auth +from Modules.crypto.signing import ScriptSigner +from Modules.guid_utils import normalize_guid + +AGENT_CONTEXT_HEADER = "X-Borealis-Agent-Context" + + +def _canonical_context(value: Optional[str]) -> Optional[str]: + if not value: + return None + cleaned = "".join(ch for ch in str(value) if ch.isalnum() or ch in ("_", "-")) + if not cleaned: + return None + return cleaned.upper() + + +def register( + app, + *, + db_conn_factory: Callable[[], Any], + auth_manager: DeviceAuthManager, + log: Callable[[str, str, Optional[str]], None], + script_signer: ScriptSigner, +) -> None: + blueprint = Blueprint("agents", __name__) + + def _json_or_none(value) -> Optional[str]: + if value is None: + return None + try: + return json.dumps(value) + except Exception: + return None + + def _context_hint(ctx=None) -> Optional[str]: + if ctx is not None and getattr(ctx, "service_mode", None): + return _canonical_context(getattr(ctx, "service_mode", None)) + return _canonical_context(request.headers.get(AGENT_CONTEXT_HEADER)) + + def _auth_context(): + ctx = getattr(g, "device_auth", None) + if ctx is None: + log("server", f"device auth context missing for {request.path}", _context_hint()) + return ctx + + @blueprint.route("/api/agent/heartbeat", methods=["POST"]) + @require_device_auth(auth_manager) + def heartbeat(): + ctx = _auth_context() + if ctx is None: + return jsonify({"error": "auth_context_missing"}), 500 + payload = request.get_json(force=True, silent=True) or {} + context_label = _context_hint(ctx) + + now_ts = int(time.time()) + updates: Dict[str, Optional[str]] = {"last_seen": now_ts} + + hostname = payload.get("hostname") + if isinstance(hostname, str) and hostname.strip(): + updates["hostname"] = hostname.strip() + + inventory = payload.get("inventory") if isinstance(payload.get("inventory"), dict) else {} + for key in ("memory", "network", "software", "storage", "cpu"): + if key in inventory and inventory[key] is not None: + encoded = _json_or_none(inventory[key]) + if encoded is not None: + updates[key] = encoded + + metrics = payload.get("metrics") if isinstance(payload.get("metrics"), dict) else {} + def _maybe_str(field: str) -> Optional[str]: + val = metrics.get(field) + if isinstance(val, str): + return val.strip() + return None + + if "last_user" in metrics and metrics["last_user"]: + updates["last_user"] = str(metrics["last_user"]) + if "operating_system" in metrics and metrics["operating_system"]: + updates["operating_system"] = str(metrics["operating_system"]) + if "uptime" in metrics and metrics["uptime"] is not None: + try: + updates["uptime"] = int(metrics["uptime"]) + except Exception: + pass + for field in ("external_ip", "internal_ip", "device_type"): + if field in payload and payload[field]: + updates[field] = str(payload[field]) + + conn = db_conn_factory() + try: + cur = conn.cursor() + + def _apply_updates() -> int: + if not updates: + return 0 + columns = ", ".join(f"{col} = ?" for col in updates.keys()) + values = list(updates.values()) + normalized_guid = normalize_guid(ctx.guid) + selected_guid: Optional[str] = None + if normalized_guid: + cur.execute( + "SELECT guid FROM devices WHERE UPPER(guid) = ?", + (normalized_guid,), + ) + rows = cur.fetchall() + for (stored_guid,) in rows or []: + if stored_guid == ctx.guid: + selected_guid = stored_guid + break + if not selected_guid and rows: + selected_guid = rows[0][0] + target_guid = selected_guid or ctx.guid + cur.execute( + f"UPDATE devices SET {columns} WHERE guid = ?", + values + [target_guid], + ) + updated = cur.rowcount + if updated > 0 and normalized_guid and target_guid != normalized_guid: + try: + cur.execute( + "UPDATE devices SET guid = ? WHERE guid = ?", + (normalized_guid, target_guid), + ) + except sqlite3.IntegrityError: + pass + return updated + + try: + rowcount = _apply_updates() + except sqlite3.IntegrityError as exc: + if "devices.hostname" in str(exc) and "UNIQUE" in str(exc).upper(): + # Another device already claims this hostname; keep the existing + # canonical hostname assigned during enrollment to avoid breaking + # the unique constraint and continue updating the remaining fields. + existing_guid_for_hostname: Optional[str] = None + if "hostname" in updates: + try: + cur.execute( + "SELECT guid FROM devices WHERE hostname = ?", + (updates["hostname"],), + ) + row = cur.fetchone() + if row and row[0]: + existing_guid_for_hostname = normalize_guid(row[0]) + except Exception: + existing_guid_for_hostname = None + if "hostname" in updates: + updates.pop("hostname", None) + try: + rowcount = _apply_updates() + except sqlite3.IntegrityError: + raise + else: + try: + current_guid = normalize_guid(ctx.guid) + except Exception: + current_guid = ctx.guid + if ( + existing_guid_for_hostname + and current_guid + and existing_guid_for_hostname == current_guid + ): + pass # Same device contexts; no log needed. + else: + log( + "server", + "heartbeat hostname collision ignored for guid=" + f"{ctx.guid}", + context_label, + ) + else: + raise + + if rowcount == 0: + log("server", f"heartbeat missing device record guid={ctx.guid}", context_label) + return jsonify({"error": "device_not_registered"}), 404 + conn.commit() + finally: + conn.close() + + return jsonify({"status": "ok", "poll_after_ms": 15000}) + + @blueprint.route("/api/agent/script/request", methods=["POST"]) + @require_device_auth(auth_manager) + def script_request(): + ctx = _auth_context() + if ctx is None: + return jsonify({"error": "auth_context_missing"}), 500 + if ctx.status != "active": + return jsonify( + { + "status": "quarantined", + "poll_after_ms": 60000, + "sig_alg": "ed25519", + "signing_key": script_signer.public_base64_spki(), + } + ) + + # Placeholder: actual dispatch logic will integrate with job scheduler. + return jsonify( + { + "status": "idle", + "poll_after_ms": 30000, + "sig_alg": "ed25519", + "signing_key": script_signer.public_base64_spki(), + } + ) + + app.register_blueprint(blueprint) diff --git a/Data/Server/Modules/auth/__init__.py b/Data/Server/Modules/auth/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/Data/Server/Modules/auth/__init__.py @@ -0,0 +1 @@ + diff --git a/Data/Server/Modules/auth/device_auth.py b/Data/Server/Modules/auth/device_auth.py new file mode 100644 index 00000000..64600a60 --- /dev/null +++ b/Data/Server/Modules/auth/device_auth.py @@ -0,0 +1,310 @@ +from __future__ import annotations + +import functools +import sqlite3 +import time +from contextlib import closing +from dataclasses import dataclass +from datetime import datetime, timezone +from typing import Any, Callable, Dict, Optional + +import jwt +from flask import g, jsonify, request + +from Modules.auth.dpop import DPoPValidator, DPoPVerificationError, DPoPReplayError +from Modules.auth.rate_limit import SlidingWindowRateLimiter +from Modules.guid_utils import normalize_guid + +AGENT_CONTEXT_HEADER = "X-Borealis-Agent-Context" + + +def _canonical_context(value: Optional[str]) -> Optional[str]: + if not value: + return None + cleaned = "".join(ch for ch in str(value) if ch.isalnum() or ch in ("_", "-")) + if not cleaned: + return None + return cleaned.upper() + + +@dataclass +class DeviceAuthContext: + guid: str + ssl_key_fingerprint: str + token_version: int + access_token: str + claims: Dict[str, Any] + dpop_jkt: Optional[str] + status: str + service_mode: Optional[str] + + +class DeviceAuthError(Exception): + status_code = 401 + error_code = "unauthorized" + + def __init__( + self, + message: str = "unauthorized", + *, + status_code: Optional[int] = None, + retry_after: Optional[float] = None, + ): + super().__init__(message) + if status_code is not None: + self.status_code = status_code + self.message = message + self.retry_after = retry_after + + +class DeviceAuthManager: + def __init__( + self, + *, + db_conn_factory: Callable[[], Any], + jwt_service, + dpop_validator: Optional[DPoPValidator], + log: Callable[[str, str, Optional[str]], None], + rate_limiter: Optional[SlidingWindowRateLimiter] = None, + ) -> None: + self._db_conn_factory = db_conn_factory + self._jwt_service = jwt_service + self._dpop_validator = dpop_validator + self._log = log + self._rate_limiter = rate_limiter + + def authenticate(self) -> DeviceAuthContext: + auth_header = request.headers.get("Authorization", "") + if not auth_header.startswith("Bearer "): + raise DeviceAuthError("missing_authorization") + token = auth_header[len("Bearer ") :].strip() + if not token: + raise DeviceAuthError("missing_authorization") + + try: + claims = self._jwt_service.decode(token) + except jwt.ExpiredSignatureError: + raise DeviceAuthError("token_expired") + except Exception: + raise DeviceAuthError("invalid_token") + + raw_guid = str(claims.get("guid") or "").strip() + guid = normalize_guid(raw_guid) + fingerprint = str(claims.get("ssl_key_fingerprint") or "").lower().strip() + token_version = int(claims.get("token_version") or 0) + if not guid or not fingerprint or token_version <= 0: + raise DeviceAuthError("invalid_claims") + + if self._rate_limiter: + decision = self._rate_limiter.check(f"fp:{fingerprint}", 60, 60.0) + if not decision.allowed: + raise DeviceAuthError( + "rate_limited", + status_code=429, + retry_after=decision.retry_after, + ) + + context_label = _canonical_context(request.headers.get(AGENT_CONTEXT_HEADER)) + + with closing(self._db_conn_factory()) as conn: + cur = conn.cursor() + cur.execute( + """ + SELECT guid, ssl_key_fingerprint, token_version, status + FROM devices + WHERE UPPER(guid) = ? + """, + (guid,), + ) + rows = cur.fetchall() + row = None + for candidate in rows or []: + candidate_guid = normalize_guid(candidate[0]) + if candidate_guid == guid: + row = candidate + break + if row is None and rows: + row = rows[0] + + if not row: + row = self._recover_device_record( + conn, guid, fingerprint, token_version, context_label + ) + + if not row: + raise DeviceAuthError("device_not_found", status_code=403) + + db_guid, db_fp, db_token_version, status = row + db_guid_normalized = normalize_guid(db_guid) + + if not db_guid_normalized or db_guid_normalized != guid: + raise DeviceAuthError("device_guid_mismatch", status_code=403) + + db_fp = (db_fp or "").lower().strip() + if db_fp and db_fp != fingerprint: + raise DeviceAuthError("fingerprint_mismatch", status_code=403) + + if db_token_version and db_token_version > token_version: + raise DeviceAuthError("token_version_revoked", status_code=401) + + status_normalized = (status or "active").strip().lower() + allowed_statuses = {"active", "quarantined"} + if status_normalized not in allowed_statuses: + raise DeviceAuthError("device_revoked", status_code=403) + if status_normalized == "quarantined": + self._log( + "server", + f"device {guid} is quarantined; limited access for {request.path}", + context_label, + ) + + dpop_jkt: Optional[str] = None + dpop_proof = request.headers.get("DPoP") + if dpop_proof: + if not self._dpop_validator: + raise DeviceAuthError("dpop_not_supported", status_code=400) + try: + htu = request.url + dpop_jkt = self._dpop_validator.verify(request.method, htu, dpop_proof, token) + except DPoPReplayError: + raise DeviceAuthError("dpop_replayed", status_code=400) + except DPoPVerificationError: + raise DeviceAuthError("dpop_invalid", status_code=400) + + ctx = DeviceAuthContext( + guid=guid, + ssl_key_fingerprint=fingerprint, + token_version=token_version, + access_token=token, + claims=claims, + dpop_jkt=dpop_jkt, + status=status_normalized, + service_mode=context_label, + ) + return ctx + + def _recover_device_record( + self, + conn: sqlite3.Connection, + guid: str, + fingerprint: str, + token_version: int, + context_label: Optional[str], + ) -> Optional[tuple]: + """Attempt to recreate a missing device row for an authenticated token.""" + + guid = normalize_guid(guid) + fingerprint = (fingerprint or "").strip() + if not guid or not fingerprint: + return None + + cur = conn.cursor() + now_ts = int(time.time()) + try: + now_iso = datetime.now(tz=timezone.utc).isoformat() + except Exception: + now_iso = datetime.utcnow().isoformat() # pragma: no cover + + base_hostname = f"RECOVERED-{guid[:12].upper()}" if guid else "RECOVERED" + + for attempt in range(6): + hostname = base_hostname if attempt == 0 else f"{base_hostname}-{attempt}" + try: + cur.execute( + """ + INSERT INTO devices ( + guid, + hostname, + created_at, + last_seen, + ssl_key_fingerprint, + token_version, + status, + key_added_at + ) + VALUES (?, ?, ?, ?, ?, ?, 'active', ?) + """, + ( + guid, + hostname, + now_ts, + now_ts, + fingerprint, + max(token_version or 1, 1), + now_iso, + ), + ) + except sqlite3.IntegrityError as exc: + # Hostname collision – try again with a suffixed placeholder. + message = str(exc).lower() + if "hostname" in message and "unique" in message: + continue + self._log( + "server", + f"device auth failed to recover guid={guid} due to integrity error: {exc}", + context_label, + ) + conn.rollback() + return None + except Exception as exc: # pragma: no cover - defensive logging + self._log( + "server", + f"device auth unexpected error recovering guid={guid}: {exc}", + context_label, + ) + conn.rollback() + return None + else: + conn.commit() + break + else: + # Exhausted attempts because of hostname collisions. + self._log( + "server", + f"device auth could not recover guid={guid}; hostname collisions persisted", + context_label, + ) + conn.rollback() + return None + + cur.execute( + """ + SELECT guid, ssl_key_fingerprint, token_version, status + FROM devices + WHERE guid = ? + """, + (guid,), + ) + row = cur.fetchone() + if not row: + self._log( + "server", + f"device auth recovery for guid={guid} committed but row still missing", + context_label, + ) + return row + + +def require_device_auth(manager: DeviceAuthManager): + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + ctx = manager.authenticate() + except DeviceAuthError as exc: + response = jsonify({"error": exc.message}) + response.status_code = exc.status_code + retry_after = getattr(exc, "retry_after", None) + if retry_after: + try: + response.headers["Retry-After"] = str(max(1, int(retry_after))) + except Exception: + response.headers["Retry-After"] = "1" + return response + + g.device_auth = ctx + return func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/Data/Server/Modules/auth/dpop.py b/Data/Server/Modules/auth/dpop.py new file mode 100644 index 00000000..1049b9ab --- /dev/null +++ b/Data/Server/Modules/auth/dpop.py @@ -0,0 +1,109 @@ +""" +DPoP proof verification helpers. +""" + +from __future__ import annotations + +import hashlib +import time +from threading import Lock +from typing import Dict, Optional + +import jwt + +_DP0P_MAX_SKEW = 300.0 # seconds + + +class DPoPVerificationError(Exception): + pass + + +class DPoPReplayError(DPoPVerificationError): + pass + + +class DPoPValidator: + def __init__(self) -> None: + self._observed_jti: Dict[str, float] = {} + self._lock = Lock() + + def verify( + self, + method: str, + htu: str, + proof: str, + access_token: Optional[str] = None, + ) -> str: + """ + Verify the presented DPoP proof. Returns the JWK thumbprint on success. + """ + + if not proof: + raise DPoPVerificationError("DPoP proof missing") + + try: + header = jwt.get_unverified_header(proof) + except Exception as exc: + raise DPoPVerificationError("invalid DPoP header") from exc + + jwk = header.get("jwk") + alg = header.get("alg") + if not jwk or not isinstance(jwk, dict): + raise DPoPVerificationError("missing jwk in DPoP header") + if alg not in ("EdDSA", "ES256", "ES384", "ES512"): + raise DPoPVerificationError(f"unsupported DPoP alg {alg}") + + try: + key = jwt.PyJWK(jwk) + public_key = key.key + except Exception as exc: + raise DPoPVerificationError("invalid jwk in DPoP header") from exc + + try: + claims = jwt.decode( + proof, + public_key, + algorithms=[alg], + options={"require": ["htm", "htu", "jti", "iat"]}, + ) + except Exception as exc: + raise DPoPVerificationError("invalid DPoP signature") from exc + + htm = claims.get("htm") + proof_htu = claims.get("htu") + jti = claims.get("jti") + iat = claims.get("iat") + ath = claims.get("ath") + + if not isinstance(htm, str) or htm.lower() != method.lower(): + raise DPoPVerificationError("DPoP htm mismatch") + if not isinstance(proof_htu, str) or proof_htu != htu: + raise DPoPVerificationError("DPoP htu mismatch") + if not isinstance(jti, str): + raise DPoPVerificationError("DPoP jti missing") + if not isinstance(iat, (int, float)): + raise DPoPVerificationError("DPoP iat missing") + + now = time.time() + if abs(now - float(iat)) > _DP0P_MAX_SKEW: + raise DPoPVerificationError("DPoP proof outside allowed skew") + + if ath and access_token: + expected_ath = jwt.utils.base64url_encode( + hashlib.sha256(access_token.encode("utf-8")).digest() + ).decode("ascii") + if expected_ath != ath: + raise DPoPVerificationError("DPoP ath mismatch") + + with self._lock: + expiry = self._observed_jti.get(jti) + if expiry and expiry > now: + raise DPoPReplayError("DPoP proof replay detected") + self._observed_jti[jti] = now + _DP0P_MAX_SKEW + # Opportunistic cleanup + stale = [key for key, exp in self._observed_jti.items() if exp <= now] + for key in stale: + self._observed_jti.pop(key, None) + + thumbprint = jwt.PyJWK(jwk).thumbprint() + return thumbprint.decode("ascii") diff --git a/Data/Server/Modules/auth/jwt_service.py b/Data/Server/Modules/auth/jwt_service.py new file mode 100644 index 00000000..ab5640b2 --- /dev/null +++ b/Data/Server/Modules/auth/jwt_service.py @@ -0,0 +1,140 @@ +""" +JWT access-token helpers backed by an Ed25519 signing key. +""" + +from __future__ import annotations + +import hashlib +import time +from datetime import datetime, timezone +from typing import Any, Dict, Optional + +import jwt +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import ed25519 + +from Modules.runtime import ensure_runtime_dir, runtime_path + +_KEY_DIR = runtime_path("auth_keys") +_KEY_FILE = _KEY_DIR / "borealis-jwt-ed25519.key" +_LEGACY_KEY_FILE = runtime_path("keys") / "borealis-jwt-ed25519.key" + + +class JWTService: + def __init__(self, private_key: ed25519.Ed25519PrivateKey, key_id: str): + self._private_key = private_key + self._public_key = private_key.public_key() + self._key_id = key_id + + @property + def key_id(self) -> str: + return self._key_id + + def issue_access_token( + self, + guid: str, + ssl_key_fingerprint: str, + token_version: int, + expires_in: int = 900, + extra_claims: Optional[Dict[str, Any]] = None, + ) -> str: + now = int(time.time()) + payload: Dict[str, Any] = { + "sub": f"device:{guid}", + "guid": guid, + "ssl_key_fingerprint": ssl_key_fingerprint, + "token_version": int(token_version), + "iat": now, + "nbf": now, + "exp": now + int(expires_in), + } + if extra_claims: + payload.update(extra_claims) + + token = jwt.encode( + payload, + self._private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ), + algorithm="EdDSA", + headers={"kid": self._key_id}, + ) + return token + + def decode(self, token: str, *, audience: Optional[str] = None) -> Dict[str, Any]: + options = {"require": ["exp", "iat", "sub"]} + public_pem = self._public_key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + return jwt.decode( + token, + public_pem, + algorithms=["EdDSA"], + audience=audience, + options=options, + ) + + def public_jwk(self) -> Dict[str, Any]: + public_bytes = self._public_key.public_bytes( + encoding=serialization.Encoding.Raw, + format=serialization.PublicFormat.Raw, + ) + # PyJWT expects base64url without padding. + jwk_x = jwt.utils.base64url_encode(public_bytes).decode("ascii") + return {"kty": "OKP", "crv": "Ed25519", "kid": self._key_id, "alg": "EdDSA", "use": "sig", "x": jwk_x} + + +def load_service() -> JWTService: + private_key = _load_or_create_private_key() + public_bytes = private_key.public_key().public_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + key_id = hashlib.sha256(public_bytes).hexdigest()[:16] + return JWTService(private_key, key_id) + + +def _load_or_create_private_key() -> ed25519.Ed25519PrivateKey: + ensure_runtime_dir("auth_keys") + _migrate_legacy_key_if_present() + + if _KEY_FILE.exists(): + with _KEY_FILE.open("rb") as fh: + return serialization.load_pem_private_key(fh.read(), password=None) + + if _LEGACY_KEY_FILE.exists(): + with _LEGACY_KEY_FILE.open("rb") as fh: + return serialization.load_pem_private_key(fh.read(), password=None) + + private_key = ed25519.Ed25519PrivateKey.generate() + pem = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ) + with _KEY_FILE.open("wb") as fh: + fh.write(pem) + try: + if _KEY_FILE.exists() and hasattr(_KEY_FILE, "chmod"): + _KEY_FILE.chmod(0o600) + except Exception: + pass + return private_key + + +def _migrate_legacy_key_if_present() -> None: + if not _LEGACY_KEY_FILE.exists() or _KEY_FILE.exists(): + return + + try: + ensure_runtime_dir("auth_keys") + try: + _LEGACY_KEY_FILE.replace(_KEY_FILE) + except Exception: + _KEY_FILE.write_bytes(_LEGACY_KEY_FILE.read_bytes()) + except Exception: + return + diff --git a/Data/Server/Modules/auth/rate_limit.py b/Data/Server/Modules/auth/rate_limit.py new file mode 100644 index 00000000..5b0c9232 --- /dev/null +++ b/Data/Server/Modules/auth/rate_limit.py @@ -0,0 +1,41 @@ +""" +Tiny in-memory rate limiter suitable for single-process development servers. +""" + +from __future__ import annotations + +import time +from collections import deque +from dataclasses import dataclass +from threading import Lock +from typing import Deque, Dict, Tuple + + +@dataclass +class RateLimitDecision: + allowed: bool + retry_after: float + + +class SlidingWindowRateLimiter: + def __init__(self) -> None: + self._buckets: Dict[str, Deque[float]] = {} + self._lock = Lock() + + def check(self, key: str, limit: int, window_seconds: float) -> RateLimitDecision: + now = time.monotonic() + with self._lock: + bucket = self._buckets.get(key) + if bucket is None: + bucket = deque() + self._buckets[key] = bucket + + while bucket and now - bucket[0] > window_seconds: + bucket.popleft() + + if len(bucket) >= limit: + retry_after = max(0.0, window_seconds - (now - bucket[0])) + return RateLimitDecision(False, retry_after) + + bucket.append(now) + return RateLimitDecision(True, 0.0) diff --git a/Data/Server/Modules/crypto/__init__.py b/Data/Server/Modules/crypto/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/Data/Server/Modules/crypto/__init__.py @@ -0,0 +1 @@ + diff --git a/Data/Server/Modules/crypto/certificates.py b/Data/Server/Modules/crypto/certificates.py new file mode 100644 index 00000000..d5f18a61 --- /dev/null +++ b/Data/Server/Modules/crypto/certificates.py @@ -0,0 +1,372 @@ +""" +Server TLS certificate management. + +Borealis now issues a dedicated root CA and a leaf server certificate so that +agents can pin the CA without requiring a re-enrollment every time the server +certificate is refreshed. The CA is persisted alongside the server key so that +existing deployments can be upgraded in-place. +""" + +from __future__ import annotations + +import os +import ssl +from datetime import datetime, timedelta, timezone +from pathlib import Path +from typing import Optional, Tuple + +from cryptography import x509 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.x509.oid import ExtendedKeyUsageOID, NameOID + +from Modules.runtime import ensure_server_certificates_dir, runtime_path, server_certificates_path + +_CERT_DIR = server_certificates_path() +_CERT_FILE = _CERT_DIR / "borealis-server-cert.pem" +_KEY_FILE = _CERT_DIR / "borealis-server-key.pem" +_BUNDLE_FILE = _CERT_DIR / "borealis-server-bundle.pem" +_CA_KEY_FILE = _CERT_DIR / "borealis-root-ca-key.pem" +_CA_CERT_FILE = _CERT_DIR / "borealis-root-ca.pem" + +_LEGACY_CERT_DIR = runtime_path("certs") +_LEGACY_CERT_FILE = _LEGACY_CERT_DIR / "borealis-server-cert.pem" +_LEGACY_KEY_FILE = _LEGACY_CERT_DIR / "borealis-server-key.pem" +_LEGACY_BUNDLE_FILE = _LEGACY_CERT_DIR / "borealis-server-bundle.pem" + +_ROOT_COMMON_NAME = "Borealis Root CA" +_ORG_NAME = "Borealis" +_ROOT_VALIDITY = timedelta(days=365 * 100) +_SERVER_VALIDITY = timedelta(days=365 * 5) + + +def ensure_certificate(common_name: str = "Borealis Server") -> Tuple[Path, Path, Path]: + """ + Ensure the root CA, server certificate, and bundle exist on disk. + + Returns (cert_path, key_path, bundle_path). + """ + + ensure_server_certificates_dir() + _migrate_legacy_material_if_present() + + ca_key, ca_cert, ca_regenerated = _ensure_root_ca() + + server_cert = _load_certificate(_CERT_FILE) + needs_regen = ca_regenerated or _server_certificate_needs_regeneration(server_cert, ca_cert) + if needs_regen: + server_cert = _generate_server_certificate(common_name, ca_key, ca_cert) + + if server_cert is None: + server_cert = _generate_server_certificate(common_name, ca_key, ca_cert) + + _write_bundle(server_cert, ca_cert) + + return _CERT_FILE, _KEY_FILE, _BUNDLE_FILE + + +def _migrate_legacy_material_if_present() -> None: + # Promote legacy runtime certificates (Server/Borealis/certs) into the new location. + if not _CERT_FILE.exists() or not _KEY_FILE.exists(): + legacy_cert = _LEGACY_CERT_FILE + legacy_key = _LEGACY_KEY_FILE + if legacy_cert.exists() and legacy_key.exists(): + try: + ensure_server_certificates_dir() + if not _CERT_FILE.exists(): + _safe_copy(legacy_cert, _CERT_FILE) + if not _KEY_FILE.exists(): + _safe_copy(legacy_key, _KEY_FILE) + except Exception: + pass + + +def _ensure_root_ca() -> Tuple[ec.EllipticCurvePrivateKey, x509.Certificate, bool]: + regenerated = False + + ca_key: Optional[ec.EllipticCurvePrivateKey] = None + ca_cert: Optional[x509.Certificate] = None + + if _CA_KEY_FILE.exists() and _CA_CERT_FILE.exists(): + try: + ca_key = _load_private_key(_CA_KEY_FILE) + ca_cert = _load_certificate(_CA_CERT_FILE) + if ca_cert is not None and ca_key is not None: + expiry = _cert_not_after(ca_cert) + subject = ca_cert.subject + subject_cn = "" + try: + subject_cn = subject.get_attributes_for_oid(NameOID.COMMON_NAME)[0].value # type: ignore[index] + except Exception: + subject_cn = "" + try: + basic = ca_cert.extensions.get_extension_for_class(x509.BasicConstraints).value # type: ignore[attr-defined] + is_ca = bool(basic.ca) + except Exception: + is_ca = False + if ( + expiry <= datetime.now(tz=timezone.utc) + or not is_ca + or subject_cn != _ROOT_COMMON_NAME + ): + regenerated = True + else: + regenerated = True + except Exception: + regenerated = True + else: + regenerated = True + + if regenerated or ca_key is None or ca_cert is None: + ca_key = ec.generate_private_key(ec.SECP384R1()) + public_key = ca_key.public_key() + + now = datetime.now(tz=timezone.utc) + builder = ( + x509.CertificateBuilder() + .subject_name( + x509.Name( + [ + x509.NameAttribute(NameOID.COMMON_NAME, _ROOT_COMMON_NAME), + x509.NameAttribute(NameOID.ORGANIZATION_NAME, _ORG_NAME), + ] + ) + ) + .issuer_name( + x509.Name( + [ + x509.NameAttribute(NameOID.COMMON_NAME, _ROOT_COMMON_NAME), + x509.NameAttribute(NameOID.ORGANIZATION_NAME, _ORG_NAME), + ] + ) + ) + .public_key(public_key) + .serial_number(x509.random_serial_number()) + .not_valid_before(now - timedelta(minutes=5)) + .not_valid_after(now + _ROOT_VALIDITY) + .add_extension(x509.BasicConstraints(ca=True, path_length=None), critical=True) + .add_extension( + x509.KeyUsage( + digital_signature=True, + content_commitment=False, + key_encipherment=False, + data_encipherment=False, + key_agreement=False, + key_cert_sign=True, + crl_sign=True, + encipher_only=False, + decipher_only=False, + ), + critical=True, + ) + .add_extension( + x509.SubjectKeyIdentifier.from_public_key(public_key), + critical=False, + ) + ) + + builder = builder.add_extension( + x509.AuthorityKeyIdentifier.from_issuer_public_key(public_key), + critical=False, + ) + + ca_cert = builder.sign(private_key=ca_key, algorithm=hashes.SHA384()) + + _CA_KEY_FILE.write_bytes( + ca_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) + ) + _CA_CERT_FILE.write_bytes(ca_cert.public_bytes(serialization.Encoding.PEM)) + + _tighten_permissions(_CA_KEY_FILE) + _tighten_permissions(_CA_CERT_FILE) + else: + regenerated = False + + return ca_key, ca_cert, regenerated + + +def _server_certificate_needs_regeneration( + server_cert: Optional[x509.Certificate], + ca_cert: x509.Certificate, +) -> bool: + if server_cert is None: + return True + + try: + if server_cert.issuer != ca_cert.subject: + return True + except Exception: + return True + + try: + expiry = _cert_not_after(server_cert) + if expiry <= datetime.now(tz=timezone.utc): + return True + except Exception: + return True + + try: + basic = server_cert.extensions.get_extension_for_class(x509.BasicConstraints).value # type: ignore[attr-defined] + if basic.ca: + return True + except Exception: + return True + + try: + eku = server_cert.extensions.get_extension_for_class(x509.ExtendedKeyUsage).value # type: ignore[attr-defined] + if ExtendedKeyUsageOID.SERVER_AUTH not in eku: + return True + except Exception: + return True + + return False + + +def _generate_server_certificate( + common_name: str, + ca_key: ec.EllipticCurvePrivateKey, + ca_cert: x509.Certificate, +) -> x509.Certificate: + private_key = ec.generate_private_key(ec.SECP384R1()) + public_key = private_key.public_key() + + now = datetime.now(tz=timezone.utc) + ca_expiry = _cert_not_after(ca_cert) + candidate_expiry = now + _SERVER_VALIDITY + not_after = min(ca_expiry - timedelta(days=1), candidate_expiry) + + builder = ( + x509.CertificateBuilder() + .subject_name( + x509.Name( + [ + x509.NameAttribute(NameOID.COMMON_NAME, common_name), + x509.NameAttribute(NameOID.ORGANIZATION_NAME, _ORG_NAME), + ] + ) + ) + .issuer_name(ca_cert.subject) + .public_key(public_key) + .serial_number(x509.random_serial_number()) + .not_valid_before(now - timedelta(minutes=5)) + .not_valid_after(not_after) + .add_extension( + x509.SubjectAlternativeName( + [ + x509.DNSName("localhost"), + x509.DNSName("127.0.0.1"), + x509.DNSName("::1"), + ] + ), + critical=False, + ) + .add_extension(x509.BasicConstraints(ca=False, path_length=None), critical=True) + .add_extension( + x509.KeyUsage( + digital_signature=True, + content_commitment=False, + key_encipherment=False, + data_encipherment=False, + key_agreement=False, + key_cert_sign=False, + crl_sign=False, + encipher_only=False, + decipher_only=False, + ), + critical=True, + ) + .add_extension( + x509.ExtendedKeyUsage([ExtendedKeyUsageOID.SERVER_AUTH]), + critical=False, + ) + .add_extension( + x509.SubjectKeyIdentifier.from_public_key(public_key), + critical=False, + ) + .add_extension( + x509.AuthorityKeyIdentifier.from_issuer_public_key(ca_key.public_key()), + critical=False, + ) + ) + + certificate = builder.sign(private_key=ca_key, algorithm=hashes.SHA384()) + + _KEY_FILE.write_bytes( + private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) + ) + _CERT_FILE.write_bytes(certificate.public_bytes(serialization.Encoding.PEM)) + + _tighten_permissions(_KEY_FILE) + _tighten_permissions(_CERT_FILE) + + return certificate + + +def _write_bundle(server_cert: x509.Certificate, ca_cert: x509.Certificate) -> None: + try: + server_pem = server_cert.public_bytes(serialization.Encoding.PEM).decode("utf-8").strip() + ca_pem = ca_cert.public_bytes(serialization.Encoding.PEM).decode("utf-8").strip() + except Exception: + return + + bundle = f"{server_pem}\n{ca_pem}\n" + _BUNDLE_FILE.write_text(bundle, encoding="utf-8") + _tighten_permissions(_BUNDLE_FILE) + + +def _safe_copy(src: Path, dst: Path) -> None: + try: + dst.write_bytes(src.read_bytes()) + except Exception: + pass + + +def _tighten_permissions(path: Path) -> None: + try: + if os.name == "posix": + path.chmod(0o600) + except Exception: + pass + + +def _load_private_key(path: Path) -> ec.EllipticCurvePrivateKey: + with path.open("rb") as fh: + return serialization.load_pem_private_key(fh.read(), password=None) + + +def _load_certificate(path: Path) -> Optional[x509.Certificate]: + try: + return x509.load_pem_x509_certificate(path.read_bytes()) + except Exception: + return None + + +def _cert_not_after(cert: x509.Certificate) -> datetime: + try: + return cert.not_valid_after_utc # type: ignore[attr-defined] + except AttributeError: + value = cert.not_valid_after + if value.tzinfo is None: + return value.replace(tzinfo=timezone.utc) + return value + + +def build_ssl_context() -> ssl.SSLContext: + cert_path, key_path, bundle_path = ensure_certificate() + context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) + context.minimum_version = ssl.TLSVersion.TLSv1_3 + context.load_cert_chain(certfile=str(bundle_path), keyfile=str(key_path)) + return context + + +def certificate_paths() -> Tuple[str, str, str]: + cert_path, key_path, bundle_path = ensure_certificate() + return str(cert_path), str(key_path), str(bundle_path) diff --git a/Data/Server/Modules/crypto/keys.py b/Data/Server/Modules/crypto/keys.py new file mode 100644 index 00000000..d3e6e1b7 --- /dev/null +++ b/Data/Server/Modules/crypto/keys.py @@ -0,0 +1,71 @@ +""" +Utility helpers for working with Ed25519 keys and fingerprints. +""" + +from __future__ import annotations + +import base64 +import hashlib +import re +from typing import Tuple + +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.serialization import load_der_public_key +from cryptography.hazmat.primitives.asymmetric import ed25519 + + +def generate_ed25519_keypair() -> Tuple[ed25519.Ed25519PrivateKey, bytes]: + """ + Generate a new Ed25519 keypair. + + Returns the private key object and the public key encoded as SubjectPublicKeyInfo DER bytes. + """ + + private_key = ed25519.Ed25519PrivateKey.generate() + public_key = private_key.public_key().public_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + return private_key, public_key + + +def normalize_base64(data: str) -> str: + """ + Collapse whitespace and normalise URL-safe encodings so we can reliably decode. + """ + + cleaned = re.sub(r"\\s+", "", data or "") + return cleaned.replace("-", "+").replace("_", "/") + + +def spki_der_from_base64(spki_b64: str) -> bytes: + return base64.b64decode(normalize_base64(spki_b64), validate=True) + + +def base64_from_spki_der(spki_der: bytes) -> str: + return base64.b64encode(spki_der).decode("ascii") + + +def fingerprint_from_spki_der(spki_der: bytes) -> str: + digest = hashlib.sha256(spki_der).hexdigest() + return digest.lower() + + +def fingerprint_from_base64_spki(spki_b64: str) -> str: + return fingerprint_from_spki_der(spki_der_from_base64(spki_b64)) + + +def private_key_to_pem(private_key: ed25519.Ed25519PrivateKey) -> bytes: + return private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ) + + +def public_key_to_pem(public_spki_der: bytes) -> bytes: + public_key = load_der_public_key(public_spki_der) + return public_key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) diff --git a/Data/Server/Modules/crypto/signing.py b/Data/Server/Modules/crypto/signing.py new file mode 100644 index 00000000..1c6ff7b4 --- /dev/null +++ b/Data/Server/Modules/crypto/signing.py @@ -0,0 +1,125 @@ +""" +Code-signing helpers for delivering scripts to agents. +""" + +from __future__ import annotations + +from pathlib import Path +from typing import Tuple + +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import ed25519 + +from Modules.runtime import ( + ensure_server_certificates_dir, + server_certificates_path, + runtime_path, +) + +from .keys import base64_from_spki_der + +_KEY_DIR = server_certificates_path("Code-Signing") +_SIGNING_KEY_FILE = _KEY_DIR / "borealis-script-ed25519.key" +_SIGNING_PUB_FILE = _KEY_DIR / "borealis-script-ed25519.pub" +_LEGACY_KEY_FILE = runtime_path("keys") / "borealis-script-ed25519.key" +_LEGACY_PUB_FILE = runtime_path("keys") / "borealis-script-ed25519.pub" +_OLD_RUNTIME_KEY_DIR = runtime_path("script_signing_keys") +_OLD_RUNTIME_KEY_FILE = _OLD_RUNTIME_KEY_DIR / "borealis-script-ed25519.key" +_OLD_RUNTIME_PUB_FILE = _OLD_RUNTIME_KEY_DIR / "borealis-script-ed25519.pub" + + +class ScriptSigner: + def __init__(self, private_key: ed25519.Ed25519PrivateKey): + self._private = private_key + self._public = private_key.public_key() + + def sign(self, payload: bytes) -> bytes: + return self._private.sign(payload) + + def public_spki_der(self) -> bytes: + return self._public.public_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + + def public_base64_spki(self) -> str: + return base64_from_spki_der(self.public_spki_der()) + + +def load_signer() -> ScriptSigner: + private_key = _load_or_create() + return ScriptSigner(private_key) + + +def _load_or_create() -> ed25519.Ed25519PrivateKey: + ensure_server_certificates_dir("Code-Signing") + _migrate_legacy_material_if_present() + + if _SIGNING_KEY_FILE.exists(): + with _SIGNING_KEY_FILE.open("rb") as fh: + return serialization.load_pem_private_key(fh.read(), password=None) + + if _LEGACY_KEY_FILE.exists(): + with _LEGACY_KEY_FILE.open("rb") as fh: + return serialization.load_pem_private_key(fh.read(), password=None) + + private_key = ed25519.Ed25519PrivateKey.generate() + pem = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ) + with _SIGNING_KEY_FILE.open("wb") as fh: + fh.write(pem) + try: + if hasattr(_SIGNING_KEY_FILE, "chmod"): + _SIGNING_KEY_FILE.chmod(0o600) + except Exception: + pass + + pub_der = private_key.public_key().public_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + _SIGNING_PUB_FILE.write_bytes(pub_der) + + return private_key + + +def _migrate_legacy_material_if_present() -> None: + if _SIGNING_KEY_FILE.exists(): + return + + # First migrate from legacy runtime path embedded in Server runtime. + try: + if _OLD_RUNTIME_KEY_FILE.exists() and not _SIGNING_KEY_FILE.exists(): + ensure_server_certificates_dir("Code-Signing") + try: + _OLD_RUNTIME_KEY_FILE.replace(_SIGNING_KEY_FILE) + except Exception: + _SIGNING_KEY_FILE.write_bytes(_OLD_RUNTIME_KEY_FILE.read_bytes()) + if _OLD_RUNTIME_PUB_FILE.exists() and not _SIGNING_PUB_FILE.exists(): + try: + _OLD_RUNTIME_PUB_FILE.replace(_SIGNING_PUB_FILE) + except Exception: + _SIGNING_PUB_FILE.write_bytes(_OLD_RUNTIME_PUB_FILE.read_bytes()) + except Exception: + pass + + if not _LEGACY_KEY_FILE.exists() or _SIGNING_KEY_FILE.exists(): + return + + try: + ensure_server_certificates_dir("Code-Signing") + try: + _LEGACY_KEY_FILE.replace(_SIGNING_KEY_FILE) + except Exception: + _SIGNING_KEY_FILE.write_bytes(_LEGACY_KEY_FILE.read_bytes()) + + if _LEGACY_PUB_FILE.exists() and not _SIGNING_PUB_FILE.exists(): + try: + _LEGACY_PUB_FILE.replace(_SIGNING_PUB_FILE) + except Exception: + _SIGNING_PUB_FILE.write_bytes(_LEGACY_PUB_FILE.read_bytes()) + except Exception: + return diff --git a/Data/Server/Modules/db_migrations.py b/Data/Server/Modules/db_migrations.py new file mode 100644 index 00000000..1e99275e --- /dev/null +++ b/Data/Server/Modules/db_migrations.py @@ -0,0 +1,488 @@ +""" +Database migration helpers for Borealis. + +This module centralises schema evolution so the main server module can stay +focused on request handling. The migration functions are intentionally +idempotent — they can run repeatedly without changing state once the schema +matches the desired shape. +""" + +from __future__ import annotations + +import sqlite3 +import uuid +from datetime import datetime, timezone +from typing import List, Optional, Sequence, Tuple + + +DEVICE_TABLE = "devices" + + +def apply_all(conn: sqlite3.Connection) -> None: + """ + Run all known schema migrations against the provided sqlite3 connection. + """ + + _ensure_devices_table(conn) + _ensure_device_aux_tables(conn) + _ensure_refresh_token_table(conn) + _ensure_install_code_table(conn) + _ensure_install_code_persistence_table(conn) + _ensure_device_approval_table(conn) + + conn.commit() + + +def _ensure_devices_table(conn: sqlite3.Connection) -> None: + cur = conn.cursor() + if not _table_exists(cur, DEVICE_TABLE): + _create_devices_table(cur) + return + + column_info = _table_info(cur, DEVICE_TABLE) + col_names = [c[1] for c in column_info] + pk_cols = [c[1] for c in column_info if c[5]] + + needs_rebuild = pk_cols != ["guid"] + required_columns = { + "guid": "TEXT", + "hostname": "TEXT", + "description": "TEXT", + "created_at": "INTEGER", + "agent_hash": "TEXT", + "memory": "TEXT", + "network": "TEXT", + "software": "TEXT", + "storage": "TEXT", + "cpu": "TEXT", + "device_type": "TEXT", + "domain": "TEXT", + "external_ip": "TEXT", + "internal_ip": "TEXT", + "last_reboot": "TEXT", + "last_seen": "INTEGER", + "last_user": "TEXT", + "operating_system": "TEXT", + "uptime": "INTEGER", + "agent_id": "TEXT", + "ansible_ee_ver": "TEXT", + "connection_type": "TEXT", + "connection_endpoint": "TEXT", + "ssl_key_fingerprint": "TEXT", + "token_version": "INTEGER", + "status": "TEXT", + "key_added_at": "TEXT", + } + + missing_columns = [col for col in required_columns if col not in col_names] + if missing_columns: + needs_rebuild = True + + if needs_rebuild: + _rebuild_devices_table(conn, column_info) + else: + _ensure_column_defaults(cur) + + _ensure_device_indexes(cur) + + +def _ensure_device_aux_tables(conn: sqlite3.Connection) -> None: + cur = conn.cursor() + cur.execute( + """ + CREATE TABLE IF NOT EXISTS device_keys ( + id TEXT PRIMARY KEY, + guid TEXT NOT NULL, + ssl_key_fingerprint TEXT NOT NULL, + added_at TEXT NOT NULL, + retired_at TEXT + ) + """ + ) + cur.execute( + """ + CREATE UNIQUE INDEX IF NOT EXISTS uq_device_keys_guid_fingerprint + ON device_keys(guid, ssl_key_fingerprint) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_device_keys_guid + ON device_keys(guid) + """ + ) + + +def _ensure_refresh_token_table(conn: sqlite3.Connection) -> None: + cur = conn.cursor() + cur.execute( + """ + CREATE TABLE IF NOT EXISTS refresh_tokens ( + id TEXT PRIMARY KEY, + guid TEXT NOT NULL, + token_hash TEXT NOT NULL, + dpop_jkt TEXT, + created_at TEXT NOT NULL, + expires_at TEXT NOT NULL, + revoked_at TEXT, + last_used_at TEXT + ) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_refresh_tokens_guid + ON refresh_tokens(guid) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_refresh_tokens_expires_at + ON refresh_tokens(expires_at) + """ + ) + + +def _ensure_install_code_table(conn: sqlite3.Connection) -> None: + cur = conn.cursor() + cur.execute( + """ + CREATE TABLE IF NOT EXISTS enrollment_install_codes ( + id TEXT PRIMARY KEY, + code TEXT NOT NULL UNIQUE, + expires_at TEXT NOT NULL, + created_by_user_id TEXT, + used_at TEXT, + used_by_guid TEXT, + max_uses INTEGER NOT NULL DEFAULT 1, + use_count INTEGER NOT NULL DEFAULT 0, + last_used_at TEXT + ) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_eic_expires_at + ON enrollment_install_codes(expires_at) + """ + ) + + columns = {row[1] for row in _table_info(cur, "enrollment_install_codes")} + if "max_uses" not in columns: + cur.execute( + """ + ALTER TABLE enrollment_install_codes + ADD COLUMN max_uses INTEGER NOT NULL DEFAULT 1 + """ + ) + if "use_count" not in columns: + cur.execute( + """ + ALTER TABLE enrollment_install_codes + ADD COLUMN use_count INTEGER NOT NULL DEFAULT 0 + """ + ) + if "last_used_at" not in columns: + cur.execute( + """ + ALTER TABLE enrollment_install_codes + ADD COLUMN last_used_at TEXT + """ + ) + + +def _ensure_install_code_persistence_table(conn: sqlite3.Connection) -> None: + cur = conn.cursor() + cur.execute( + """ + CREATE TABLE IF NOT EXISTS enrollment_install_codes_persistent ( + id TEXT PRIMARY KEY, + code TEXT NOT NULL UNIQUE, + created_at TEXT NOT NULL, + expires_at TEXT NOT NULL, + created_by_user_id TEXT, + used_at TEXT, + used_by_guid TEXT, + max_uses INTEGER NOT NULL DEFAULT 1, + last_known_use_count INTEGER NOT NULL DEFAULT 0, + last_used_at TEXT, + is_active INTEGER NOT NULL DEFAULT 1, + archived_at TEXT, + consumed_at TEXT + ) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_eicp_active + ON enrollment_install_codes_persistent(is_active, expires_at) + """ + ) + cur.execute( + """ + CREATE UNIQUE INDEX IF NOT EXISTS uq_eicp_code + ON enrollment_install_codes_persistent(code) + """ + ) + + columns = {row[1] for row in _table_info(cur, "enrollment_install_codes_persistent")} + if "last_known_use_count" not in columns: + cur.execute( + """ + ALTER TABLE enrollment_install_codes_persistent + ADD COLUMN last_known_use_count INTEGER NOT NULL DEFAULT 0 + """ + ) + if "archived_at" not in columns: + cur.execute( + """ + ALTER TABLE enrollment_install_codes_persistent + ADD COLUMN archived_at TEXT + """ + ) + if "consumed_at" not in columns: + cur.execute( + """ + ALTER TABLE enrollment_install_codes_persistent + ADD COLUMN consumed_at TEXT + """ + ) + if "is_active" not in columns: + cur.execute( + """ + ALTER TABLE enrollment_install_codes_persistent + ADD COLUMN is_active INTEGER NOT NULL DEFAULT 1 + """ + ) + if "used_at" not in columns: + cur.execute( + """ + ALTER TABLE enrollment_install_codes_persistent + ADD COLUMN used_at TEXT + """ + ) + if "used_by_guid" not in columns: + cur.execute( + """ + ALTER TABLE enrollment_install_codes_persistent + ADD COLUMN used_by_guid TEXT + """ + ) + if "last_used_at" not in columns: + cur.execute( + """ + ALTER TABLE enrollment_install_codes_persistent + ADD COLUMN last_used_at TEXT + """ + ) + + +def _ensure_device_approval_table(conn: sqlite3.Connection) -> None: + cur = conn.cursor() + cur.execute( + """ + CREATE TABLE IF NOT EXISTS device_approvals ( + id TEXT PRIMARY KEY, + approval_reference TEXT NOT NULL UNIQUE, + guid TEXT, + hostname_claimed TEXT NOT NULL, + ssl_key_fingerprint_claimed TEXT NOT NULL, + enrollment_code_id TEXT NOT NULL, + status TEXT NOT NULL, + client_nonce TEXT NOT NULL, + server_nonce TEXT NOT NULL, + agent_pubkey_der BLOB NOT NULL, + created_at TEXT NOT NULL, + updated_at TEXT NOT NULL, + approved_by_user_id TEXT + ) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_da_status + ON device_approvals(status) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_da_fp_status + ON device_approvals(ssl_key_fingerprint_claimed, status) + """ + ) + + +def _create_devices_table(cur: sqlite3.Cursor) -> None: + cur.execute( + """ + CREATE TABLE devices ( + guid TEXT PRIMARY KEY, + hostname TEXT, + description TEXT, + created_at INTEGER, + agent_hash TEXT, + memory TEXT, + network TEXT, + software TEXT, + storage TEXT, + cpu TEXT, + device_type TEXT, + domain TEXT, + external_ip TEXT, + internal_ip TEXT, + last_reboot TEXT, + last_seen INTEGER, + last_user TEXT, + operating_system TEXT, + uptime INTEGER, + agent_id TEXT, + ansible_ee_ver TEXT, + connection_type TEXT, + connection_endpoint TEXT, + ssl_key_fingerprint TEXT, + token_version INTEGER DEFAULT 1, + status TEXT DEFAULT 'active', + key_added_at TEXT + ) + """ + ) + _ensure_device_indexes(cur) + + +def _ensure_device_indexes(cur: sqlite3.Cursor) -> None: + cur.execute( + """ + CREATE UNIQUE INDEX IF NOT EXISTS uq_devices_hostname + ON devices(hostname) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_devices_ssl_key + ON devices(ssl_key_fingerprint) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_devices_status + ON devices(status) + """ + ) + + +def _ensure_column_defaults(cur: sqlite3.Cursor) -> None: + cur.execute( + """ + UPDATE devices + SET token_version = COALESCE(token_version, 1) + WHERE token_version IS NULL + """ + ) + cur.execute( + """ + UPDATE devices + SET status = COALESCE(status, 'active') + WHERE status IS NULL OR status = '' + """ + ) + + +def _rebuild_devices_table(conn: sqlite3.Connection, column_info: Sequence[Tuple]) -> None: + cur = conn.cursor() + cur.execute("PRAGMA foreign_keys=OFF") + cur.execute("BEGIN IMMEDIATE") + + cur.execute("ALTER TABLE devices RENAME TO devices_legacy") + _create_devices_table(cur) + + legacy_columns = [c[1] for c in column_info] + cur.execute(f"SELECT {', '.join(legacy_columns)} FROM devices_legacy") + rows = cur.fetchall() + + insert_sql = ( + """ + INSERT OR REPLACE INTO devices ( + guid, hostname, description, created_at, agent_hash, memory, + network, software, storage, cpu, device_type, domain, external_ip, + internal_ip, last_reboot, last_seen, last_user, operating_system, + uptime, agent_id, ansible_ee_ver, connection_type, connection_endpoint, + ssl_key_fingerprint, token_version, status, key_added_at + ) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + """ + ) + + for row in rows: + record = dict(zip(legacy_columns, row)) + guid = _normalized_guid(record.get("guid")) + if not guid: + guid = str(uuid.uuid4()) + hostname = record.get("hostname") + created_at = record.get("created_at") + key_added_at = record.get("key_added_at") + if key_added_at is None: + key_added_at = _default_key_added_at(created_at) + + params: Tuple = ( + guid, + hostname, + record.get("description"), + created_at, + record.get("agent_hash"), + record.get("memory"), + record.get("network"), + record.get("software"), + record.get("storage"), + record.get("cpu"), + record.get("device_type"), + record.get("domain"), + record.get("external_ip"), + record.get("internal_ip"), + record.get("last_reboot"), + record.get("last_seen"), + record.get("last_user"), + record.get("operating_system"), + record.get("uptime"), + record.get("agent_id"), + record.get("ansible_ee_ver"), + record.get("connection_type"), + record.get("connection_endpoint"), + record.get("ssl_key_fingerprint"), + record.get("token_version") or 1, + record.get("status") or "active", + key_added_at, + ) + cur.execute(insert_sql, params) + + cur.execute("DROP TABLE devices_legacy") + cur.execute("COMMIT") + cur.execute("PRAGMA foreign_keys=ON") + + +def _default_key_added_at(created_at: Optional[int]) -> Optional[str]: + if created_at: + try: + dt = datetime.fromtimestamp(int(created_at), tz=timezone.utc) + return dt.isoformat() + except Exception: + pass + return datetime.now(tz=timezone.utc).isoformat() + + +def _table_exists(cur: sqlite3.Cursor, name: str) -> bool: + cur.execute( + "SELECT 1 FROM sqlite_master WHERE type='table' AND name=?", + (name,), + ) + return cur.fetchone() is not None + + +def _table_info(cur: sqlite3.Cursor, name: str) -> List[Tuple]: + cur.execute(f"PRAGMA table_info({name})") + return cur.fetchall() + + +def _normalized_guid(value: Optional[str]) -> str: + if not value: + return "" + return str(value).strip() diff --git a/Data/Server/Modules/enrollment/__init__.py b/Data/Server/Modules/enrollment/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/Data/Server/Modules/enrollment/__init__.py @@ -0,0 +1 @@ + diff --git a/Data/Server/Modules/enrollment/nonce_store.py b/Data/Server/Modules/enrollment/nonce_store.py new file mode 100644 index 00000000..bcdb962e --- /dev/null +++ b/Data/Server/Modules/enrollment/nonce_store.py @@ -0,0 +1,35 @@ +""" +Short-lived nonce cache to defend against replay attacks during enrollment. +""" + +from __future__ import annotations + +import time +from threading import Lock +from typing import Dict + + +class NonceCache: + def __init__(self, ttl_seconds: float = 300.0) -> None: + self._ttl = ttl_seconds + self._entries: Dict[str, float] = {} + self._lock = Lock() + + def consume(self, key: str) -> bool: + """ + Attempt to consume the nonce identified by `key`. + + Returns True on first use within TTL, False if already consumed. + """ + + now = time.monotonic() + with self._lock: + expire_at = self._entries.get(key) + if expire_at and expire_at > now: + return False + self._entries[key] = now + self._ttl + # Opportunistic cleanup to keep the dict small + stale = [nonce for nonce, expiry in self._entries.items() if expiry <= now] + for nonce in stale: + self._entries.pop(nonce, None) + return True diff --git a/Data/Server/Modules/enrollment/routes.py b/Data/Server/Modules/enrollment/routes.py new file mode 100644 index 00000000..948bd33b --- /dev/null +++ b/Data/Server/Modules/enrollment/routes.py @@ -0,0 +1,759 @@ +from __future__ import annotations + +import base64 +import secrets +import sqlite3 +import uuid +from datetime import datetime, timezone, timedelta +import time +from typing import Any, Callable, Dict, Optional, Tuple + +AGENT_CONTEXT_HEADER = "X-Borealis-Agent-Context" + + +def _canonical_context(value: Optional[str]) -> Optional[str]: + if not value: + return None + cleaned = "".join(ch for ch in str(value) if ch.isalnum() or ch in ("_", "-")) + if not cleaned: + return None + return cleaned.upper() + +from flask import Blueprint, jsonify, request + +from Modules.auth.rate_limit import SlidingWindowRateLimiter +from Modules.crypto import keys as crypto_keys +from Modules.enrollment.nonce_store import NonceCache +from Modules.guid_utils import normalize_guid +from cryptography.hazmat.primitives import serialization + + +def register( + app, + *, + db_conn_factory: Callable[[], sqlite3.Connection], + log: Callable[[str, str, Optional[str]], None], + jwt_service, + tls_bundle_path: str, + ip_rate_limiter: SlidingWindowRateLimiter, + fp_rate_limiter: SlidingWindowRateLimiter, + nonce_cache: NonceCache, + script_signer, +) -> None: + blueprint = Blueprint("enrollment", __name__) + + def _now() -> datetime: + return datetime.now(tz=timezone.utc) + + def _iso(dt: datetime) -> str: + return dt.isoformat() + + def _remote_addr() -> str: + forwarded = request.headers.get("X-Forwarded-For") + if forwarded: + return forwarded.split(",")[0].strip() + addr = request.remote_addr or "unknown" + return addr.strip() + + def _signing_key_b64() -> str: + if not script_signer: + return "" + try: + return script_signer.public_base64_spki() + except Exception: + return "" + + def _rate_limited( + key: str, + limiter: SlidingWindowRateLimiter, + limit: int, + window_s: float, + context_hint: Optional[str], + ): + decision = limiter.check(key, limit, window_s) + if not decision.allowed: + log( + "server", + f"enrollment rate limited key={key} limit={limit}/{window_s}s retry_after={decision.retry_after:.2f}", + context_hint, + ) + response = jsonify({"error": "rate_limited", "retry_after": decision.retry_after}) + response.status_code = 429 + response.headers["Retry-After"] = f"{int(decision.retry_after) or 1}" + return response + return None + + def _load_install_code(cur: sqlite3.Cursor, code_value: str) -> Optional[Dict[str, Any]]: + cur.execute( + """ + SELECT id, + code, + expires_at, + used_at, + used_by_guid, + max_uses, + use_count, + last_used_at + FROM enrollment_install_codes + WHERE code = ? + """, + (code_value,), + ) + row = cur.fetchone() + if not row: + return None + keys = [ + "id", + "code", + "expires_at", + "used_at", + "used_by_guid", + "max_uses", + "use_count", + "last_used_at", + ] + record = dict(zip(keys, row)) + return record + + def _install_code_valid( + record: Dict[str, Any], fingerprint: str, cur: sqlite3.Cursor + ) -> Tuple[bool, Optional[str]]: + if not record: + return False, None + expires_at = record.get("expires_at") + if not isinstance(expires_at, str): + return False, None + try: + expiry = datetime.fromisoformat(expires_at) + except Exception: + return False, None + if expiry <= _now(): + return False, None + try: + max_uses = int(record.get("max_uses") or 1) + except Exception: + max_uses = 1 + if max_uses < 1: + max_uses = 1 + try: + use_count = int(record.get("use_count") or 0) + except Exception: + use_count = 0 + if use_count < max_uses: + return True, None + + guid = normalize_guid(record.get("used_by_guid")) + if not guid: + return False, None + cur.execute( + "SELECT ssl_key_fingerprint FROM devices WHERE UPPER(guid) = ?", + (guid,), + ) + row = cur.fetchone() + if not row: + return False, None + stored_fp = (row[0] or "").strip().lower() + if not stored_fp: + return False, None + if stored_fp == (fingerprint or "").strip().lower(): + return True, guid + return False, None + + def _normalize_host(hostname: str, guid: str, cur: sqlite3.Cursor) -> str: + guid_norm = normalize_guid(guid) + base = (hostname or "").strip() or guid_norm + base = base[:253] + candidate = base + suffix = 1 + while True: + cur.execute( + "SELECT guid FROM devices WHERE hostname = ?", + (candidate,), + ) + row = cur.fetchone() + if not row: + return candidate + existing_guid = normalize_guid(row[0]) + if existing_guid == guid_norm: + return candidate + candidate = f"{base}-{suffix}" + suffix += 1 + if suffix > 50: + return guid_norm + + def _store_device_key(cur: sqlite3.Cursor, guid: str, fingerprint: str) -> None: + guid_norm = normalize_guid(guid) + added_at = _iso(_now()) + cur.execute( + """ + INSERT OR IGNORE INTO device_keys (id, guid, ssl_key_fingerprint, added_at) + VALUES (?, ?, ?, ?) + """, + (str(uuid.uuid4()), guid_norm, fingerprint, added_at), + ) + cur.execute( + """ + UPDATE device_keys + SET retired_at = ? + WHERE guid = ? + AND ssl_key_fingerprint != ? + AND retired_at IS NULL + """, + (_iso(_now()), guid_norm, fingerprint), + ) + + def _ensure_device_record(cur: sqlite3.Cursor, guid: str, hostname: str, fingerprint: str) -> Dict[str, Any]: + guid_norm = normalize_guid(guid) + cur.execute( + """ + SELECT guid, hostname, token_version, status, ssl_key_fingerprint, key_added_at + FROM devices + WHERE UPPER(guid) = ? + """, + (guid_norm,), + ) + row = cur.fetchone() + if row: + keys = [ + "guid", + "hostname", + "token_version", + "status", + "ssl_key_fingerprint", + "key_added_at", + ] + record = dict(zip(keys, row)) + record["guid"] = normalize_guid(record.get("guid")) + stored_fp = (record.get("ssl_key_fingerprint") or "").strip().lower() + new_fp = (fingerprint or "").strip().lower() + if not stored_fp and new_fp: + cur.execute( + "UPDATE devices SET ssl_key_fingerprint = ?, key_added_at = ? WHERE guid = ?", + (fingerprint, _iso(_now()), record["guid"]), + ) + record["ssl_key_fingerprint"] = fingerprint + elif new_fp and stored_fp != new_fp: + now_iso = _iso(_now()) + try: + current_version = int(record.get("token_version") or 1) + except Exception: + current_version = 1 + new_version = max(current_version + 1, 1) + cur.execute( + """ + UPDATE devices + SET ssl_key_fingerprint = ?, + key_added_at = ?, + token_version = ?, + status = 'active' + WHERE guid = ? + """, + (fingerprint, now_iso, new_version, record["guid"]), + ) + cur.execute( + """ + UPDATE refresh_tokens + SET revoked_at = ? + WHERE guid = ? + AND revoked_at IS NULL + """, + (now_iso, record["guid"]), + ) + record["ssl_key_fingerprint"] = fingerprint + record["token_version"] = new_version + record["status"] = "active" + record["key_added_at"] = now_iso + return record + + resolved_hostname = _normalize_host(hostname, guid_norm, cur) + created_at = int(time.time()) + key_added_at = _iso(_now()) + cur.execute( + """ + INSERT INTO devices ( + guid, hostname, created_at, last_seen, ssl_key_fingerprint, + token_version, status, key_added_at + ) + VALUES (?, ?, ?, ?, ?, 1, 'active', ?) + """, + ( + guid_norm, + resolved_hostname, + created_at, + created_at, + fingerprint, + key_added_at, + ), + ) + return { + "guid": guid_norm, + "hostname": resolved_hostname, + "token_version": 1, + "status": "active", + "ssl_key_fingerprint": fingerprint, + "key_added_at": key_added_at, + } + + def _hash_refresh_token(token: str) -> str: + import hashlib + + return hashlib.sha256(token.encode("utf-8")).hexdigest() + + def _issue_refresh_token(cur: sqlite3.Cursor, guid: str) -> Dict[str, Any]: + token = secrets.token_urlsafe(48) + now = _now() + expires_at = now.replace(microsecond=0) + timedelta(days=30) + cur.execute( + """ + INSERT INTO refresh_tokens (id, guid, token_hash, created_at, expires_at) + VALUES (?, ?, ?, ?, ?) + """, + ( + str(uuid.uuid4()), + guid, + _hash_refresh_token(token), + _iso(now), + _iso(expires_at), + ), + ) + return {"token": token, "expires_at": expires_at} + + @blueprint.route("/api/agent/enroll/request", methods=["POST"]) + def enrollment_request(): + remote = _remote_addr() + context_hint = _canonical_context(request.headers.get(AGENT_CONTEXT_HEADER)) + + rate_error = _rate_limited(f"ip:{remote}", ip_rate_limiter, 40, 60.0, context_hint) + if rate_error: + return rate_error + + payload = request.get_json(force=True, silent=True) or {} + hostname = str(payload.get("hostname") or "").strip() + enrollment_code = str(payload.get("enrollment_code") or "").strip() + agent_pubkey_b64 = payload.get("agent_pubkey") + client_nonce_b64 = payload.get("client_nonce") + + log( + "server", + "enrollment request received " + f"ip={remote} hostname={hostname or ''} code_mask={_mask_code(enrollment_code)} " + f"pubkey_len={len(agent_pubkey_b64 or '')} nonce_len={len(client_nonce_b64 or '')}", + context_hint, + ) + + if not hostname: + log("server", f"enrollment rejected missing_hostname ip={remote}", context_hint) + return jsonify({"error": "hostname_required"}), 400 + if not enrollment_code: + log("server", f"enrollment rejected missing_code ip={remote} host={hostname}", context_hint) + return jsonify({"error": "enrollment_code_required"}), 400 + if not isinstance(agent_pubkey_b64, str): + log("server", f"enrollment rejected missing_pubkey ip={remote} host={hostname}", context_hint) + return jsonify({"error": "agent_pubkey_required"}), 400 + if not isinstance(client_nonce_b64, str): + log("server", f"enrollment rejected missing_nonce ip={remote} host={hostname}", context_hint) + return jsonify({"error": "client_nonce_required"}), 400 + + try: + agent_pubkey_der = crypto_keys.spki_der_from_base64(agent_pubkey_b64) + except Exception: + log("server", f"enrollment rejected invalid_pubkey ip={remote} host={hostname}", context_hint) + return jsonify({"error": "invalid_agent_pubkey"}), 400 + + if len(agent_pubkey_der) < 10: + log("server", f"enrollment rejected short_pubkey ip={remote} host={hostname}", context_hint) + return jsonify({"error": "invalid_agent_pubkey"}), 400 + + try: + client_nonce_bytes = base64.b64decode(client_nonce_b64, validate=True) + except Exception: + log("server", f"enrollment rejected invalid_nonce ip={remote} host={hostname}", context_hint) + return jsonify({"error": "invalid_client_nonce"}), 400 + if len(client_nonce_bytes) < 16: + log("server", f"enrollment rejected short_nonce ip={remote} host={hostname}", context_hint) + return jsonify({"error": "invalid_client_nonce"}), 400 + + fingerprint = crypto_keys.fingerprint_from_spki_der(agent_pubkey_der) + rate_error = _rate_limited(f"fp:{fingerprint}", fp_rate_limiter, 12, 60.0, context_hint) + if rate_error: + return rate_error + + conn = db_conn_factory() + try: + cur = conn.cursor() + install_code = _load_install_code(cur, enrollment_code) + valid_code, reuse_guid = _install_code_valid(install_code, fingerprint, cur) + if not valid_code: + log( + "server", + "enrollment request invalid_code " + f"host={hostname} fingerprint={fingerprint[:12]} code_mask={_mask_code(enrollment_code)}", + context_hint, + ) + return jsonify({"error": "invalid_enrollment_code"}), 400 + + approval_reference: str + record_id: str + server_nonce_bytes = secrets.token_bytes(32) + server_nonce_b64 = base64.b64encode(server_nonce_bytes).decode("ascii") + now = _iso(_now()) + + cur.execute( + """ + SELECT id, approval_reference + FROM device_approvals + WHERE ssl_key_fingerprint_claimed = ? + AND status = 'pending' + """, + (fingerprint,), + ) + existing = cur.fetchone() + if existing: + record_id = existing[0] + approval_reference = existing[1] + cur.execute( + """ + UPDATE device_approvals + SET hostname_claimed = ?, + guid = ?, + enrollment_code_id = ?, + client_nonce = ?, + server_nonce = ?, + agent_pubkey_der = ?, + updated_at = ? + WHERE id = ? + """, + ( + hostname, + reuse_guid, + install_code["id"], + client_nonce_b64, + server_nonce_b64, + agent_pubkey_der, + now, + record_id, + ), + ) + else: + record_id = str(uuid.uuid4()) + approval_reference = str(uuid.uuid4()) + cur.execute( + """ + INSERT INTO device_approvals ( + id, approval_reference, guid, hostname_claimed, + ssl_key_fingerprint_claimed, enrollment_code_id, + status, client_nonce, server_nonce, agent_pubkey_der, + created_at, updated_at + ) + VALUES (?, ?, ?, ?, ?, ?, 'pending', ?, ?, ?, ?, ?) + """, + ( + record_id, + approval_reference, + reuse_guid, + hostname, + fingerprint, + install_code["id"], + client_nonce_b64, + server_nonce_b64, + agent_pubkey_der, + now, + now, + ), + ) + + conn.commit() + finally: + conn.close() + + response = { + "status": "pending", + "approval_reference": approval_reference, + "server_nonce": server_nonce_b64, + "poll_after_ms": 3000, + "server_certificate": _load_tls_bundle(tls_bundle_path), + "signing_key": _signing_key_b64(), + } + log( + "server", + f"enrollment request queued fingerprint={fingerprint[:12]} host={hostname} ip={remote}", + context_hint, + ) + return jsonify(response) + + @blueprint.route("/api/agent/enroll/poll", methods=["POST"]) + def enrollment_poll(): + payload = request.get_json(force=True, silent=True) or {} + approval_reference = payload.get("approval_reference") + client_nonce_b64 = payload.get("client_nonce") + proof_sig_b64 = payload.get("proof_sig") + context_hint = _canonical_context(request.headers.get(AGENT_CONTEXT_HEADER)) + + log( + "server", + "enrollment poll received " + f"ref={approval_reference} client_nonce_len={len(client_nonce_b64 or '')}" + f" proof_sig_len={len(proof_sig_b64 or '')}", + context_hint, + ) + + if not isinstance(approval_reference, str) or not approval_reference: + log("server", "enrollment poll rejected missing_reference", context_hint) + return jsonify({"error": "approval_reference_required"}), 400 + if not isinstance(client_nonce_b64, str): + log("server", f"enrollment poll rejected missing_nonce ref={approval_reference}", context_hint) + return jsonify({"error": "client_nonce_required"}), 400 + if not isinstance(proof_sig_b64, str): + log("server", f"enrollment poll rejected missing_sig ref={approval_reference}", context_hint) + return jsonify({"error": "proof_sig_required"}), 400 + + try: + client_nonce_bytes = base64.b64decode(client_nonce_b64, validate=True) + except Exception: + log("server", f"enrollment poll invalid_client_nonce ref={approval_reference}", context_hint) + return jsonify({"error": "invalid_client_nonce"}), 400 + + try: + proof_sig = base64.b64decode(proof_sig_b64, validate=True) + except Exception: + log("server", f"enrollment poll invalid_sig ref={approval_reference}", context_hint) + return jsonify({"error": "invalid_proof_sig"}), 400 + + conn = db_conn_factory() + try: + cur = conn.cursor() + cur.execute( + """ + SELECT id, guid, hostname_claimed, ssl_key_fingerprint_claimed, + enrollment_code_id, status, client_nonce, server_nonce, + agent_pubkey_der, created_at, updated_at, approved_by_user_id + FROM device_approvals + WHERE approval_reference = ? + """, + (approval_reference,), + ) + row = cur.fetchone() + if not row: + log("server", f"enrollment poll unknown_reference ref={approval_reference}", context_hint) + return jsonify({"status": "unknown"}), 404 + + ( + record_id, + guid, + hostname_claimed, + fingerprint, + enrollment_code_id, + status, + client_nonce_stored, + server_nonce_b64, + agent_pubkey_der, + created_at, + updated_at, + approved_by, + ) = row + + if client_nonce_stored != client_nonce_b64: + log("server", f"enrollment poll nonce_mismatch ref={approval_reference}", context_hint) + return jsonify({"error": "nonce_mismatch"}), 400 + + try: + server_nonce_bytes = base64.b64decode(server_nonce_b64, validate=True) + except Exception: + log("server", f"enrollment poll invalid_server_nonce ref={approval_reference}", context_hint) + return jsonify({"error": "server_nonce_invalid"}), 400 + + message = server_nonce_bytes + approval_reference.encode("utf-8") + client_nonce_bytes + + try: + public_key = serialization.load_der_public_key(agent_pubkey_der) + except Exception: + log("server", f"enrollment poll pubkey_load_failed ref={approval_reference}", context_hint) + public_key = None + + if public_key is None: + log("server", f"enrollment poll invalid_pubkey ref={approval_reference}", context_hint) + return jsonify({"error": "agent_pubkey_invalid"}), 400 + + try: + public_key.verify(proof_sig, message) + except Exception: + log("server", f"enrollment poll invalid_proof ref={approval_reference}", context_hint) + return jsonify({"error": "invalid_proof"}), 400 + + if status == "pending": + log( + "server", + f"enrollment poll pending ref={approval_reference} host={hostname_claimed}" + f" fingerprint={fingerprint[:12]}", + context_hint, + ) + return jsonify({"status": "pending", "poll_after_ms": 5000}) + if status == "denied": + log( + "server", + f"enrollment poll denied ref={approval_reference} host={hostname_claimed}", + context_hint, + ) + return jsonify({"status": "denied", "reason": "operator_denied"}) + if status == "expired": + log( + "server", + f"enrollment poll expired ref={approval_reference} host={hostname_claimed}", + context_hint, + ) + return jsonify({"status": "expired"}) + if status == "completed": + log( + "server", + f"enrollment poll already_completed ref={approval_reference} host={hostname_claimed}", + context_hint, + ) + return jsonify({"status": "approved", "detail": "finalized"}) + + if status != "approved": + log( + "server", + f"enrollment poll unexpected_status={status} ref={approval_reference}", + context_hint, + ) + return jsonify({"status": status or "unknown"}), 400 + + nonce_key = f"{approval_reference}:{base64.b64encode(proof_sig).decode('ascii')}" + if not nonce_cache.consume(nonce_key): + log( + "server", + f"enrollment poll replay_detected ref={approval_reference} fingerprint={fingerprint[:12]}", + context_hint, + ) + return jsonify({"error": "proof_replayed"}), 409 + + # Finalize enrollment + effective_guid = normalize_guid(guid) if guid else normalize_guid(str(uuid.uuid4())) + now_iso = _iso(_now()) + + device_record = _ensure_device_record(cur, effective_guid, hostname_claimed, fingerprint) + _store_device_key(cur, effective_guid, fingerprint) + + # Mark install code used + if enrollment_code_id: + cur.execute( + "SELECT use_count, max_uses FROM enrollment_install_codes WHERE id = ?", + (enrollment_code_id,), + ) + usage_row = cur.fetchone() + try: + prior_count = int(usage_row[0]) if usage_row else 0 + except Exception: + prior_count = 0 + try: + allowed_uses = int(usage_row[1]) if usage_row else 1 + except Exception: + allowed_uses = 1 + if allowed_uses < 1: + allowed_uses = 1 + new_count = prior_count + 1 + consumed = new_count >= allowed_uses + cur.execute( + """ + UPDATE enrollment_install_codes + SET use_count = ?, + used_by_guid = ?, + last_used_at = ?, + used_at = CASE WHEN ? THEN ? ELSE used_at END + WHERE id = ? + """, + ( + new_count, + effective_guid, + now_iso, + 1 if consumed else 0, + now_iso, + enrollment_code_id, + ), + ) + cur.execute( + """ + UPDATE enrollment_install_codes_persistent + SET last_known_use_count = ?, + used_by_guid = ?, + last_used_at = ?, + used_at = CASE WHEN ? THEN ? ELSE used_at END, + is_active = CASE WHEN ? THEN 0 ELSE is_active END, + consumed_at = CASE WHEN ? THEN COALESCE(consumed_at, ?) ELSE consumed_at END, + archived_at = CASE WHEN ? THEN COALESCE(archived_at, ?) ELSE archived_at END + WHERE id = ? + """, + ( + new_count, + effective_guid, + now_iso, + 1 if consumed else 0, + now_iso, + 1 if consumed else 0, + 1 if consumed else 0, + now_iso, + 1 if consumed else 0, + now_iso, + enrollment_code_id, + ), + ) + + # Update approval record with final state + cur.execute( + """ + UPDATE device_approvals + SET guid = ?, + status = 'completed', + updated_at = ? + WHERE id = ? + """, + (effective_guid, now_iso, record_id), + ) + + refresh_info = _issue_refresh_token(cur, effective_guid) + access_token = jwt_service.issue_access_token( + effective_guid, + fingerprint, + device_record.get("token_version") or 1, + ) + + conn.commit() + finally: + conn.close() + + log( + "server", + f"enrollment finalized guid={effective_guid} fingerprint={fingerprint[:12]} host={hostname_claimed}", + context_hint, + ) + return jsonify( + { + "status": "approved", + "guid": effective_guid, + "access_token": access_token, + "expires_in": 900, + "refresh_token": refresh_info["token"], + "token_type": "Bearer", + "server_certificate": _load_tls_bundle(tls_bundle_path), + "signing_key": _signing_key_b64(), + } + ) + + app.register_blueprint(blueprint) + + +def _load_tls_bundle(path: str) -> str: + try: + with open(path, "r", encoding="utf-8") as fh: + return fh.read() + except Exception: + return "" + + +def _mask_code(code: str) -> str: + if not code: + return "" + trimmed = str(code).strip() + if len(trimmed) <= 6: + return "***" + return f"{trimmed[:3]}***{trimmed[-3:]}" diff --git a/Data/Server/Modules/guid_utils.py b/Data/Server/Modules/guid_utils.py new file mode 100644 index 00000000..74791253 --- /dev/null +++ b/Data/Server/Modules/guid_utils.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +import string +import uuid +from typing import Optional + + +def normalize_guid(value: Optional[str]) -> str: + """ + Canonicalize GUID strings so the server treats different casings/formats uniformly. + """ + candidate = (value or "").strip() + if not candidate: + return "" + candidate = candidate.strip("{}") + try: + return str(uuid.UUID(candidate)).upper() + except Exception: + cleaned = "".join(ch for ch in candidate if ch in string.hexdigits or ch == "-") + cleaned = cleaned.strip("-") + if cleaned: + try: + return str(uuid.UUID(cleaned)).upper() + except Exception: + pass + return candidate.upper() diff --git a/Data/Server/Modules/jobs/__init__.py b/Data/Server/Modules/jobs/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/Data/Server/Modules/jobs/__init__.py @@ -0,0 +1 @@ + diff --git a/Data/Server/Modules/jobs/prune.py b/Data/Server/Modules/jobs/prune.py new file mode 100644 index 00000000..f86b7245 --- /dev/null +++ b/Data/Server/Modules/jobs/prune.py @@ -0,0 +1,110 @@ +from __future__ import annotations + +from datetime import datetime, timedelta, timezone +from typing import Callable, List, Optional + +import eventlet +from flask_socketio import SocketIO + + +def start_prune_job( + socketio: SocketIO, + *, + db_conn_factory: Callable[[], any], + log: Callable[[str, str, Optional[str]], None], +) -> None: + def _job_loop(): + while True: + try: + _run_once(db_conn_factory, log) + except Exception as exc: + log("server", f"prune job failure: {exc}") + eventlet.sleep(24 * 60 * 60) + + socketio.start_background_task(_job_loop) + + +def _run_once(db_conn_factory: Callable[[], any], log: Callable[[str, str, Optional[str]], None]) -> None: + now = datetime.now(tz=timezone.utc) + now_iso = now.isoformat() + stale_before = (now - timedelta(hours=24)).isoformat() + conn = db_conn_factory() + try: + cur = conn.cursor() + persistent_table_exists = False + try: + cur.execute( + "SELECT 1 FROM sqlite_master WHERE type='table' AND name='enrollment_install_codes_persistent'" + ) + persistent_table_exists = cur.fetchone() is not None + except Exception: + persistent_table_exists = False + + expired_ids: List[str] = [] + if persistent_table_exists: + cur.execute( + """ + SELECT id + FROM enrollment_install_codes + WHERE use_count = 0 + AND expires_at < ? + """, + (now_iso,), + ) + expired_ids = [str(row[0]) for row in cur.fetchall() if row and row[0]] + cur.execute( + """ + DELETE FROM enrollment_install_codes + WHERE use_count = 0 + AND expires_at < ? + """, + (now_iso,), + ) + codes_pruned = cur.rowcount or 0 + if expired_ids: + placeholders = ",".join("?" for _ in expired_ids) + try: + cur.execute( + f""" + UPDATE enrollment_install_codes_persistent + SET is_active = 0, + archived_at = COALESCE(archived_at, ?) + WHERE id IN ({placeholders}) + """, + (now_iso, *expired_ids), + ) + except Exception: + # Best-effort archival; continue if the persistence table is absent. + pass + + cur.execute( + """ + UPDATE device_approvals + SET status = 'expired', + updated_at = ? + WHERE status = 'pending' + AND ( + EXISTS ( + SELECT 1 + FROM enrollment_install_codes c + WHERE c.id = device_approvals.enrollment_code_id + AND ( + c.expires_at < ? + OR c.use_count >= c.max_uses + ) + ) + OR created_at < ? + ) + """, + (now_iso, now_iso, stale_before), + ) + approvals_marked = cur.rowcount or 0 + + conn.commit() + finally: + conn.close() + + if codes_pruned: + log("server", f"prune job removed {codes_pruned} expired enrollment codes") + if approvals_marked: + log("server", f"prune job expired {approvals_marked} device approvals") diff --git a/Data/Server/Modules/runtime.py b/Data/Server/Modules/runtime.py new file mode 100644 index 00000000..40a841ff --- /dev/null +++ b/Data/Server/Modules/runtime.py @@ -0,0 +1,168 @@ +"""Utility helpers for locating runtime storage paths. + +The Borealis repository keeps the authoritative source code under ``Data/`` +so that the bootstrap scripts can copy those assets into sibling ``Server/`` +and ``Agent/`` directories for execution. Runtime artefacts such as TLS +certificates or signing keys must therefore live outside ``Data`` to avoid +polluting the template tree. This module centralises the path selection so +other modules can rely on a consistent location regardless of whether they +are executed from the copied runtime directory or directly from ``Data`` +during development. +""" + +from __future__ import annotations + +import os +from functools import lru_cache +from pathlib import Path +from typing import Optional + + +def _env_path(name: str) -> Optional[Path]: + """Return a resolved ``Path`` for the given environment variable.""" + + value = os.environ.get(name) + if not value: + return None + try: + return Path(value).expanduser().resolve() + except Exception: + return None + + +@lru_cache(maxsize=None) +def project_root() -> Path: + """Best-effort detection of the repository root.""" + + env = _env_path("BOREALIS_PROJECT_ROOT") + if env: + return env + + current = Path(__file__).resolve() + for parent in current.parents: + if (parent / "Borealis.ps1").exists() or (parent / ".git").is_dir(): + return parent + + # Fallback to the ancestor that corresponds to ``/`` when the module + # lives under ``Data/Server/Modules``. + try: + return current.parents[4] + except IndexError: + return current.parent + + +@lru_cache(maxsize=None) +def server_runtime_root() -> Path: + """Location where the running server stores mutable artefacts.""" + + env = _env_path("BOREALIS_SERVER_ROOT") + if env: + return env + + root = project_root() + runtime = root / "Server" / "Borealis" + return runtime + + +def runtime_path(*parts: str) -> Path: + """Return a path relative to the server runtime root.""" + + return server_runtime_root().joinpath(*parts) + + +def ensure_runtime_dir(*parts: str) -> Path: + """Create (if required) and return a runtime directory.""" + + path = runtime_path(*parts) + path.mkdir(parents=True, exist_ok=True) + return path + + +@lru_cache(maxsize=None) +def certificates_root() -> Path: + """Base directory for persisted certificate material.""" + + env = _env_path("BOREALIS_CERTIFICATES_ROOT") or _env_path("BOREALIS_CERT_ROOT") + if env: + env.mkdir(parents=True, exist_ok=True) + return env + + root = project_root() / "Certificates" + root.mkdir(parents=True, exist_ok=True) + # Ensure expected subdirectories exist for agent and server material. + try: + (root / "Server").mkdir(parents=True, exist_ok=True) + (root / "Agent").mkdir(parents=True, exist_ok=True) + except Exception: + pass + return root + + +@lru_cache(maxsize=None) +def server_certificates_root() -> Path: + """Base directory for server certificate material.""" + + env = _env_path("BOREALIS_SERVER_CERT_ROOT") + if env: + env.mkdir(parents=True, exist_ok=True) + return env + + root = certificates_root() / "Server" + root.mkdir(parents=True, exist_ok=True) + return root + + +@lru_cache(maxsize=None) +def agent_certificates_root() -> Path: + """Base directory for agent certificate material.""" + + env = _env_path("BOREALIS_AGENT_CERT_ROOT") + if env: + env.mkdir(parents=True, exist_ok=True) + return env + + root = certificates_root() / "Agent" + root.mkdir(parents=True, exist_ok=True) + return root + + +def certificates_path(*parts: str) -> Path: + """Return a path under the certificates root.""" + + return certificates_root().joinpath(*parts) + + +def ensure_certificates_dir(*parts: str) -> Path: + """Create (if required) and return a certificates subdirectory.""" + + path = certificates_path(*parts) + path.mkdir(parents=True, exist_ok=True) + return path + + +def server_certificates_path(*parts: str) -> Path: + """Return a path under the server certificates root.""" + + return server_certificates_root().joinpath(*parts) + + +def ensure_server_certificates_dir(*parts: str) -> Path: + """Create (if required) and return a server certificates subdirectory.""" + + path = server_certificates_path(*parts) + path.mkdir(parents=True, exist_ok=True) + return path + + +def agent_certificates_path(*parts: str) -> Path: + """Return a path under the agent certificates root.""" + + return agent_certificates_root().joinpath(*parts) + + +def ensure_agent_certificates_dir(*parts: str) -> Path: + """Create (if required) and return an agent certificates subdirectory.""" + + path = agent_certificates_path(*parts) + path.mkdir(parents=True, exist_ok=True) + return path diff --git a/Data/Server/Modules/tokens/__init__.py b/Data/Server/Modules/tokens/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/Data/Server/Modules/tokens/__init__.py @@ -0,0 +1 @@ + diff --git a/Data/Server/Modules/tokens/routes.py b/Data/Server/Modules/tokens/routes.py new file mode 100644 index 00000000..80058363 --- /dev/null +++ b/Data/Server/Modules/tokens/routes.py @@ -0,0 +1,138 @@ + +from __future__ import annotations + +import hashlib +import sqlite3 +from datetime import datetime, timezone +from typing import Callable + +from flask import Blueprint, jsonify, request + +from Modules.auth.dpop import DPoPValidator, DPoPVerificationError, DPoPReplayError + + +def register( + app, + *, + db_conn_factory: Callable[[], sqlite3.Connection], + jwt_service, + dpop_validator: DPoPValidator, +) -> None: + blueprint = Blueprint("tokens", __name__) + + def _hash_token(token: str) -> str: + return hashlib.sha256(token.encode("utf-8")).hexdigest() + + def _iso_now() -> str: + return datetime.now(tz=timezone.utc).isoformat() + + def _parse_iso(ts: str) -> datetime: + return datetime.fromisoformat(ts) + + @blueprint.route("/api/agent/token/refresh", methods=["POST"]) + def refresh(): + payload = request.get_json(force=True, silent=True) or {} + guid = str(payload.get("guid") or "").strip() + refresh_token = str(payload.get("refresh_token") or "").strip() + + if not guid or not refresh_token: + return jsonify({"error": "invalid_request"}), 400 + + conn = db_conn_factory() + try: + cur = conn.cursor() + cur.execute( + """ + SELECT id, guid, token_hash, dpop_jkt, created_at, expires_at, revoked_at + FROM refresh_tokens + WHERE guid = ? + AND token_hash = ? + """, + (guid, _hash_token(refresh_token)), + ) + row = cur.fetchone() + if not row: + return jsonify({"error": "invalid_refresh_token"}), 401 + + record_id, row_guid, _token_hash, stored_jkt, created_at, expires_at, revoked_at = row + if row_guid != guid: + return jsonify({"error": "invalid_refresh_token"}), 401 + if revoked_at: + return jsonify({"error": "refresh_token_revoked"}), 401 + if expires_at: + try: + if _parse_iso(expires_at) <= datetime.now(tz=timezone.utc): + return jsonify({"error": "refresh_token_expired"}), 401 + except Exception: + pass + + cur.execute( + """ + SELECT guid, ssl_key_fingerprint, token_version, status + FROM devices + WHERE guid = ? + """, + (guid,), + ) + device_row = cur.fetchone() + if not device_row: + return jsonify({"error": "device_not_found"}), 404 + + device_guid, fingerprint, token_version, status = device_row + status_norm = (status or "active").strip().lower() + if status_norm in {"revoked", "decommissioned"}: + return jsonify({"error": "device_revoked"}), 403 + + dpop_proof = request.headers.get("DPoP") + jkt = stored_jkt or "" + if dpop_proof: + try: + jkt = dpop_validator.verify(request.method, request.url, dpop_proof, access_token=None) + except DPoPReplayError: + return jsonify({"error": "dpop_replayed"}), 400 + except DPoPVerificationError: + return jsonify({"error": "dpop_invalid"}), 400 + elif stored_jkt: + # The agent does not yet emit DPoP proofs; allow recovery by clearing + # the stored binding so refreshes can succeed. This preserves + # backward compatibility while the client gains full DPoP support. + try: + app.logger.warning( + "Clearing stored DPoP binding for guid=%s due to missing proof", + guid, + ) + except Exception: + pass + cur.execute( + "UPDATE refresh_tokens SET dpop_jkt = NULL WHERE id = ?", + (record_id,), + ) + + new_access_token = jwt_service.issue_access_token( + guid, + fingerprint or "", + token_version or 1, + ) + + cur.execute( + """ + UPDATE refresh_tokens + SET last_used_at = ?, + dpop_jkt = COALESCE(NULLIF(?, ''), dpop_jkt) + WHERE id = ? + """, + (_iso_now(), jkt, record_id), + ) + conn.commit() + finally: + conn.close() + + return jsonify( + { + "access_token": new_access_token, + "expires_in": 900, + "token_type": "Bearer", + } + ) + + app.register_blueprint(blueprint) diff --git a/Data/Server/Package-Borealis-Server.ps1 b/Data/Server/Package-Borealis-Server.ps1 new file mode 100644 index 00000000..26b05a60 --- /dev/null +++ b/Data/Server/Package-Borealis-Server.ps1 @@ -0,0 +1,88 @@ +#////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/Server/Package-Borealis-Server.ps1 + +# ------------- Configuration ------------- +# (all paths are made absolute via Join-Path and $scriptDir) +$scriptDir = Split-Path $MyInvocation.MyCommand.Definition -Parent +$projectRoot = Resolve-Path (Join-Path $scriptDir "..\..") # go up two levels to \Borealis +$packagingDir = Join-Path $scriptDir "Packaging_Server" +$venvDir = Join-Path $packagingDir "Pyinstaller_Virtual_Environment" +$distDir = Join-Path $packagingDir "dist" +$buildDir = Join-Path $packagingDir "build" +$specPath = $packagingDir + +$serverScript = Join-Path $scriptDir "server.py" +$outputName = "Borealis-Server" +$finalExeName = "$outputName.exe" +$requirementsPath = Join-Path $scriptDir "server-requirements.txt" +$iconPath = Join-Path $scriptDir "Borealis.ico" + +# Static assets to bundle: +# - the compiled React build under Server/web-interface/build +$staticBuildSrc = Join-Path $projectRoot "Server\web-interface\build" +$staticBuildDst = "web-interface/build" +# - Tesseract-OCR folder must be nested under 'Borealis/Python_API_Endpoints/Tesseract-OCR' +$ocrSrc = Join-Path $scriptDir "Python_API_Endpoints\Tesseract-OCR" +$ocrDst = "Borealis/Python_API_Endpoints/Tesseract-OCR" +$soundsSrc = Join-Path $scriptDir "Sounds" +$soundsDst = "Sounds" + +# Embedded Python shipped under Dependencies\Python\python.exe +$embeddedPython = Join-Path $projectRoot "Dependencies\Python\python.exe" + +# ------------- Prepare packaging folder ------------- +if (-Not (Test-Path $packagingDir)) { + New-Item -ItemType Directory -Path $packagingDir | Out-Null +} + +# 1) Create or upgrade virtual environment +if (-Not (Test-Path (Join-Path $venvDir "Scripts\python.exe"))) { + Write-Host "[SETUP] Creating virtual environment at $venvDir" + & $embeddedPython -m venv --upgrade-deps $venvDir +} + +# helper to invoke venv's python +$venvPy = Join-Path $venvDir "Scripts\python.exe" + +# 2) Bootstrap & upgrade pip +Write-Host "[INFO] Bootstrapping pip" +& $venvPy -m ensurepip --upgrade +& $venvPy -m pip install --upgrade pip + +# 3) Install server dependencies +Write-Host "[INFO] Installing server dependencies" +& $venvPy -m pip install -r $requirementsPath +# Ensure dnspython is available for Eventlet's greendns support +& $venvPy -m pip install dnspython + +# 4) Install PyInstaller +Write-Host "[INFO] Installing PyInstaller" +& $venvPy -m pip install pyinstaller + +# 5) Clean previous artifacts +Write-Host "[INFO] Cleaning previous artifacts" +Remove-Item -Recurse -Force $distDir, $buildDir, "$specPath\$outputName.spec" -ErrorAction SilentlyContinue + +# 6) Run PyInstaller, bundling server code and assets +# Collect all Eventlet and DNS submodules to avoid missing dynamic imports +Write-Host "[INFO] Running PyInstaller" +& $venvPy -m PyInstaller ` + --onefile ` + --name $outputName ` + --icon $iconPath ` + --collect-submodules eventlet ` + --collect-submodules dns ` + --distpath $distDir ` + --workpath $buildDir ` + --specpath $specPath ` + --add-data "$staticBuildSrc;$staticBuildDst" ` + --add-data "$ocrSrc;$ocrDst" ` + --add-data "$soundsSrc;$soundsDst" ` + $serverScript + +# 7) Copy the final EXE back to Data/Server +if (Test-Path (Join-Path $distDir $finalExeName)) { + Copy-Item (Join-Path $distDir $finalExeName) (Join-Path $scriptDir $finalExeName) -Force + Write-Host "[SUCCESS] Server packaged at $finalExeName" +} else { + Write-Host "[FAILURE] Packaging failed." -ForegroundColor Red +} diff --git a/Data/Server/Python_API_Endpoints/ocr_engines.py b/Data/Server/Python_API_Endpoints/ocr_engines.py new file mode 100644 index 00000000..22e9c6e2 --- /dev/null +++ b/Data/Server/Python_API_Endpoints/ocr_engines.py @@ -0,0 +1,104 @@ +#////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/Python_API_Endpoints/ocr_engines.py + +import os +import io +import sys +import base64 +import torch +import pytesseract +import easyocr +import numpy as np +import platform +from PIL import Image + +# --------------------------------------------------------------------- +# Configure cross-platform Tesseract path +# --------------------------------------------------------------------- +SYSTEM = platform.system() + +def get_tesseract_folder(): + if getattr(sys, 'frozen', False): + # PyInstaller EXE + base_path = sys._MEIPASS + return os.path.join(base_path, "Borealis", "Python_API_Endpoints", "Tesseract-OCR") + else: + # Normal Python environment + base_dir = os.path.dirname(os.path.abspath(__file__)) + return os.path.join(base_dir, "Tesseract-OCR") + +if SYSTEM == "Windows": + TESSERACT_FOLDER = get_tesseract_folder() + TESSERACT_EXE = os.path.join(TESSERACT_FOLDER, "tesseract.exe") + TESSDATA_DIR = os.path.join(TESSERACT_FOLDER, "tessdata") + + if not os.path.isfile(TESSERACT_EXE): + raise EnvironmentError(f"Missing tesseract.exe at expected path: {TESSERACT_EXE}") + + pytesseract.pytesseract.tesseract_cmd = TESSERACT_EXE + os.environ["TESSDATA_PREFIX"] = TESSDATA_DIR +else: + # Assume Linux/macOS with system-installed Tesseract + pytesseract.pytesseract.tesseract_cmd = "tesseract" + +# --------------------------------------------------------------------- +# EasyOCR Global Instances +# --------------------------------------------------------------------- +easyocr_reader_cpu = None +easyocr_reader_gpu = None + +def initialize_ocr_engines(): + global easyocr_reader_cpu, easyocr_reader_gpu + if easyocr_reader_cpu is None: + easyocr_reader_cpu = easyocr.Reader(['en'], gpu=False) + if easyocr_reader_gpu is None: + easyocr_reader_gpu = easyocr.Reader(['en'], gpu=torch.cuda.is_available()) + +# --------------------------------------------------------------------- +# Main OCR Handler +# --------------------------------------------------------------------- +def run_ocr_on_base64(image_b64: str, engine: str = "tesseract", backend: str = "cpu") -> list[str]: + if not image_b64: + raise ValueError("No base64 image data provided.") + + try: + raw_bytes = base64.b64decode(image_b64) + image = Image.open(io.BytesIO(raw_bytes)).convert("RGB") + except Exception as e: + raise ValueError(f"Invalid base64 image input: {e}") + + engine = engine.lower().strip() + backend = backend.lower().strip() + + if engine in ["tesseract", "tesseractocr"]: + try: + text = pytesseract.image_to_string(image, config="--psm 6 --oem 1") + except pytesseract.TesseractNotFoundError: + raise RuntimeError("Tesseract binary not found or not available on this platform.") + elif engine == "easyocr": + initialize_ocr_engines() + reader = easyocr_reader_gpu if backend == "gpu" else easyocr_reader_cpu + result = reader.readtext(np.array(image), detail=1) + + # Group by Y position (line-aware sorting) + result = sorted(result, key=lambda r: r[0][0][1]) + lines = [] + current_line = [] + last_y = None + line_threshold = 10 + + for (bbox, text, _) in result: + y = bbox[0][1] + if last_y is None or abs(y - last_y) < line_threshold: + current_line.append(text) + else: + lines.append(" ".join(current_line)) + current_line = [text] + last_y = y + + if current_line: + lines.append(" ".join(current_line)) + text = "\n".join(lines) + else: + raise ValueError(f"OCR engine '{engine}' not recognized.") + + return [line.strip() for line in text.splitlines() if line.strip()] diff --git a/Data/Server/Python_API_Endpoints/script_engines.py b/Data/Server/Python_API_Endpoints/script_engines.py new file mode 100644 index 00000000..7c789b19 --- /dev/null +++ b/Data/Server/Python_API_Endpoints/script_engines.py @@ -0,0 +1,57 @@ +import os +import subprocess +import sys +import platform + + +def run_powershell_script(script_path: str): + """ + Execute a PowerShell script with ExecutionPolicy Bypass. + + Returns (returncode, stdout, stderr) + """ + if not script_path or not os.path.isfile(script_path): + raise FileNotFoundError(f"Script not found: {script_path}") + + if not script_path.lower().endswith(".ps1"): + raise ValueError("run_powershell_script only accepts .ps1 files") + + system = platform.system() + + # Choose powershell binary + ps_bin = None + if system == "Windows": + # Prefer Windows PowerShell + ps_bin = os.path.expandvars(r"%SystemRoot%\\System32\\WindowsPowerShell\\v1.0\\powershell.exe") + if not os.path.isfile(ps_bin): + ps_bin = "powershell.exe" + else: + # PowerShell Core (pwsh) may exist cross-platform + ps_bin = "pwsh" + + # Build command + # -ExecutionPolicy Bypass (Windows only), -NoProfile, -File "script" + cmd = [ps_bin] + if system == "Windows": + cmd += ["-ExecutionPolicy", "Bypass"] + cmd += ["-NoProfile", "-File", script_path] + + # Hide window on Windows + creationflags = 0 + startupinfo = None + if system == "Windows": + creationflags = 0x08000000 # CREATE_NO_WINDOW + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + + proc = subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + creationflags=creationflags, + startupinfo=startupinfo, + ) + out, err = proc.communicate() + return proc.returncode, out or "", err or "" + diff --git a/Data/Server/Sounds/Short_Beep.wav b/Data/Server/Sounds/Short_Beep.wav new file mode 100644 index 00000000..015e1f64 Binary files /dev/null and b/Data/Server/Sounds/Short_Beep.wav differ diff --git a/Data/Server/WebUI/index.html b/Data/Server/WebUI/index.html new file mode 100644 index 00000000..35c37add --- /dev/null +++ b/Data/Server/WebUI/index.html @@ -0,0 +1,22 @@ + + + + + + + + + + + + + Borealis + + + +
+ + + + + \ No newline at end of file diff --git a/Data/Server/WebUI/package.json b/Data/Server/WebUI/package.json new file mode 100644 index 00000000..49cffbb9 --- /dev/null +++ b/Data/Server/WebUI/package.json @@ -0,0 +1,50 @@ +{ + "name": "borealis-webui", + "version": "1.0.0", + "private": true, + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview" + }, + "dependencies": { + "@emotion/react": "11.14.0", + "@emotion/styled": "11.14.0", + "@fortawesome/fontawesome-free": "7.1.0", + "@fontsource/ibm-plex-sans": "5.0.17", + "@mui/icons-material": "7.0.2", + "@mui/material": "7.0.2", + "@mui/x-date-pickers": "8.11.3", + "@mui/x-tree-view": "8.10.0", + "ag-grid-community": "34.2.0", + "ag-grid-react": "34.2.0", + "dayjs": "1.11.18", + "normalize.css": "8.0.1", + "prismjs": "1.30.0", + "react-simple-code-editor": "0.13.1", + "react": "19.1.0", + "react-color": "2.19.3", + "react-dom": "19.1.0", + "react-resizable": "3.0.5", + "react-markdown": "8.0.6", + "reactflow": "11.11.4", + "react-simple-keyboard": "3.8.62", + "socket.io-client": "4.8.1" + }, + "devDependencies": { + "@vitejs/plugin-react": "^4.0.0", + "vite": "^5.0.0" + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + } +} diff --git a/Data/Server/WebUI/public/Borealis_Logo.png b/Data/Server/WebUI/public/Borealis_Logo.png new file mode 100644 index 00000000..bf68420f Binary files /dev/null and b/Data/Server/WebUI/public/Borealis_Logo.png differ diff --git a/Data/Server/WebUI/public/Borealis_Logo_Full.png b/Data/Server/WebUI/public/Borealis_Logo_Full.png new file mode 100644 index 00000000..c800ceaa Binary files /dev/null and b/Data/Server/WebUI/public/Borealis_Logo_Full.png differ diff --git a/Data/Server/WebUI/public/favicon.ico b/Data/Server/WebUI/public/favicon.ico new file mode 100644 index 00000000..901a213f Binary files /dev/null and b/Data/Server/WebUI/public/favicon.ico differ diff --git a/Data/Server/WebUI/src/Access_Management/Credential_Editor.jsx b/Data/Server/WebUI/src/Access_Management/Credential_Editor.jsx new file mode 100644 index 00000000..88b293d0 --- /dev/null +++ b/Data/Server/WebUI/src/Access_Management/Credential_Editor.jsx @@ -0,0 +1,549 @@ +import React, { useEffect, useMemo, useState } from "react"; +import { + Box, + Button, + Dialog, + DialogActions, + DialogContent, + DialogTitle, + FormControl, + InputLabel, + MenuItem, + Select, + TextField, + Typography, + IconButton, + Tooltip, + CircularProgress +} from "@mui/material"; +import UploadIcon from "@mui/icons-material/UploadFile"; +import ClearIcon from "@mui/icons-material/Clear"; + +const CREDENTIAL_TYPES = [ + { value: "machine", label: "Machine" }, + { value: "domain", label: "Domain" }, + { value: "token", label: "Token" } +]; + +const CONNECTION_TYPES = [ + { value: "ssh", label: "SSH" }, + { value: "winrm", label: "WinRM" } +]; + +const BECOME_METHODS = [ + { value: "", label: "None" }, + { value: "sudo", label: "sudo" }, + { value: "su", label: "su" }, + { value: "runas", label: "runas" }, + { value: "enable", label: "enable" } +]; + +function emptyForm() { + return { + name: "", + description: "", + site_id: "", + credential_type: "machine", + connection_type: "ssh", + username: "", + password: "", + private_key: "", + private_key_passphrase: "", + become_method: "", + become_username: "", + become_password: "" + }; +} + +function normalizeSiteId(value) { + if (value === null || typeof value === "undefined" || value === "") return ""; + const num = Number(value); + if (Number.isNaN(num)) return ""; + return String(num); +} + +export default function CredentialEditor({ + open, + mode = "create", + credential, + onClose, + onSaved +}) { + const isEdit = mode === "edit" && credential && credential.id; + const [form, setForm] = useState(emptyForm); + const [sites, setSites] = useState([]); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(""); + const [passwordDirty, setPasswordDirty] = useState(false); + const [privateKeyDirty, setPrivateKeyDirty] = useState(false); + const [passphraseDirty, setPassphraseDirty] = useState(false); + const [becomePasswordDirty, setBecomePasswordDirty] = useState(false); + const [clearPassword, setClearPassword] = useState(false); + const [clearPrivateKey, setClearPrivateKey] = useState(false); + const [clearPassphrase, setClearPassphrase] = useState(false); + const [clearBecomePassword, setClearBecomePassword] = useState(false); + const [fetchingDetail, setFetchingDetail] = useState(false); + + const credentialId = credential?.id; + + useEffect(() => { + if (!open) return; + let canceled = false; + (async () => { + try { + const resp = await fetch("/api/sites"); + if (!resp.ok) return; + const data = await resp.json(); + if (canceled) return; + const parsed = Array.isArray(data?.sites) + ? data.sites + .filter((s) => s && s.id) + .map((s) => ({ + id: s.id, + name: s.name || `Site ${s.id}` + })) + : []; + parsed.sort((a, b) => String(a.name || "").localeCompare(String(b.name || ""))); + setSites(parsed); + } catch { + if (!canceled) setSites([]); + } + })(); + return () => { + canceled = true; + }; + }, [open]); + + useEffect(() => { + if (!open) return; + setError(""); + setPasswordDirty(false); + setPrivateKeyDirty(false); + setPassphraseDirty(false); + setBecomePasswordDirty(false); + setClearPassword(false); + setClearPrivateKey(false); + setClearPassphrase(false); + setClearBecomePassword(false); + if (isEdit && credentialId) { + const applyData = (detail) => { + const next = emptyForm(); + next.name = detail?.name || ""; + next.description = detail?.description || ""; + next.site_id = normalizeSiteId(detail?.site_id); + next.credential_type = (detail?.credential_type || "machine").toLowerCase(); + next.connection_type = (detail?.connection_type || "ssh").toLowerCase(); + next.username = detail?.username || ""; + next.become_method = (detail?.become_method || "").toLowerCase(); + next.become_username = detail?.become_username || ""; + setForm(next); + }; + + if (credential?.name) { + applyData(credential); + } else { + setFetchingDetail(true); + (async () => { + try { + const resp = await fetch(`/api/credentials/${credentialId}`); + if (resp.ok) { + const data = await resp.json(); + applyData(data?.credential || {}); + } + } catch { + /* ignore */ + } finally { + setFetchingDetail(false); + } + })(); + } + } else { + setForm(emptyForm()); + } + }, [open, isEdit, credentialId, credential]); + + const currentCredentialFlags = useMemo(() => ({ + hasPassword: Boolean(credential?.has_password), + hasPrivateKey: Boolean(credential?.has_private_key), + hasPrivateKeyPassphrase: Boolean(credential?.has_private_key_passphrase), + hasBecomePassword: Boolean(credential?.has_become_password) + }), [credential]); + + const disableSave = loading || fetchingDetail; + + const updateField = (key) => (event) => { + const value = event?.target?.value ?? ""; + setForm((prev) => ({ ...prev, [key]: value })); + if (key === "password") { + setPasswordDirty(true); + setClearPassword(false); + } else if (key === "private_key") { + setPrivateKeyDirty(true); + setClearPrivateKey(false); + } else if (key === "private_key_passphrase") { + setPassphraseDirty(true); + setClearPassphrase(false); + } else if (key === "become_password") { + setBecomePasswordDirty(true); + setClearBecomePassword(false); + } + }; + + const handlePrivateKeyUpload = async (event) => { + const file = event.target.files?.[0]; + if (!file) return; + try { + const text = await file.text(); + setForm((prev) => ({ ...prev, private_key: text })); + setPrivateKeyDirty(true); + setClearPrivateKey(false); + } catch { + setError("Unable to read private key file."); + } finally { + event.target.value = ""; + } + }; + + const handleCancel = () => { + if (loading) return; + onClose && onClose(); + }; + + const validate = () => { + if (!form.name.trim()) { + setError("Credential name is required."); + return false; + } + setError(""); + return true; + }; + + const buildPayload = () => { + const payload = { + name: form.name.trim(), + description: form.description.trim(), + credential_type: (form.credential_type || "machine").toLowerCase(), + connection_type: (form.connection_type || "ssh").toLowerCase(), + username: form.username.trim(), + become_method: form.become_method.trim(), + become_username: form.become_username.trim() + }; + const siteId = normalizeSiteId(form.site_id); + if (siteId) { + payload.site_id = Number(siteId); + } else { + payload.site_id = null; + } + if (passwordDirty) { + payload.password = form.password; + } + if (privateKeyDirty) { + payload.private_key = form.private_key; + } + if (passphraseDirty) { + payload.private_key_passphrase = form.private_key_passphrase; + } + if (becomePasswordDirty) { + payload.become_password = form.become_password; + } + if (clearPassword) payload.clear_password = true; + if (clearPrivateKey) payload.clear_private_key = true; + if (clearPassphrase) payload.clear_private_key_passphrase = true; + if (clearBecomePassword) payload.clear_become_password = true; + return payload; + }; + + const handleSave = async () => { + if (!validate()) return; + setLoading(true); + setError(""); + const payload = buildPayload(); + try { + const resp = await fetch( + isEdit ? `/api/credentials/${credentialId}` : "/api/credentials", + { + method: isEdit ? "PUT" : "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(payload) + } + ); + const data = await resp.json(); + if (!resp.ok) { + throw new Error(data?.error || `Request failed (${resp.status})`); + } + onSaved && onSaved(data?.credential || null); + } catch (err) { + setError(String(err.message || err)); + } finally { + setLoading(false); + } + }; + + const title = isEdit ? "Edit Credential" : "Create Credential"; + const helperStyle = { fontSize: 12, color: "#8a8a8a", mt: 0.5 }; + + return ( + + {title} + + {fetchingDetail && ( + + + Loading credential details… + + )} + {error && ( + + {error} + + )} + + + + + Site + + + + Credential Type + + + + Connection + + + + + + + {isEdit && currentCredentialFlags.hasPassword && !passwordDirty && !clearPassword && ( + + setClearPassword(true)} sx={{ color: "#ff8080" }}> + + + + )} + + {isEdit && currentCredentialFlags.hasPassword && !passwordDirty && !clearPassword && ( + Stored password will remain unless you change or clear it. + )} + {clearPassword && ( + Password will be removed when saving. + )} + + + + + {isEdit && currentCredentialFlags.hasPrivateKey && !privateKeyDirty && !clearPrivateKey && ( + + setClearPrivateKey(true)} sx={{ color: "#ff8080" }}> + + + + )} + + {isEdit && currentCredentialFlags.hasPrivateKey && !privateKeyDirty && !clearPrivateKey && ( + Private key is stored. Upload or paste a new one to replace, or clear it. + )} + {clearPrivateKey && ( + Private key will be removed when saving. + )} + + + + {isEdit && currentCredentialFlags.hasPrivateKeyPassphrase && !passphraseDirty && !clearPassphrase && ( + + setClearPassphrase(true)} sx={{ color: "#ff8080" }}> + + + + )} + + {isEdit && currentCredentialFlags.hasPrivateKeyPassphrase && !passphraseDirty && !clearPassphrase && ( + A passphrase is stored for this key. + )} + {clearPassphrase && ( + Key passphrase will be removed when saving. + )} + + + + Privilege Escalation + + + + + + + {isEdit && currentCredentialFlags.hasBecomePassword && !becomePasswordDirty && !clearBecomePassword && ( + + setClearBecomePassword(true)} sx={{ color: "#ff8080" }}> + + + + )} + + {isEdit && currentCredentialFlags.hasBecomePassword && !becomePasswordDirty && !clearBecomePassword && ( + Escalation password is stored. + )} + {clearBecomePassword && ( + Escalation password will be removed when saving. + )} + + + + + + + ); +} diff --git a/Data/Server/WebUI/src/Access_Management/Credential_List.jsx b/Data/Server/WebUI/src/Access_Management/Credential_List.jsx new file mode 100644 index 00000000..878ed8c3 --- /dev/null +++ b/Data/Server/WebUI/src/Access_Management/Credential_List.jsx @@ -0,0 +1,464 @@ +import React, { useCallback, useEffect, useMemo, useRef, useState } from "react"; +import { + Box, + Button, + IconButton, + Menu, + MenuItem, + Paper, + Typography, + CircularProgress, +} from "@mui/material"; +import MoreVertIcon from "@mui/icons-material/MoreVert"; +import AddIcon from "@mui/icons-material/Add"; +import RefreshIcon from "@mui/icons-material/Refresh"; +import LockIcon from "@mui/icons-material/Lock"; +import WifiIcon from "@mui/icons-material/Wifi"; +import ComputerIcon from "@mui/icons-material/Computer"; +import { AgGridReact } from "ag-grid-react"; +import { ModuleRegistry, AllCommunityModule, themeQuartz } from "ag-grid-community"; +import CredentialEditor from "./Credential_Editor.jsx"; +import { ConfirmDeleteDialog } from "../Dialogs.jsx"; + +ModuleRegistry.registerModules([AllCommunityModule]); + +const myTheme = themeQuartz.withParams({ + accentColor: "#FFA6FF", + backgroundColor: "#1f2836", + browserColorScheme: "dark", + chromeBackgroundColor: { + ref: "foregroundColor", + mix: 0.07, + onto: "backgroundColor" + }, + fontFamily: { + googleFont: "IBM Plex Sans" + }, + foregroundColor: "#FFF", + headerFontSize: 14 +}); + +const themeClassName = myTheme.themeName || "ag-theme-quartz"; +const gridFontFamily = '"IBM Plex Sans", "Helvetica Neue", Arial, sans-serif'; +const iconFontFamily = '"Quartz Regular"'; + +function formatTs(ts) { + if (!ts) return "-"; + const date = new Date(Number(ts) * 1000); + if (Number.isNaN(date?.getTime())) return "-"; + return `${date.toLocaleDateString()} ${date.toLocaleTimeString([], { hour: "2-digit", minute: "2-digit" })}`; +} + +function titleCase(value) { + if (!value) return "-"; + const lower = String(value).toLowerCase(); + return lower.replace(/(^|\s)\w/g, (c) => c.toUpperCase()); +} + +function connectionIcon(connection) { + const val = (connection || "").toLowerCase(); + if (val === "ssh") return ; + if (val === "winrm") return ; + return ; +} + +export default function CredentialList({ isAdmin = false }) { + const [rows, setRows] = useState([]); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(""); + const [menuAnchor, setMenuAnchor] = useState(null); + const [menuRow, setMenuRow] = useState(null); + const [editorOpen, setEditorOpen] = useState(false); + const [editorMode, setEditorMode] = useState("create"); + const [editingCredential, setEditingCredential] = useState(null); + const [deleteTarget, setDeleteTarget] = useState(null); + const [deleteBusy, setDeleteBusy] = useState(false); + const gridApiRef = useRef(null); + + const openMenu = useCallback((event, row) => { + setMenuAnchor(event.currentTarget); + setMenuRow(row); + }, []); + + const closeMenu = useCallback(() => { + setMenuAnchor(null); + setMenuRow(null); + }, []); + + const connectionCellRenderer = useCallback((params) => { + const row = params.data || {}; + const label = titleCase(row.connection_type); + return ( + + {connectionIcon(row.connection_type)} + + {label} + + + ); + }, []); + + const actionCellRenderer = useCallback( + (params) => { + const row = params.data; + if (!row) return null; + const handleClick = (event) => { + event.preventDefault(); + event.stopPropagation(); + openMenu(event, row); + }; + return ( + + + + ); + }, + [openMenu] + ); + + const columnDefs = useMemo( + () => [ + { + headerName: "Name", + field: "name", + sort: "asc", + cellRenderer: (params) => params.value || "-" + }, + { + headerName: "Credential Type", + field: "credential_type", + valueGetter: (params) => titleCase(params.data?.credential_type) + }, + { + headerName: "Connection", + field: "connection_type", + cellRenderer: connectionCellRenderer + }, + { + headerName: "Site", + field: "site_name", + cellRenderer: (params) => params.value || "-" + }, + { + headerName: "Username", + field: "username", + cellRenderer: (params) => params.value || "-" + }, + { + headerName: "Updated", + field: "updated_at", + valueGetter: (params) => + formatTs(params.data?.updated_at || params.data?.created_at) + }, + { + headerName: "", + field: "__actions__", + minWidth: 70, + maxWidth: 80, + sortable: false, + filter: false, + resizable: false, + suppressMenu: true, + cellRenderer: actionCellRenderer, + pinned: "right" + } + ], + [actionCellRenderer, connectionCellRenderer] + ); + + const defaultColDef = useMemo( + () => ({ + sortable: true, + filter: "agTextColumnFilter", + resizable: true, + flex: 1, + minWidth: 140, + cellStyle: { + display: "flex", + alignItems: "center", + color: "#f5f7fa", + fontFamily: gridFontFamily, + fontSize: "13px" + }, + headerClass: "credential-grid-header" + }), + [] + ); + + const getRowId = useCallback( + (params) => + params.data?.id || + params.data?.name || + params.data?.username || + String(params.rowIndex ?? ""), + [] + ); + + const fetchCredentials = useCallback(async () => { + setLoading(true); + setError(""); + try { + const resp = await fetch("/api/credentials"); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + const list = Array.isArray(data?.credentials) ? data.credentials : []; + list.sort((a, b) => String(a?.name || "").localeCompare(String(b?.name || ""))); + setRows(list); + } catch (err) { + setRows([]); + setError(String(err.message || err)); + } finally { + setLoading(false); + } + }, []); + + useEffect(() => { + fetchCredentials(); + }, [fetchCredentials]); + + const handleCreate = () => { + setEditorMode("create"); + setEditingCredential(null); + setEditorOpen(true); + }; + + const handleEdit = (row) => { + closeMenu(); + setEditorMode("edit"); + setEditingCredential(row); + setEditorOpen(true); + }; + + const handleDelete = (row) => { + closeMenu(); + setDeleteTarget(row); + }; + + const doDelete = async () => { + if (!deleteTarget?.id) return; + setDeleteBusy(true); + try { + const resp = await fetch(`/api/credentials/${deleteTarget.id}`, { method: "DELETE" }); + if (!resp.ok) { + const data = await resp.json().catch(() => ({})); + throw new Error(data?.error || `HTTP ${resp.status}`); + } + setDeleteTarget(null); + await fetchCredentials(); + } catch (err) { + setError(String(err.message || err)); + } finally { + setDeleteBusy(false); + } + }; + + const handleEditorSaved = async () => { + setEditorOpen(false); + setEditingCredential(null); + await fetchCredentials(); + }; + + const handleGridReady = useCallback((params) => { + gridApiRef.current = params.api; + }, []); + + useEffect(() => { + const api = gridApiRef.current; + if (!api) return; + if (loading) { + api.showLoadingOverlay(); + } else if (!rows.length) { + api.showNoRowsOverlay(); + } else { + api.hideOverlay(); + } + }, [loading, rows]); + + if (!isAdmin) { + return ( + + + Access denied + + + You do not have permission to manage credentials. + + + ); + } + + return ( + <> + + + + + Credentials + + + Stored credentials for remote automation tasks and Ansible playbook runs. + + + + + + + + {loading && ( + + + Loading credentials… + + )} + {error && ( + + {error} + + )} + + + + + + + + + + handleEdit(menuRow)}>Edit + handleDelete(menuRow)} sx={{ color: "#ff8080" }}> + Delete + + + + { + setEditorOpen(false); + setEditingCredential(null); + }} + onSaved={handleEditorSaved} + /> + + setDeleteTarget(null)} + onConfirm={doDelete} + confirmDisabled={deleteBusy} + message={ + deleteTarget + ? `Delete credential '${deleteTarget.name || ""}'? Any jobs referencing it will require an update.` + : "" + } + /> + + ); +} diff --git a/Data/Server/WebUI/src/Access_Management/Github_API_Token.jsx b/Data/Server/WebUI/src/Access_Management/Github_API_Token.jsx new file mode 100644 index 00000000..9c4d541e --- /dev/null +++ b/Data/Server/WebUI/src/Access_Management/Github_API_Token.jsx @@ -0,0 +1,325 @@ +import React, { useCallback, useEffect, useMemo, useState } from "react"; +import { + Box, + Button, + CircularProgress, + InputAdornment, + Link, + Paper, + TextField, + Typography +} from "@mui/material"; +import RefreshIcon from "@mui/icons-material/Refresh"; +import SaveIcon from "@mui/icons-material/Save"; +import VisibilityIcon from "@mui/icons-material/Visibility"; +import VisibilityOffIcon from "@mui/icons-material/VisibilityOff"; + +const paperSx = { + m: 2, + p: 0, + bgcolor: "#1e1e1e", + color: "#f5f7fa", + display: "flex", + flexDirection: "column", + flexGrow: 1, + minWidth: 0, + minHeight: 320 +}; + +const fieldSx = { + mt: 2, + "& .MuiOutlinedInput-root": { + bgcolor: "#181818", + color: "#f5f7fa", + "& fieldset": { borderColor: "#2a2a2a" }, + "&:hover fieldset": { borderColor: "#58a6ff" }, + "&.Mui-focused fieldset": { borderColor: "#58a6ff" } + }, + "& .MuiInputLabel-root": { color: "#bbb" }, + "& .MuiInputLabel-root.Mui-focused": { color: "#7db7ff" } +}; + +export default function GithubAPIToken({ isAdmin = false }) { + const [loading, setLoading] = useState(false); + const [saving, setSaving] = useState(false); + const [token, setToken] = useState(""); + const [inputValue, setInputValue] = useState(""); + const [fetchError, setFetchError] = useState(""); + const [showToken, setShowToken] = useState(false); + const [verification, setVerification] = useState({ + message: "", + valid: null, + status: "", + rateLimit: null, + error: "" + }); + + const hydrate = useCallback(async () => { + setLoading(true); + setFetchError(""); + try { + const resp = await fetch("/api/github/token"); + const data = await resp.json(); + if (!resp.ok) { + throw new Error(data?.error || `HTTP ${resp.status}`); + } + const storedToken = typeof data?.token === "string" ? data.token : ""; + setToken(storedToken); + setInputValue(storedToken); + setShowToken(false); + setVerification({ + message: typeof data?.message === "string" ? data.message : "", + valid: data?.valid === true, + status: typeof data?.status === "string" ? data.status : "", + rateLimit: typeof data?.rate_limit === "number" ? data.rate_limit : null, + error: typeof data?.error === "string" ? data.error : "" + }); + } catch (err) { + const message = err && typeof err.message === "string" ? err.message : String(err); + setFetchError(message); + setToken(""); + setInputValue(""); + setVerification({ message: "", valid: null, status: "", rateLimit: null, error: "" }); + } finally { + setLoading(false); + } + }, []); + + useEffect(() => { + if (!isAdmin) return; + hydrate(); + }, [hydrate, isAdmin]); + + const handleSave = useCallback(async () => { + setSaving(true); + setFetchError(""); + try { + const resp = await fetch("/api/github/token", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ token: inputValue }) + }); + const data = await resp.json(); + if (!resp.ok) { + throw new Error(data?.error || `HTTP ${resp.status}`); + } + const storedToken = typeof data?.token === "string" ? data.token : ""; + setToken(storedToken); + setInputValue(storedToken); + setShowToken(false); + setVerification({ + message: typeof data?.message === "string" ? data.message : "", + valid: data?.valid === true, + status: typeof data?.status === "string" ? data.status : "", + rateLimit: typeof data?.rate_limit === "number" ? data.rate_limit : null, + error: typeof data?.error === "string" ? data.error : "" + }); + } catch (err) { + const message = err && typeof err.message === "string" ? err.message : String(err); + setFetchError(message); + } finally { + setSaving(false); + } + }, [inputValue]); + + const dirty = useMemo(() => inputValue !== token, [inputValue, token]); + + const verificationMessage = useMemo(() => { + if (dirty) { + return { text: "Token has not been saved yet — Save to verify.", color: "#f0c36d" }; + } + const message = verification.message || ""; + if (!message) { + return { text: "", color: "#bbb" }; + } + if (verification.valid) { + return { text: message, color: "#7dffac" }; + } + if ((verification.status || "").toLowerCase() === "missing") { + return { text: message, color: "#bbb" }; + } + return { text: message, color: "#ff8080" }; + }, [dirty, verification]); + + const toggleReveal = useCallback(() => { + setShowToken((prev) => !prev); + }, []); + + if (!isAdmin) { + return ( + + + Access denied + + + You do not have permission to manage the GitHub API token. + + + ); + } + + return ( + + + + + Github API Token + + + Using a Github "Personal Access Token" increases the Github API rate limits from 60/hr to 5,000/hr. This is important for production Borealis usage as it likes to hit its unauthenticated API limits sometimes despite my best efforts. +

Navigate to{' '} + + https://github.com/settings/tokens + {' '} + ❯ Personal Access Tokens ❯ Tokens (Classic) ❯ Generate New Token ❯ New Personal Access Token (Classic) +
+ +

+ + Note:{' '} + + Borealis Automation Platform + + + + Scope:{' '} + + public_repo + + + + Expiration:{' '} + + No Expiration + + +
+
+ + setInputValue(event.target.value)} + fullWidth + variant="outlined" + sx={fieldSx} + disabled={saving || loading} + type={showToken ? "text" : "password"} + InputProps={{ + endAdornment: ( + + + + + ) + }} + /> + + + + {(verificationMessage.text || (!dirty && verification.rateLimit)) && ( + + {verificationMessage.text && `${verificationMessage.text} `} + {!dirty && + verification.rateLimit && + `- Hourly Request Rate Limit: ${verification.rateLimit.toLocaleString()}`} + + )} + + + {loading && ( + + + Loading token… + + )} + + {fetchError && ( + + {fetchError} + + )} + +
+ ); +} diff --git a/Data/Server/WebUI/src/Access_Management/Users.jsx b/Data/Server/WebUI/src/Access_Management/Users.jsx new file mode 100644 index 00000000..794131fd --- /dev/null +++ b/Data/Server/WebUI/src/Access_Management/Users.jsx @@ -0,0 +1,680 @@ +import React, { useEffect, useMemo, useState, useCallback } from "react"; +import { + Paper, + Box, + Typography, + Table, + TableBody, + TableCell, + TableHead, + TableRow, + TableSortLabel, + IconButton, + Menu, + MenuItem, + Button, + Dialog, + DialogTitle, + DialogContent, + DialogContentText, + DialogActions, + TextField, + Select, + FormControl, + InputLabel, + Checkbox, + Popover +} from "@mui/material"; +import MoreVertIcon from "@mui/icons-material/MoreVert"; +import FilterListIcon from "@mui/icons-material/FilterList"; +import { ConfirmDeleteDialog } from "../Dialogs.jsx"; + +/* ---------- Formatting helpers to keep this page in lockstep with Device_List ---------- */ +const tablePaperSx = { m: 2, p: 0, bgcolor: "#1e1e1e" }; +const tableSx = { + minWidth: 820, + "& th, & td": { + color: "#ddd", + borderColor: "#2a2a2a", + fontSize: 13, + py: 0.75 + }, + "& th .MuiTableSortLabel-root": { color: "#ddd" }, + "& th .MuiTableSortLabel-root.Mui-active": { color: "#ddd" } +}; +const menuPaperSx = { bgcolor: "#1e1e1e", color: "#fff", fontSize: "13px" }; +const filterFieldSx = { + input: { color: "#fff" }, + minWidth: 220, + "& .MuiOutlinedInput-root": { + "& fieldset": { borderColor: "#555" }, + "&:hover fieldset": { borderColor: "#888" } + } +}; +/* -------------------------------------------------------------------- */ + +function formatTs(tsSec) { + if (!tsSec) return "-"; + const d = new Date((tsSec || 0) * 1000); + const date = d.toLocaleDateString("en-US", { month: "2-digit", day: "2-digit", year: "numeric" }); + const time = d.toLocaleTimeString("en-US", { hour: "numeric", minute: "2-digit" }); + return `${date} @ ${time}`; +} + +async function sha512(text) { + const enc = new TextEncoder(); + const data = enc.encode(text || ""); + const buf = await crypto.subtle.digest("SHA-512", data); + const arr = Array.from(new Uint8Array(buf)); + return arr.map((b) => b.toString(16).padStart(2, "0")).join(""); +} + +export default function UserManagement({ isAdmin = false }) { + const [rows, setRows] = useState([]); // {username, display_name, role, last_login} + const [orderBy, setOrderBy] = useState("username"); + const [order, setOrder] = useState("asc"); + const [menuAnchor, setMenuAnchor] = useState(null); + const [menuUser, setMenuUser] = useState(null); + const [resetOpen, setResetOpen] = useState(false); + const [resetTarget, setResetTarget] = useState(null); + const [newPassword, setNewPassword] = useState(""); + const [createOpen, setCreateOpen] = useState(false); + const [createForm, setCreateForm] = useState({ username: "", display_name: "", password: "", role: "User" }); + const [confirmDeleteOpen, setConfirmDeleteOpen] = useState(false); + const [deleteTarget, setDeleteTarget] = useState(null); + const [confirmChangeRoleOpen, setConfirmChangeRoleOpen] = useState(false); + const [changeRoleTarget, setChangeRoleTarget] = useState(null); + const [changeRoleNext, setChangeRoleNext] = useState(null); + const [warnOpen, setWarnOpen] = useState(false); + const [warnMessage, setWarnMessage] = useState(""); + const [me, setMe] = useState(null); + const [mfaBusyUser, setMfaBusyUser] = useState(null); + const [resetMfaOpen, setResetMfaOpen] = useState(false); + const [resetMfaTarget, setResetMfaTarget] = useState(null); + + // Columns and filters + const columns = useMemo(() => ([ + { id: "display_name", label: "Display Name" }, + { id: "username", label: "User Name" }, + { id: "last_login", label: "Last Login" }, + { id: "role", label: "User Role" }, + { id: "mfa_enabled", label: "MFA" }, + { id: "actions", label: "" } + ]), []); + const [filters, setFilters] = useState({}); // id -> string + const [filterAnchor, setFilterAnchor] = useState(null); // { id, anchorEl } + const openFilter = (id) => (e) => setFilterAnchor({ id, anchorEl: e.currentTarget }); + const closeFilter = () => setFilterAnchor(null); + const onFilterChange = (id) => (e) => setFilters((prev) => ({ ...prev, [id]: e.target.value })); + + const fetchUsers = useCallback(async () => { + try { + const res = await fetch("/api/users", { credentials: "include" }); + const data = await res.json(); + if (Array.isArray(data?.users)) { + setRows( + data.users.map((u) => ({ + ...u, + mfa_enabled: u && typeof u.mfa_enabled !== "undefined" ? (u.mfa_enabled ? 1 : 0) : 0 + })) + ); + } else { + setRows([]); + } + } catch { + setRows([]); + } + }, []); + + useEffect(() => { + if (!isAdmin) return; + (async () => { + try { + const resp = await fetch("/api/auth/me", { credentials: "include" }); + if (resp.ok) { + const who = await resp.json(); + setMe(who); + } + } catch {} + })(); + fetchUsers(); + }, [fetchUsers, isAdmin]); + + const handleSort = (col) => { + if (orderBy === col) setOrder(order === "asc" ? "desc" : "asc"); + else { setOrderBy(col); setOrder("asc"); } + }; + + const filteredSorted = useMemo(() => { + const applyFilters = (r) => { + for (const [key, val] of Object.entries(filters || {})) { + if (!val) continue; + const needle = String(val).toLowerCase(); + let hay = ""; + if (key === "last_login") hay = String(formatTs(r.last_login)); + else hay = String(r[key] ?? ""); + if (!hay.toLowerCase().includes(needle)) return false; + } + return true; + }; + + const dir = order === "asc" ? 1 : -1; + const arr = rows.filter(applyFilters); + arr.sort((a, b) => { + if (orderBy === "last_login") return ((a.last_login || 0) - (b.last_login || 0)) * dir; + if (orderBy === "mfa_enabled") return ((a.mfa_enabled ? 1 : 0) - (b.mfa_enabled ? 1 : 0)) * dir; + return String(a[orderBy] ?? "").toLowerCase() + .localeCompare(String(b[orderBy] ?? "").toLowerCase()) * dir; + }); + return arr; + }, [rows, filters, orderBy, order]); + + const openMenu = (evt, user) => { + setMenuAnchor({ mouseX: evt.clientX, mouseY: evt.clientY, anchorEl: evt.currentTarget }); + setMenuUser(user); + }; + const closeMenu = () => { setMenuAnchor(null); setMenuUser(null); }; + + const confirmDelete = (user) => { + if (!user) return; + if (me && user.username && String(me.username).toLowerCase() === String(user.username).toLowerCase()) { + setWarnMessage("You cannot delete the user you are currently logged in as."); + setWarnOpen(true); + return; + } + setDeleteTarget(user); + setConfirmDeleteOpen(true); + }; + + const doDelete = async () => { + const user = deleteTarget; + setConfirmDeleteOpen(false); + if (!user) return; + try { + const resp = await fetch(`/api/users/${encodeURIComponent(user.username)}`, { method: "DELETE", credentials: "include" }); + const data = await resp.json(); + if (!resp.ok) { + setWarnMessage(data?.error || "Failed to delete user"); + setWarnOpen(true); + return; + } + await fetchUsers(); + } catch (e) { + console.error(e); + setWarnMessage("Failed to delete user"); + setWarnOpen(true); + } + }; + + const openChangeRole = (user) => { + if (!user) return; + if (me && user.username && String(me.username).toLowerCase() === String(user.username).toLowerCase()) { + setWarnMessage("You cannot change your own role."); + setWarnOpen(true); + return; + } + const nextRole = (String(user.role || "User").toLowerCase() === "admin") ? "User" : "Admin"; + setChangeRoleTarget(user); + setChangeRoleNext(nextRole); + setConfirmChangeRoleOpen(true); + }; + + const doChangeRole = async () => { + const user = changeRoleTarget; + const nextRole = changeRoleNext; + setConfirmChangeRoleOpen(false); + if (!user || !nextRole) return; + try { + const resp = await fetch(`/api/users/${encodeURIComponent(user.username)}/role`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + credentials: "include", + body: JSON.stringify({ role: nextRole }) + }); + const data = await resp.json(); + if (!resp.ok) { + setWarnMessage(data?.error || "Failed to change role"); + setWarnOpen(true); + return; + } + await fetchUsers(); + } catch (e) { + console.error(e); + setWarnMessage("Failed to change role"); + setWarnOpen(true); + } + }; + + const openResetMfa = (user) => { + if (!user) return; + setResetMfaTarget(user); + setResetMfaOpen(true); + }; + + const doResetMfa = async () => { + const user = resetMfaTarget; + setResetMfaOpen(false); + setResetMfaTarget(null); + if (!user) return; + const username = user.username; + const keepEnabled = Boolean(user.mfa_enabled); + setMfaBusyUser(username); + try { + const resp = await fetch(`/api/users/${encodeURIComponent(username)}/mfa`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + credentials: "include", + body: JSON.stringify({ enabled: keepEnabled, reset_secret: true }) + }); + const data = await resp.json(); + if (!resp.ok) { + setWarnMessage(data?.error || "Failed to reset MFA for this user."); + setWarnOpen(true); + return; + } + await fetchUsers(); + } catch (err) { + console.error(err); + setWarnMessage("Failed to reset MFA for this user."); + setWarnOpen(true); + } finally { + setMfaBusyUser(null); + } + }; + + const toggleMfa = async (user, enabled) => { + if (!user) return; + const previous = Boolean(user.mfa_enabled); + const nextFlag = enabled ? 1 : 0; + setRows((prev) => + prev.map((r) => + String(r.username).toLowerCase() === String(user.username).toLowerCase() + ? { ...r, mfa_enabled: nextFlag } + : r + ) + ); + setMfaBusyUser(user.username); + try { + const resp = await fetch(`/api/users/${encodeURIComponent(user.username)}/mfa`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + credentials: "include", + body: JSON.stringify({ enabled }) + }); + const data = await resp.json(); + if (!resp.ok) { + setRows((prev) => + prev.map((r) => + String(r.username).toLowerCase() === String(user.username).toLowerCase() + ? { ...r, mfa_enabled: previous ? 1 : 0 } + : r + ) + ); + setWarnMessage(data?.error || "Failed to update MFA settings."); + setWarnOpen(true); + return; + } + await fetchUsers(); + } catch (e) { + console.error(e); + setRows((prev) => + prev.map((r) => + String(r.username).toLowerCase() === String(user.username).toLowerCase() + ? { ...r, mfa_enabled: previous ? 1 : 0 } + : r + ) + ); + setWarnMessage("Failed to update MFA settings."); + setWarnOpen(true); + } finally { + setMfaBusyUser(null); + } + }; + + const doResetPassword = async () => { + const user = resetTarget; + if (!user) return; + const pw = newPassword || ""; + if (!pw.trim()) return; + try { + const hash = await sha512(pw); + const resp = await fetch(`/api/users/${encodeURIComponent(user.username)}/reset_password`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + credentials: "include", + body: JSON.stringify({ password_sha512: hash }) + }); + const data = await resp.json(); + if (!resp.ok) { + alert(data?.error || "Failed to reset password"); + return; + } + setResetOpen(false); + setResetTarget(null); + setNewPassword(""); + } catch (e) { + console.error(e); + alert("Failed to reset password"); + } + }; + + const openReset = (user) => { + if (!user) return; + setResetTarget(user); + setResetOpen(true); + setNewPassword(""); + }; + + const openCreate = () => { setCreateOpen(true); setCreateForm({ username: "", display_name: "", password: "", role: "User" }); }; + const doCreate = async () => { + const u = (createForm.username || "").trim(); + const dn = (createForm.display_name || u).trim(); + const pw = (createForm.password || "").trim(); + const role = (createForm.role || "User"); + if (!u || !pw) return; + try { + const hash = await sha512(pw); + const resp = await fetch("/api/users", { + method: "POST", + headers: { "Content-Type": "application/json" }, + credentials: "include", + body: JSON.stringify({ username: u, display_name: dn, password_sha512: hash, role }) + }); + const data = await resp.json(); + if (!resp.ok) { + alert(data?.error || "Failed to create user"); + return; + } + setCreateOpen(false); + await fetchUsers(); + } catch (e) { + console.error(e); + alert("Failed to create user"); + } + }; + + if (!isAdmin) return null; + + return ( + <> + + + + + User Management + + + Manage authorized users of the Borealis Automation Platform. + + + + + + + + + {/* Leading checkbox gutter to match Devices table rhythm */} + + {columns.map((col) => ( + + {col.id !== "actions" ? ( + + handleSort(col.id)} + > + {col.label} + + + + + + ) : null} + + ))} + + + + + {filteredSorted.map((u) => ( + + {/* Body gutter to stay aligned with header */} + + {u.display_name || u.username} + {u.username} + {formatTs(u.last_login)} + {u.role || "User"} + + { + event.stopPropagation(); + toggleMfa(u, event.target.checked); + }} + onClick={(event) => event.stopPropagation()} + sx={{ + color: "#888", + "&.Mui-checked": { color: "#58a6ff" } + }} + inputProps={{ "aria-label": `Toggle MFA for ${u.username}` }} + /> + + + openMenu(e, u)} sx={{ color: "#ccc" }}> + + + + + ))} + {filteredSorted.length === 0 && ( + + + No users found. + + + )} + +
+ + {/* Filter popover (styled to match Device_List) */} + + {filterAnchor && ( + + c.id === filterAnchor.id)?.label || ""}`} + value={filters[filterAnchor.id] || ""} + onChange={onFilterChange(filterAnchor.id)} + onKeyDown={(e) => { if (e.key === "Escape") closeFilter(); }} + sx={filterFieldSx} + /> + + + )} + + + + { const u = menuUser; closeMenu(); confirmDelete(u); }} + > + Delete User + + { const u = menuUser; closeMenu(); openReset(u); }}>Reset Password + { const u = menuUser; closeMenu(); openChangeRole(u); }} + > + Change Role + + { const u = menuUser; closeMenu(); openResetMfa(u); }}> + Reset MFA + + + + setResetOpen(false)} PaperProps={{ sx: { bgcolor: "#121212", color: "#fff" } }}> + Reset Password + + + Enter a new password for {resetTarget?.username}. + + setNewPassword(e.target.value)} + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#2a2a2a", + color: "#ccc", + "& fieldset": { borderColor: "#444" }, + "&:hover fieldset": { borderColor: "#666" } + }, + label: { color: "#aaa" }, + mt: 1 + }} + /> + + + + + + + + setCreateOpen(false)} PaperProps={{ sx: { bgcolor: "#121212", color: "#fff" } }}> + Create User + + setCreateForm((p) => ({ ...p, username: e.target.value }))} + sx={{ + "& .MuiOutlinedInput-root": { backgroundColor: "#2a2a2a", color: "#ccc", "& fieldset": { borderColor: "#444" }, "&:hover fieldset": { borderColor: "#666" } }, + label: { color: "#aaa" }, mt: 1 + }} + /> + setCreateForm((p) => ({ ...p, display_name: e.target.value }))} + sx={{ + "& .MuiOutlinedInput-root": { backgroundColor: "#2a2a2a", color: "#ccc", "& fieldset": { borderColor: "#444" }, "&:hover fieldset": { borderColor: "#666" } }, + label: { color: "#aaa" }, mt: 1 + }} + /> + setCreateForm((p) => ({ ...p, password: e.target.value }))} + sx={{ + "& .MuiOutlinedInput-root": { backgroundColor: "#2a2a2a", color: "#ccc", "& fieldset": { borderColor: "#444" }, "&:hover fieldset": { borderColor: "#666" } }, + label: { color: "#aaa" }, mt: 1 + }} + /> + + Role + + + + + + + + +
+ + setConfirmDeleteOpen(false)} + onConfirm={doDelete} + /> + setConfirmChangeRoleOpen(false)} + onConfirm={doChangeRole} + /> + { setResetMfaOpen(false); setResetMfaTarget(null); }} + onConfirm={doResetMfa} + /> + setWarnOpen(false)} + onConfirm={() => setWarnOpen(false)} + /> + + ); +} diff --git a/Data/Server/WebUI/src/Admin/Server_Info.jsx b/Data/Server/WebUI/src/Admin/Server_Info.jsx new file mode 100644 index 00000000..d2188394 --- /dev/null +++ b/Data/Server/WebUI/src/Admin/Server_Info.jsx @@ -0,0 +1,73 @@ +import React, { useEffect, useState } from "react"; +import { Paper, Box, Typography, Button } from "@mui/material"; +import { GitHub as GitHubIcon, InfoOutlined as InfoIcon } from "@mui/icons-material"; +import { CreditsDialog } from "../Dialogs.jsx"; + +export default function ServerInfo({ isAdmin = false }) { + const [serverTime, setServerTime] = useState(null); + const [error, setError] = useState(null); + const [aboutOpen, setAboutOpen] = useState(false); + + useEffect(() => { + if (!isAdmin) return; + let isMounted = true; + const fetchTime = async () => { + try { + const resp = await fetch('/api/server/time'); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + if (isMounted) { + setServerTime(data?.display || data?.iso || null); + setError(null); + } + } catch (e) { + if (isMounted) setError(String(e)); + } + }; + fetchTime(); + const id = setInterval(fetchTime, 60000); // update once per minute + return () => { isMounted = false; clearInterval(id); }; + }, [isAdmin]); + + if (!isAdmin) return null; + + return ( + + + Server Info + Basic server information will appear here for informative and debug purposes. + + Server Time + + {error ? `Error: ${error}` : (serverTime || 'Loading...')} + + + + + Project Links + + + + + + + setAboutOpen(false)} /> + + ); +} diff --git a/Data/Server/WebUI/src/App.jsx b/Data/Server/WebUI/src/App.jsx new file mode 100644 index 00000000..071c5a8d --- /dev/null +++ b/Data/Server/WebUI/src/App.jsx @@ -0,0 +1,1392 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/App.jsx + +//Shared Imports +import React, { useState, useEffect, useCallback, useRef } from "react"; +import { ReactFlowProvider } from "reactflow"; +import "reactflow/dist/style.css"; +import { + CloseAllDialog, RenameTabDialog, TabContextMenu, NotAuthorizedDialog +} from "./Dialogs"; +import NavigationSidebar from "./Navigation_Sidebar"; + +// Styling Imports +import { + AppBar, Toolbar, Typography, Box, Menu, MenuItem, Button, + CssBaseline, ThemeProvider, createTheme, Breadcrumbs + } from "@mui/material"; + import { + KeyboardArrowDown as KeyboardArrowDownIcon, + Logout as LogoutIcon, + NavigateNext as NavigateNextIcon + } from "@mui/icons-material"; + import ClickAwayListener from "@mui/material/ClickAwayListener"; + import SearchIcon from "@mui/icons-material/Search"; + import ArrowDropDownIcon from "@mui/icons-material/ArrowDropDown"; + import ArrowDropUpIcon from "@mui/icons-material/ArrowDropUp"; + +// Workflow Editor Imports +import FlowTabs from "./Flow_Editor/Flow_Tabs"; +import FlowEditor from "./Flow_Editor/Flow_Editor"; +import NodeSidebar from "./Flow_Editor/Node_Sidebar"; +import StatusBar from "./Status_Bar"; + +// Borealis Page Imports +import Login from "./Login.jsx"; +import SiteList from "./Sites/Site_List"; +import DeviceList from "./Devices/Device_List"; +import DeviceDetails from "./Devices/Device_Details"; +import AgentDevices from "./Devices/Agent_Devices.jsx"; +import SSHDevices from "./Devices/SSH_Devices.jsx"; +import WinRMDevices from "./Devices/WinRM_Devices.jsx"; +import AssemblyList from "./Assemblies/Assembly_List"; +import AssemblyEditor from "./Assemblies/Assembly_Editor"; +import ScheduledJobsList from "./Scheduling/Scheduled_Jobs_List"; +import CreateJob from "./Scheduling/Create_Job.jsx"; +import CredentialList from "./Access_Management/Credential_List.jsx"; +import UserManagement from "./Access_Management/Users.jsx"; +import GithubAPIToken from "./Access_Management/Github_API_Token.jsx"; +import ServerInfo from "./Admin/Server_Info.jsx"; +import EnrollmentCodes from "./Devices/Enrollment_Codes.jsx"; +import DeviceApprovals from "./Devices/Device_Approvals.jsx"; + +// Networking Imports +import { io } from "socket.io-client"; +if (!window.BorealisSocket) { + window.BorealisSocket = io(window.location.origin, { transports: ["websocket"] }); +} +if (!window.BorealisUpdateRate) { + window.BorealisUpdateRate = 200; +} + +/////////////////////////////////////////////////////////////////////////////////////////////////// + +// Load node modules dynamically +const modules = import.meta.glob('./Nodes/**/*.jsx', { eager: true }); +const nodeTypes = {}; +const categorizedNodes = {}; +Object.entries(modules).forEach(([path, mod]) => { + const comp = mod.default; + if (!comp) return; + const { type, component } = comp; + if (!type || !component) return; + const parts = path.replace('./Nodes/', '').split('/'); + const category = parts[0]; + if (!categorizedNodes[category]) categorizedNodes[category] = []; + categorizedNodes[category].push(comp); + nodeTypes[type] = component; +}); + +const darkTheme = createTheme({ + palette: { + mode: "dark", + background: { default: "#121212", paper: "#1e1e1e" }, + text: { primary: "#ffffff" } + }, + components: { + MuiTooltip: { + styleOverrides: { + tooltip: { backgroundColor: "#2a2a2a", color: "#ccc", fontSize: "0.75rem", border: "1px solid #444" }, + arrow: { color: "#2a2a2a" } + } + } + } +}); + +const LOCAL_STORAGE_KEY = "borealis_persistent_state"; + + export default function App() { + const [tabs, setTabs] = useState([{ id: "flow_1", tab_name: "Flow 1", nodes: [], edges: [] }]); + const [activeTabId, setActiveTabId] = useState("flow_1"); + const [currentPage, setCurrentPageState] = useState("devices"); + const [selectedDevice, setSelectedDevice] = useState(null); + + const [userMenuAnchorEl, setUserMenuAnchorEl] = useState(null); + const [confirmCloseOpen, setConfirmCloseOpen] = useState(false); + const [renameDialogOpen, setRenameDialogOpen] = useState(false); + const [renameTabId, setRenameTabId] = useState(null); + const [renameValue, setRenameValue] = useState(""); + const [tabMenuAnchor, setTabMenuAnchor] = useState(null); + const [tabMenuTabId, setTabMenuTabId] = useState(null); + const fileInputRef = useRef(null); + const [user, setUser] = useState(null); + const [userRole, setUserRole] = useState(null); + const [userDisplayName, setUserDisplayName] = useState(null); + const [editingJob, setEditingJob] = useState(null); + const [jobsRefreshToken, setJobsRefreshToken] = useState(0); + const [assemblyEditorState, setAssemblyEditorState] = useState(null); // { path, mode, context, nonce } + const [sessionResolved, setSessionResolved] = useState(false); + const initialPathRef = useRef(window.location.pathname + window.location.search); + const pendingPathRef = useRef(null); + const [notAuthorizedOpen, setNotAuthorizedOpen] = useState(false); + + // Top-bar search state + const SEARCH_CATEGORIES = [ + { key: "hostname", label: "Hostname", scope: "device", placeholder: "Search Hostname" }, + { key: "internal_ip", label: "Internal IP", scope: "device", placeholder: "Search Internal IP" }, + { key: "external_ip", label: "External IP", scope: "device", placeholder: "Search External IP" }, + { key: "description", label: "Description", scope: "device", placeholder: "Search Description" }, + { key: "last_user", label: "Last User", scope: "device", placeholder: "Search Last User" }, + { key: "serial_number", label: "Serial Number (Soon)", scope: "device", placeholder: "Search Serial Number" }, + { key: "site_name", label: "Site Name", scope: "site", placeholder: "Search Site Name" }, + { key: "site_description", label: "Site Description", scope: "site", placeholder: "Search Site Description" }, + ]; + const [searchCategory, setSearchCategory] = useState("hostname"); + const [searchOpen, setSearchOpen] = useState(false); + const [searchQuery, setSearchQuery] = useState(""); + const [searchMenuEl, setSearchMenuEl] = useState(null); + const [suggestions, setSuggestions] = useState({ devices: [], sites: [], q: "", field: "" }); + const searchAnchorRef = useRef(null); + const searchDebounceRef = useRef(null); + + // Gentle highlight helper for matched substrings + const highlightText = useCallback((text, query) => { + const t = String(text ?? ""); + const q = String(query ?? "").trim(); + if (!q) return t; + try { + const esc = q.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + const re = new RegExp(`(${esc})`, "ig"); + const parts = t.split(re); + return parts.map((part, i) => + part.toLowerCase() === q.toLowerCase() + ? ( + {part} + ) + : {part} + ); + } catch { + return t; + } + }, []); + + const pageToPath = useCallback( + (page, options = {}) => { + switch (page) { + case "login": + return "/login"; + case "sites": + return "/sites"; + case "devices": + return "/devices"; + case "agent_devices": + return "/devices/agent"; + case "ssh_devices": + return "/devices/ssh"; + case "winrm_devices": + return "/devices/winrm"; + case "device_details": { + const device = + options.device || + selectedDevice || + (options.deviceId + ? { agent_guid: options.deviceId, hostname: options.deviceName || options.deviceId } + : null); + const deviceId = + device?.agent_guid || + device?.guid || + device?.summary?.agent_guid || + device?.hostname || + device?.id; + if (deviceId) { + return `/device/${encodeURIComponent(deviceId)}`; + } + return "/devices"; + } + case "jobs": + return "/scheduling"; + case "create_job": + return "/scheduling/create_job"; + case "workflows": + return "/workflows"; + case "workflow-editor": + return "/workflows/editor"; + case "assemblies": + return "/assemblies"; + case "scripts": + case "ansible_editor": { + const mode = page === "ansible_editor" ? "ansible" : "scripts"; + const params = new URLSearchParams(); + if (mode === "ansible") { + params.set("mode", "ansible"); + } + const state = options.assemblyState || assemblyEditorState; + if (state?.path) { + params.set("path", state.path); + } + const query = params.toString(); + return query ? `/assemblies/editor?${query}` : "/assemblies/editor"; + } + case "access_credentials": + return "/access_management/credentials"; + case "access_github_token": + return "/access_management/github_token"; + case "access_users": + return "/access_management/users"; + case "server_info": + return "/admin/server_info"; + case "admin_enrollment_codes": + return "/admin/enrollment-codes"; + case "admin_device_approvals": + return "/admin/device-approvals"; + default: + return "/devices"; + } + }, + [assemblyEditorState, selectedDevice] + ); + + const interpretPath = useCallback((rawPath) => { + try { + const url = new URL(rawPath || "/", window.location.origin); + let path = url.pathname || "/"; + if (path.length > 1 && path.endsWith("/")) { + path = path.slice(0, -1); + } + const segments = path.split("/").filter(Boolean); + const params = url.searchParams; + + if (path === "/login") return { page: "login", options: {} }; + if (path === "/" || path === "") return { page: "devices", options: {} }; + if (path === "/devices") return { page: "devices", options: {} }; + if (path === "/devices/agent") return { page: "agent_devices", options: {} }; + if (path === "/devices/ssh") return { page: "ssh_devices", options: {} }; + if (path === "/devices/winrm") return { page: "winrm_devices", options: {} }; + if (segments[0] === "device" && segments[1]) { + const id = decodeURIComponent(segments[1]); + return { + page: "device_details", + options: { device: { agent_guid: id, hostname: id } } + }; + } + if (path === "/sites") return { page: "sites", options: {} }; + if (path === "/scheduling") return { page: "jobs", options: {} }; + if (path === "/scheduling/create_job") return { page: "create_job", options: {} }; + if (path === "/workflows") return { page: "workflows", options: {} }; + if (path === "/workflows/editor") return { page: "workflow-editor", options: {} }; + if (path === "/assemblies") return { page: "assemblies", options: {} }; + if (path === "/assemblies/editor") { + const mode = params.get("mode"); + const relPath = params.get("path") || ""; + const state = relPath + ? { path: relPath, mode: mode === "ansible" ? "ansible" : "scripts", nonce: Date.now() } + : null; + return { + page: mode === "ansible" ? "ansible_editor" : "scripts", + options: state ? { assemblyState: state } : {} + }; + } + if (path === "/access_management/users") return { page: "access_users", options: {} }; + if (path === "/access_management/github_token") return { page: "access_github_token", options: {} }; + if (path === "/access_management/credentials") return { page: "access_credentials", options: {} }; + if (path === "/admin/server_info") return { page: "server_info", options: {} }; + if (path === "/admin/enrollment-codes") return { page: "admin_enrollment_codes", options: {} }; + if (path === "/admin/device-approvals") return { page: "admin_device_approvals", options: {} }; + return { page: "devices", options: {} }; + } catch { + return { page: "devices", options: {} }; + } + }, []); + + const updateStateForPage = useCallback( + (page, options = {}) => { + setCurrentPageState(page); + if (page === "device_details") { + if (options.device) { + setSelectedDevice(options.device); + } else if (options.deviceId) { + const fallbackId = options.deviceId; + const fallbackName = options.deviceName || options.deviceId; + setSelectedDevice((prev) => { + const prevId = prev?.agent_guid || prev?.guid || prev?.hostname || ""; + if (prevId === fallbackId || prevId === fallbackName) { + return prev; + } + return { agent_guid: fallbackId, hostname: fallbackName }; + }); + } + } else if (!options.preserveDevice) { + setSelectedDevice(null); + } + + if ((page === "scripts" || page === "ansible_editor") && options.assemblyState) { + setAssemblyEditorState(options.assemblyState); + } + }, + [setAssemblyEditorState, setCurrentPageState, setSelectedDevice] + ); + + const navigateTo = useCallback( + (page, options = {}) => { + const { replace = false, allowUnauthenticated = false, suppressPending = false } = options; + const targetPath = pageToPath(page, options); + + if (!allowUnauthenticated && !user && page !== "login") { + if (!suppressPending && targetPath) { + pendingPathRef.current = targetPath; + } + updateStateForPage("login", {}); + const loginPath = "/login"; + const method = replace ? "replaceState" : "pushState"; + const current = window.location.pathname + window.location.search; + if (replace || current !== loginPath) { + window.history[method]({}, "", loginPath); + } + return; + } + + if (page === "login") { + updateStateForPage("login", {}); + const loginPath = "/login"; + const method = replace ? "replaceState" : "pushState"; + const current = window.location.pathname + window.location.search; + if (replace || current !== loginPath) { + window.history[method]({}, "", loginPath); + } + return; + } + + pendingPathRef.current = null; + updateStateForPage(page, options); + + if (targetPath) { + const method = replace ? "replaceState" : "pushState"; + const current = window.location.pathname + window.location.search; + if (replace || current !== targetPath) { + window.history[method]({}, "", targetPath); + } + } + }, + [pageToPath, updateStateForPage, user] + ); + + const navigateByPath = useCallback( + (path, { replace = false, allowUnauthenticated = false } = {}) => { + const { page, options } = interpretPath(path); + navigateTo(page, { ...(options || {}), replace, allowUnauthenticated }); + }, + [interpretPath, navigateTo] + ); + + const navigateToRef = useRef(navigateTo); + const navigateByPathRef = useRef(navigateByPath); + + useEffect(() => { + navigateToRef.current = navigateTo; + navigateByPathRef.current = navigateByPath; + }, [navigateTo, navigateByPath]); + + // Build breadcrumb items for current view + const breadcrumbs = React.useMemo(() => { + const items = []; + switch (currentPage) { + case "sites": + items.push({ label: "Sites", page: "sites" }); + items.push({ label: "Site List", page: "sites" }); + break; + case "devices": + items.push({ label: "Inventory", page: "devices" }); + items.push({ label: "Devices", page: "devices" }); + break; + case "device_details": + items.push({ label: "Devices", page: "devices" }); + items.push({ label: "Device List", page: "devices" }); + items.push({ label: "Device Details" }); + break; + case "jobs": + items.push({ label: "Automation", page: "jobs" }); + items.push({ label: "Scheduled Jobs", page: "jobs" }); + break; + case "create_job": + items.push({ label: "Automation", page: "jobs" }); + items.push({ label: "Scheduled Jobs", page: "jobs" }); + items.push({ label: editingJob ? "Edit Job" : "Create Job", page: "create_job" }); + break; + case "workflows": + items.push({ label: "Automation", page: "jobs" }); + items.push({ label: "Workflows", page: "workflows" }); + break; + case "workflow-editor": + items.push({ label: "Automation", page: "jobs" }); + items.push({ label: "Workflows", page: "workflows" }); + items.push({ label: "Flow Editor" }); + break; + case "scripts": + items.push({ label: "Automation", page: "jobs" }); + items.push({ label: "Scripts", page: "scripts" }); + break; + case "ansible_editor": + items.push({ label: "Automation", page: "jobs" }); + items.push({ label: "Ansible Playbooks", page: "assemblies" }); + items.push({ label: "Playbook Editor" }); + break; + case "assemblies": + items.push({ label: "Automation", page: "jobs" }); + items.push({ label: "Assemblies", page: "assemblies" }); + break; + case "community": + items.push({ label: "Automation", page: "jobs" }); + items.push({ label: "Community Content", page: "community" }); + break; + case "agent_devices": + items.push({ label: "Inventory", page: "devices" }); + items.push({ label: "Devices", page: "devices" }); + items.push({ label: "Agent Devices", page: "agent_devices" }); + break; + case "ssh_devices": + items.push({ label: "Inventory", page: "devices" }); + items.push({ label: "Devices", page: "devices" }); + items.push({ label: "SSH Devices", page: "ssh_devices" }); + break; + case "winrm_devices": + items.push({ label: "Inventory", page: "devices" }); + items.push({ label: "Devices", page: "devices" }); + items.push({ label: "WinRM Devices", page: "winrm_devices" }); + break; + case "access_credentials": + items.push({ label: "Access Management", page: "access_credentials" }); + items.push({ label: "Credentials", page: "access_credentials" }); + break; + case "access_github_token": + items.push({ label: "Access Management", page: "access_credentials" }); + items.push({ label: "GitHub API Token", page: "access_github_token" }); + break; + case "access_users": + items.push({ label: "Access Management", page: "access_credentials" }); + items.push({ label: "Users", page: "access_users" }); + break; + case "server_info": + items.push({ label: "Admin Settings" }); + items.push({ label: "Server Info", page: "server_info" }); + break; + case "admin_enrollment_codes": + items.push({ label: "Admin Settings", page: "server_info" }); + items.push({ label: "Installer Codes", page: "admin_enrollment_codes" }); + break; + case "admin_device_approvals": + items.push({ label: "Admin Settings", page: "server_info" }); + items.push({ label: "Device Approvals", page: "admin_device_approvals" }); + break; + case "filters": + items.push({ label: "Filters & Groups", page: "filters" }); + items.push({ label: "Filters", page: "filters" }); + break; + case "groups": + items.push({ label: "Filters & Groups", page: "filters" }); + items.push({ label: "Groups", page: "groups" }); + break; + default: + // Fallback to a neutral crumb if unknown + if (currentPage) items.push({ label: String(currentPage) }); + } + return items; + }, [currentPage, selectedDevice, editingJob]); + + useEffect(() => { + let canceled = false; + const hydrateSession = async () => { + const session = localStorage.getItem("borealis_session"); + if (session) { + try { + const data = JSON.parse(session); + if (Date.now() - data.timestamp < 3600 * 1000) { + if (!canceled) { + setUser(data.username); + setUserRole(data.role || null); + setUserDisplayName(data.display_name || data.username); + } + } else { + localStorage.removeItem("borealis_session"); + } + } catch { + localStorage.removeItem("borealis_session"); + } + } + + try { + const resp = await fetch('/api/auth/me', { credentials: 'include' }); + if (resp.ok) { + const me = await resp.json(); + if (!canceled) { + setUser(me.username); + setUserRole(me.role || null); + setUserDisplayName(me.display_name || me.username); + } + localStorage.setItem( + "borealis_session", + JSON.stringify({ username: me.username, display_name: me.display_name || me.username, role: me.role, timestamp: Date.now() }) + ); + } + } catch {} + + if (!canceled) { + setSessionResolved(true); + } + }; + + hydrateSession(); + return () => { + canceled = true; + }; + }, []); + + useEffect(() => { + if (!sessionResolved) return; + + const navTo = navigateToRef.current; + const navByPath = navigateByPathRef.current; + + if (user) { + const stored = initialPathRef.current; + const currentLocation = window.location.pathname + window.location.search; + const targetPath = + stored && stored !== "/login" + ? stored + : currentLocation === "/login" || currentLocation === "" + ? "/devices" + : currentLocation; + navByPath(targetPath, { replace: true, allowUnauthenticated: true }); + initialPathRef.current = null; + pendingPathRef.current = null; + } else { + const stored = initialPathRef.current; + const currentLocation = window.location.pathname + window.location.search; + const rememberPath = + stored && !stored.startsWith("/login") + ? stored + : !currentLocation.startsWith("/login") + ? currentLocation + : null; + if (rememberPath) { + pendingPathRef.current = rememberPath; + } + navTo("login", { replace: true, allowUnauthenticated: true, suppressPending: true }); + } + }, [sessionResolved, user]); + + useEffect(() => { + if (!sessionResolved) return; + + const handlePopState = () => { + const path = window.location.pathname + window.location.search; + if (!user) { + if (!path.startsWith("/login")) { + pendingPathRef.current = path; + } + navigateToRef.current("login", { replace: true, allowUnauthenticated: true, suppressPending: true }); + return; + } + navigateByPathRef.current(path, { replace: true, allowUnauthenticated: true }); + }; + + window.addEventListener("popstate", handlePopState); + return () => window.removeEventListener("popstate", handlePopState); + }, [sessionResolved, user]); + + // Suggest fetcher with debounce + const fetchSuggestions = useCallback((field, q) => { + const query = String(q || "").trim(); + if (query.length < 3) { + setSuggestions({ devices: [], sites: [], q: query, field }); + return; + } + const params = new URLSearchParams({ field, q: query, limit: "5" }); + fetch(`/api/search/suggest?${params.toString()}`) + .then((r) => (r.ok ? r.json() : { devices: [], sites: [], q: query, field })) + .then((data) => setSuggestions(data)) + .catch(() => setSuggestions({ devices: [], sites: [], q: query, field })); + }, []); + + useEffect(() => { + if (!searchOpen) return; + if (searchDebounceRef.current) clearTimeout(searchDebounceRef.current); + searchDebounceRef.current = setTimeout(() => { + fetchSuggestions(searchCategory, searchQuery); + }, 220); + return () => { if (searchDebounceRef.current) clearTimeout(searchDebounceRef.current); }; + }, [searchOpen, searchCategory, searchQuery, fetchSuggestions]); + + const execSearch = useCallback(async (field, q, navigateImmediate = true) => { + const cat = SEARCH_CATEGORIES.find((c) => c.key === field) || SEARCH_CATEGORIES[0]; + if (cat.scope === "site") { + try { + localStorage.setItem('site_list_initial_filters', JSON.stringify( + field === 'site_name' ? { name: q } : { description: q } + )); + } catch {} + if (navigateImmediate) navigateTo("sites"); + } else { + // device field + // Map API field -> Device_List filter key + const fieldMap = { + hostname: 'hostname', + description: 'description', + last_user: 'lastUser', + internal_ip: 'internalIp', + external_ip: 'externalIp', + serial_number: 'serialNumber', // placeholder (ignored by Device_List for now) + }; + const k = fieldMap[field] || 'hostname'; + const qLc = String(q || '').toLowerCase(); + const exact = (suggestions.devices || []).find((d) => String(d.hostname || d.value || '').toLowerCase() === qLc); + if (exact && (exact.hostname || '').trim()) { + const device = { hostname: exact.hostname.trim() }; + if (navigateImmediate) { + navigateTo('device_details', { device }); + } else { + setSelectedDevice(device); + } + } else if (field === 'hostname') { + // Probe device existence and open directly if found + try { + const resp = await fetch(`/api/device/details/${encodeURIComponent(q)}`); + if (resp.ok) { + const data = await resp.json(); + if (data && (data.summary?.hostname || Object.keys(data).length > 0)) { + const device = { hostname: q }; + if (navigateImmediate) { + navigateTo('device_details', { device }); + } else { + setSelectedDevice(device); + } + } else { + try { localStorage.setItem('device_list_initial_filters', JSON.stringify({ [k]: q })); } catch {} + if (navigateImmediate) navigateTo('devices'); + } + } else { + try { localStorage.setItem('device_list_initial_filters', JSON.stringify({ [k]: q })); } catch {} + if (navigateImmediate) navigateTo('devices'); + } + } catch { + try { localStorage.setItem('device_list_initial_filters', JSON.stringify({ [k]: q })); } catch {} + if (navigateImmediate) navigateTo('devices'); + } + } else { + try { + const payload = (k === 'serialNumber') ? {} : { [k]: q }; + localStorage.setItem('device_list_initial_filters', JSON.stringify(payload)); + } catch {} + if (navigateImmediate) navigateTo("devices"); + } + } + setSearchOpen(false); + }, [SEARCH_CATEGORIES, navigateTo, suggestions.devices]); + + const handleLoginSuccess = ({ username, role }) => { + setUser(username); + setUserRole(role || null); + setUserDisplayName(username); + localStorage.setItem( + "borealis_session", + JSON.stringify({ username, display_name: username, role: role || null, timestamp: Date.now() }) + ); + // Refresh full profile (to get display_name) in background + (async () => { + try { + const resp = await fetch('/api/auth/me', { credentials: 'include' }); + if (resp.ok) { + const me = await resp.json(); + setUserDisplayName(me.display_name || me.username); + localStorage.setItem( + "borealis_session", + JSON.stringify({ username: me.username, display_name: me.display_name || me.username, role: me.role, timestamp: Date.now() }) + ); + } + } catch {} + })(); + if (pendingPathRef.current) { + navigateByPath(pendingPathRef.current, { replace: true, allowUnauthenticated: true }); + pendingPathRef.current = null; + } else { + navigateTo('devices', { replace: true, allowUnauthenticated: true }); + } + }; + + useEffect(() => { + const saved = localStorage.getItem(LOCAL_STORAGE_KEY); + if (saved) { + try { + const parsed = JSON.parse(saved); + if (Array.isArray(parsed.tabs) && parsed.activeTabId) { + setTabs(parsed.tabs); + setActiveTabId(parsed.activeTabId); + } + } catch (err) { + console.warn("Failed to parse saved state:", err); + } + } + }, []); + + useEffect(() => { + const timeout = setTimeout(() => { + const data = JSON.stringify({ tabs, activeTabId }); + localStorage.setItem(LOCAL_STORAGE_KEY, data); + }, 1000); + return () => clearTimeout(timeout); + }, [tabs, activeTabId]); + + const handleSetNodes = useCallback((callbackOrArray, tId) => { + const targetId = tId || activeTabId; + setTabs((old) => + old.map((tab) => + tab.id === targetId + ? { ...tab, nodes: typeof callbackOrArray === "function" ? callbackOrArray(tab.nodes) : callbackOrArray } + : tab + ) + ); + }, [activeTabId]); + + const handleSetEdges = useCallback((callbackOrArray, tId) => { + const targetId = tId || activeTabId; + setTabs((old) => + old.map((tab) => + tab.id === targetId + ? { ...tab, edges: typeof callbackOrArray === "function" ? callbackOrArray(tab.edges) : callbackOrArray } + : tab + ) + ); + }, [activeTabId]); + + const handleUserMenuOpen = (event) => setUserMenuAnchorEl(event.currentTarget); + const handleUserMenuClose = () => setUserMenuAnchorEl(null); + const handleLogout = async () => { + try { + await fetch('/api/auth/logout', { method: 'POST', credentials: 'include' }); + } catch {} + try { localStorage.removeItem('borealis_session'); } catch {} + setUser(null); + setUserRole(null); + setUserDisplayName(null); + navigateTo('login', { replace: true, allowUnauthenticated: true, suppressPending: true }); + }; + + const handleTabRightClick = (evt, tabId) => { + evt.preventDefault(); + setTabMenuAnchor({ x: evt.clientX, y: evt.clientY }); + setTabMenuTabId(tabId); + }; + + const handleCloseTab = () => { + setTabs((prev) => { + const filtered = prev.filter((t) => t.id !== tabMenuTabId); + if (filtered.length === 0) { + const newTab = { id: "flow_1", tab_name: "Flow 1", nodes: [], edges: [] }; + setActiveTabId(newTab.id); + return [newTab]; + } + if (activeTabId === tabMenuTabId) { + setActiveTabId(filtered[0].id); + } + return filtered; + }); + setTabMenuAnchor(null); + }; + + const handleRenameTab = () => { + const tab = tabs.find((t) => t.id === tabMenuTabId); + if (tab) { + setRenameTabId(tabMenuTabId); + setRenameValue(tab.tab_name); + setRenameDialogOpen(true); + } + setTabMenuAnchor(null); + }; + + const handleSaveRename = () => { + setTabs((prev) => + prev.map((t) => (t.id === renameTabId ? { ...t, tab_name: renameValue } : t)) + ); + setRenameDialogOpen(false); + }; + + const handleExportFlow = useCallback(() => { + const tab = tabs.find((t) => t.id === activeTabId); + if (!tab) return; + const payload = { + tab_name: tab.tab_name, + nodes: tab.nodes, + edges: tab.edges + }; + const fileName = `${tab.tab_name || "workflow"}.json`; + const blob = new Blob([JSON.stringify(payload, null, 2)], { type: "application/json" }); + const url = URL.createObjectURL(blob); + const a = document.createElement("a"); + a.href = url; + a.download = fileName; + a.click(); + URL.revokeObjectURL(url); + }, [tabs, activeTabId]); + + const handleImportFlow = useCallback(() => { + if (fileInputRef.current) { + fileInputRef.current.value = null; + fileInputRef.current.click(); + } + }, []); + + const onFileInputChange = useCallback( + (e) => { + const file = e.target.files && e.target.files[0]; + if (!file) return; + const reader = new FileReader(); + reader.onload = () => { + try { + const data = JSON.parse(reader.result); + const newId = "flow_" + Date.now(); + setTabs((prev) => [ + ...prev, + { + id: newId, + tab_name: + data.tab_name || data.name || file.name.replace(/\.json$/i, ""), + nodes: data.nodes || [], + edges: data.edges || [] + } + ]); + setActiveTabId(newId); + navigateTo("workflow-editor"); + } catch (err) { + console.error("Failed to import workflow:", err); + } + }; + reader.readAsText(file); + e.target.value = ""; + }, + [navigateTo, setTabs] + ); + + const handleSaveFlow = useCallback( + async (name) => { + const tab = tabs.find((t) => t.id === activeTabId); + if (!tab || !name) return; + const payload = { + path: tab.folderPath ? `${tab.folderPath}/${name}` : name, + workflow: { + tab_name: tab.tab_name, + nodes: tab.nodes, + edges: tab.edges + } + }; + try { + const body = { + island: 'workflows', + kind: 'file', + path: payload.path, + content: payload.workflow + }; + await fetch("/api/assembly/create", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(body) + }); + setTabs((prev) => + prev.map((t) => (t.id === activeTabId ? { ...t, tab_name: name } : t)) + ); + } catch (err) { + console.error("Failed to save workflow:", err); + } + }, + [tabs, activeTabId] + ); + + const isAdmin = (String(userRole || '').toLowerCase() === 'admin'); + + useEffect(() => { + const requiresAdmin = currentPage === 'server_info' + || currentPage === 'admin_enrollment_codes' + || currentPage === 'admin_device_approvals' + || currentPage === 'access_credentials' + || currentPage === 'access_github_token' + || currentPage === 'access_users' + || currentPage === 'ssh_devices' + || currentPage === 'winrm_devices' + || currentPage === 'agent_devices'; + if (!isAdmin && requiresAdmin) { + setNotAuthorizedOpen(true); + navigateTo('devices', { replace: true, suppressPending: true }); + } + }, [currentPage, isAdmin, navigateTo]); + + const renderMainContent = () => { + switch (currentPage) { + case "sites": + return ( + { + try { + localStorage.setItem('device_list_initial_site_filter', String(siteName || '')); + } catch {} + navigateTo("devices"); + }} + /> + ); + case "devices": + return ( + { + navigateTo("device_details", { device: d }); + }} + /> + ); + case "agent_devices": + return ( + { + navigateTo("device_details", { device: d }); + }} + /> + ); + case "ssh_devices": + return ; + case "winrm_devices": + return ; + + case "device_details": + return ( + { + navigateTo("devices"); + setSelectedDevice(null); + }} + /> + ); + + case "jobs": + return ( + { setEditingJob(null); navigateTo("create_job"); }} + onEditJob={(job) => { setEditingJob(job); navigateTo("create_job"); }} + refreshToken={jobsRefreshToken} + /> + ); + + case "create_job": + return ( + { navigateTo("jobs"); setEditingJob(null); }} + onCreated={() => { navigateTo("jobs"); setEditingJob(null); setJobsRefreshToken(Date.now()); }} + /> + ); + + case "workflows": + return ( + { + const newId = "flow_" + Date.now(); + if (workflow && workflow.rel_path) { + const folder = workflow.rel_path.split("/").slice(0, -1).join("/"); + try { + const resp = await fetch(`/api/assembly/load?island=workflows&path=${encodeURIComponent(workflow.rel_path)}`); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + setTabs([{ id: newId, tab_name: data.tab_name || workflow.name || workflow.file_name || "Workflow", nodes: data.nodes || [], edges: data.edges || [], folderPath: folder }]); + } catch (err) { + console.error("Failed to load workflow:", err); + setTabs([{ id: newId, tab_name: workflow?.name || "Workflow", nodes: [], edges: [], folderPath: folder }]); + } + } else { + setTabs([{ id: newId, tab_name: name || "Flow", nodes: [], edges: [], folderPath: folderPath || "" }]); + } + setActiveTabId(newId); + navigateTo("workflow-editor"); + }} + onOpenScript={(rel, mode, context) => { + const nonce = Date.now(); + setAssemblyEditorState({ + path: rel || '', + mode, + context: context ? { ...context, nonce } : null, + nonce + }); + navigateTo(mode === 'ansible' ? 'ansible_editor' : 'scripts', { + assemblyState: { + path: rel || '', + mode, + context: context ? { ...context, nonce } : null, + nonce + } + }); + }} + /> + ); + + case "assemblies": + return ( + { + const newId = "flow_" + Date.now(); + if (workflow && workflow.rel_path) { + const folder = workflow.rel_path.split("/").slice(0, -1).join("/"); + try { + const resp = await fetch(`/api/assembly/load?island=workflows&path=${encodeURIComponent(workflow.rel_path)}`); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + setTabs([{ id: newId, tab_name: data.tab_name || workflow.name || workflow.file_name || "Workflow", nodes: data.nodes || [], edges: data.edges || [], folderPath: folder }]); + } catch (err) { + console.error("Failed to load workflow:", err); + setTabs([{ id: newId, tab_name: workflow?.name || "Workflow", nodes: [], edges: [], folderPath: folder }]); + } + } else { + setTabs([{ id: newId, tab_name: name || "Flow", nodes: [], edges: [], folderPath: folderPath || "" }]); + } + setActiveTabId(newId); + navigateTo("workflow-editor"); + }} + onOpenScript={(rel, mode, context) => { + const nonce = Date.now(); + setAssemblyEditorState({ + path: rel || '', + mode, + context: context ? { ...context, nonce } : null, + nonce + }); + navigateTo(mode === 'ansible' ? 'ansible_editor' : 'scripts', { + assemblyState: { + path: rel || '', + mode, + context: context ? { ...context, nonce } : null, + nonce + } + }); + }} + /> + ); + + case "scripts": + return ( + + setAssemblyEditorState((prev) => (prev && prev.mode === 'scripts' ? null : prev)) + } + onSaved={() => navigateTo('assemblies')} + /> + ); + + case "ansible_editor": + return ( + + setAssemblyEditorState((prev) => (prev && prev.mode === 'ansible' ? null : prev)) + } + onSaved={() => navigateTo('assemblies')} + /> + ); + + case "access_credentials": + return ; + + case "access_github_token": + return ; + + case "access_users": + return ; + + case "server_info": + return ; + + case "admin_enrollment_codes": + return ; + + case "admin_device_approvals": + return ; + + case "workflow-editor": + return ( + + + setConfirmCloseOpen(true)} + fileInputRef={fileInputRef} + onFileInputChange={onFileInputChange} + currentTabName={tabs.find((t) => t.id === activeTabId)?.tab_name} + /> + + {}} + onTabRightClick={handleTabRightClick} + /> + + {tabs.map((tab) => ( + + + handleSetNodes(val, tab.id)} + setEdges={(val) => handleSetEdges(val, tab.id)} + nodeTypes={nodeTypes} + categorizedNodes={categorizedNodes} + /> + + + ))} + + + + + + ); + + default: + return ( + + Select a section from navigation. + + ); + } + }; + if (!user) { + return ( + + + + + ); + } + + return ( + + + + + + + {/* Breadcrumbs inline in top bar (transparent), aligned to content area */} + + } + aria-label="breadcrumb" + sx={{ + color: "#9aa0a6", + fontSize: "0.825rem", // 50% larger than previous + '& .MuiBreadcrumbs-separator': { mx: 0.6 }, + pointerEvents: 'auto' + }} + > + {breadcrumbs.map((c, idx) => { + if (c.page) { + return ( + + ); + } + return ( + + {c.label} + + ); + })} + + + {/* Top search: category + input */} + setSearchOpen(false)}> + + + setSearchMenuEl(null)} + PaperProps={{ sx: { bgcolor: '#1e1e1e', color: '#fff', minWidth: 240 } }} + > + {SEARCH_CATEGORIES.map((c) => ( + { setSearchCategory(c.key); setSearchMenuEl(null); setSearchQuery(''); setSuggestions({ devices: [], sites: [], q: '', field: '' }); }}> + {c.label} + + ))} + + + { setSearchQuery(e.target.value); setSearchOpen(true); }} + onFocus={() => setSearchOpen(true)} + onKeyDown={(e) => { + if (e.key === 'Enter') { + execSearch(searchCategory, searchQuery); + } else if (e.key === 'Escape') { + setSearchOpen(false); + } + }} + placeholder={(SEARCH_CATEGORIES.find(c => c.key === searchCategory) || {}).placeholder || 'Search'} + style={{ + outline: 'none', border: 'none', background: 'transparent', color: '#e8eaed', paddingLeft: 10, paddingRight: 28, width: 360, height: '100%' + }} + /> + + {searchOpen && (((SEARCH_CATEGORIES.find(c=>c.key===searchCategory)?.scope==='device') && (suggestions.devices||[]).length>0) || ((SEARCH_CATEGORIES.find(c=>c.key===searchCategory)?.scope==='site') && (suggestions.sites||[]).length>0)) && ( + + {/* Devices group */} + {((suggestions.devices || []).length > 0 && (SEARCH_CATEGORIES.find(c=>c.key===searchCategory)?.scope==='device')) && ( + + Devices + {suggestions.devices && suggestions.devices.length > 0 ? ( + suggestions.devices.map((d, idx) => { + const primary = (searchCategory === 'hostname') + ? highlightText(d.hostname || d.value, searchQuery) + : (d.hostname || d.value); + // Choose a secondary value based on category; fallback to best-available info + let secVal = ''; + if (searchCategory === 'internal_ip') secVal = d.internal_ip || ''; + else if (searchCategory === 'external_ip') secVal = d.external_ip || ''; + else if (searchCategory === 'description') secVal = d.description || ''; + else if (searchCategory === 'last_user') secVal = d.last_user || ''; + const secHighlighted = (searchCategory !== 'hostname' && secVal) + ? highlightText(secVal, searchQuery) + : (d.internal_ip || d.external_ip || d.description || d.last_user || ''); + return ( + { navigateTo('device_details', { device: { hostname: d.hostname || d.value } }); setSearchOpen(false); }} sx={{ px: 1.2, py: 0.6, '&:hover': { bgcolor: '#22272e' }, cursor: 'pointer' }}> + {primary} + + {d.site_name || ''}{(d.site_name && (secVal || (d.internal_ip || d.external_ip || d.description || d.last_user))) ? ' • ' : ''}{secHighlighted} + + + ); + }) + ) : ( + + {searchCategory === 'serial_number' ? 'Serial numbers are not tracked yet.' : 'No matches'} + + )} + + )} + {/* Sites group */} + {((suggestions.sites || []).length > 0 && (SEARCH_CATEGORIES.find(c=>c.key===searchCategory)?.scope==='site')) && ( + + Sites + {suggestions.sites && suggestions.sites.length > 0 ? ( + suggestions.sites.map((s, idx) => ( + execSearch(searchCategory, s.value)} sx={{ px: 1.2, py: 0.6, '&:hover': { bgcolor: '#22272e' }, cursor: 'pointer' }}> + {searchCategory === 'site_name' ? highlightText(s.site_name, searchQuery) : s.site_name} + {searchCategory === 'site_description' ? highlightText(s.site_description || '', searchQuery) : (s.site_description || '')} + + )) + ) : ( + No matches + )} + + )} + + )} + + + + {/* Spacer to keep user menu aligned right */} + + + + { handleUserMenuClose(); handleLogout(); }}> + Logout + + + + + + + *': { + alignSelf: 'stretch', + minHeight: 'calc(100% - 32px)' // account for typical m:2 top+bottom margins + } + }} + > + {renderMainContent()} + + + + setConfirmCloseOpen(false)} onConfirm={() => {}} /> + setRenameDialogOpen(false)} + onSave={handleSaveRename} + /> + setTabMenuAnchor(null)} + onRename={handleRenameTab} + onCloseTab={handleCloseTab} + /> + setNotAuthorizedOpen(false)} /> + + ); +} diff --git a/Data/Server/WebUI/src/Assemblies/Assembly_Editor.jsx b/Data/Server/WebUI/src/Assemblies/Assembly_Editor.jsx new file mode 100644 index 00000000..f9ef28f3 --- /dev/null +++ b/Data/Server/WebUI/src/Assemblies/Assembly_Editor.jsx @@ -0,0 +1,1269 @@ +import React, { useEffect, useMemo, useRef, useState } from "react"; +import { + Box, + Paper, + Typography, + Button, + TextField, + MenuItem, + Grid, + FormControlLabel, + Checkbox, + IconButton, + Tooltip, + Dialog, + DialogTitle, + DialogContent, + DialogActions, + ListItemText +} from "@mui/material"; +import { Add as AddIcon, Delete as DeleteIcon, UploadFile as UploadFileIcon } from "@mui/icons-material"; +import Prism from "prismjs"; +import "prismjs/components/prism-yaml"; +import "prismjs/components/prism-bash"; +import "prismjs/components/prism-powershell"; +import "prismjs/components/prism-batch"; +import "prismjs/themes/prism-okaidia.css"; +import Editor from "react-simple-code-editor"; +import { ConfirmDeleteDialog } from "../Dialogs"; + +const TYPE_OPTIONS_ALL = [ + { key: "ansible", label: "Ansible Playbook", prism: "yaml" }, + { key: "powershell", label: "PowerShell Script", prism: "powershell" }, + { key: "batch", label: "Batch Script", prism: "batch" }, + { key: "bash", label: "Bash Script", prism: "bash" } +]; + +const CATEGORY_OPTIONS = [ + { key: "script", label: "Script" }, + { key: "application", label: "Application" } +]; + +const VARIABLE_TYPE_OPTIONS = [ + { key: "string", label: "String" }, + { key: "number", label: "Number" }, + { key: "boolean", label: "Boolean" }, + { key: "credential", label: "Credential" } +]; + +const BACKGROUND_COLORS = { + field: "#1C1C1C", /* Shared surface color for text fields, dropdown inputs, and script editors */ + sectionCard: "#2E2E2E", /* Background for section container cards */ + menuSelected: "rgba(88,166,255,0.16)", /* Background for selected dropdown items */ + menuSelectedHover: "rgba(88,166,255,0.24)", /* Background for hovered selected dropdown items */ + primaryActionSaving: "rgba(88,166,255,0.12)", /* Background for primary action button while saving */ + primaryActionHover: "rgba(88,166,255,0.18)", /* Background for primary action button hover state */ + dialog: "#1a1f27" /* Background for modal dialogs */ +}; + +const INPUT_BASE_SX = { + "& .MuiOutlinedInput-root": { + bgcolor: BACKGROUND_COLORS.field, + color: "#e6edf3", /* Text Color */ + borderRadius: 1, /* Roundness of UI Elements */ + minHeight: 40, + "& fieldset": { borderColor: "#2b3544" }, + "&:hover fieldset": { borderColor: "#3a4657" }, + "&.Mui-focused fieldset": { borderColor: "#58a6ff" } + }, + + "& .MuiOutlinedInput-input": { + padding: "9px 12px", + fontSize: "0.95rem", + lineHeight: 1.4 + }, + + "& .MuiOutlinedInput-inputMultiline": { + padding: "9px 12px" + }, + + "& .MuiInputLabel-root": { + color: "#9ba3b4", + transform: "translate(12px, 11px) scale(0.8)" // label at rest (inside field) + }, + "& .MuiInputLabel-root.Mui-focused": { color: "#58a6ff" }, + "& .MuiInputLabel-root.MuiInputLabel-shrink": { + transform: "translate(12px, -6px) scale(0.75)" // floated label position + }, + + "& input[type=number]": { MozAppearance: "textfield" }, + "& input[type=number]::-webkit-outer-spin-button": { WebkitAppearance: "none", margin: 0 }, + "& input[type=number]::-webkit-inner-spin-button": { WebkitAppearance: "none", margin: 0 } +}; + +const SELECT_BASE_SX = { + ...INPUT_BASE_SX, + "& .MuiSelect-select": { + padding: "10px 12px !important", + display: "flex", + alignItems: "center" + } +}; + +const SECTION_TITLE_SX = { + color: "#58a6ff", + fontWeight: 400, + fontSize: "14px", + letterSpacing: 0.2 +}; + +const SECTION_CARD_SX = { + bgcolor: BACKGROUND_COLORS.sectionCard, + borderRadius: 2, + border: "1px solid #262f3d", +}; + +const MENU_PROPS = { + PaperProps: { + sx: { + bgcolor: BACKGROUND_COLORS.field, + color: "#e6edf3", + border: "1px solid #2b3544", + "& .MuiMenuItem-root.Mui-selected": { + bgcolor: BACKGROUND_COLORS.menuSelected + }, + "& .MuiMenuItem-root.Mui-selected:hover": { + bgcolor: BACKGROUND_COLORS.menuSelectedHover + } + } + } +}; + +function keyBy(arr) { + return Object.fromEntries(arr.map((o) => [o.key, o])); +} + +const TYPE_MAP = keyBy(TYPE_OPTIONS_ALL); + +const PAGE_BACKGROUND = "#0d1117"; /* Color of Void Space Between Sidebar and Page */ + +function highlightedHtml(code, prismLang) { + try { + const grammar = Prism.languages[prismLang] || Prism.languages.markup; + return Prism.highlight(code ?? "", grammar, prismLang); + } catch { + return (code ?? "").replace(/[&<>]/g, (c) => ({ "&": "&", "<": "<", ">": ">" }[c])); + } +} + +function sanitizeFileName(name = "") { + const base = name.trim().replace(/[^a-zA-Z0-9._-]+/g, "_") || "assembly"; + return base.endsWith(".json") ? base : `${base}.json`; +} + +function normalizeFolderPath(path = "") { + if (!path) return ""; + return path + .replace(/\\/g, "/") + .replace(/^\/+|\/+$/g, "") + .replace(/\/+/g, "/"); +} + +function formatBytes(size) { + if (!size || Number.isNaN(size)) return "0 B"; + if (size < 1024) return `${size} B`; + const units = ["KB", "MB", "GB", "TB"]; + let idx = -1; + let s = size; + while (s >= 1024 && idx < units.length - 1) { + s /= 1024; + idx += 1; + } + return `${s.toFixed(1)} ${units[idx]}`; +} + +function defaultAssembly(defaultType = "powershell") { + return { + name: "", + description: "", + category: defaultType === "ansible" ? "application" : "script", + type: defaultType, + script: "", + timeoutSeconds: 3600, + sites: { mode: "all", values: [] }, + variables: [], + files: [] + }; +} + +function normalizeVariablesFromServer(vars = []) { + return (Array.isArray(vars) ? vars : []).map((v, idx) => ({ + id: `${Date.now()}_${idx}_${Math.random().toString(36).slice(2, 8)}`, + name: v?.name || v?.key || "", + label: v?.label || "", + type: v?.type || "string", + defaultValue: v?.default ?? v?.default_value ?? "", + required: Boolean(v?.required), + description: v?.description || "" + })); +} + +function decodeBase64String(data = "") { + if (typeof data !== "string") { + return { success: false, value: "" }; + } + + const trimmed = data.trim(); + if (!trimmed) { + return { success: true, value: "" }; + } + + const sanitized = trimmed.replace(/\s+/g, ""); + + try { + if (typeof window !== "undefined" && typeof window.atob === "function") { + const binary = window.atob(sanitized); + if (typeof TextDecoder !== "undefined") { + try { + const decoder = new TextDecoder("utf-8", { fatal: false }); + return { + success: true, + value: decoder.decode(Uint8Array.from(binary, (c) => c.charCodeAt(0))) + }; + } catch (err) { + // fall through to manual reconstruction + } + } + + let decoded = ""; + for (let i = 0; i < binary.length; i += 1) { + decoded += String.fromCharCode(binary.charCodeAt(i)); + } + try { + return { success: true, value: decodeURIComponent(escape(decoded)) }; + } catch (err) { + return { success: true, value: decoded }; + } + } + } catch (err) { + // fall through to Buffer fallback + } + + try { + if (typeof Buffer !== "undefined") { + return { success: true, value: Buffer.from(sanitized, "base64").toString("utf-8") }; + } + } catch (err) { + // ignore + } + + return { success: false, value: "" }; +} + +function encodeBase64String(text = "") { + if (typeof text !== "string") { + text = text == null ? "" : String(text); + } + if (!text) return ""; + try { + if (typeof TextEncoder !== "undefined" && typeof window !== "undefined" && typeof window.btoa === "function") { + const encoder = new TextEncoder(); + const bytes = encoder.encode(text); + let binary = ""; + bytes.forEach((b) => { binary += String.fromCharCode(b); }); + return window.btoa(binary); + } + } catch (err) { + // fall through to Buffer fallback + } + try { + if (typeof Buffer !== "undefined") { + return Buffer.from(text, "utf-8").toString("base64"); + } + } catch (err) { + // ignore + } + return ""; +} + +function normalizeFilesFromServer(files = []) { + return (Array.isArray(files) ? files : []).map((f, idx) => ({ + id: `${Date.now()}_${idx}_${Math.random().toString(36).slice(2, 8)}`, + fileName: f?.file_name || f?.name || "file.bin", + size: f?.size || 0, + mimeType: f?.mime_type || f?.mimeType || "", + data: f?.data || "" + })); +} + +function fromServerDocument(doc = {}, defaultType = "powershell") { + const assembly = defaultAssembly(defaultType); + if (doc && typeof doc === "object") { + assembly.name = doc.name || doc.display_name || assembly.name; + assembly.description = doc.description || ""; + assembly.category = doc.category || assembly.category; + assembly.type = doc.type || assembly.type; + const legacyScript = Array.isArray(doc.script_lines) + ? doc.script_lines.map((line) => (line == null ? "" : String(line))).join("\n") + : ""; + const script = doc.script ?? doc.content ?? legacyScript; + if (typeof script === "string") { + const encoding = (doc.script_encoding || doc.scriptEncoding || "").toLowerCase(); + if (["base64", "b64", "base-64"].includes(encoding)) { + const decoded = decodeBase64String(script); + assembly.script = decoded.success ? decoded.value : script; + } else if (!encoding) { + const decoded = decodeBase64String(script); + assembly.script = decoded.success ? decoded.value : script; + } else { + assembly.script = script; + } + } else { + assembly.script = legacyScript; + } + const timeout = doc.timeout_seconds ?? doc.timeout ?? assembly.timeoutSeconds; + assembly.timeoutSeconds = Number.isFinite(Number(timeout)) + ? Number(timeout) + : assembly.timeoutSeconds; + const sites = doc.sites || {}; + assembly.sites = { + mode: sites.mode || (Array.isArray(sites.values) && sites.values.length ? "specific" : "all"), + values: Array.isArray(sites.values) ? sites.values : [] + }; + assembly.variables = normalizeVariablesFromServer(doc.variables); + assembly.files = normalizeFilesFromServer(doc.files); + } + return assembly; +} + +function toServerDocument(assembly) { + const normalizedScript = typeof assembly.script === "string" + ? assembly.script.replace(/\r\n/g, "\n") + : ""; + const timeoutNumeric = Number(assembly.timeoutSeconds); + const timeoutSeconds = Number.isFinite(timeoutNumeric) ? Math.max(0, Math.round(timeoutNumeric)) : 3600; + const encodedScript = encodeBase64String(normalizedScript); + return { + version: 1, + name: assembly.name?.trim() || "", + description: assembly.description || "", + category: assembly.category || "script", + type: assembly.type || "powershell", + script: encodedScript, + script_encoding: "base64", + timeout_seconds: timeoutSeconds, + sites: { + mode: assembly.sites?.mode === "specific" ? "specific" : "all", + values: Array.isArray(assembly.sites?.values) + ? assembly.sites.values.filter((v) => v && v.trim()).map((v) => v.trim()) + : [] + }, + variables: (assembly.variables || []).map((v) => ({ + name: v.name?.trim() || "", + label: v.label || "", + type: v.type || "string", + default: v.defaultValue ?? "", + required: Boolean(v.required), + description: v.description || "" + })), + files: (assembly.files || []).map((f) => ({ + file_name: f.fileName || "file.bin", + size: f.size || 0, + mime_type: f.mimeType || "", + data: f.data || "" + })) + }; +} + +function RenameFileDialog({ open, value, onChange, onCancel, onSave }) { + return ( + + Rename Assembly File + + onChange(e.target.value)} + sx={INPUT_BASE_SX} + /> + + + + + + + ); +} + +export default function AssemblyEditor({ + mode = "scripts", + initialPath = "", + initialContext = null, + onConsumeInitialData, + onSaved +}) { + const isAnsible = mode === "ansible"; + const defaultType = isAnsible ? "ansible" : "powershell"; + const [assembly, setAssembly] = useState(() => defaultAssembly(defaultType)); + const [currentPath, setCurrentPath] = useState(""); + const [fileName, setFileName] = useState(""); + const [folderPath, setFolderPath] = useState(() => normalizeFolderPath(initialContext?.folder || "")); + const [renameOpen, setRenameOpen] = useState(false); + const [renameValue, setRenameValue] = useState(""); + const [deleteOpen, setDeleteOpen] = useState(false); + const [saving, setSaving] = useState(false); + const [siteOptions, setSiteOptions] = useState([]); + const [siteLoading, setSiteLoading] = useState(false); + const contextNonceRef = useRef(null); + + const TYPE_OPTIONS = useMemo( + () => (isAnsible ? TYPE_OPTIONS_ALL.filter((o) => o.key === "ansible") : TYPE_OPTIONS_ALL.filter((o) => o.key !== "ansible")), + [isAnsible] + ); + + const siteOptionMap = useMemo(() => { + const map = new Map(); + siteOptions.forEach((site) => { + if (!site) return; + const id = site.id != null ? String(site.id) : ""; + if (!id) return; + map.set(id, site); + }); + return map; + }, [siteOptions]); + + const island = isAnsible ? "ansible" : "scripts"; + + useEffect(() => { + if (!initialPath) return; + let canceled = false; + (async () => { + try { + const resp = await fetch(`/api/assembly/load?island=${encodeURIComponent(island)}&path=${encodeURIComponent(initialPath)}`); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + if (canceled) return; + const rel = data.rel_path || initialPath; + setCurrentPath(rel); + setFolderPath(normalizeFolderPath(rel.split("/").slice(0, -1).join("/"))); + setFileName(data.file_name || rel.split("/").pop() || ""); + const doc = fromServerDocument(data.assembly || data, defaultType); + setAssembly(doc); + } catch (err) { + console.error("Failed to load assembly:", err); + } finally { + if (!canceled && onConsumeInitialData) onConsumeInitialData(); + } + })(); + return () => { + canceled = true; + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [initialPath, island]); + + useEffect(() => { + const ctx = initialContext; + if (!ctx || !ctx.nonce) return; + if (contextNonceRef.current === ctx.nonce) return; + contextNonceRef.current = ctx.nonce; + const doc = defaultAssembly(ctx.defaultType || defaultType); + if (ctx.name) doc.name = ctx.name; + if (ctx.description) doc.description = ctx.description; + if (ctx.category) doc.category = ctx.category; + if (ctx.type) doc.type = ctx.type; + setAssembly(doc); + setCurrentPath(""); + const suggested = ctx.suggestedFileName || ctx.name || ""; + setFileName(suggested ? sanitizeFileName(suggested) : ""); + setFolderPath(normalizeFolderPath(ctx.folder || "")); + if (onConsumeInitialData) onConsumeInitialData(); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [initialContext?.nonce]); + + useEffect(() => { + let canceled = false; + const loadSites = async () => { + try { + setSiteLoading(true); + const resp = await fetch("/api/sites"); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + if (canceled) return; + const items = Array.isArray(data?.sites) ? data.sites : []; + setSiteOptions(items.map((s) => ({ ...s, id: s?.id != null ? String(s.id) : "" })).filter((s) => s.id)); + } catch (err) { + if (!canceled) { + console.error("Failed to load sites:", err); + setSiteOptions([]); + } + } finally { + if (!canceled) setSiteLoading(false); + } + }; + loadSites(); + return () => { + canceled = true; + }; + }, []); + + const prismLanguage = TYPE_MAP[assembly.type]?.prism || "powershell"; + + const updateAssembly = (partial) => { + setAssembly((prev) => ({ ...prev, ...partial })); + }; + + const updateSitesMode = (modeValue) => { + setAssembly((prev) => ({ + ...prev, + sites: { + mode: modeValue, + values: modeValue === "specific" ? prev.sites.values || [] : [] + } + })); + }; + + const updateSelectedSites = (values) => { + const arr = Array.isArray(values) + ? values + : typeof values === "string" + ? values.split(",").map((v) => v.trim()).filter(Boolean) + : []; + setAssembly((prev) => ({ + ...prev, + sites: { + mode: "specific", + values: arr.map((v) => String(v)) + } + })); + }; + + const addVariable = () => { + setAssembly((prev) => ({ + ...prev, + variables: [ + ...prev.variables, + { + id: `${Date.now()}_${Math.random().toString(36).slice(2, 8)}`, + name: "", + label: "", + type: "string", + defaultValue: "", + required: false, + description: "" + } + ] + })); + }; + + const updateVariable = (id, partial) => { + setAssembly((prev) => ({ + ...prev, + variables: prev.variables.map((v) => (v.id === id ? { ...v, ...partial } : v)) + })); + }; + + const removeVariable = (id) => { + setAssembly((prev) => ({ + ...prev, + variables: prev.variables.filter((v) => v.id !== id) + })); + }; + + const handleFileUpload = async (event) => { + const files = Array.from(event.target.files || []); + if (!files.length) return; + const reads = files.map((file) => new Promise((resolve) => { + const reader = new FileReader(); + reader.onload = () => { + const result = reader.result || ""; + const base64 = typeof result === "string" && result.includes(",") ? result.split(",", 2)[1] : result; + resolve({ + id: `${Date.now()}_${Math.random().toString(36).slice(2, 8)}`, + fileName: file.name, + size: file.size, + mimeType: file.type, + data: base64 + }); + }; + reader.onerror = () => resolve(null); + reader.readAsDataURL(file); + })); + const uploaded = (await Promise.all(reads)).filter(Boolean); + if (uploaded.length) { + setAssembly((prev) => ({ ...prev, files: [...prev.files, ...uploaded] })); + } + event.target.value = ""; + }; + + const removeFile = (id) => { + setAssembly((prev) => ({ ...prev, files: prev.files.filter((f) => f.id !== id) })); + }; + + const computeTargetPath = () => { + if (currentPath) return currentPath; + const baseName = sanitizeFileName(fileName || assembly.name || (isAnsible ? "playbook" : "assembly")); + const folder = normalizeFolderPath(folderPath); + return folder ? `${folder}/${baseName}` : baseName; + }; + + const saveAssembly = async () => { + if (!assembly.name.trim()) { + alert("Assembly Name is required."); + return; + } + const payload = toServerDocument(assembly); + payload.type = assembly.type; + const targetPath = computeTargetPath(); + if (!targetPath) { + alert("Unable to determine file path."); + return; + } + setSaving(true); + try { + if (currentPath) { + const resp = await fetch(`/api/assembly/edit`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island, path: currentPath, content: payload }) + }); + const data = await resp.json().catch(() => ({})); + if (!resp.ok) { + throw new Error(data?.error || `HTTP ${resp.status}`); + } + if (data?.rel_path) { + setCurrentPath(data.rel_path); + setFolderPath(normalizeFolderPath(data.rel_path.split("/").slice(0, -1).join("/"))); + setFileName(data.rel_path.split("/").pop() || fileName); + } + } else { + const resp = await fetch(`/api/assembly/create`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island, kind: "file", path: targetPath, content: payload, type: assembly.type }) + }); + const data = await resp.json(); + if (!resp.ok) throw new Error(data?.error || `HTTP ${resp.status}`); + if (data.rel_path) { + setCurrentPath(data.rel_path); + setFolderPath(data.rel_path.split("/").slice(0, -1).join("/")); + setFileName(data.rel_path.split("/").pop() || ""); + } else { + setCurrentPath(targetPath); + setFileName(targetPath.split("/").pop() || ""); + } + } + onSaved && onSaved(); + } catch (err) { + console.error("Failed to save assembly:", err); + alert(err.message || "Failed to save assembly"); + } finally { + setSaving(false); + } + }; + + const saveRename = async () => { + try { + const nextName = sanitizeFileName(renameValue || fileName || assembly.name); + const resp = await fetch(`/api/assembly/rename`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island, kind: "file", path: currentPath, new_name: nextName, type: assembly.type }) + }); + const data = await resp.json(); + if (!resp.ok) throw new Error(data?.error || `HTTP ${resp.status}`); + const rel = data.rel_path || currentPath; + setCurrentPath(rel); + setFolderPath(rel.split("/").slice(0, -1).join("/")); + setFileName(rel.split("/").pop() || nextName); + setRenameOpen(false); + } catch (err) { + console.error("Failed to rename assembly:", err); + alert(err.message || "Failed to rename"); + setRenameOpen(false); + } + }; + + const deleteAssembly = async () => { + if (!currentPath) { + setDeleteOpen(false); + return; + } + try { + const resp = await fetch(`/api/assembly/delete`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island, kind: "file", path: currentPath }) + }); + if (!resp.ok) { + const data = await resp.json().catch(() => ({})); + throw new Error(data?.error || `HTTP ${resp.status}`); + } + setDeleteOpen(false); + setAssembly(defaultAssembly(defaultType)); + setCurrentPath(""); + setFileName(""); + onSaved && onSaved(); + } catch (err) { + console.error("Failed to delete assembly:", err); + alert(err.message || "Failed to delete assembly"); + setDeleteOpen(false); + } + }; + + const siteScopeValue = assembly.sites?.mode === "specific" ? "specific" : "all"; + const selectedSiteValues = Array.isArray(assembly.sites?.values) + ? assembly.sites.values.map((v) => String(v)) + : []; + + return ( + + + + + + {/* Left half */} + + + Assembly Editor + Create and edit variables, scripts, and other fields related to assemblies. + + + + {/* Right half */} + + + {currentPath ? ( + + + + ) : null} + {currentPath ? ( + + + + ) : null} + + + + + + + + + + Overview + + + + + + + updateAssembly({ name: e.target.value })} + fullWidth + variant="outlined" + sx={{ ...INPUT_BASE_SX, mb: 2 }} + /> + updateAssembly({ description: e.target.value })} + multiline + minRows={2} + maxRows={8} + fullWidth + variant="outlined" + sx={{ + ...INPUT_BASE_SX, + "& .MuiOutlinedInput-inputMultiline": { + padding: "6px 12px", + lineHeight: 1.4 + } + }} + /> + + + updateAssembly({ category: e.target.value })} + sx={{ ...SELECT_BASE_SX, mb: 2 }} + SelectProps={{ MenuProps: MENU_PROPS }} + > + {CATEGORY_OPTIONS.map((o) => ( + {o.label} + ))} + + + updateAssembly({ type: e.target.value })} + sx={SELECT_BASE_SX} + SelectProps={{ MenuProps: MENU_PROPS }} + > + {TYPE_OPTIONS.map((o) => ( + {o.label} + ))} + + + + + + + Script Content + + + updateAssembly({ script: value })} + highlight={(src) => highlightedHtml(src, prismLanguage)} + padding={12} + placeholder={currentPath ? `Editing: ${currentPath}` : "Start typing your script..."} + style={{ + fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace', + fontSize: 14, + color: "#e6edf3", + background: BACKGROUND_COLORS.field, /* Color of Script Box */ + outline: "none", + minHeight: 320, + lineHeight: 1.45, + caretColor: "#58a6ff" + }} + /> + + + + + + { + const nextValue = e.target.value.replace(/[^0-9]/g, ""); + updateAssembly({ timeoutSeconds: nextValue ? Number(nextValue) : 0 }); + }} + fullWidth + variant="outlined" + sx={INPUT_BASE_SX} + helperText="Timeout this script if not completed within X seconds" + /> + + + + updateSitesMode(e.target.value)} + sx={{ + ...SELECT_BASE_SX, + width: { xs: "100%", sm: 320, lg: 360 } + }} + SelectProps={{ MenuProps: MENU_PROPS }} + > + All Sites + Specific Sites + + {siteScopeValue === "specific" ? ( + updateSelectedSites(Array.isArray(e.target.value) ? e.target.value : [])} + sx={{ + ...SELECT_BASE_SX, + width: { xs: "100%", sm: 360, lg: 420 } + }} + SelectProps={{ + multiple: true, + renderValue: (selected) => { + if (!selected || selected.length === 0) { + return Select sites; + } + const names = selected.map((val) => siteOptionMap.get(String(val))?.name || String(val)); + return names.join(", "); + }, + MenuProps: MENU_PROPS + }} + > + {siteLoading ? ( + + + + ) : siteOptions.length ? ( + siteOptions.map((site) => { + const value = String(site.id); + const checked = selectedSiteValues.includes(value); + return ( + + + + + ); + }) + ) : ( + + + + )} + + ) : null} + + + + + + + Environment Variables + + + Variables are dynamically passed into the script as environment variables at runtime. They are written like $env:variableName in the script editor. + + {(assembly.variables || []).length ? ( + + {assembly.variables.map((variable) => ( + + + + + + updateVariable(variable.id, { name: e.target.value })} + fullWidth + variant="outlined" + sx={INPUT_BASE_SX} + /> + + + + + updateVariable(variable.id, { label: e.target.value })} + fullWidth + variant="outlined" + sx={INPUT_BASE_SX} + /> + + + + + + updateVariable(variable.id, { type: e.target.value })} + sx={SELECT_BASE_SX} + SelectProps={{ MenuProps: MENU_PROPS }} + > + {VARIABLE_TYPE_OPTIONS.map((opt) => ( + {opt.label} + ))} + + + + + {variable.type === "boolean" ? ( + + updateVariable(variable.id, { defaultValue: e.target.checked })} + sx={{ color: "#58a6ff" }} + /> + } + label="Default Value" + sx={{ + color: "#9ba3b4", + m: 0, + "& .MuiFormControlLabel-label": { fontSize: "0.95rem" } + }} + /> + + ) : ( + + updateVariable(variable.id, { defaultValue: e.target.value })} + fullWidth + variant="outlined" + sx={INPUT_BASE_SX} + /> + + )} + + + + updateVariable(variable.id, { description: e.target.value })} + fullWidth + variant="outlined" + sx={INPUT_BASE_SX} + /> + + + + + removeVariable(variable.id)} sx={{ color: "#ff6b6b" }}> + + + + + + + + + Required + + + updateVariable(variable.id, { required: e.target.checked }) + } + sx={{ + color: "#58a6ff", + p: 0.5, + }} + inputProps={{ "aria-label": "Required" }} + /> + + + + + ))} + + ) : ( + + No variables have been defined. + + )} + + + + + + Files + + + Upload supporting files. They will be embedded as Base64 and available to the assembly at runtime. + + {(assembly.files || []).length ? ( + + {assembly.files.map((file) => ( + + + {file.fileName} + {formatBytes(file.size)}{file.mimeType ? ` • ${file.mimeType}` : ""} + + removeFile(file.id)} sx={{ color: "#ff6b6b" }}> + + + + ))} + + ) : ( + + No files uploaded yet. + + )} + + + + + + + setRenameOpen(false)} + onSave={saveRename} + /> + setDeleteOpen(false)} + onConfirm={deleteAssembly} + /> + + ); +} \ No newline at end of file diff --git a/Data/Server/WebUI/src/Assemblies/Assembly_List.jsx b/Data/Server/WebUI/src/Assemblies/Assembly_List.jsx new file mode 100644 index 00000000..5f5fc1dd --- /dev/null +++ b/Data/Server/WebUI/src/Assemblies/Assembly_List.jsx @@ -0,0 +1,777 @@ +import React, { useState, useEffect, useCallback } from "react"; +import { Paper, Box, Typography, Menu, MenuItem, Button } from "@mui/material"; +import { Folder as FolderIcon, Description as DescriptionIcon, Polyline as WorkflowsIcon, Code as ScriptIcon, MenuBook as BookIcon } from "@mui/icons-material"; +import { + SimpleTreeView, + TreeItem, + useTreeViewApiRef +} from "@mui/x-tree-view"; +import { + RenameWorkflowDialog, + RenameFolderDialog, + NewWorkflowDialog, + ConfirmDeleteDialog +} from "../Dialogs"; + +// Generic Island wrapper with large icon, stacked title/description, and actions on the right +const Island = ({ title, description, icon, actions, children, sx }) => ( + + + + {icon ? ( + + {icon} + + ) : null} + + + {title} + + {description ? ( + + {description} + + ) : null} + + + {actions ? ( + + {actions} + + ) : null} + + {children} + +); + +// ---------------- Workflows Island ----------------- +const sortTree = (node) => { + if (!node || !Array.isArray(node.children)) return; + node.children.sort((a, b) => { + const aFolder = Boolean(a.isFolder); + const bFolder = Boolean(b.isFolder); + if (aFolder !== bFolder) return aFolder ? -1 : 1; + return String(a.label || "").localeCompare(String(b.label || ""), undefined, { + sensitivity: "base" + }); + }); + node.children.forEach(sortTree); +}; + +function buildWorkflowTree(workflows, folders) { + const map = {}; + const rootNode = { id: "root", label: "Workflows", path: "", isFolder: true, children: [] }; + map[rootNode.id] = rootNode; + (folders || []).forEach((f) => { + const parts = (f || "").split("/"); + let children = rootNode.children; + let parentPath = ""; + parts.forEach((part) => { + const path = parentPath ? `${parentPath}/${part}` : part; + let node = children.find((n) => n.id === path); + if (!node) { + node = { id: path, label: part, path, isFolder: true, children: [] }; + children.push(node); + map[path] = node; + } + children = node.children; + parentPath = path; + }); + }); + (workflows || []).forEach((w) => { + const parts = (w.rel_path || "").split("/"); + let children = rootNode.children; + let parentPath = ""; + parts.forEach((part, idx) => { + const path = parentPath ? `${parentPath}/${part}` : part; + const isFile = idx === parts.length - 1; + let node = children.find((n) => n.id === path); + if (!node) { + node = { + id: path, + label: isFile ? ((w.tab_name && w.tab_name.trim()) || w.file_name) : part, + path, + isFolder: !isFile, + fileName: w.file_name, + workflow: isFile ? w : null, + children: [] + }; + children.push(node); + map[path] = node; + } + if (!isFile) { + children = node.children; + parentPath = path; + } + }); + }); + sortTree(rootNode); + return { root: [rootNode], map }; +} + +function WorkflowsIsland({ onOpenWorkflow }) { + const [tree, setTree] = useState([]); + const [nodeMap, setNodeMap] = useState({}); + const [contextMenu, setContextMenu] = useState(null); + const [selectedNode, setSelectedNode] = useState(null); + const [renameValue, setRenameValue] = useState(""); + const [renameOpen, setRenameOpen] = useState(false); + const [renameFolderOpen, setRenameFolderOpen] = useState(false); + const [folderDialogMode, setFolderDialogMode] = useState("rename"); + const [newWorkflowOpen, setNewWorkflowOpen] = useState(false); + const [newWorkflowName, setNewWorkflowName] = useState(""); + const [deleteOpen, setDeleteOpen] = useState(false); + const apiRef = useTreeViewApiRef(); + const [dragNode, setDragNode] = useState(null); + + const handleDrop = async (target) => { + if (!dragNode || !target.isFolder) return; + if (dragNode.path === target.path || target.path.startsWith(`${dragNode.path}/`)) { + setDragNode(null); + return; + } + const newPath = target.path ? `${target.path}/${dragNode.fileName}` : dragNode.fileName; + try { + await fetch("/api/assembly/move", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island: 'workflows', kind: 'file', path: dragNode.path, new_path: newPath }) + }); + loadTree(); + } catch (err) { + console.error("Failed to move workflow:", err); + } + setDragNode(null); + }; + + const loadTree = useCallback(async () => { + try { + const resp = await fetch(`/api/assembly/list?island=workflows`); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + const { root, map } = buildWorkflowTree(data.items || [], data.folders || []); + setTree(root); + setNodeMap(map); + } catch (err) { + console.error("Failed to load workflows:", err); + setTree([]); + setNodeMap({}); + } + }, []); + + useEffect(() => { loadTree(); }, [loadTree]); + + const handleContextMenu = (e, node) => { + e.preventDefault(); + setSelectedNode(node); + setContextMenu( + contextMenu === null ? { mouseX: e.clientX - 2, mouseY: e.clientY - 4 } : null + ); + }; + + const handleRename = () => { + setContextMenu(null); + if (!selectedNode) return; + setRenameValue(selectedNode.label); + if (selectedNode.isFolder) { + setFolderDialogMode("rename"); + setRenameFolderOpen(true); + } else setRenameOpen(true); + }; + + const handleEdit = () => { + setContextMenu(null); + if (selectedNode && !selectedNode.isFolder && onOpenWorkflow) { + onOpenWorkflow(selectedNode.workflow); + } + }; + + const handleDelete = () => { + setContextMenu(null); + if (!selectedNode) return; + setDeleteOpen(true); + }; + + const handleNewFolder = () => { + if (!selectedNode) return; + setContextMenu(null); + setFolderDialogMode("create"); + setRenameValue(""); + setRenameFolderOpen(true); + }; + + const handleNewWorkflow = () => { + if (!selectedNode) return; + setContextMenu(null); + setNewWorkflowName(""); + setNewWorkflowOpen(true); + }; + + const saveRenameWorkflow = async () => { + if (!selectedNode) return; + try { + await fetch("/api/assembly/rename", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island: 'workflows', kind: 'file', path: selectedNode.path, new_name: renameValue }) + }); + loadTree(); + } catch (err) { + console.error("Failed to rename workflow:", err); + } + setRenameOpen(false); + }; + + const saveRenameFolder = async () => { + try { + if (folderDialogMode === "rename" && selectedNode) { + await fetch("/api/assembly/rename", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island: 'workflows', kind: 'folder', path: selectedNode.path, new_name: renameValue }) + }); + } else { + const basePath = selectedNode ? selectedNode.path : ""; + const newPath = basePath ? `${basePath}/${renameValue}` : renameValue; + await fetch("/api/assembly/create", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island: 'workflows', kind: 'folder', path: newPath }) + }); + } + loadTree(); + } catch (err) { + console.error("Folder operation failed:", err); + } + setRenameFolderOpen(false); + }; + + const handleNodeSelect = (_event, itemId) => { + const node = nodeMap[itemId]; + if (node && !node.isFolder && onOpenWorkflow) { + onOpenWorkflow(node.workflow); + } + }; + + const confirmDelete = async () => { + if (!selectedNode) return; + try { + if (selectedNode.isFolder) { + await fetch("/api/assembly/delete", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island: 'workflows', kind: 'folder', path: selectedNode.path }) + }); + } else { + await fetch("/api/assembly/delete", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island: 'workflows', kind: 'file', path: selectedNode.path }) + }); + } + loadTree(); + } catch (err) { + console.error("Failed to delete:", err); + } + setDeleteOpen(false); + }; + + const renderItems = (nodes) => + (nodes || []).map((n) => ( + !n.isFolder && setDragNode(n)} + onDragOver={(e) => { if (dragNode && n.isFolder) e.preventDefault(); }} + onDrop={(e) => { e.preventDefault(); handleDrop(n); }} + onContextMenu={(e) => handleContextMenu(e, n)} + > + {n.isFolder ? ( + + ) : ( + + )} + {n.label} +
+ } + > + {n.children && n.children.length > 0 ? renderItems(n.children) : null} + + )); + + const rootChildIds = tree[0]?.children?.map((c) => c.id) || []; + + return ( + } + actions={ + + } + > + { if (dragNode) e.preventDefault(); }} + onDrop={(e) => { e.preventDefault(); handleDrop({ path: "", isFolder: true }); }} + > + + {renderItems(tree)} + + + setContextMenu(null)} + anchorReference="anchorPosition" + anchorPosition={contextMenu ? { top: contextMenu.mouseY, left: contextMenu.mouseX } : undefined} + PaperProps={{ sx: { bgcolor: "#1e1e1e", color: "#fff", fontSize: "13px" } }} + > + {selectedNode?.isFolder && ( + <> + New Workflow + New Subfolder + {selectedNode.id !== "root" && (Rename)} + {selectedNode.id !== "root" && (Delete)} + + )} + {!selectedNode?.isFolder && ( + <> + Edit + Rename + Delete + + )} + + setRenameOpen(false)} onSave={saveRenameWorkflow} /> + setRenameFolderOpen(false)} onSave={saveRenameFolder} title={folderDialogMode === "rename" ? "Rename Folder" : "New Folder"} confirmText={folderDialogMode === "rename" ? "Save" : "Create"} /> + setNewWorkflowOpen(false)} onCreate={() => { setNewWorkflowOpen(false); onOpenWorkflow && onOpenWorkflow(null, selectedNode?.path || "", newWorkflowName); }} /> + setDeleteOpen(false)} onConfirm={confirmDelete} /> + + ); +} + +// ---------------- Generic Scripts-like Islands (used for Scripts and Ansible) ----------------- +function buildFileTree(rootLabel, items, folders) { + // Some backends (e.g. /api/scripts) return paths relative to + // the Assemblies root, which prefixes items with a top-level + // folder like "Scripts". Others (e.g. /api/ansible) already + // return paths relative to their specific root. Normalize by + // stripping a matching top-level segment so the UI shows + // "Scripts/<...>" rather than "Scripts/Scripts/<...>". + const normalize = (p) => { + const candidates = [ + String(rootLabel || "").trim(), + String(rootLabel || "").replace(/\s+/g, "_") + ].filter(Boolean); + const parts = String(p || "").replace(/\\/g, "/").split("/").filter(Boolean); + if (parts.length && candidates.includes(parts[0])) parts.shift(); + return parts; + }; + + const map = {}; + const rootNode = { id: "root", label: rootLabel, path: "", isFolder: true, children: [] }; + map[rootNode.id] = rootNode; + + (folders || []).forEach((f) => { + const parts = normalize(f); + let children = rootNode.children; + let parentPath = ""; + parts.forEach((part) => { + const path = parentPath ? `${parentPath}/${part}` : part; + let node = children.find((n) => n.id === path); + if (!node) { + node = { id: path, label: part, path, isFolder: true, children: [] }; + children.push(node); + map[path] = node; + } + children = node.children; + parentPath = path; + }); + }); + + (items || []).forEach((s) => { + const parts = normalize(s?.rel_path); + let children = rootNode.children; + let parentPath = ""; + parts.forEach((part, idx) => { + const path = parentPath ? `${parentPath}/${part}` : part; + const isFile = idx === parts.length - 1; + let node = children.find((n) => n.id === path); + if (!node) { + node = { + id: path, + label: isFile ? (s.name || s.display_name || s.file_name || part) : part, + path, + isFolder: !isFile, + fileName: s.file_name, + meta: isFile ? s : null, + children: [] + }; + children.push(node); + map[path] = node; + } + if (!isFile) { + children = node.children; + parentPath = path; + } + }); + }); + sortTree(rootNode); + return { root: [rootNode], map }; +} + +function ScriptsLikeIsland({ + title, + description, + rootLabel, + baseApi, // e.g. '/api/scripts' or '/api/ansible' + newItemLabel = "New Script", + onEdit // (rel_path) => void +}) { + const [tree, setTree] = useState([]); + const [nodeMap, setNodeMap] = useState({}); + const [contextMenu, setContextMenu] = useState(null); + const [selectedNode, setSelectedNode] = useState(null); + const [renameValue, setRenameValue] = useState(""); + const [renameOpen, setRenameOpen] = useState(false); + const [renameFolderOpen, setRenameFolderOpen] = useState(false); + const [folderDialogMode, setFolderDialogMode] = useState("rename"); + const [newItemOpen, setNewItemOpen] = useState(false); + const [newItemName, setNewItemName] = useState(""); + const [deleteOpen, setDeleteOpen] = useState(false); + const apiRef = useTreeViewApiRef(); + const [dragNode, setDragNode] = useState(null); + + const island = React.useMemo(() => { + const b = String(baseApi || '').toLowerCase(); + return b.endsWith('/api/ansible') ? 'ansible' : 'scripts'; + }, [baseApi]); + + const loadTree = useCallback(async () => { + try { + const resp = await fetch(`/api/assembly/list?island=${encodeURIComponent(island)}`); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + const { root, map } = buildFileTree(rootLabel, data.items || [], data.folders || []); + setTree(root); + setNodeMap(map); + } catch (err) { + console.error(`Failed to load ${title}:`, err); + setTree([]); + setNodeMap({}); + } + }, [island, title, rootLabel]); + + useEffect(() => { loadTree(); }, [loadTree]); + + const handleContextMenu = (e, node) => { + e.preventDefault(); + setSelectedNode(node); + setContextMenu( + contextMenu === null ? { mouseX: e.clientX - 2, mouseY: e.clientY - 4 } : null + ); + }; + + const handleDrop = async (target) => { + if (!dragNode || !target.isFolder) return; + if (dragNode.path === target.path || target.path.startsWith(`${dragNode.path}/`)) { + setDragNode(null); + return; + } + const newPath = target.path ? `${target.path}/${dragNode.fileName}` : dragNode.fileName; + try { + await fetch(`/api/assembly/move`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island, kind: 'file', path: dragNode.path, new_path: newPath }) + }); + loadTree(); + } catch (err) { + console.error("Failed to move:", err); + } + setDragNode(null); + }; + + const handleNodeSelect = async (_e, itemId) => { + const node = nodeMap[itemId]; + if (node && !node.isFolder) { + setContextMenu(null); + onEdit && onEdit(node.path); + } + }; + + const saveRenameFile = async () => { + try { + const payload = { island, kind: 'file', path: selectedNode.path, new_name: renameValue }; + // preserve extension for scripts when no extension provided + if (selectedNode?.meta?.type) payload.type = selectedNode.meta.type; + const res = await fetch(`/api/assembly/rename`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(payload) + }); + const data = await res.json(); + if (!res.ok) throw new Error(data?.error || `HTTP ${res.status}`); + setRenameOpen(false); + loadTree(); + } catch (err) { + console.error("Failed to rename file:", err); + setRenameOpen(false); + } + }; + + const saveRenameFolder = async () => { + try { + if (folderDialogMode === "rename" && selectedNode) { + await fetch(`/api/assembly/rename`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island, kind: 'folder', path: selectedNode.path, new_name: renameValue }) + }); + } else { + const basePath = selectedNode ? selectedNode.path : ""; + const newPath = basePath ? `${basePath}/${renameValue}` : renameValue; + await fetch(`/api/assembly/create`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island, kind: 'folder', path: newPath }) + }); + } + setRenameFolderOpen(false); + loadTree(); + } catch (err) { + console.error("Folder operation failed:", err); + setRenameFolderOpen(false); + } + }; + + const confirmDelete = async () => { + if (!selectedNode) return; + try { + if (selectedNode.isFolder) { + await fetch(`/api/assembly/delete`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island, kind: 'folder', path: selectedNode.path }) + }); + } else { + await fetch(`/api/assembly/delete`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island, kind: 'file', path: selectedNode.path }) + }); + } + setDeleteOpen(false); + loadTree(); + } catch (err) { + console.error("Failed to delete:", err); + setDeleteOpen(false); + } + }; + + const createNewItem = () => { + const trimmedName = (newItemName || '').trim(); + const folder = selectedNode?.isFolder + ? selectedNode.path + : (selectedNode?.path?.split("/").slice(0, -1).join("/") || ""); + const context = { + folder, + suggestedFileName: trimmedName, + defaultType: island === 'ansible' ? 'ansible' : 'powershell', + type: island === 'ansible' ? 'ansible' : 'powershell', + category: island === 'ansible' ? 'application' : 'script' + }; + setNewItemOpen(false); + setNewItemName(""); + onEdit && onEdit(null, context); + }; + + const renderItems = (nodes) => + (nodes || []).map((n) => ( + !n.isFolder && setDragNode(n)} + onDragOver={(e) => { if (dragNode && n.isFolder) e.preventDefault(); }} + onDrop={(e) => { e.preventDefault(); handleDrop(n); }} + onContextMenu={(e) => handleContextMenu(e, n)} + onDoubleClick={() => { if (!n.isFolder) onEdit && onEdit(n.path); }} + > + {n.isFolder ? ( + + ) : ( + + )} + {n.label} + + } + > + {n.children && n.children.length > 0 ? renderItems(n.children) : null} + + )); + + const rootChildIds = tree[0]?.children?.map((c) => c.id) || []; + + return ( + : } + actions={ + + } + > + { if (dragNode) e.preventDefault(); }} + onDrop={(e) => { e.preventDefault(); handleDrop({ path: "", isFolder: true }); }} + > + + {renderItems(tree)} + + + setContextMenu(null)} + anchorReference="anchorPosition" + anchorPosition={contextMenu ? { top: contextMenu.mouseY, left: contextMenu.mouseX } : undefined} + PaperProps={{ sx: { bgcolor: "#1e1e1e", color: "#fff", fontSize: "13px" } }} + > + {selectedNode?.isFolder && ( + <> + { setContextMenu(null); setNewItemOpen(true); }}>{newItemLabel} + { setContextMenu(null); setFolderDialogMode("create"); setRenameValue(""); setRenameFolderOpen(true); }}>New Subfolder + {selectedNode.id !== "root" && ( { setContextMenu(null); setRenameValue(selectedNode.label); setRenameOpen(true); }}>Rename)} + {selectedNode.id !== "root" && ( { setContextMenu(null); setDeleteOpen(true); }}>Delete)} + + )} + {!selectedNode?.isFolder && ( + <> + { setContextMenu(null); onEdit && onEdit(selectedNode.path); }}>Edit + { setContextMenu(null); setRenameValue(selectedNode.label); setRenameOpen(true); }}>Rename + { setContextMenu(null); setDeleteOpen(true); }}>Delete + + )} + + {/* Simple inline dialogs using shared components */} + setRenameFolderOpen(false)} onSave={saveRenameFolder} title={folderDialogMode === "rename" ? "Rename Folder" : "New Folder"} confirmText={folderDialogMode === "rename" ? "Save" : "Create"} /> + {/* File rename */} +
} sx={{ display: renameOpen ? 'block' : 'none' }}> +
+ + Rename + setRenameValue(e.target.value)} style={{ width: '100%', padding: 8, background: '#2a2a2a', color: '#ccc', border: '1px solid #444', borderRadius: 4 }} /> + + + + + +
+ +
} sx={{ display: newItemOpen ? 'block' : 'none' }}> +
+ + {newItemLabel} + setNewItemName(e.target.value)} placeholder="Name" style={{ width: '100%', padding: 8, background: '#2a2a2a', color: '#ccc', border: '1px solid #444', borderRadius: 4 }} /> + + + + + +
+ + setDeleteOpen(false)} onConfirm={confirmDelete} /> + + ); +} + +export default function AssemblyList({ onOpenWorkflow, onOpenScript }) { + return ( + + + Assemblies + Collections of various types of components used to perform various automations upon targeted devices. + + + + {/* Left: Workflows */} + + + {/* Middle: Scripts */} + onOpenScript && onOpenScript(rel, 'scripts', ctx)} + /> + + {/* Right: Ansible Playbooks */} + onOpenScript && onOpenScript(rel, 'ansible', ctx)} + /> + + + + ); +} diff --git a/Data/Server/WebUI/src/Borealis.css b/Data/Server/WebUI/src/Borealis.css new file mode 100644 index 00000000..f2881d9a --- /dev/null +++ b/Data/Server/WebUI/src/Borealis.css @@ -0,0 +1,252 @@ +/* ///////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/Borealis.css + +body { + font-family: "IBM Plex Sans", "Helvetica Neue", Arial, sans-serif; + background-color: #0b0f19; + color: #f5f7fa; +} + +/* ======================================= */ +/* FLOW EDITOR */ +/* ======================================= */ + +/* FlowEditor background container */ +.flow-editor-container { + position: relative; + width: 100%; + height: 100%; + overflow: hidden; +} + +/* Blue Gradient Overlay */ +.flow-editor-container::before { + content: ""; + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; + pointer-events: none; + background: linear-gradient( + to bottom, + rgba(9, 44, 68, 0.9) 0%, + rgba(30, 30, 30, 0) 45%, + rgba(30, 30, 30, 0) 75%, + rgba(9, 44, 68, 0.7) 100% + ); + z-index: -1; +} + +/* helper lines for snapping */ +.helper-line { + position: absolute; + background: #0074ff; + z-index: 10; + pointer-events: none; + } + + .helper-line-vertical { + width: 1px; + height: 100%; + } + + .helper-line-horizontal { + height: 1px; + width: 100%; + } + +/* ======================================= */ +/* NODE SIDEBAR */ +/* ======================================= */ + +/* Emphasize Drag & Drop Node Functionality */ +.sidebar-button:hover { + background-color: #2a2a2a !important; + box-shadow: 0 0 5px rgba(88, 166, 255, 0.3); + cursor: grab; +} + +/* ======================================= */ +/* NODES */ +/* ======================================= */ + +/* Borealis Node Styling */ +.borealis-node { + background: linear-gradient( + to bottom, + #2c2c2c 60%, + #232323 100% + ); + border: 1px solid #3a3a3a; + border-radius: 4px; + color: #ccc; + font-size: 12px; + min-width: 160px; + max-width: 260px; + position: relative; + box-shadow: 0 0 5px rgba(88, 166, 255, 0.15), + 0 0 10px rgba(88, 166, 255, 0.15); + transition: box-shadow 0.3s ease-in-out; +} +.borealis-node::before { + content: ""; + display: block; + position: absolute; + left: 0; + top: 0; + width: 3px; + height: 100%; + background: linear-gradient( + to bottom, + var(--borealis-accent, #58a6ff) 0%, + var(--borealis-accent-dark, #0475c2) 100% + ); + border-top-left-radius: 4px; + border-bottom-left-radius: 4px; +} +.borealis-node-header { + background: #232323; + padding: 6px 10px; + border-top-left-radius: 4px; + border-top-right-radius: 4px; + font-weight: bold; + color: var(--borealis-title, #58a6ff); + font-size: 10px; +} +.borealis-node-content { + padding: 10px; + font-size: 9px; +} +.borealis-handle { + background: #58a6ff; + width: 10px; + height: 10px; +} + +/* Global dark form inputs */ +input, +select, +button { + background-color: #1d1d1d; + color: #ccc; + border: 1px solid #444; + font-size: 12px; +} + +/* Label / Dark Text styling */ +label { + color: #aaa; + font-size: 9px; +} + +/* Node Header - Shows drag handle cursor */ +.borealis-node-header { + cursor: grab; +} + +/* Optional: when actively dragging */ +.borealis-node-header:active { + cursor: grabbing; +} + +/* Node Body - Just pointer, not draggable */ +.borealis-node-content { + cursor: default; +} + +/* ======================================= */ +/* FLOW TABS */ +/* ======================================= */ + +/* Multi-Tab Bar Adjustments */ +.MuiTabs-root { + min-height: 32px !important; +} + +.MuiTab-root { + min-height: 32px !important; + padding: 6px 12px !important; + color: #58a6ff !important; + text-transform: none !important; +} + +/* Highlight tab on hover if it's not active */ +.MuiTab-root:hover:not(.Mui-selected) { + background-color: #2C2C2C !important; +} + +/* We rely on the TabIndicatorProps to show the underline highlight for active tabs. */ + +/* ======================================= */ +/* REACT-SIMPLE-KEYBOARD */ +/* ======================================= */ + +/* Make the keyboard max width like the demo */ +.simple-keyboard { + max-width: 950px; + margin: 0 auto; + background: #181c23; + border-radius: 8px; + padding: 24px 24px 30px 24px; + box-shadow: 0 2px 24px 0 #000a; +} + +/* Set dark background and color for the keyboard and its keys */ +.simple-keyboard .hg-button { + background: #23262e; + color: #b0d0ff; + border: 1px solid #333; + font-size: 1.1em; + min-width: 48px; + min-height: 48px; + margin: 5px; + border-radius: 6px; + transition: background 0.1s, color 0.1s; + padding-top: 6px; + padding-left: 8px; +} + +.simple-keyboard .hg-button[data-skbtn="space"] { + min-width: 380px; +} + +.simple-keyboard .hg-button[data-skbtn="tab"], +.simple-keyboard .hg-button[data-skbtn="caps"], +.simple-keyboard .hg-button[data-skbtn="shift"], +.simple-keyboard .hg-button[data-skbtn="enter"], +.simple-keyboard .hg-button[data-skbtn="bksp"] { + min-width: 82px; +} + +.simple-keyboard .hg-button:hover { + background: #58a6ff; + color: #000; + border-color: #58a6ff; +} + +/* Make sure rows aren't squashed */ +.simple-keyboard .hg-row { + display: flex !important; + flex-flow: row wrap; + justify-content: center; + margin-bottom: 10px; +} + +/* Remove any unwanted shrink/stretch */ +.simple-keyboard .hg-button { + flex: 0 0 auto; +} + +/* Optional: on-screen keyboard input field (if you ever show it) */ +input[type="text"].simple-keyboard-input { + width: 100%; + height: 48px; + padding: 10px 20px; + font-size: 20px; + border: none; + box-sizing: border-box; + background: #181818; + color: #f5f7fa; + border-radius: 6px; + margin-bottom: 20px; +} \ No newline at end of file diff --git a/Data/Server/WebUI/src/Devices/Add_Device.jsx b/Data/Server/WebUI/src/Devices/Add_Device.jsx new file mode 100644 index 00000000..f44945d8 --- /dev/null +++ b/Data/Server/WebUI/src/Devices/Add_Device.jsx @@ -0,0 +1,219 @@ +import React, { useEffect, useState } from "react"; +import { + Dialog, + DialogTitle, + DialogContent, + DialogActions, + TextField, + Button, + MenuItem, + Typography +} from "@mui/material"; + +const TYPE_OPTIONS = [ + { value: "ssh", label: "SSH" }, + { value: "winrm", label: "WinRM" } +]; + +const initialForm = { + hostname: "", + address: "", + description: "", + operating_system: "" +}; + +export default function AddDevice({ + open, + onClose, + defaultType = null, + onCreated +}) { + const [type, setType] = useState(defaultType || "ssh"); + const [form, setForm] = useState(initialForm); + const [submitting, setSubmitting] = useState(false); + const [error, setError] = useState(""); + + useEffect(() => { + if (open) { + setType(defaultType || "ssh"); + setForm(initialForm); + setError(""); + } + }, [open, defaultType]); + + const handleClose = () => { + if (submitting) return; + onClose && onClose(); + }; + + const handleChange = (field) => (event) => { + const value = event.target.value; + setForm((prev) => ({ ...prev, [field]: value })); + }; + + const handleSubmit = async () => { + if (submitting) return; + const trimmedHostname = form.hostname.trim(); + const trimmedAddress = form.address.trim(); + if (!trimmedHostname) { + setError("Hostname is required."); + return; + } + if (!type) { + setError("Select a device type."); + return; + } + if (!trimmedAddress) { + setError("Address is required."); + return; + } + setSubmitting(true); + setError(""); + const payload = { + hostname: trimmedHostname, + address: trimmedAddress, + description: form.description.trim(), + operating_system: form.operating_system.trim() + }; + const apiBase = type === "winrm" ? "/api/winrm_devices" : "/api/ssh_devices"; + try { + const resp = await fetch(apiBase, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(payload) + }); + const data = await resp.json().catch(() => ({})); + if (!resp.ok) throw new Error(data?.error || `HTTP ${resp.status}`); + onCreated && onCreated(data.device || null); + onClose && onClose(); + } catch (err) { + setError(String(err.message || err)); + } finally { + setSubmitting(false); + } + }; + + const dialogTitle = defaultType + ? `Add ${defaultType.toUpperCase()} Device` + : "Add Device"; + + const typeLabel = (TYPE_OPTIONS.find((opt) => opt.value === type) || TYPE_OPTIONS[0]).label; + + return ( + + {dialogTitle} + + {!defaultType && ( + setType(e.target.value)} + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#1f1f1f", + color: "#fff", + "& fieldset": { borderColor: "#555" }, + "&:hover fieldset": { borderColor: "#888" } + }, + "& .MuiInputLabel-root": { color: "#aaa" } + }} + > + {TYPE_OPTIONS.map((opt) => ( + + {opt.label} + + ))} + + )} + + + + + {error && ( + + {error} + + )} + + + + + + + ); +} diff --git a/Data/Server/WebUI/src/Devices/Agent_Devices.jsx b/Data/Server/WebUI/src/Devices/Agent_Devices.jsx new file mode 100644 index 00000000..9f0f112f --- /dev/null +++ b/Data/Server/WebUI/src/Devices/Agent_Devices.jsx @@ -0,0 +1,13 @@ +import React from "react"; +import DeviceList from "./Device_List.jsx"; + +export default function AgentDevices(props) { + return ( + + ); +} diff --git a/Data/Server/WebUI/src/Devices/Device_Approvals.jsx b/Data/Server/WebUI/src/Devices/Device_Approvals.jsx new file mode 100644 index 00000000..7f4c2c87 --- /dev/null +++ b/Data/Server/WebUI/src/Devices/Device_Approvals.jsx @@ -0,0 +1,505 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/Server/WebUI/src/Admin/Device_Approvals.jsx + +import React, { useCallback, useEffect, useMemo, useState } from "react"; +import { + Alert, + Box, + Button, + Chip, + CircularProgress, + Dialog, + DialogActions, + DialogContent, + DialogContentText, + DialogTitle, + FormControl, + IconButton, + InputLabel, + MenuItem, + Paper, + Select, + Stack, + Table, + TableBody, + TableCell, + TableContainer, + TableHead, + TableRow, + TextField, + Tooltip, + Typography, +} from "@mui/material"; +import { + CheckCircleOutline as ApproveIcon, + HighlightOff as DenyIcon, + Refresh as RefreshIcon, + Security as SecurityIcon, +} from "@mui/icons-material"; + +const STATUS_OPTIONS = [ + { value: "all", label: "All" }, + { value: "pending", label: "Pending" }, + { value: "approved", label: "Approved" }, + { value: "completed", label: "Completed" }, + { value: "denied", label: "Denied" }, + { value: "expired", label: "Expired" }, +]; + +const statusChipColor = { + pending: "warning", + approved: "info", + completed: "success", + denied: "default", + expired: "default", +}; + +const formatDateTime = (value) => { + if (!value) return "—"; + const date = new Date(value); + if (Number.isNaN(date.getTime())) return value; + return date.toLocaleString(); +}; + +const formatFingerprint = (fp) => { + if (!fp) return "—"; + const normalized = fp.replace(/[^a-f0-9]/gi, "").toLowerCase(); + if (!normalized) return fp; + return normalized.match(/.{1,4}/g)?.join(" ") ?? normalized; +}; + +const normalizeStatus = (status) => { + if (!status) return "pending"; + if (status === "completed") return "completed"; + return status.toLowerCase(); +}; + +function DeviceApprovals() { + const [approvals, setApprovals] = useState([]); + const [statusFilter, setStatusFilter] = useState("all"); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(""); + const [feedback, setFeedback] = useState(null); + const [guidInputs, setGuidInputs] = useState({}); + const [actioningId, setActioningId] = useState(null); + const [conflictPrompt, setConflictPrompt] = useState(null); + + const loadApprovals = useCallback(async () => { + setLoading(true); + setError(""); + try { + const query = statusFilter === "all" ? "" : `?status=${encodeURIComponent(statusFilter)}`; + const resp = await fetch(`/api/admin/device-approvals${query}`, { credentials: "include" }); + if (!resp.ok) { + const body = await resp.json().catch(() => ({})); + throw new Error(body.error || `Request failed (${resp.status})`); + } + const data = await resp.json(); + setApprovals(Array.isArray(data.approvals) ? data.approvals : []); + } catch (err) { + setError(err.message || "Unable to load device approvals"); + } finally { + setLoading(false); + } + }, [statusFilter]); + + useEffect(() => { + loadApprovals(); + }, [loadApprovals]); + + const dedupedApprovals = useMemo(() => { + const normalized = approvals + .map((record) => ({ ...record, status: normalizeStatus(record.status) })) + .sort((a, b) => { + const left = new Date(a.created_at || 0).getTime(); + const right = new Date(b.created_at || 0).getTime(); + return left - right; + }); + if (statusFilter !== "pending") { + return normalized; + } + const seen = new Set(); + const unique = []; + for (const record of normalized) { + const key = record.ssl_key_fingerprint_claimed || record.hostname_claimed || record.id; + if (seen.has(key)) continue; + seen.add(key); + unique.push(record); + } + return unique; + }, [approvals, statusFilter]); + + const handleGuidChange = useCallback((id, value) => { + setGuidInputs((prev) => ({ ...prev, [id]: value })); + }, []); + + const submitApproval = useCallback( + async (record, overrides = {}) => { + if (!record?.id) return; + setActioningId(record.id); + setFeedback(null); + setError(""); + try { + const manualGuid = (guidInputs[record.id] || "").trim(); + const payload = {}; + const overrideGuidRaw = overrides.guid; + let overrideGuid = ""; + if (typeof overrideGuidRaw === "string") { + overrideGuid = overrideGuidRaw.trim(); + } else if (overrideGuidRaw != null) { + overrideGuid = String(overrideGuidRaw).trim(); + } + if (overrideGuid) { + payload.guid = overrideGuid; + } else if (manualGuid) { + payload.guid = manualGuid; + } + const resolutionRaw = overrides.conflictResolution || overrides.resolution; + if (typeof resolutionRaw === "string" && resolutionRaw.trim()) { + payload.conflict_resolution = resolutionRaw.trim().toLowerCase(); + } + const resp = await fetch(`/api/admin/device-approvals/${encodeURIComponent(record.id)}/approve`, { + method: "POST", + credentials: "include", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(Object.keys(payload).length ? payload : {}), + }); + const body = await resp.json().catch(() => ({})); + if (!resp.ok) { + if (resp.status === 409 && body.error === "conflict_resolution_required") { + const conflict = record.hostname_conflict; + const fallbackAlternate = + record.alternate_hostname || + (record.hostname_claimed ? `${record.hostname_claimed}-1` : ""); + if (conflict) { + setConflictPrompt({ + record, + conflict, + alternate: fallbackAlternate || "", + }); + } + return; + } + throw new Error(body.error || `Approval failed (${resp.status})`); + } + const appliedResolution = (body.conflict_resolution || payload.conflict_resolution || "").toLowerCase(); + let successMessage = "Enrollment approved"; + if (appliedResolution === "overwrite") { + successMessage = "Enrollment approved; existing device overwritten"; + } else if (appliedResolution === "coexist") { + successMessage = "Enrollment approved; devices will co-exist"; + } else if (appliedResolution === "auto_merge_fingerprint") { + successMessage = "Enrollment approved; device reconnected with its existing identity"; + } + setFeedback({ type: "success", message: successMessage }); + await loadApprovals(); + } catch (err) { + setFeedback({ type: "error", message: err.message || "Unable to approve request" }); + } finally { + setActioningId(null); + } + }, + [guidInputs, loadApprovals] + ); + + const startApprove = useCallback( + (record) => { + if (!record?.id) return; + const status = normalizeStatus(record.status); + if (status !== "pending") return; + const manualGuid = (guidInputs[record.id] || "").trim(); + const conflict = record.hostname_conflict; + const requiresPrompt = Boolean(conflict?.requires_prompt ?? record.conflict_requires_prompt); + if (requiresPrompt && !manualGuid) { + const fallbackAlternate = + record.alternate_hostname || + (record.hostname_claimed ? `${record.hostname_claimed}-1` : ""); + setConflictPrompt({ + record, + conflict, + alternate: fallbackAlternate || "", + }); + return; + } + submitApproval(record); + }, + [guidInputs, submitApproval] + ); + + const handleConflictCancel = useCallback(() => { + setConflictPrompt(null); + }, []); + + const handleConflictOverwrite = useCallback(() => { + if (!conflictPrompt?.record) { + setConflictPrompt(null); + return; + } + const { record, conflict } = conflictPrompt; + setConflictPrompt(null); + const conflictGuid = conflict?.guid != null ? String(conflict.guid).trim() : ""; + submitApproval(record, { + guid: conflictGuid, + conflictResolution: "overwrite", + }); + }, [conflictPrompt, submitApproval]); + + const handleConflictCoexist = useCallback(() => { + if (!conflictPrompt?.record) { + setConflictPrompt(null); + return; + } + const { record } = conflictPrompt; + setConflictPrompt(null); + submitApproval(record, { + conflictResolution: "coexist", + }); + }, [conflictPrompt, submitApproval]); + + const conflictRecord = conflictPrompt?.record; + const conflictInfo = conflictPrompt?.conflict; + const conflictHostname = conflictRecord?.hostname_claimed || conflictRecord?.hostname || ""; + const conflictSiteName = conflictInfo?.site_name || ""; + const conflictSiteDescriptor = conflictInfo + ? conflictSiteName + ? `under site ${conflictSiteName}` + : "under site (not assigned)" + : "under site (not assigned)"; + const conflictAlternate = + conflictPrompt?.alternate || + (conflictHostname ? `${conflictHostname}-1` : "hostname-1"); + const conflictGuidDisplay = conflictInfo?.guid || ""; + + const handleDeny = useCallback( + async (record) => { + if (!record?.id) return; + const confirmDeny = window.confirm("Deny this enrollment request?"); + if (!confirmDeny) return; + setActioningId(record.id); + setFeedback(null); + setError(""); + try { + const resp = await fetch(`/api/admin/device-approvals/${encodeURIComponent(record.id)}/deny`, { + method: "POST", + credentials: "include", + }); + if (!resp.ok) { + const body = await resp.json().catch(() => ({})); + throw new Error(body.error || `Deny failed (${resp.status})`); + } + setFeedback({ type: "success", message: "Enrollment denied" }); + await loadApprovals(); + } catch (err) { + setFeedback({ type: "error", message: err.message || "Unable to deny request" }); + } finally { + setActioningId(null); + } + }, + [loadApprovals] + ); + + return ( + + + + Device Approval Queue + + + + + + Status + + + + + + + {feedback ? ( + setFeedback(null)}> + {feedback.message} + + ) : null} + + {error ? ( + + {error} + + ) : null} + + + + + + Status + Hostname + Fingerprint + Enrollment Code + Created + Updated + Approved By + Actions + + + + {loading ? ( + + + + + Loading approvals… + + + + ) : dedupedApprovals.length === 0 ? ( + + + + No enrollment requests match this filter. + + + + ) : ( + dedupedApprovals.map((record) => { + const status = normalizeStatus(record.status); + const showActions = status === "pending"; + const guidValue = guidInputs[record.id] || ""; + const approverDisplay = record.approved_by_username || record.approved_by_user_id; + return ( + + + + + {record.hostname_claimed || "—"} + + {formatFingerprint(record.ssl_key_fingerprint_claimed)} + + + {record.enrollment_code_id || "—"} + + {formatDateTime(record.created_at)} + {formatDateTime(record.updated_at)} + {approverDisplay || "—"} + + {showActions ? ( + + handleGuidChange(record.id, event.target.value)} + sx={{ minWidth: 200 }} + /> + + + + startApprove(record)} + disabled={actioningId === record.id} + > + {actioningId === record.id ? ( + + ) : ( + + )} + + + + + + handleDeny(record)} + disabled={actioningId === record.id} + > + + + + + + + ) : ( + + No actions available + + )} + + + ); + }) + )} + +
+
+
+ + Hostname Conflict + + + + {conflictHostname + ? `Device ${conflictHostname} already exists in the database ${conflictSiteDescriptor}.` + : `A device with this hostname already exists in the database ${conflictSiteDescriptor}.`} + + + Do you want this device to overwrite the existing device, or allow both to co-exist? + + + {`Device will be renamed ${conflictAlternate} if you choose to allow both to co-exist.`} + + {conflictGuidDisplay ? ( + + Existing device GUID: {conflictGuidDisplay} + + ) : null} + + + + + + + + +
+ ); +} + +export default React.memo(DeviceApprovals); diff --git a/Data/Server/WebUI/src/Devices/Device_Details.jsx b/Data/Server/WebUI/src/Devices/Device_Details.jsx new file mode 100644 index 00000000..1190b42d --- /dev/null +++ b/Data/Server/WebUI/src/Devices/Device_Details.jsx @@ -0,0 +1,1383 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/Device_Details.js + +import React, { useState, useEffect, useMemo, useCallback } from "react"; +import { + Paper, + Box, + Tabs, + Tab, + Typography, + Table, + TableHead, + TableRow, + TableCell, + TableBody, + Button, + IconButton, + Menu, + MenuItem, + LinearProgress, + TableSortLabel, + TextField, + Dialog, + DialogTitle, + DialogContent, + DialogActions +} from "@mui/material"; +import StorageRoundedIcon from "@mui/icons-material/StorageRounded"; +import MemoryRoundedIcon from "@mui/icons-material/MemoryRounded"; +import SpeedRoundedIcon from "@mui/icons-material/SpeedRounded"; +import DeveloperBoardRoundedIcon from "@mui/icons-material/DeveloperBoardRounded"; +import MoreHorizIcon from "@mui/icons-material/MoreHoriz"; +import { ClearDeviceActivityDialog } from "../Dialogs.jsx"; +import Prism from "prismjs"; +import "prismjs/components/prism-yaml"; +import "prismjs/components/prism-bash"; +import "prismjs/components/prism-powershell"; +import "prismjs/components/prism-batch"; +import "prismjs/themes/prism-okaidia.css"; +import Editor from "react-simple-code-editor"; +import QuickJob from "../Scheduling/Quick_Job.jsx"; + +export default function DeviceDetails({ device, onBack }) { + const [tab, setTab] = useState(0); + const [agent, setAgent] = useState(device || {}); + const [details, setDetails] = useState({}); + const [meta, setMeta] = useState({}); + const [softwareOrderBy, setSoftwareOrderBy] = useState("name"); + const [softwareOrder, setSoftwareOrder] = useState("asc"); + const [softwareSearch, setSoftwareSearch] = useState(""); + const [description, setDescription] = useState(""); + const [connectionType, setConnectionType] = useState(""); + const [connectionEndpoint, setConnectionEndpoint] = useState(""); + const [connectionDraft, setConnectionDraft] = useState(""); + const [connectionSaving, setConnectionSaving] = useState(false); + const [connectionMessage, setConnectionMessage] = useState(""); + const [connectionError, setConnectionError] = useState(""); + const [historyRows, setHistoryRows] = useState([]); + const [historyOrderBy, setHistoryOrderBy] = useState("ran_at"); + const [historyOrder, setHistoryOrder] = useState("desc"); + const [outputOpen, setOutputOpen] = useState(false); + const [outputTitle, setOutputTitle] = useState(""); + const [outputContent, setOutputContent] = useState(""); + const [outputLang, setOutputLang] = useState("powershell"); + const [quickJobOpen, setQuickJobOpen] = useState(false); + const [menuAnchor, setMenuAnchor] = useState(null); + const [clearDialogOpen, setClearDialogOpen] = useState(false); + const [assemblyNameMap, setAssemblyNameMap] = useState({}); + // Snapshotted status for the lifetime of this page + const [lockedStatus, setLockedStatus] = useState(() => { + // Prefer status provided by the device list row if available + if (device?.status) return device.status; + // Fallback: compute once from the provided lastSeen timestamp + const tsSec = device?.lastSeen; + if (!tsSec) return "Offline"; + const now = Date.now() / 1000; + return now - tsSec <= 300 ? "Online" : "Offline"; + }); + + useEffect(() => { + setConnectionError(""); + }, [connectionDraft]); + + useEffect(() => { + if (connectionType !== "ssh") { + setConnectionMessage(""); + setConnectionError(""); + } + }, [connectionType]); + + useEffect(() => { + let canceled = false; + const loadAssemblyNames = async () => { + const next = {}; + const storeName = (rawPath, rawName, prefix = "") => { + const name = typeof rawName === "string" ? rawName.trim() : ""; + if (!name) return; + const normalizedPath = String(rawPath || "") + .replace(/\\/g, "/") + .replace(/^\/+/, "") + .trim(); + const keys = new Set(); + if (normalizedPath) { + keys.add(normalizedPath); + if (prefix) { + const prefixed = `${prefix}/${normalizedPath}`.replace(/\/+/g, "/"); + keys.add(prefixed); + } + } + const base = normalizedPath ? normalizedPath.split("/").pop() || "" : ""; + if (base) { + keys.add(base); + const dot = base.lastIndexOf("."); + if (dot > 0) { + keys.add(base.slice(0, dot)); + } + } + keys.forEach((key) => { + if (key && !next[key]) { + next[key] = name; + } + }); + }; + const ingest = async (island, prefix = "") => { + try { + const resp = await fetch(`/api/assembly/list?island=${island}`); + if (!resp.ok) return; + const data = await resp.json(); + const items = Array.isArray(data.items) ? data.items : []; + items.forEach((item) => { + if (!item || typeof item !== "object") return; + const rel = item.rel_path || item.path || item.file_name || item.playbook_path || ""; + const label = (item.name || item.tab_name || item.display_name || item.file_name || "").trim(); + storeName(rel, label, prefix); + }); + } catch { + // ignore failures; map remains partial + } + }; + await ingest("scripts", "Scripts"); + await ingest("workflows", "Workflows"); + await ingest("ansible", "Ansible_Playbooks"); + if (!canceled) { + setAssemblyNameMap(next); + } + }; + loadAssemblyNames(); + return () => { + canceled = true; + }; + }, []); + + const statusFromHeartbeat = (tsSec, offlineAfter = 300) => { + if (!tsSec) return "Offline"; + const now = Date.now() / 1000; + return now - tsSec <= offlineAfter ? "Online" : "Offline"; + }; + + const statusColor = (s) => (s === "Online" ? "#00d18c" : "#ff4f4f"); + + const resolveAssemblyName = useCallback((scriptName, scriptPath) => { + const normalized = String(scriptPath || "").replace(/\\/g, "/").trim(); + const base = normalized ? normalized.split("/").pop() || "" : ""; + const baseNoExt = base && base.includes(".") ? base.slice(0, base.lastIndexOf(".")) : base; + return ( + assemblyNameMap[normalized] || + (base ? assemblyNameMap[base] : "") || + (baseNoExt ? assemblyNameMap[baseNoExt] : "") || + scriptName || + base || + scriptPath || + "" + ); + }, [assemblyNameMap]); + + const formatLastSeen = (tsSec, offlineAfter = 120) => { + if (!tsSec) return "unknown"; + const now = Date.now() / 1000; + if (now - tsSec <= offlineAfter) return "Currently Online"; + const d = new Date(tsSec * 1000); + const date = d.toLocaleDateString("en-US", { + month: "2-digit", + day: "2-digit", + year: "numeric", + }); + const time = d.toLocaleTimeString("en-US", { + hour: "numeric", + minute: "2-digit", + }); + return `${date} @ ${time}`; + }; + + useEffect(() => { + if (device) { + setLockedStatus(device.status || statusFromHeartbeat(device.lastSeen)); + } + + const guid = device?.agent_guid || device?.guid || device?.agentGuid || device?.summary?.agent_guid; + const agentId = device?.agentId || device?.summary?.agent_id || device?.id; + const hostname = device?.hostname || device?.summary?.hostname; + if (!device || (!guid && !hostname)) return; + + const load = async () => { + try { + const agentsPromise = fetch("/api/agents").catch(() => null); + let detailResponse = null; + if (guid) { + try { + detailResponse = await fetch(`/api/devices/${encodeURIComponent(guid)}`); + } catch (err) { + detailResponse = null; + } + } + if ((!detailResponse || !detailResponse.ok) && hostname) { + try { + detailResponse = await fetch(`/api/device/details/${encodeURIComponent(hostname)}`); + } catch (err) { + detailResponse = null; + } + } + if (!detailResponse || !detailResponse.ok) { + throw new Error(`Failed to load device record (${detailResponse ? detailResponse.status : 'no response'})`); + } + + const [agentsData, detailData] = await Promise.all([ + agentsPromise?.then((r) => (r ? r.json() : {})).catch(() => ({})), + detailResponse.json(), + ]); + + if (agentsData && agentId && agentsData[agentId]) { + setAgent({ id: agentId, ...agentsData[agentId] }); + } + + const summary = + detailData?.summary && typeof detailData.summary === "object" + ? detailData.summary + : (detailData?.details?.summary || {}); + const normalizedSummary = { ...(summary || {}) }; + if (detailData?.description) { + normalizedSummary.description = detailData.description; + } + + const connectionTypeValue = + (normalizedSummary.connection_type || + normalizedSummary.remote_type || + "").toLowerCase(); + const connectionEndpointValue = + normalizedSummary.connection_endpoint || + normalizedSummary.connection_address || + detailData?.connection_endpoint || + ""; + setConnectionType(connectionTypeValue); + setConnectionEndpoint(connectionEndpointValue); + setConnectionDraft(connectionEndpointValue); + setConnectionMessage(""); + setConnectionError(""); + + const normalized = { + summary: normalizedSummary, + memory: Array.isArray(detailData?.memory) + ? detailData.memory + : Array.isArray(detailData?.details?.memory) + ? detailData.details.memory + : [], + network: Array.isArray(detailData?.network) + ? detailData.network + : Array.isArray(detailData?.details?.network) + ? detailData.details.network + : [], + software: Array.isArray(detailData?.software) + ? detailData.software + : Array.isArray(detailData?.details?.software) + ? detailData.details.software + : [], + storage: Array.isArray(detailData?.storage) + ? detailData.storage + : Array.isArray(detailData?.details?.storage) + ? detailData.details.storage + : [], + cpu: detailData?.cpu || detailData?.details?.cpu || {}, + }; + setDetails(normalized); + + const toYmdHms = (dateObj) => { + if (!dateObj || Number.isNaN(dateObj.getTime())) return ''; + const pad = (v) => String(v).padStart(2, '0'); + return `${dateObj.getUTCFullYear()}-${pad(dateObj.getUTCMonth() + 1)}-${pad(dateObj.getUTCDate())} ${pad(dateObj.getUTCHours())}:${pad(dateObj.getUTCMinutes())}:${pad(dateObj.getUTCSeconds())}`; + }; + + let createdDisplay = normalizedSummary.created || ''; + if (!createdDisplay) { + if (detailData?.created_at && Number(detailData.created_at)) { + createdDisplay = toYmdHms(new Date(Number(detailData.created_at) * 1000)); + } else if (detailData?.created_at_iso) { + createdDisplay = toYmdHms(new Date(detailData.created_at_iso)); + } + } + + const metaPayload = { + hostname: detailData?.hostname || normalizedSummary.hostname || hostname || "", + lastUser: detailData?.last_user || normalizedSummary.last_user || "", + deviceType: detailData?.device_type || normalizedSummary.device_type || "", + created: createdDisplay, + createdAtIso: detailData?.created_at_iso || "", + lastSeen: detailData?.last_seen || normalizedSummary.last_seen || 0, + lastReboot: detailData?.last_reboot || normalizedSummary.last_reboot || "", + operatingSystem: + detailData?.operating_system || normalizedSummary.operating_system || normalizedSummary.agent_operating_system || "", + agentId: detailData?.agent_id || normalizedSummary.agent_id || agentId || "", + agentGuid: detailData?.agent_guid || normalizedSummary.agent_guid || guid || "", + agentHash: detailData?.agent_hash || normalizedSummary.agent_hash || "", + internalIp: detailData?.internal_ip || normalizedSummary.internal_ip || "", + externalIp: detailData?.external_ip || normalizedSummary.external_ip || "", + siteId: detailData?.site_id, + siteName: detailData?.site_name || "", + siteDescription: detailData?.site_description || "", + status: detailData?.status || "", + connectionType: connectionTypeValue, + connectionEndpoint: connectionEndpointValue, + }; + setMeta(metaPayload); + setDescription(normalizedSummary.description || detailData?.description || ""); + + setAgent((prev) => ({ + ...(prev || {}), + id: agentId || prev?.id, + hostname: metaPayload.hostname || prev?.hostname, + agent_hash: metaPayload.agentHash || prev?.agent_hash, + agent_operating_system: metaPayload.operatingSystem || prev?.agent_operating_system, + device_type: metaPayload.deviceType || prev?.device_type, + last_seen: metaPayload.lastSeen || prev?.last_seen, + })); + + if (metaPayload.status) { + setLockedStatus(metaPayload.status); + } else if (metaPayload.lastSeen) { + setLockedStatus(statusFromHeartbeat(metaPayload.lastSeen)); + } + } catch (e) { + console.warn("Failed to load device info", e); + setMeta({}); + } + }; + load(); + }, [device]); + + const activityHostname = useMemo(() => { + return (meta?.hostname || agent?.hostname || device?.hostname || "").trim(); + }, [meta?.hostname, agent?.hostname, device?.hostname]); + + const saveConnectionEndpoint = useCallback(async () => { + if (connectionType !== "ssh") return; + const host = activityHostname; + if (!host) return; + const trimmed = connectionDraft.trim(); + if (!trimmed) { + setConnectionError("Address is required."); + return; + } + if (trimmed === connectionEndpoint.trim()) { + setConnectionMessage("No changes to save."); + return; + } + setConnectionSaving(true); + setConnectionError(""); + setConnectionMessage(""); + try { + const resp = await fetch(`/api/ssh_devices/${encodeURIComponent(host)}`, { + method: "PUT", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ address: trimmed }) + }); + const data = await resp.json().catch(() => ({})); + if (!resp.ok) throw new Error(data?.error || `HTTP ${resp.status}`); + const updated = data?.device?.connection_endpoint || trimmed; + setConnectionEndpoint(updated); + setConnectionDraft(updated); + setMeta((prev) => ({ ...(prev || {}), connectionEndpoint: updated })); + setConnectionMessage("SSH endpoint updated."); + setTimeout(() => setConnectionMessage(""), 3000); + } catch (err) { + setConnectionError(String(err.message || err)); + } finally { + setConnectionSaving(false); + } + }, [connectionType, connectionDraft, connectionEndpoint, activityHostname]); + + const loadHistory = useCallback(async () => { + if (!activityHostname) return; + try { + const resp = await fetch(`/api/device/activity/${encodeURIComponent(activityHostname)}`); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + setHistoryRows(data.history || []); + } catch (e) { + console.warn("Failed to load activity history", e); + setHistoryRows([]); + } + }, [activityHostname]); + + useEffect(() => { loadHistory(); }, [loadHistory]); + + useEffect(() => { + const socket = typeof window !== "undefined" ? window.BorealisSocket : null; + if (!socket || !activityHostname) return undefined; + + let refreshTimer = null; + const normalizedHost = activityHostname.toLowerCase(); + const scheduleRefresh = (delay = 200) => { + if (refreshTimer) clearTimeout(refreshTimer); + refreshTimer = setTimeout(() => { + refreshTimer = null; + loadHistory(); + }, delay); + }; + + const handleActivityChanged = (payload = {}) => { + const payloadHost = String(payload?.hostname || "").trim().toLowerCase(); + if (!payloadHost) return; + if (payloadHost === normalizedHost) { + const delay = payload?.change === "updated" ? 150 : 0; + scheduleRefresh(delay); + } + }; + + socket.on("device_activity_changed", handleActivityChanged); + + return () => { + if (refreshTimer) clearTimeout(refreshTimer); + socket.off("device_activity_changed", handleActivityChanged); + }; + }, [activityHostname, loadHistory]); + + // No explicit live recap tab; recaps are recorded into Activity History + + const clearHistory = async () => { + if (!activityHostname) return; + try { + const resp = await fetch(`/api/device/activity/${encodeURIComponent(activityHostname)}`, { method: "DELETE" }); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + setHistoryRows([]); + } catch (e) { + console.warn("Failed to clear activity history", e); + } + }; + + const saveDescription = async () => { + const targetHost = meta.hostname || details.summary?.hostname; + if (!targetHost) return; + try { + await fetch(`/api/device/description/${targetHost}`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ description }) + }); + setDetails((d) => ({ + ...d, + summary: { ...(d.summary || {}), description } + })); + setMeta((m) => ({ ...(m || {}), hostname: targetHost })); + } catch (e) { + console.warn("Failed to save description", e); + } + }; + + const formatDateTime = (str) => { + if (!str) return "unknown"; + try { + const [datePart, timePart] = str.split(" "); + const [y, m, d] = datePart.split("-").map(Number); + let [hh, mm, ss] = timePart.split(":").map(Number); + const ampm = hh >= 12 ? "PM" : "AM"; + hh = hh % 12 || 12; + return `${m.toString().padStart(2, "0")}/${d.toString().padStart(2, "0")}/${y} @ ${hh}:${mm + .toString() + .padStart(2, "0")} ${ampm}`; + } catch { + return str; + } + }; + + const formatMac = (mac) => (mac ? mac.replace(/-/g, ":").toUpperCase() : "unknown"); + + const formatBytes = (val) => { + if (val === undefined || val === null || val === "unknown") return "unknown"; + let num = Number(val); + const units = ["B", "KB", "MB", "GB", "TB"]; + let i = 0; + while (num >= 1024 && i < units.length - 1) { + num /= 1024; + i++; + } + return `${num.toFixed(1)} ${units[i]}`; + }; + + const formatTimestamp = (epochSec) => { + const ts = Number(epochSec || 0); + if (!ts) return "unknown"; + const d = new Date(ts * 1000); + const mm = String(d.getMonth() + 1).padStart(2, "0"); + const dd = String(d.getDate()).padStart(2, "0"); + const yyyy = d.getFullYear(); + let hh = d.getHours(); + const ampm = hh >= 12 ? "PM" : "AM"; + hh = hh % 12 || 12; + const min = String(d.getMinutes()).padStart(2, "0"); + return `${mm}/${dd}/${yyyy} @ ${hh}:${min} ${ampm}`; + }; + + const handleSoftwareSort = (col) => { + if (softwareOrderBy === col) { + setSoftwareOrder(softwareOrder === "asc" ? "desc" : "asc"); + } else { + setSoftwareOrderBy(col); + setSoftwareOrder("asc"); + } + }; + + const softwareRows = useMemo(() => { + const rows = details.software || []; + const filtered = rows.filter((s) => + s.name.toLowerCase().includes(softwareSearch.toLowerCase()) + ); + const dir = softwareOrder === "asc" ? 1 : -1; + return [...filtered].sort((a, b) => { + const A = a[softwareOrderBy] || ""; + const B = b[softwareOrderBy] || ""; + return String(A).localeCompare(String(B)) * dir; + }); + }, [details.software, softwareSearch, softwareOrderBy, softwareOrder]); + + const summary = details.summary || {}; + // Build a best-effort CPU display from summary fields + const cpuInfo = useMemo(() => { + const cpu = details.cpu || summary.cpu || {}; + const cores = cpu.logical_cores || cpu.cores || cpu.physical_cores; + let ghz = cpu.base_clock_ghz; + if (!ghz && typeof (summary.processor || '') === 'string') { + const m = String(summary.processor).match(/\(([^)]*?)ghz\)/i); + if (m && m[1]) { + const n = parseFloat(m[1]); + if (!Number.isNaN(n)) ghz = n; + } + } + const name = (cpu.name || '').trim(); + const fromProcessor = (summary.processor || '').trim(); + const display = fromProcessor || [name, ghz ? `(${Number(ghz).toFixed(1)}GHz)` : null, cores ? `@ ${cores} Cores` : null].filter(Boolean).join(' '); + return { cores, ghz, name, display }; + }, [summary]); + + const summaryItems = [ + { label: "Hostname", value: meta.hostname || summary.hostname || agent.hostname || device?.hostname || "unknown" }, + { + label: "Last User", + value: ( + + + {meta.lastUser || summary.last_user || 'unknown'} + + ) + }, + { label: "Device Type", value: meta.deviceType || summary.device_type || 'unknown' }, + { + label: "Created", + value: meta.created ? formatDateTime(meta.created) : summary.created ? formatDateTime(summary.created) : 'unknown' + }, + { + label: "Last Seen", + value: formatLastSeen(meta.lastSeen || agent.last_seen || device?.lastSeen) + }, + { + label: "Last Reboot", + value: meta.lastReboot ? formatDateTime(meta.lastReboot) : summary.last_reboot ? formatDateTime(summary.last_reboot) : 'unknown' + }, + { label: "Operating System", value: meta.operatingSystem || summary.operating_system || agent.agent_operating_system || 'unknown' }, + { label: "Agent ID", value: meta.agentId || summary.agent_id || 'unknown' }, + { label: "Agent GUID", value: meta.agentGuid || summary.agent_guid || 'unknown' }, + { label: "Agent Hash", value: meta.agentHash || summary.agent_hash || 'unknown' }, + ]; + + const MetricCard = ({ icon, title, main, sub, color }) => { + const edgeColor = color || '#232323'; + const parseHex = (hex) => { + const v = String(hex || '').replace('#', ''); + const n = parseInt(v.length === 3 ? v.split('').map(c => c + c).join('') : v, 16); + return { r: (n >> 16) & 255, g: (n >> 8) & 255, b: n & 255 }; + }; + const hexToRgba = (hex, alpha = 1) => { + try { const { r, g, b } = parseHex(hex); return `rgba(${r}, ${g}, ${b}, ${alpha})`; } catch { return `rgba(88,166,255, ${alpha})`; } + }; + const lightenToRgba = (hex, p = 0.5, alpha = 1) => { + try { + const { r, g, b } = parseHex(hex); + const mix = (c) => Math.round(c + (255 - c) * p); + const R = mix(r), G = mix(g), B = mix(b); + return `rgba(${R}, ${G}, ${B}, ${alpha})`; + } catch { return hexToRgba('#58a6ff', alpha); } + }; + return ( + + + {icon} + {title} + + {main} + + + {sub ? {sub} : null} + + ); + }; + + const Island = ({ title, children, sx }) => ( + + {title} + {children} + + ); + + const renderSummary = () => { + // Derive metric values + // CPU tile: model as main, speed as sub (like screenshot) + const cpuMain = (cpuInfo.name || (summary.processor || '') || '').split('\n')[0] || 'Unknown CPU'; + const cpuSub = cpuInfo.ghz || cpuInfo.cores + ? ( + + {cpuInfo.ghz ? `${Number(cpuInfo.ghz).toFixed(2)}GHz ` : ''} + {cpuInfo.cores ? ({cpuInfo.cores}-Cores) : null} + + ) + : ''; + + // MEMORY: total RAM + let totalRam = summary.total_ram; + if (!totalRam && Array.isArray(details.memory)) { + try { totalRam = details.memory.reduce((a, m) => a + (Number(m.capacity || 0) || 0), 0); } catch {} + } + const memVal = totalRam ? `${formatBytes(totalRam)}` : 'Unknown'; + // RAM speed best-effort: use max speed among modules + let memSpeed = ''; + try { + const speeds = (details.memory || []) + .map(m => parseInt(String(m.speed || '').replace(/[^0-9]/g, ''), 10)) + .filter(v => !Number.isNaN(v) && v > 0); + if (speeds.length) memSpeed = `Speed: ${Math.max(...speeds)} MT/s`; + } catch {} + + // STORAGE: OS drive (Windows C: if available) + let osDrive = null; + if (Array.isArray(details.storage)) { + osDrive = details.storage.find((d) => String(d.drive || '').toUpperCase().startsWith('C:')) || details.storage[0] || null; + } + const storageMain = osDrive && osDrive.total != null ? `${formatBytes(osDrive.total)}` : 'Unknown'; + const storageSub = (osDrive && osDrive.used != null && osDrive.total != null) + ? `${formatBytes(osDrive.used)} of ${formatBytes(osDrive.total)} used` + : (osDrive && osDrive.free != null && osDrive.total != null) + ? `${formatBytes(osDrive.total - osDrive.free)} of ${formatBytes(osDrive.total)} used` + : ''; + + // NETWORK: Speed of adapter with internal IP or first + const primaryIp = (summary.internal_ip || '').trim(); + let nic = null; + if (Array.isArray(details.network)) { + nic = details.network.find((n) => (n.ips || []).includes(primaryIp)) || details.network[0] || null; + } + function normalizeSpeed(val) { + const s = String(val || '').trim(); + if (!s) return 'unknown'; + const low = s.toLowerCase(); + if (low.includes('gbps') || low.includes('mbps')) return s; + const m = low.match(/(\d+\.?\d*)\s*([gmk]?)(bps)/); + if (!m) return s; + let num = parseFloat(m[1]); + const unit = m[2]; + if (unit === 'g') return `${num} Gbps`; + if (unit === 'm') return `${num} Mbps`; + if (unit === 'k') return `${(num/1000).toFixed(1)} Mbps`; + // raw bps + if (num >= 1e9) return `${(num/1e9).toFixed(1)} Gbps`; + if (num >= 1e6) return `${(num/1e6).toFixed(0)} Mbps`; + return s; + } + const netVal = nic ? normalizeSpeed(nic.link_speed || nic.speed) : 'Unknown'; + + return ( + + {/* Metrics row at the very top */} + + } + title="Processor" + main={cpuMain} + sub={cpuSub} + color="#132332" + /> + } + title="Installed RAM" + main={memVal} + sub={memSpeed || ' '} + color="#291a2e" + /> + } + title="Storage" + main={storageMain} + sub={storageSub || ' '} + color="#142616" + /> + } + title="Network" + main={netVal} + sub={(nic && nic.adapter) ? nic.adapter : ' '} + color="#2b1a18" + /> + + {/* Split pane: three-column layout (Summary | Storage | Memory/Network) */} + + {/* Left column: Summary table */} + + + + + + Description + + setDescription(e.target.value)} + onBlur={saveDescription} + placeholder="Enter description" + sx={{ + input: { color: '#fff' }, + '& .MuiOutlinedInput-root': { + '& fieldset': { borderColor: '#555' }, + '&:hover fieldset': { borderColor: '#888' } + } + }} + /> + + + {connectionType === "ssh" && ( + + SSH Endpoint + + + setConnectionDraft(e.target.value)} + placeholder="user@host or host" + sx={{ + maxWidth: 300, + input: { color: '#fff' }, + '& .MuiOutlinedInput-root': { + '& fieldset': { borderColor: '#555' }, + '&:hover fieldset': { borderColor: '#888' } + } + }} + /> + + + {connectionMessage && ( + {connectionMessage} + )} + {connectionError && ( + {connectionError} + )} + + + )} + {summaryItems.map((item) => ( + + {item.label} + {item.value} + + ))} + +
+
+
+ + {/* Middle column: Storage */} + {renderStorage()} + + {/* Right column: Memory + Network */} + + {renderMemory()} + {renderNetwork()} + +
+
+ ); + }; + + const placeholderTable = (headers) => ( + + + + + {headers.map((h) => ( + {h} + ))} + + + + + + No data available. + + + +
+
+ ); + + const renderSoftware = () => { + if (!softwareRows.length) + return placeholderTable(["Software Name", "Version", "Action"]); + + return ( + + + setSoftwareSearch(e.target.value)} + sx={{ + input: { color: "#fff" }, + "& .MuiOutlinedInput-root": { + "& fieldset": { borderColor: "#555" }, + "&:hover fieldset": { borderColor: "#888" } + } + }} + /> + + {/* Constrain the table height within the page and enable scrolling */} + + + + + + handleSoftwareSort("name")} + > + Software Name + + + + handleSoftwareSort("version")} + > + Version + + + Action + + + + {softwareRows.map((s, i) => ( + + {s.name} + {s.version} + + + ))} + +
+
+
+ ); + }; + + const renderMemory = () => { + const rows = details.memory || []; + if (!rows.length) return placeholderTable(["Slot", "Speed", "Serial Number", "Capacity"]); + return ( + + + + + Slot + Speed + Serial Number + Capacity + + + + {rows.map((m, i) => ( + + {m.slot} + {m.speed} + {m.serial} + {formatBytes(m.capacity)} + + ))} + +
+
+ ); + }; + + const renderStorage = () => { + const toNum = (val) => { + if (val === undefined || val === null) return undefined; + if (typeof val === "number") { + return Number.isNaN(val) ? undefined : val; + } + const n = parseFloat(String(val).replace(/[^0-9.]+/g, "")); + return Number.isNaN(n) ? undefined : n; + }; + + const rows = (details.storage || []).map((d) => { + const total = toNum(d.total); + let usagePct = toNum(d.usage); + let usedBytes = toNum(d.used); + let freeBytes = toNum(d.free); + let freePct; + + if (usagePct !== undefined) { + if (usagePct <= 1) usagePct *= 100; + freePct = 100 - usagePct; + } + + if (usedBytes === undefined && total !== undefined && usagePct !== undefined) { + usedBytes = (usagePct / 100) * total; + } + + if (freeBytes === undefined && total !== undefined && usedBytes !== undefined) { + freeBytes = total - usedBytes; + } + + if (freePct === undefined && total !== undefined && freeBytes !== undefined) { + freePct = (freeBytes / total) * 100; + } + + if (usagePct === undefined && freePct !== undefined) { + usagePct = 100 - freePct; + } + + return { + drive: d.drive, + disk_type: d.disk_type, + used: usedBytes, + freePct, + freeBytes, + total, + usage: usagePct, + }; + }); + + if (!rows.length) { + return placeholderTable(["Drive", "Type", "Capacity"]); + } + + const fmtPct = (v) => (v !== undefined && !Number.isNaN(v) ? `${v.toFixed(0)}%` : "unknown"); + + return ( + + {rows.map((d, i) => { + const usage = d.usage ?? (d.total ? ((d.used || 0) / d.total) * 100 : 0); + const used = d.used; + const free = d.freeBytes; + const total = d.total; + return ( + + + + + {`Drive ${String(d.drive || '').replace('\\', '')}`} + {d.disk_type || 'Fixed Disk'} + + {total !== undefined ? formatBytes(total) : 'unknown'} + + + + + + + {used !== undefined ? `${formatBytes(used)} - ${fmtPct(usage)} in use` : 'unknown'} + + + {free !== undefined && total !== undefined ? `${formatBytes(free)} - ${fmtPct(100 - (usage || 0))} remaining` : ''} + + + + ); + })} + + ); + }; + + const renderNetwork = () => { + const rows = details.network || []; + const internalIp = meta.internalIp || summary.internal_ip || "unknown"; + const externalIp = meta.externalIp || summary.external_ip || "unknown"; + const ipHeader = ( + + + Internal IP: {internalIp || 'unknown'} + + + External IP: {externalIp || 'unknown'} + + + ); + if (!rows.length) { + return ( + + {ipHeader} + {placeholderTable(["Adapter", "IP Address", "MAC Address"])} + + ); + } + return ( + + {ipHeader} + + + + Adapter + IP Address + MAC Address + + + + {rows.map((n, i) => ( + + {n.adapter} + {(n.ips || []).join(", ")} + {formatMac(n.mac)} + + ))} + +
+
+ ); + }; + + const jobStatusColor = (s) => { + const val = String(s || "").toLowerCase(); + if (val === "running") return "#58a6ff"; // borealis blue + if (val === "success") return "#00d18c"; + if (val === "failed") return "#ff4f4f"; + return "#666"; + }; + + const highlightCode = (code, lang) => { + try { + return Prism.highlight(code ?? "", Prism.languages[lang] || Prism.languages.markup, lang); + } catch { + return String(code || ""); + } + }; + + const handleViewOutput = useCallback(async (row, which) => { + if (!row || !row.id) return; + try { + const resp = await fetch(`/api/device/activity/job/${row.id}`); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + const lang = ((data.script_path || "").toLowerCase().endsWith(".ps1")) ? "powershell" + : ((data.script_path || "").toLowerCase().endsWith(".bat")) ? "batch" + : ((data.script_path || "").toLowerCase().endsWith(".sh")) ? "bash" + : ((data.script_path || "").toLowerCase().endsWith(".yml")) ? "yaml" : "powershell"; + setOutputLang(lang); + const friendly = resolveAssemblyName(data.script_name, data.script_path); + setOutputTitle(`${which === 'stderr' ? 'StdErr' : 'StdOut'} - ${friendly}`); + setOutputContent(which === 'stderr' ? (data.stderr || "") : (data.stdout || "")); + setOutputOpen(true); + } catch (e) { + console.warn("Failed to load output", e); + } + }, [resolveAssemblyName]); + + const handleHistorySort = (col) => { + if (historyOrderBy === col) setHistoryOrder(historyOrder === "asc" ? "desc" : "asc"); + else { + setHistoryOrderBy(col); + setHistoryOrder("asc"); + } + }; + + const historyDisplayRows = useMemo(() => { + return (historyRows || []).map((row) => ({ + ...row, + script_display_name: resolveAssemblyName(row.script_name, row.script_path), + })); + }, [historyRows, resolveAssemblyName]); + + const sortedHistory = useMemo(() => { + const dir = historyOrder === "asc" ? 1 : -1; + const key = historyOrderBy === "script_name" ? "script_display_name" : historyOrderBy; + return [...historyDisplayRows].sort((a, b) => { + const A = a[key]; + const B = b[key]; + if (key === "ran_at") return ((A || 0) - (B || 0)) * dir; + return String(A ?? "").localeCompare(String(B ?? "")) * dir; + }); + }, [historyDisplayRows, historyOrderBy, historyOrder]); + + const renderHistory = () => ( + + + + + Assembly + + handleHistorySort("script_name")}> + Task + + + + handleHistorySort("ran_at")} + > + Ran On + + + + handleHistorySort("status")} + > + Job Status + + + + StdOut / StdErr + + + + + {sortedHistory.map((r) => ( + + {(r.script_type || '').toLowerCase() === 'ansible' ? 'Ansible Playbook' : 'Script'} + {r.script_display_name || r.script_name} + {formatTimestamp(r.ran_at)} + + + {r.status} + + + + + {(String(r.script_type || '').toLowerCase() === 'ansible' && String(r.status||'') === 'Running') ? ( + + ) : null} + {r.has_stdout ? ( + + ) : null} + {r.has_stderr ? ( + + ) : null} + + + + ))} + {sortedHistory.length === 0 && ( + No activity yet. + )} + +
+
+ ); + + + + const tabs = [ + { label: "Summary", content: renderSummary() }, + { label: "Installed Software", content: renderSoftware() }, + { label: "Activity History", content: renderHistory() } + ]; + // Use the snapshotted status so it stays static while on this page + const status = lockedStatus || statusFromHeartbeat(agent.last_seen || device?.lastSeen); + + return ( + + + + {onBack && ( + + )} + + + {agent.hostname || "Device Details"} + + + + setMenuAnchor(e.currentTarget)} + sx={{ + color: !(agent?.hostname || device?.hostname) ? "#666" : "#58a6ff", + borderColor: !(agent?.hostname || device?.hostname) ? "#333" : "#58a6ff", + border: "1px solid", + borderRadius: 1, + width: 32, + height: 32 + }} + > + + + setMenuAnchor(null)} + > + { + setMenuAnchor(null); + setQuickJobOpen(true); + }} + > + Quick Job + + { + setMenuAnchor(null); + setClearDialogOpen(true); + }} + > + Clear Device Activity + + + + + setTab(v)} + sx={{ borderBottom: 1, borderColor: "#333" }} + > + {tabs.map((t) => ( + + ))} + + {tabs[tab].content} + + setOutputOpen(false)} fullWidth maxWidth="md" + PaperProps={{ sx: { bgcolor: "#121212", color: "#fff" } }} + > + {outputTitle} + + + {}} + highlight={(code) => highlightCode(code, outputLang)} + padding={12} + style={{ + fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace', + fontSize: 12, + color: "#e6edf3", + minHeight: 200 + }} + textareaProps={{ readOnly: true }} + /> + + + + + + + + {/* Recap dialog removed; recaps flow into Activity History stdout */} + + setClearDialogOpen(false)} + onConfirm={() => { + clearHistory(); + setClearDialogOpen(false); + }} + /> + + {quickJobOpen && ( + setQuickJobOpen(false)} + hostnames={[agent?.hostname || device?.hostname].filter(Boolean)} + /> + )} + + ); + } diff --git a/Data/Server/WebUI/src/Devices/Device_List.jsx b/Data/Server/WebUI/src/Devices/Device_List.jsx new file mode 100644 index 00000000..e61164cb --- /dev/null +++ b/Data/Server/WebUI/src/Devices/Device_List.jsx @@ -0,0 +1,1832 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/Device_List.jsx + +import React, { useState, useEffect, useCallback, useMemo, useRef } from "react"; +import { + Paper, + Box, + Typography, + Button, + IconButton, + Menu, + MenuItem, + Popover, + TextField, + Tooltip, + Checkbox, +} from "@mui/material"; +import MoreVertIcon from "@mui/icons-material/MoreVert"; +import ViewColumnIcon from "@mui/icons-material/ViewColumn"; +import AddIcon from "@mui/icons-material/Add"; +import CachedIcon from "@mui/icons-material/Cached"; +import { AgGridReact } from "ag-grid-react"; +import { ModuleRegistry, AllCommunityModule, themeQuartz } from "ag-grid-community"; +import { DeleteDeviceDialog, CreateCustomViewDialog, RenameCustomViewDialog } from "../Dialogs.jsx"; +import QuickJob from "../Scheduling/Quick_Job.jsx"; +import AddDevice from "./Add_Device.jsx"; + +ModuleRegistry.registerModules([AllCommunityModule]); + +const myTheme = themeQuartz.withParams({ + accentColor: "#FFA6FF", + backgroundColor: "#1f2836", + browserColorScheme: "dark", + chromeBackgroundColor: { + ref: "foregroundColor", + mix: 0.07, + onto: "backgroundColor", + }, + fontFamily: { + googleFont: "IBM Plex Sans", + }, + foregroundColor: "#FFF", + headerFontSize: 14, +}); + +const themeClassName = myTheme.themeName || "ag-theme-quartz"; +const gridFontFamily = '"IBM Plex Sans", "Helvetica Neue", Arial, sans-serif'; +const iconFontFamily = '"Quartz Regular"'; + +const getOsIconClass = (osName) => { + const value = (osName || "").toString().toLowerCase(); + if (!value) return ""; + + if (value.includes("mac") || value.includes("os x") || value.includes("darwin")) { + return "fa-brands fa-apple"; + } + + if (value.includes("win")) { + return "fa-brands fa-windows"; + } + + if ( + value.includes("linux") || + value.includes("ubuntu") || + value.includes("debian") || + value.includes("fedora") || + value.includes("red hat") || + value.includes("centos") || + value.includes("suse") || + value.includes("rhel") + ) { + return "fa-brands fa-linux"; + } + + return ""; +}; + +const DescriptionCellRenderer = React.memo(function DescriptionCellRenderer(props) { + const { value, data, onSaveDescription, fontFamily } = props; + const safeValue = typeof value === "string" ? value : value == null ? "" : String(value); + const [draft, setDraft] = useState(safeValue); + const [editing, setEditing] = useState(false); + const [saving, setSaving] = useState(false); + const [error, setError] = useState(""); + + useEffect(() => { + if (!editing && !saving) { + setDraft(safeValue); + } + }, [safeValue, editing, saving]); + + const handleFocus = useCallback((event) => { + event.stopPropagation(); + setEditing(true); + setError(""); + }, []); + + const handleChange = useCallback((event) => { + setDraft(event.target.value); + }, []); + + const handleKeyDown = useCallback( + async (event) => { + event.stopPropagation(); + if (event.key === "Enter") { + event.preventDefault(); + const trimmed = (draft || "").trim(); + if (trimmed === safeValue.trim()) { + setEditing(false); + setDraft(safeValue); + setError(""); + return; + } + if (typeof onSaveDescription !== "function" || !data) { + setEditing(false); + setError(""); + return; + } + setSaving(true); + setError(""); + const ok = await onSaveDescription(data, trimmed); + setSaving(false); + if (ok) { + setEditing(false); + } else { + setError("Failed to save description"); + } + } else if (event.key === "Escape") { + event.preventDefault(); + setDraft(safeValue); + setEditing(false); + setError(""); + } + }, + [data, draft, onSaveDescription, safeValue] + ); + + const handleBlur = useCallback( + (event) => { + event.stopPropagation(); + if (saving) return; + setEditing(false); + setDraft(safeValue); + setError(""); + }, + [saving, safeValue] + ); + + const stopPropagation = useCallback((event) => { + event.stopPropagation(); + }, []); + + const backgroundColor = saving + ? "rgba(255,255,255,0.04)" + : editing + ? "rgba(255,255,255,0.16)" + : "rgba(255,255,255,0.02)"; + + return ( + + ); +}); + +function formatLastSeen(tsSec, offlineAfter = 300) { + if (!tsSec) return "unknown"; + const now = Date.now() / 1000; + if (now - tsSec <= offlineAfter) return "Currently Online"; + const d = new Date(tsSec * 1000); + const date = d.toLocaleDateString("en-US", { + month: "2-digit", + day: "2-digit", + year: "numeric", + }); + const time = d.toLocaleTimeString("en-US", { + hour: "numeric", + minute: "2-digit", + }); + return `${date} @ ${time}`; +} + +function statusFromHeartbeat(tsSec, offlineAfter = 300) { + if (!tsSec) return "Offline"; + const now = Date.now() / 1000; + return now - tsSec <= offlineAfter ? "Online" : "Offline"; +} + +function formatUptime(seconds) { + const total = Number(seconds); + if (!Number.isFinite(total) || total <= 0) return ""; + const parts = []; + const days = Math.floor(total / 86400); + if (days) parts.push(`${days}d`); + const hours = Math.floor((total % 86400) / 3600); + if (hours) parts.push(`${hours}h`); + const minutes = Math.floor((total % 3600) / 60); + if (minutes) parts.push(`${minutes}m`); + const secondsPart = Math.floor(total % 60); + if (!parts.length && secondsPart) parts.push(`${secondsPart}s`); + return parts.join(' '); +} + +export default function DeviceList({ + onSelectDevice, + filterMode = "all", + title, + showAddButton, + addButtonLabel, + defaultAddType, +}) { + const [rows, setRows] = useState([]); + const [menuAnchor, setMenuAnchor] = useState(null); + const [selected, setSelected] = useState(null); + const [confirmOpen, setConfirmOpen] = useState(false); + // Track selection by agent id to avoid duplicate hostname collisions + const [selectedIds, setSelectedIds] = useState(() => new Set()); + const [quickJobOpen, setQuickJobOpen] = useState(false); + const [addDeviceOpen, setAddDeviceOpen] = useState(false); + const [addDeviceType, setAddDeviceType] = useState(null); + const computedTitle = useMemo(() => { + if (title) return title; + switch (filterMode) { + case "agent": + return "Agent Devices"; + case "ssh": + return "SSH Devices"; + case "winrm": + return "WinRM Devices"; + default: + return "Device Inventory"; + } + }, [filterMode, title]); + const derivedDefaultType = useMemo(() => { + if (defaultAddType !== undefined) return defaultAddType; + if (filterMode === "ssh" || filterMode === "winrm") return filterMode; + return null; + }, [defaultAddType, filterMode]); + const derivedAddLabel = useMemo(() => { + if (addButtonLabel) return addButtonLabel; + if (filterMode === "ssh") return "Add SSH Device"; + if (filterMode === "winrm") return "Add WinRM Device"; + return "Add Device"; + }, [addButtonLabel, filterMode]); + const derivedShowAddButton = useMemo(() => { + if (typeof showAddButton === "boolean") return showAddButton; + return filterMode !== "agent"; + }, [showAddButton, filterMode]); + + // Saved custom views (from server) + const [views, setViews] = useState([]); // [{id, name, columns:[id], filters:{}}] + const [selectedViewId, setSelectedViewId] = useState("default"); + const [createDialogOpen, setCreateDialogOpen] = useState(false); + const [newViewName, setNewViewName] = useState(""); + const [renameDialogOpen, setRenameDialogOpen] = useState(false); + const [renameViewName, setRenameViewName] = useState(""); + const [renameTarget, setRenameTarget] = useState(null); // {id, name} + const [viewActionAnchor, setViewActionAnchor] = useState(null); // anchor for per-item actions + const [viewActionTarget, setViewActionTarget] = useState(null); // view object for actions + + // Column configuration and rearranging state + const COL_LABELS = useMemo( + () => ({ + status: "Status", + agentVersion: "Agent Version", + site: "Site", + hostname: "Hostname", + description: "Description", + lastUser: "Last User", + type: "Type", + os: "OS", + internalIp: "Internal IP", + externalIp: "External IP", + lastReboot: "Last Reboot", + created: "Created", + lastSeen: "Last Seen", + agentId: "Agent ID", + agentHash: "Agent Hash", + agentGuid: "Agent GUID", + domain: "Domain", + uptime: "Uptime", + memory: "Memory", + network: "Network", + software: "Software", + storage: "Storage", + cpu: "CPU", + siteDescription: "Site Description", + }), + [] + ); + + const defaultColumns = useMemo( + () => [ + { id: "status", label: COL_LABELS.status }, + { id: "agentVersion", label: COL_LABELS.agentVersion }, + { id: "site", label: COL_LABELS.site }, + { id: "hostname", label: COL_LABELS.hostname }, + { id: "description", label: COL_LABELS.description }, + { id: "lastUser", label: COL_LABELS.lastUser }, + { id: "type", label: COL_LABELS.type }, + { id: "os", label: COL_LABELS.os }, + ], + [COL_LABELS] + ); + const [columns, setColumns] = useState(defaultColumns); + const [colChooserAnchor, setColChooserAnchor] = useState(null); + const gridRef = useRef(null); + + // Per-column filters + const [filtersState, setFiltersState] = useState({}); + + const sanitizeFilterModel = useCallback((raw) => { + if (!raw || typeof raw !== "object") return {}; + const sanitized = {}; + Object.entries(raw).forEach(([key, value]) => { + if (typeof value === "string") { + const trimmed = value.trim(); + if (trimmed) { + sanitized[key] = { + filterType: "text", + type: "contains", + filter: trimmed, + }; + } + return; + } + if (!value || typeof value !== "object") return; + const clone = JSON.parse(JSON.stringify(value)); + if (!clone.filterType) clone.filterType = "text"; + if (clone.filterType === "text") { + if (typeof clone.filter === "string") { + clone.filter = clone.filter.trim(); + } + if (Array.isArray(clone.conditions)) { + clone.conditions = clone.conditions + .map((condition) => { + if (!condition || typeof condition !== "object") return null; + const condClone = { ...condition }; + if (typeof condClone.filter === "string") { + condClone.filter = condClone.filter.trim(); + } + if ( + !condClone.filter && + !["blank", "notBlank"].includes(condClone.type ?? "") + ) { + return null; + } + return condClone; + }) + .filter(Boolean); + if (!clone.conditions.length) { + delete clone.conditions; + } + } + if ( + !clone.filter && + !clone.conditions && + !["blank", "notBlank"].includes(clone.type ?? "") + ) { + return; + } + } + sanitized[key] = clone; + }); + return sanitized; + }, []); + + const filterModelsEqual = useCallback( + (a, b) => JSON.stringify(a ?? {}) === JSON.stringify(b ?? {}), + [] + ); + + const replaceFilters = useCallback( + (raw) => { + const sanitized = + raw && typeof raw === "object" ? sanitizeFilterModel(raw) : {}; + setFiltersState((prev) => + filterModelsEqual(prev, sanitized) ? prev : sanitized + ); + }, + [filterModelsEqual, sanitizeFilterModel] + ); + + const mergeFilters = useCallback( + (raw) => { + if (!raw || typeof raw !== "object") return; + const sanitized = sanitizeFilterModel(raw); + if (!Object.keys(sanitized).length) return; + setFiltersState((prev) => { + const base = prev || {}; + const next = { ...base }; + let changed = false; + Object.entries(sanitized).forEach(([key, value]) => { + if (!value) return; + if (!next[key] || !filterModelsEqual(next[key], value)) { + next[key] = value; + changed = true; + } + }); + return changed ? next : base; + }); + }, + [filterModelsEqual, sanitizeFilterModel] + ); + + const filters = filtersState; + + const [sites, setSites] = useState([]); // sites list for assignment + const [assignDialogOpen, setAssignDialogOpen] = useState(false); + const [assignSiteId, setAssignSiteId] = useState(null); + const [assignTargets, setAssignTargets] = useState([]); // hostnames + + const [repoHash, setRepoHash] = useState(null); + const lastRepoFetchRef = useRef(0); + + const gridWrapperClass = themeClassName; + + const fetchLatestRepoHash = useCallback(async (options = {}) => { + const { force = false } = options || {}; + const now = Date.now(); + const elapsed = now - lastRepoFetchRef.current; + if (!force && repoHash && elapsed >= 0 && elapsed < 60_000) { + return repoHash; + } + try { + const params = new URLSearchParams({ repo: "bunny-lab-io/Borealis", branch: "main" }); + if (force) { + params.set("refresh", "1"); + } + const resp = await fetch(`/api/repo/current_hash?${params.toString()}`); + const json = await resp.json(); + const sha = (json?.sha || "").trim(); + if (!resp.ok || !sha) { + const err = new Error(`Latest hash status ${resp.status}${json?.error ? ` - ${json.error}` : ""}`); + err.response = json; + throw err; + } + lastRepoFetchRef.current = now; + setRepoHash((prev) => (sha ? sha : prev || null)); + return sha || null; + } catch (err) { + console.warn("Failed to fetch repository hash", err); + if (!force && repoHash) { + return repoHash; + } + lastRepoFetchRef.current = now; + setRepoHash((prev) => prev || null); + return null; + } + }, [repoHash]); + + const computeAgentVersion = useCallback((agentHashValue, repoHashValue) => { + const agentHash = (agentHashValue || "").trim(); + const repo = (repoHashValue || "").trim(); + if (!repo) return agentHash ? "Unknown" : "Unknown"; + if (!agentHash) return "Needs Updated"; + return agentHash === repo ? "Up-to-Date" : "Needs Updated"; + }, []); + + const fetchDevices = useCallback(async (options = {}) => { + const { refreshRepo = false } = options || {}; + let repoSha = repoHash; + if (refreshRepo || !repoSha) { + const fetched = await fetchLatestRepoHash({ force: refreshRepo }); + if (fetched) repoSha = fetched; + } + + const hashById = new Map(); + const hashByGuid = new Map(); + const hashByHost = new Map(); + try { + const hashResp = await fetch('/api/agent/hash_list'); + if (hashResp.ok) { + const hashJson = await hashResp.json(); + const list = Array.isArray(hashJson?.agents) ? hashJson.agents : []; + list.forEach((rec) => { + if (!rec || typeof rec !== 'object') return; + const hash = (rec.agent_hash || '').trim(); + if (!hash) return; + const agentId = (rec.agent_id || '').trim(); + const guidRaw = (rec.agent_guid || '').trim().toLowerCase(); + const hostKey = (rec.hostname || '').trim().toLowerCase(); + const isMemory = (rec.source || '').trim() === 'memory'; + if (agentId && (!hashById.has(agentId) || isMemory)) { + hashById.set(agentId, hash); + } + if (guidRaw && (!hashByGuid.has(guidRaw) || isMemory)) { + hashByGuid.set(guidRaw, hash); + } + if (hostKey && (!hashByHost.has(hostKey) || isMemory)) { + hashByHost.set(hostKey, hash); + } + }); + } + } catch (err) { + console.warn('Failed to fetch agent hash list', err); + } + + try { + const res = await fetch('/api/devices'); + if (!res.ok) { + const err = new Error(`Failed to fetch devices (${res.status})`); + try { + err.response = await res.json(); + } catch {} + throw err; + } + const payload = await res.json(); + const list = Array.isArray(payload?.devices) ? payload.devices : []; + + const normalizeJson = (value) => { + if (!value) return ''; + try { + return JSON.stringify(value); + } catch { + return ''; + } + }; + + const normalized = list.map((device, index) => { + const summary = device && typeof device.summary === 'object' ? { ...device.summary } : {}; + const rawHostname = (device.hostname || summary.hostname || '').trim(); + const hostname = rawHostname || `device-${index + 1}`; + const agentId = (device.agent_id || summary.agent_id || '').trim(); + const guidRaw = (device.agent_guid || summary.agent_guid || '').trim(); + const guidLookupKey = guidRaw.toLowerCase(); + const rowKey = guidRaw || agentId || hostname || `device-${index + 1}`; + let agentHash = (device.agent_hash || summary.agent_hash || '').trim(); + if (agentId && hashById.has(agentId)) agentHash = hashById.get(agentId) || agentHash; + if (!agentHash && guidLookupKey && hashByGuid.has(guidLookupKey)) { + agentHash = hashByGuid.get(guidLookupKey) || agentHash; + } + const hostKey = hostname.trim().toLowerCase(); + if (!agentHash && hostKey && hashByHost.has(hostKey)) { + agentHash = hashByHost.get(hostKey) || agentHash; + } + const lastSeen = Number(device.last_seen || summary.last_seen || 0) || 0; + const status = device.status || statusFromHeartbeat(lastSeen); + + if (guidRaw && !summary.agent_guid) { + summary.agent_guid = guidRaw; + } + + let createdTs = Number(device.created_at || 0) || 0; + let createdDisplay = summary.created || ''; + if (!createdTs && createdDisplay) { + const parsed = Date.parse(createdDisplay.replace(' ', 'T')); + if (!Number.isNaN(parsed)) createdTs = Math.floor(parsed / 1000); + } + if (!createdDisplay && device.created_at_iso) { + try { + createdDisplay = new Date(device.created_at_iso).toLocaleString(); + } catch {} + } + + const osName = + device.operating_system || + summary.operating_system || + summary.agent_operating_system || + "-"; + const type = (device.device_type || summary.device_type || '').trim(); + const lastUser = (device.last_user || summary.last_user || '').trim(); + const domain = (device.domain || summary.domain || '').trim(); + const internalIp = (device.internal_ip || summary.internal_ip || '').trim(); + const externalIp = (device.external_ip || summary.external_ip || '').trim(); + const lastReboot = (device.last_reboot || summary.last_reboot || '').trim(); + const uptimeSeconds = Number( + device.uptime || + summary.uptime_sec || + summary.uptime_seconds || + summary.uptime || + 0 + ) || 0; + const connectionType = (device.connection_type || summary.connection_type || '').trim().toLowerCase(); + const connectionLabel = connectionType === 'ssh' ? 'SSH' : connectionType === 'winrm' ? 'WinRM' : ''; + const connectionEndpoint = (device.connection_endpoint || summary.connection_endpoint || '').trim(); + + const memoryList = Array.isArray(device.memory) ? device.memory : []; + const networkList = Array.isArray(device.network) ? device.network : []; + const softwareList = Array.isArray(device.software) ? device.software : []; + const storageList = Array.isArray(device.storage) ? device.storage : []; + const cpuObj = + (device.cpu && typeof device.cpu === 'object' && device.cpu) || + (summary.cpu && typeof summary.cpu === 'object' ? summary.cpu : {}); + + const memoryDisplay = memoryList.length ? `${memoryList.length} module(s)` : ''; + const networkDisplay = networkList.length ? networkList.map((n) => n.adapter || n.name || '').filter(Boolean).join(', ') : ''; + const softwareDisplay = softwareList.length ? `${softwareList.length} item(s)` : ''; + const storageDisplay = storageList.length ? `${storageList.length} volume(s)` : ''; + const cpuDisplay = cpuObj.name || summary.processor || ''; + + return { + id: rowKey, + hostname, + status, + lastSeen, + lastSeenDisplay: formatLastSeen(lastSeen), + os: osName, + lastUser, + type: type || connectionLabel || '', + site: device.site_name || 'Not Configured', + siteId: device.site_id || null, + siteDescription: device.site_description || '', + description: (device.description || summary.description || '').trim(), + created: createdDisplay, + createdTs, + createdIso: device.created_at_iso || '', + agentGuid: guidRaw, + agentHash, + agentVersion: computeAgentVersion(agentHash, repoSha), + agentId, + domain, + internalIp, + externalIp, + lastReboot, + uptime: uptimeSeconds, + uptimeDisplay: formatUptime(uptimeSeconds), + memory: memoryDisplay, + memoryRaw: normalizeJson(memoryList), + network: networkDisplay, + networkRaw: normalizeJson(networkList), + software: softwareDisplay, + softwareRaw: normalizeJson(softwareList), + storage: storageDisplay, + storageRaw: normalizeJson(storageList), + cpu: cpuDisplay, + cpuRaw: normalizeJson(cpuObj), + summary, + details: device.details || {}, + connectionType, + connectionLabel, + connectionEndpoint, + isRemote: Boolean(connectionLabel), + }; + }); + + let filtered = normalized; + if (filterMode === "agent") { + filtered = normalized.filter((row) => !row.connectionType); + } else if (filterMode === "ssh") { + filtered = normalized.filter((row) => row.connectionType === "ssh"); + } else if (filterMode === "winrm") { + filtered = normalized.filter((row) => row.connectionType === "winrm"); + } + + setRows(filtered); + } catch (e) { + console.warn('Failed to load devices:', e); + setRows([]); + } + }, [repoHash, fetchLatestRepoHash, computeAgentVersion, filterMode]); + + const fetchViews = useCallback(async () => { + try { + const res = await fetch("/api/device_list_views"); + const data = await res.json(); + if (data && Array.isArray(data.views)) setViews(data.views); + else setViews([]); + } catch { + setViews([]); + } + }, []); + + useEffect(() => { + // Initial load only; removed auto-refresh interval + fetchDevices({ refreshRepo: true }); + }, [fetchDevices]); + + useEffect(() => { + fetchViews(); + }, [fetchViews]); + + // Sites helper fetch + const fetchSites = useCallback(async () => { + try { + const res = await fetch('/api/sites'); + const data = await res.json(); + setSites(Array.isArray(data?.sites) ? data.sites : []); + } catch { setSites([]); } + }, []); + + // Apply initial site filter from Sites page + useEffect(() => { + try { + // General initial filters (set by global search) + const json = localStorage.getItem('device_list_initial_filters'); + if (json) { + const obj = JSON.parse(json); + if (obj && typeof obj === 'object') { + mergeFilters(obj); + // Optionally ensure Site column exists when site filter is present + if (obj.site) { + setColumns((prev) => { + if (prev.some((c) => c.id === 'site')) return prev; + const hasAgentVersion = prev.some((c) => c.id === 'agentVersion'); + const remainder = prev.filter((c) => !['status', 'agentVersion'].includes(c.id)); + const base = [ + { id: 'status', label: COL_LABELS.status }, + ...(hasAgentVersion ? [{ id: 'agentVersion', label: COL_LABELS.agentVersion }] : []), + { id: 'site', label: COL_LABELS.site }, + ]; + if (!hasAgentVersion) { + return base.concat(prev.filter((c) => c.id !== 'status')); + } + return [...base, ...remainder]; + }); + } + } + localStorage.removeItem('device_list_initial_filters'); + } + + const site = localStorage.getItem('device_list_initial_site_filter'); + if (site && site.trim()) { + setColumns((prev) => { + const hasSite = prev.some((c) => c.id === 'site'); + if (hasSite) return prev; + const next = [...prev]; + const agentIndex = next.findIndex((c) => c.id === 'agentVersion'); + const insertAt = agentIndex >= 0 ? agentIndex + 1 : 1; + next.splice(insertAt, 0, { id: 'site', label: COL_LABELS.site }); + return next; + }); + mergeFilters({ site }); + localStorage.removeItem('device_list_initial_site_filter'); + } + } catch {} + }, [COL_LABELS.site, mergeFilters]); + + const applyView = useCallback((view) => { + if (!view || view.id === "default") { + setColumns(defaultColumns); + replaceFilters({}); + return; + } + try { + const ids = Array.isArray(view.columns) ? view.columns : []; + // Ensure status is present and first + const finalIds = ["status", ...ids.filter((x) => x !== "status")]; + const mapped = finalIds + .filter((id) => COL_LABELS[id]) + .map((id) => ({ id, label: COL_LABELS[id] })); + setColumns(mapped.length ? mapped : defaultColumns); + replaceFilters( + view.filters && typeof view.filters === "object" ? view.filters : {} + ); + } catch { + setColumns(defaultColumns); + replaceFilters({}); + } + }, [COL_LABELS, defaultColumns, replaceFilters]); + + const statusTokenTheme = useMemo( + () => ({ + Online: { + text: "#00d18c", + background: "rgba(0, 209, 140, 0.16)", + border: "1px solid rgba(0, 209, 140, 0.45)", + dot: "#00d18c", + }, + Offline: { + text: "#b0b8c8", + background: "rgba(176, 184, 200, 0.14)", + border: "1px solid rgba(176, 184, 200, 0.35)", + dot: "#c3cada", + }, + default: { + text: "#e2e6f0", + background: "rgba(226, 230, 240, 0.12)", + border: "1px solid rgba(226, 230, 240, 0.25)", + dot: "#e2e6f0", + }, + }), + [] + ); + + const formatCreated = useCallback((created, createdTs) => { + if (createdTs) { + const d = new Date(createdTs * 1000); + const mm = String(d.getMonth() + 1).padStart(2, "0"); + const dd = String(d.getDate()).padStart(2, "0"); + const yyyy = d.getFullYear(); + const hh = d.getHours() % 12 || 12; + const min = String(d.getMinutes()).padStart(2, "0"); + const ampm = d.getHours() >= 12 ? "PM" : "AM"; + return `${mm}/${dd}/${yyyy} @ ${hh}:${min} ${ampm}`; + } + return created || ""; + }, []); + + const filterModel = useMemo( + () => JSON.parse(JSON.stringify(filters || {})), + [filters] + ); + + useEffect(() => { + if (gridRef.current?.api) { + gridRef.current.api.setFilterModel(filterModel); + } + }, [filterModel]); + + const handleFilterChanged = useCallback( + (event) => { + const model = event.api.getFilterModel() || {}; + replaceFilters(model); + }, + [replaceFilters] + ); + + const handleSelectionChanged = useCallback(() => { + const api = gridRef.current?.api; + if (!api) return; + const selectedNodes = api.getSelectedNodes(); + const ids = selectedNodes + .map((node) => node.data?.id) + .filter((id) => id !== undefined && id !== null); + setSelectedIds(new Set(ids)); + }, []); + + const openMenu = useCallback((event, row) => { + setMenuAnchor(event.currentTarget); + setSelected(row); + }, []); + + const closeMenu = useCallback(() => setMenuAnchor(null), []); + + const confirmDelete = useCallback(() => { + closeMenu(); + setConfirmOpen(true); + }, [closeMenu]); + + const handleDelete = useCallback(async () => { + if (!selected) return; + const targetAgentId = selected.agentId || selected.summary?.agent_id || selected.id; + try { + if (targetAgentId) { + await fetch(`/api/agent/${encodeURIComponent(targetAgentId)}`, { method: "DELETE" }); + } + } catch (e) { + console.warn("Failed to remove agent", e); + } + setRows((r) => r.filter((x) => x.id !== selected.id)); + setSelectedIds((prev) => { + if (!prev.has(selected.id)) return prev; + const next = new Set(prev); + next.delete(selected.id); + return next; + }); + setConfirmOpen(false); + setSelected(null); + }, [selected]); + + const hostnameCellRenderer = useCallback( + (params) => { + const row = params.data; + if (!row) return null; + const handleClick = (event) => { + event.preventDefault(); + event.stopPropagation(); + if (onSelectDevice) onSelectDevice(row); + }; + const label = row.connectionLabel || ""; + let badgeBg = "#2d3042"; + let badgeColor = "#a4c7ff"; + if (label === "SSH") { + badgeBg = "#2a3b28"; + badgeColor = "#7cffc4"; + } else if (label === "WinRM") { + badgeBg = "#352e3b"; + badgeColor = "#ffb6ff"; + } + return ( + + {label ? ( + + {label} + + ) : null} + + {row.hostname || ""} + + + ); + }, + [onSelectDevice] + ); + + const statusCellRenderer = useCallback( + (params) => { + const status = params.value || ""; + if (!status) return null; + const theme = statusTokenTheme[status] || statusTokenTheme.default; + return ( + + + {status} + + ); + }, + [statusTokenTheme, gridFontFamily] + ); + + const osCellRenderer = useCallback((params) => { + const rawValue = params.value; + const label = typeof rawValue === "string" ? rawValue : rawValue == null ? "" : String(rawValue); + const display = label.trim() || "-"; + const iconClass = getOsIconClass(label); + + return ( + + {iconClass ? ( + + ); + }, []); + + const actionCellRenderer = useCallback( + (params) => { + const row = params.data; + if (!row) return null; + const handleClick = (event) => { + event.stopPropagation(); + openMenu(event, row); + }; + return ( + + + + ); + }, + [openMenu] + ); + + const handleDescriptionSave = useCallback( + async (row, nextDescription) => { + if (!row) return false; + const trimmed = (nextDescription || "").trim(); + const targetHost = (row.hostname || row.summary?.hostname || "").trim(); + if (!targetHost) return false; + try { + const resp = await fetch(`/api/device/description/${targetHost}`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ description: trimmed }), + }); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const matchValue = row.id || row.agentGuid || row.hostname || targetHost; + setRows((prev) => + prev.map((item) => { + const itemMatch = item.id || item.agentGuid || item.hostname || ""; + if (itemMatch !== matchValue) return item; + const updated = { + ...item, + description: trimmed, + summary: { ...(item.summary || {}), description: trimmed }, + }; + if (item.details) { + updated.details = { ...item.details, description: trimmed }; + } + return updated; + }) + ); + setSelected((prev) => { + if (!prev) return prev; + const prevMatch = prev.id || prev.agentGuid || prev.hostname || ""; + if (prevMatch !== matchValue) return prev; + const updated = { + ...prev, + description: trimmed, + summary: { ...(prev.summary || {}), description: trimmed }, + }; + if (prev.details) { + updated.details = { ...prev.details, description: trimmed }; + } + return updated; + }); + return true; + } catch (e) { + console.warn("Failed to save description", e); + return false; + } + }, + [setRows, setSelected] + ); + + const columnDefs = useMemo(() => { + const defs = columns.map((col) => { + switch (col.id) { + case "status": + return { + field: "status", + headerName: col.label, + cellRenderer: statusCellRenderer, + cellClass: "status-pill-cell", + width: 112, + minWidth: 112, + flex: 0, + }; + case "agentVersion": + return { + field: "agentVersion", + headerName: col.label, + width: 140, + minWidth: 150, + flex: 0, + }; + case "site": + return { + field: "site", + headerName: col.label, + valueGetter: (params) => params.data?.site || "Not Configured", + width: 140, + minWidth: 140, + flex: 0, + }; + case "hostname": + return { + field: "hostname", + headerName: col.label, + cellRenderer: hostnameCellRenderer, + width: 210, + minWidth: 210, + flex: 0, + }; + case "description": + return { + field: "description", + headerName: col.label, + width: 280, + minWidth: 280, + flex: 0, + cellRenderer: DescriptionCellRenderer, + cellRendererParams: { + onSaveDescription: handleDescriptionSave, + fontFamily: gridFontFamily, + }, + }; + case "lastUser": + return { + field: "lastUser", + headerName: col.label, + width: 220, + minWidth: 220, + flex: 0, + }; + case "type": + return { + field: "type", + headerName: col.label, + width: 170, + minWidth: 170, + flex: 0, + }; + case "os": + return { + field: "os", + headerName: col.label, + width: 410, + minWidth: 410, + flex: 1, + cellRenderer: osCellRenderer, + }; + case "internalIp": + return { + field: "internalIp", + headerName: col.label, + width: 140, + minWidth: 140, + flex: 0, + }; + case "externalIp": + return { + field: "externalIp", + headerName: col.label, + width: 140, + minWidth: 140, + flex: 0, + }; + case "lastReboot": + return { + field: "lastReboot", + headerName: col.label, + width: 180, + minWidth: 180, + flex: 0, + }; + case "created": + return { + field: "created", + headerName: col.label, + valueGetter: (params) => + formatCreated(params.data?.created, params.data?.createdTs), + comparator: (a, b, nodeA, nodeB) => + (nodeA?.data?.createdTs || 0) - (nodeB?.data?.createdTs || 0), + width: 200, + minWidth: 200, + flex: 0, + }; + case "lastSeen": + return { + field: "lastSeen", + headerName: col.label, + valueGetter: (params) => formatLastSeen(params.data?.lastSeen), + comparator: (a, b, nodeA, nodeB) => + (nodeA?.data?.lastSeen || 0) - (nodeB?.data?.lastSeen || 0), + width: 200, + minWidth: 200, + flex: 0, + }; + case "agentId": + return { + field: "agentId", + headerName: col.label, + width: 290, + minWidth: 290, + flex: 0, + }; + case "agentHash": + return { + field: "agentHash", + headerName: col.label, + width: 365, + minWidth: 365, + flex: 0, + }; + case "agentGuid": + return { + field: "agentGuid", + headerName: col.label, + width: 345, + minWidth: 345, + flex: 0, + }; + case "domain": + return { + field: "domain", + headerName: col.label, + width: 160, + minWidth: 160, + flex: 0, + }; + case "uptime": + return { + field: "uptime", + headerName: col.label, + valueGetter: (params) => + params.data?.uptimeDisplay || + formatUptime(params.data?.uptime || 0), + comparator: (a, b, nodeA, nodeB) => + (nodeA?.data?.uptime || 0) - (nodeB?.data?.uptime || 0), + width: 140, + minWidth: 140, + flex: 0, + }; + case "memory": + case "network": + case "software": + case "storage": + case "cpu": + case "siteDescription": + return { + field: col.id, + headerName: col.label, + minWidth: 200, + }; + default: + return { + field: col.id, + headerName: col.label, + }; + } + }); + return [ + { + headerName: "", + field: "__select__", + width: 52, + maxWidth: 52, + checkboxSelection: true, + headerCheckboxSelection: true, + resizable: false, + sortable: false, + suppressMenu: true, + filter: false, + pinned: "left", + lockPosition: true, + }, + ...defs, + { + headerName: "", + field: "__actions__", + width: 64, + maxWidth: 64, + resizable: false, + sortable: false, + suppressMenu: true, + filter: false, + cellRenderer: actionCellRenderer, + pinned: "right", + }, + ]; + }, [ + columns, + actionCellRenderer, + formatCreated, + handleDescriptionSave, + hostnameCellRenderer, + statusCellRenderer, + ]); + + const defaultColDef = useMemo( + () => ({ + sortable: true, + filter: "agTextColumnFilter", + resizable: true, + flex: 1, + minWidth: 160, + }), + [] + ); + + const handleGridReady = useCallback( + (params) => { + params.api.setFilterModel(filterModel); + }, + [filterModel] + ); + + const getRowId = useCallback( + (params) => + params.data?.id || + params.data?.agentGuid || + params.data?.hostname || + String(params.rowIndex ?? ""), + [] + ); + + return ( + + {/* Header area with title on left and controls on right */} + + + + {computedTitle} + + + {/* Views dropdown + add button */} + + { + const val = e.target.value; + setSelectedViewId(val); + if (val === "default") applyView({ id: "default" }); + else { + const v = views.find((x) => String(x.id) === String(val)); + if (v) applyView(v); + } + }} + sx={{ + minWidth: 220, + mr: 0, + '& .MuiOutlinedInput-root': { + height: 32, + pr: 0, + borderTopRightRadius: 0, + borderBottomRightRadius: 0, + '& fieldset': { borderColor: '#555', borderRight: '1px solid #555' }, + '&:hover fieldset': { borderColor: '#888' }, + }, + '& .MuiSelect-select': { + display: 'flex', + alignItems: 'center', + py: 0, + }, + }} + SelectProps={{ + MenuProps: { + PaperProps: { sx: { bgcolor: '#1e1e1e', color: '#fff' } }, + }, + renderValue: (val) => { + if (val === "default") return "Default View"; + const v = views.find((x) => String(x.id) === String(val)); + return v ? v.name : "Default View"; + } + }} + > + Default View + {views.map((v) => ( + + + {v.name} + { + e.stopPropagation(); + setViewActionAnchor(e.currentTarget); + setViewActionTarget(v); + }} + sx={{ color: '#ccc' }} + > + + + + + ))} + + { setNewViewName(""); setCreateDialogOpen(true); }} + sx={{ + ml: '-1px', + border: '1px solid #555', + borderLeft: '1px solid #555', + borderRadius: '0 4px 4px 0', + color: '#bbb', + height: 32, + width: 32, + }} + > + + + + + fetchDevices({ refreshRepo: true })} + sx={{ color: "#bbb", mr: 1 }} + > + + + + + setColChooserAnchor(e.currentTarget)} + sx={{ color: "#bbb", mr: 1 }} + > + + + + {derivedShowAddButton && ( + + )} + + + {/* Second row: Quick Job button aligned under header title */} + + + + + {/* The Size of the Grid itself and its margins relative to the overall page */} + + span": { + margin: 0, + }, + }} + > + + + + {/* View actions menu (rename/delete for custom views) */} + { setViewActionAnchor(null); setViewActionTarget(null); }} + PaperProps={{ sx: { bgcolor: '#1e1e1e', color: '#fff', fontSize: '13px' } }} + > + { + const v = viewActionTarget; + setViewActionAnchor(null); + if (!v) return; + setRenameTarget(v); + setRenameViewName(v.name || ""); + setRenameDialogOpen(true); + }}>Rename + { + const v = viewActionTarget; + setViewActionAnchor(null); + if (!v) return; + try { + await fetch(`/api/device_list_views/${encodeURIComponent(v.id)}`, { method: 'DELETE' }); + } catch {} + setViews((prev) => prev.filter((x) => String(x.id) !== String(v.id))); + if (String(selectedViewId) === String(v.id)) { + setSelectedViewId('default'); + applyView({ id: 'default' }); + } + }}>Delete + + + {/* Create new custom view dialog */} + setCreateDialogOpen(false)} + onSave={async () => { + const name = (newViewName || '').trim(); + if (!name) return; + // Build current config + const cols = (columns || []).map((c) => c.id); + const cfg = { name, columns: cols, filters }; + try { + const res = await fetch('/api/device_list_views', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(cfg) + }); + if (res.ok) { + const created = await res.json(); + setViews((prev) => [...prev, created].sort((a, b) => String(a.name).localeCompare(String(b.name)))); + setSelectedViewId(String(created.id)); + // Already applied in UI; we keep current state + setCreateDialogOpen(false); + setNewViewName(''); + } + } catch {} + }} + /> + + {/* Rename custom view dialog */} + setRenameDialogOpen(false)} + onSave={async () => { + const v = renameTarget; + const newName = (renameViewName || '').trim(); + if (!v || !newName) return; + try { + const res = await fetch(`/api/device_list_views/${encodeURIComponent(v.id)}`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ name: newName }) + }); + if (res.ok) { + const updated = await res.json(); + setViews((prev) => prev.map((x) => String(x.id) === String(v.id) ? updated : x)); + setRenameDialogOpen(false); + setRenameViewName(''); + setRenameTarget(null); + } + } catch {} + }} + /> + {/* Column chooser popover */} + setColChooserAnchor(null)} + anchorOrigin={{ vertical: "bottom", horizontal: "right" }} + PaperProps={{ sx: { bgcolor: "#1e1e1e", color: '#fff', p: 1 } }} + > + + {Object.entries(COL_LABELS) + .filter(([id]) => id !== 'status') + .map(([id, label]) => ( + e.stopPropagation()} sx={{ gap: 1 }}> + c.id === id)} + onChange={(e) => { + const checked = e.target.checked; + setColumns((prev) => { + const exists = prev.some((c) => c.id === id); + if (checked) { + if (exists) return prev; + const nextLabel = COL_LABELS[id] || label || id; + return [...prev, { id, label: nextLabel }]; + } + return prev.filter((c) => c.id !== id); + }); + }} + sx={{ p: 0.3, color: '#bbb' }} + /> + {label || id} + + ))} + + + + + + + { + closeMenu(); + await fetchSites(); + const targets = new Set(selectedIds); + if (selected && !targets.has(selected.id)) targets.add(selected.id); + const idToHost = new Map(rows.map((r) => [r.id, r.hostname])); + const hostnames = Array.from(targets).map((id) => idToHost.get(id)).filter(Boolean); + setAssignTargets(hostnames); + setAssignSiteId(null); + setAssignDialogOpen(true); + }}>Add to Site + { + closeMenu(); + await fetchSites(); + const targets = new Set(selectedIds); + if (selected && !targets.has(selected.id)) targets.add(selected.id); + const idToHost = new Map(rows.map((r) => [r.id, r.hostname])); + const hostnames = Array.from(targets).map((id) => idToHost.get(id)).filter(Boolean); + setAssignTargets(hostnames); + setAssignSiteId(null); + setAssignDialogOpen(true); + }}>Move to Another Site + Delete + + setConfirmOpen(false)} + onConfirm={handleDelete} + /> + + {quickJobOpen && ( + setQuickJobOpen(false)} + hostnames={rows.filter((r) => selectedIds.has(r.id)).map((r) => r.hostname)} + /> + )} + {assignDialogOpen && ( + setAssignDialogOpen(false)} + anchorReference="anchorPosition" + anchorPosition={{ top: Math.max(Math.floor(window.innerHeight*0.5), 200), left: Math.max(Math.floor(window.innerWidth*0.5), 300) }} + PaperProps={{ sx: { bgcolor: '#1e1e1e', color: '#fff', p: 2, minWidth: 360 } }} + > + + Assign {assignTargets.length} device(s) to a site + setAssignSiteId(Number(e.target.value))} + sx={{ '& .MuiOutlinedInput-root': { '& fieldset': { borderColor: '#444' }, '&:hover fieldset': { borderColor: '#666' } }, label: { color: '#aaa' } }} + > + {sites.map((s) => ( + {s.name} + ))} + + + + + + + + )} + { + setAddDeviceOpen(false); + setAddDeviceType(derivedDefaultType ?? null); + }} + onCreated={() => { + setAddDeviceOpen(false); + setAddDeviceType(derivedDefaultType ?? null); + fetchDevices({ refreshRepo: true }); + }} + /> + + ); +} diff --git a/Data/Server/WebUI/src/Devices/Enrollment_Codes.jsx b/Data/Server/WebUI/src/Devices/Enrollment_Codes.jsx new file mode 100644 index 00000000..db3e387e --- /dev/null +++ b/Data/Server/WebUI/src/Devices/Enrollment_Codes.jsx @@ -0,0 +1,371 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/Server/WebUI/src/Admin/Enrollment_Codes.jsx + +import React, { useCallback, useEffect, useMemo, useState } from "react"; +import { + Alert, + Box, + Button, + Chip, + CircularProgress, + FormControl, + IconButton, + InputLabel, + MenuItem, + Paper, + Select, + Stack, + Table, + TableBody, + TableCell, + TableContainer, + TableHead, + TableRow, + Tooltip, + Typography, +} from "@mui/material"; +import { + ContentCopy as CopyIcon, + DeleteOutline as DeleteIcon, + Refresh as RefreshIcon, + Key as KeyIcon, +} from "@mui/icons-material"; + +const TTL_PRESETS = [ + { value: 1, label: "1 hour" }, + { value: 3, label: "3 hours" }, + { value: 6, label: "6 hours" }, + { value: 12, label: "12 hours" }, + { value: 24, label: "24 hours" }, +]; + +const statusColor = { + active: "success", + used: "default", + expired: "warning", +}; + +const maskCode = (code) => { + if (!code) return "—"; + const parts = code.split("-"); + if (parts.length <= 1) { + const prefix = code.slice(0, 4); + return `${prefix}${"•".repeat(Math.max(0, code.length - prefix.length))}`; + } + return parts + .map((part, idx) => (idx === 0 || idx === parts.length - 1 ? part : "•".repeat(part.length))) + .join("-"); +}; + +const formatDateTime = (value) => { + if (!value) return "—"; + const date = new Date(value); + if (Number.isNaN(date.getTime())) return value; + return date.toLocaleString(); +}; + +const determineStatus = (record) => { + if (!record) return "expired"; + const maxUses = Number.isFinite(record?.max_uses) ? record.max_uses : 1; + const useCount = Number.isFinite(record?.use_count) ? record.use_count : 0; + if (useCount >= Math.max(1, maxUses || 1)) return "used"; + if (!record.expires_at) return "expired"; + const expires = new Date(record.expires_at); + if (Number.isNaN(expires.getTime())) return "expired"; + return expires.getTime() > Date.now() ? "active" : "expired"; +}; + +function EnrollmentCodes() { + const [codes, setCodes] = useState([]); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(""); + const [feedback, setFeedback] = useState(null); + const [statusFilter, setStatusFilter] = useState("all"); + const [ttlHours, setTtlHours] = useState(6); + const [generating, setGenerating] = useState(false); + const [maxUses, setMaxUses] = useState(2); + + const filteredCodes = useMemo(() => { + if (statusFilter === "all") return codes; + return codes.filter((code) => determineStatus(code) === statusFilter); + }, [codes, statusFilter]); + + const fetchCodes = useCallback(async () => { + setLoading(true); + setError(""); + try { + const query = statusFilter === "all" ? "" : `?status=${encodeURIComponent(statusFilter)}`; + const resp = await fetch(`/api/admin/enrollment-codes${query}`, { + credentials: "include", + }); + if (!resp.ok) { + const body = await resp.json().catch(() => ({})); + throw new Error(body.error || `Request failed (${resp.status})`); + } + const data = await resp.json(); + setCodes(Array.isArray(data.codes) ? data.codes : []); + } catch (err) { + setError(err.message || "Unable to load enrollment codes"); + } finally { + setLoading(false); + } + }, [statusFilter]); + + useEffect(() => { + fetchCodes(); + }, [fetchCodes]); + + const handleGenerate = useCallback(async () => { + setGenerating(true); + setError(""); + try { + const resp = await fetch("/api/admin/enrollment-codes", { + method: "POST", + credentials: "include", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ ttl_hours: ttlHours, max_uses: maxUses }), + }); + if (!resp.ok) { + const body = await resp.json().catch(() => ({})); + throw new Error(body.error || `Request failed (${resp.status})`); + } + const created = await resp.json(); + setFeedback({ type: "success", message: `Installer code ${created.code} created` }); + await fetchCodes(); + } catch (err) { + setFeedback({ type: "error", message: err.message || "Failed to create code" }); + } finally { + setGenerating(false); + } + }, [fetchCodes, ttlHours, maxUses]); + + const handleDelete = useCallback( + async (id) => { + if (!id) return; + const confirmDelete = window.confirm("Delete this unused installer code?"); + if (!confirmDelete) return; + setError(""); + try { + const resp = await fetch(`/api/admin/enrollment-codes/${encodeURIComponent(id)}`, { + method: "DELETE", + credentials: "include", + }); + if (!resp.ok) { + const body = await resp.json().catch(() => ({})); + throw new Error(body.error || `Request failed (${resp.status})`); + } + setFeedback({ type: "success", message: "Installer code deleted" }); + await fetchCodes(); + } catch (err) { + setFeedback({ type: "error", message: err.message || "Failed to delete code" }); + } + }, + [fetchCodes] + ); + + const handleCopy = useCallback((code) => { + if (!code) return; + try { + if (navigator.clipboard?.writeText) { + navigator.clipboard.writeText(code); + setFeedback({ type: "success", message: "Code copied to clipboard" }); + } else { + const textArea = document.createElement("textarea"); + textArea.value = code; + textArea.style.position = "fixed"; + textArea.style.opacity = "0"; + document.body.appendChild(textArea); + textArea.select(); + document.execCommand("copy"); + document.body.removeChild(textArea); + setFeedback({ type: "success", message: "Code copied to clipboard" }); + } + } catch (err) { + setFeedback({ type: "error", message: err.message || "Unable to copy code" }); + } + }, []); + + const renderStatusChip = (record) => { + const status = determineStatus(record); + return ; + }; + + return ( + + + + Enrollment Installer Codes + + + + + + Filter + + + + + Duration + + + + + Allowed Uses + + + + + + + + + {feedback ? ( + setFeedback(null)} + variant="outlined" + > + {feedback.message} + + ) : null} + + {error ? ( + + {error} + + ) : null} + + + + + + Status + Installer Code + Expires At + Created By + Usage + Last Used + Consumed At + Used By GUID + Actions + + + + {loading ? ( + + + + + Loading installer codes… + + + + ) : filteredCodes.length === 0 ? ( + + + + No installer codes match this filter. + + + + ) : ( + filteredCodes.map((record) => { + const status = determineStatus(record); + const maxAllowed = Math.max(1, Number.isFinite(record?.max_uses) ? record.max_uses : 1); + const usageCount = Math.max(0, Number.isFinite(record?.use_count) ? record.use_count : 0); + const disableDelete = usageCount !== 0; + return ( + + {renderStatusChip(record)} + {maskCode(record.code)} + {formatDateTime(record.expires_at)} + {record.created_by_user_id || "—"} + {`${usageCount} / ${maxAllowed}`} + {formatDateTime(record.last_used_at)} + {formatDateTime(record.used_at)} + + {record.used_by_guid || "—"} + + + + + handleCopy(record.code)} + disabled={!record.code} + > + + + + + + + handleDelete(record.id)} + disabled={disableDelete} + > + + + + + + + ); + }) + )} + +
+
+
+
+ ); +} + +export default React.memo(EnrollmentCodes); diff --git a/Data/Server/WebUI/src/Devices/SSH_Devices.jsx b/Data/Server/WebUI/src/Devices/SSH_Devices.jsx new file mode 100644 index 00000000..e993985b --- /dev/null +++ b/Data/Server/WebUI/src/Devices/SSH_Devices.jsx @@ -0,0 +1,480 @@ +import React, { useCallback, useEffect, useMemo, useState } from "react"; +import { + Paper, + Box, + Typography, + Button, + IconButton, + Table, + TableHead, + TableBody, + TableRow, + TableCell, + TableSortLabel, + Dialog, + DialogTitle, + DialogContent, + DialogActions, + TextField, + CircularProgress +} from "@mui/material"; +import AddIcon from "@mui/icons-material/Add"; +import EditIcon from "@mui/icons-material/Edit"; +import DeleteIcon from "@mui/icons-material/Delete"; +import RefreshIcon from "@mui/icons-material/Refresh"; +import { ConfirmDeleteDialog } from "../Dialogs.jsx"; +import AddDevice from "./Add_Device.jsx"; + +const tableStyles = { + "& th, & td": { + color: "#ddd", + borderColor: "#2a2a2a", + fontSize: 13, + py: 0.75 + }, + "& th": { + fontWeight: 600 + }, + "& th .MuiTableSortLabel-root": { color: "#ddd" }, + "& th .MuiTableSortLabel-root.Mui-active": { color: "#ddd" } +}; + +const defaultForm = { + hostname: "", + address: "", + description: "", + operating_system: "" +}; + +export default function SSHDevices({ type = "ssh" }) { + const typeLabel = type === "winrm" ? "WinRM" : "SSH"; + const apiBase = type === "winrm" ? "/api/winrm_devices" : "/api/ssh_devices"; + const pageTitle = `${typeLabel} Devices`; + const addButtonLabel = `Add ${typeLabel} Device`; + const addressLabel = `${typeLabel} Address`; + const loadingLabel = `Loading ${typeLabel} devices…`; + const emptyLabel = `No ${typeLabel} devices have been added yet.`; + const descriptionText = type === "winrm" + ? "Manage remote endpoints reachable via WinRM for playbook execution." + : "Manage remote endpoints reachable via SSH for playbook execution."; + const editDialogTitle = `Edit ${typeLabel} Device`; + const newDialogTitle = `New ${typeLabel} Device`; + const [rows, setRows] = useState([]); + const [orderBy, setOrderBy] = useState("hostname"); + const [order, setOrder] = useState("asc"); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(""); + const [dialogOpen, setDialogOpen] = useState(false); + const [form, setForm] = useState(defaultForm); + const [formError, setFormError] = useState(""); + const [submitting, setSubmitting] = useState(false); + const [editTarget, setEditTarget] = useState(null); + const [deleteTarget, setDeleteTarget] = useState(null); + const [deleteBusy, setDeleteBusy] = useState(false); + const [addDeviceOpen, setAddDeviceOpen] = useState(false); + + const isEdit = Boolean(editTarget); + + const loadDevices = useCallback(async () => { + setLoading(true); + setError(""); + try { + const resp = await fetch(apiBase); + if (!resp.ok) { + const data = await resp.json().catch(() => ({})); + throw new Error(data?.error || `HTTP ${resp.status}`); + } + const data = await resp.json(); + const list = Array.isArray(data?.devices) ? data.devices : []; + setRows(list); + } catch (err) { + setError(String(err.message || err)); + setRows([]); + } finally { + setLoading(false); + } + }, [apiBase]); + + useEffect(() => { + loadDevices(); + }, [loadDevices]); + + const sortedRows = useMemo(() => { + const list = [...rows]; + list.sort((a, b) => { + const getKey = (row) => { + switch (orderBy) { + case "created_at": + return Number(row.created_at || 0); + case "address": + return (row.connection_endpoint || "").toLowerCase(); + case "description": + return (row.description || "").toLowerCase(); + default: + return (row.hostname || "").toLowerCase(); + } + }; + const aKey = getKey(a); + const bKey = getKey(b); + if (aKey < bKey) return order === "asc" ? -1 : 1; + if (aKey > bKey) return order === "asc" ? 1 : -1; + return 0; + }); + return list; + }, [rows, order, orderBy]); + + const handleSort = (column) => () => { + if (orderBy === column) { + setOrder((prev) => (prev === "asc" ? "desc" : "asc")); + } else { + setOrderBy(column); + setOrder("asc"); + } + }; + + const openCreate = () => { + setAddDeviceOpen(true); + setFormError(""); + }; + + const openEdit = (row) => { + setEditTarget(row); + setForm({ + hostname: row.hostname || "", + address: row.connection_endpoint || "", + description: row.description || "", + operating_system: row.summary?.operating_system || "" + }); + setDialogOpen(true); + setFormError(""); + }; + + const handleDialogClose = () => { + if (submitting) return; + setDialogOpen(false); + setForm(defaultForm); + setEditTarget(null); + setFormError(""); + }; + + const handleSubmit = async () => { + if (submitting) return; + const payload = { + hostname: form.hostname.trim(), + address: form.address.trim(), + description: form.description.trim(), + operating_system: form.operating_system.trim() + }; + if (!payload.hostname) { + setFormError("Hostname is required."); + return; + } + if (!payload.address) { + setFormError("Address is required."); + return; + } + setSubmitting(true); + setFormError(""); + try { + const endpoint = isEdit + ? `${apiBase}/${encodeURIComponent(editTarget.hostname)}` + : apiBase; + const resp = await fetch(endpoint, { + method: isEdit ? "PUT" : "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(payload) + }); + const data = await resp.json().catch(() => ({})); + if (!resp.ok) { + throw new Error(data?.error || `HTTP ${resp.status}`); + } + setDialogOpen(false); + setForm(defaultForm); + setEditTarget(null); + setFormError(""); + setRows((prev) => { + const next = [...prev]; + if (data?.device) { + const idx = next.findIndex((row) => row.hostname === data.device.hostname); + if (idx >= 0) next[idx] = data.device; + else next.push(data.device); + return next; + } + return prev; + }); + // Ensure latest ordering by triggering refresh + loadDevices(); + } catch (err) { + setFormError(String(err.message || err)); + } finally { + setSubmitting(false); + } + }; + + const handleDelete = async () => { + if (!deleteTarget) return; + setDeleteBusy(true); + try { + const resp = await fetch(`${apiBase}/${encodeURIComponent(deleteTarget.hostname)}`, { + method: "DELETE" + }); + const data = await resp.json().catch(() => ({})); + if (!resp.ok) throw new Error(data?.error || `HTTP ${resp.status}`); + setRows((prev) => prev.filter((row) => row.hostname !== deleteTarget.hostname)); + setDeleteTarget(null); + } catch (err) { + setError(String(err.message || err)); + } finally { + setDeleteBusy(false); + } + }; + + return ( + + + + + {pageTitle} + + + {descriptionText} + + + + + + + + + {error && ( + + {error} + + )} + {loading && ( + + + {loadingLabel} + + )} + + + + + + + Hostname + + + + + {addressLabel} + + + + + Description + + + + + Added + + + Actions + + + + {sortedRows.map((row) => { + const createdTs = Number(row.created_at || 0) * 1000; + const createdDisplay = createdTs + ? new Date(createdTs).toLocaleString() + : (row.summary?.created || ""); + return ( + + {row.hostname} + {row.connection_endpoint || ""} + {row.description || ""} + {createdDisplay} + + openEdit(row)}> + + + setDeleteTarget(row)}> + + + + + ); + })} + {!sortedRows.length && !loading && ( + + + {emptyLabel} + + + )} + +
+ + + {isEdit ? editDialogTitle : newDialogTitle} + + setForm((prev) => ({ ...prev, hostname: e.target.value }))} + fullWidth + size="small" + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#1f1f1f", + color: "#fff", + "& fieldset": { borderColor: "#555" }, + "&:hover fieldset": { borderColor: "#888" } + }, + "& .MuiInputLabel-root": { color: "#aaa" } + }} + helperText="Hostname used within Borealis (unique)." + /> + setForm((prev) => ({ ...prev, address: e.target.value }))} + fullWidth + size="small" + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#1f1f1f", + color: "#fff", + "& fieldset": { borderColor: "#555" }, + "&:hover fieldset": { borderColor: "#888" } + }, + "& .MuiInputLabel-root": { color: "#aaa" } + }} + helperText={`IP or FQDN Borealis can reach over ${typeLabel}.`} + /> + setForm((prev) => ({ ...prev, description: e.target.value }))} + fullWidth + size="small" + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#1f1f1f", + color: "#fff", + "& fieldset": { borderColor: "#555" }, + "&:hover fieldset": { borderColor: "#888" } + }, + "& .MuiInputLabel-root": { color: "#aaa" } + }} + /> + setForm((prev) => ({ ...prev, operating_system: e.target.value }))} + fullWidth + size="small" + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#1f1f1f", + color: "#fff", + "& fieldset": { borderColor: "#555" }, + "&:hover fieldset": { borderColor: "#888" } + }, + "& .MuiInputLabel-root": { color: "#aaa" } + }} + /> + {error && ( + + {error} + + )} + + + + + + + + setDeleteTarget(null)} + onConfirm={handleDelete} + confirmDisabled={deleteBusy} + /> + setAddDeviceOpen(false)} + onCreated={() => { + setAddDeviceOpen(false); + loadDevices(); + }} + /> +
+ ); +} diff --git a/Data/Server/WebUI/src/Devices/WinRM_Devices.jsx b/Data/Server/WebUI/src/Devices/WinRM_Devices.jsx new file mode 100644 index 00000000..eb4a161a --- /dev/null +++ b/Data/Server/WebUI/src/Devices/WinRM_Devices.jsx @@ -0,0 +1,6 @@ +import React from "react"; +import SSHDevices from "./SSH_Devices.jsx"; + +export default function WinRMDevices(props) { + return ; +} diff --git a/Data/Server/WebUI/src/Dialogs.jsx b/Data/Server/WebUI/src/Dialogs.jsx new file mode 100644 index 00000000..68e3cd20 --- /dev/null +++ b/Data/Server/WebUI/src/Dialogs.jsx @@ -0,0 +1,514 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/Dialogs.jsx + +import React from "react"; +import { + Dialog, + DialogTitle, + DialogContent, + DialogContentText, + DialogActions, + Button, + Menu, + MenuItem, + TextField +} from "@mui/material"; + +export function CloseAllDialog({ open, onClose, onConfirm }) { + return ( + + Close All Flow Tabs? + + + This will remove all existing flow tabs and create a fresh tab named Flow 1. + + + + + + + + ); +} + +export function NotAuthorizedDialog({ open, onClose }) { + return ( + + Not Authorized + + + You are not authorized to access this section. + + + + + + + ); +} + +export function CreditsDialog({ open, onClose }) { + return ( + + + Borealis Logo + Borealis - Automation Platform + + Designed by Nicole Rappe @{" "} + + Bunny Lab + + + + + + + + ); +} + +export function RenameTabDialog({ open, value, onChange, onCancel, onSave }) { + return ( + + Rename Tab + + onChange(e.target.value)} + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#2a2a2a", + color: "#ccc", + "& fieldset": { + borderColor: "#444" + }, + "&:hover fieldset": { + borderColor: "#666" + } + }, + label: { color: "#aaa" }, + mt: 1 + }} + /> + + + + + + + ); +} + +export function RenameWorkflowDialog({ open, value, onChange, onCancel, onSave }) { + return ( + + Rename Workflow + + onChange(e.target.value)} + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#2a2a2a", + color: "#ccc", + "& fieldset": { + borderColor: "#444" + }, + "&:hover fieldset": { + borderColor: "#666" + } + }, + label: { color: "#aaa" }, + mt: 1 + }} + /> + + + + + + + ); +} + +export function RenameFolderDialog({ + open, + value, + onChange, + onCancel, + onSave, + title = "Folder Name", + confirmText = "Save" +}) { + return ( + + {title} + + onChange(e.target.value)} + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#2a2a2a", + color: "#ccc", + "& fieldset": { borderColor: "#444" }, + "&:hover fieldset": { borderColor: "#666" } + }, + label: { color: "#aaa" }, + mt: 1 + }} + /> + + + + + + + ); +} + +export function NewWorkflowDialog({ open, value, onChange, onCancel, onCreate }) { + return ( + + New Workflow + + onChange(e.target.value)} + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#2a2a2a", + color: "#ccc", + "& fieldset": { borderColor: "#444" }, + "&:hover fieldset": { borderColor: "#666" } + }, + label: { color: "#aaa" }, + mt: 1 + }} + /> + + + + + + + ); +} + +export function ClearDeviceActivityDialog({ open, onCancel, onConfirm }) { + return ( + + Clear Device Activity + + + All device activity history will be cleared, are you sure? + + + + + + + + ); +} + +export function SaveWorkflowDialog({ open, value, onChange, onCancel, onSave }) { + return ( + + Save Workflow + + onChange(e.target.value)} + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#2a2a2a", + color: "#ccc", + "& fieldset": { borderColor: "#444" }, + "&:hover fieldset": { borderColor: "#666" } + }, + label: { color: "#aaa" }, + mt: 1 + }} + /> + + + + + + + ); +} + +export function ConfirmDeleteDialog({ open, message, onCancel, onConfirm }) { + return ( + + Confirm Delete + + {message} + + + + + + + ); +} + +export function DeleteDeviceDialog({ open, onCancel, onConfirm }) { + return ( + + Remove Device + + + Are you sure you want to remove this device? If the agent is still running, it will automatically re-enroll the device. + + + + + + + + ); +} + +export function TabContextMenu({ anchor, onClose, onRename, onCloseTab }) { + return ( + + Rename + Close Workflow + + ); +} + +export function CreateCustomViewDialog({ open, value, onChange, onCancel, onSave }) { + return ( + + Create a New Custom View + + + Saving a view will save column order, visibility, and filters. + + onChange(e.target.value)} + placeholder="Add a name for this custom view" + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#2a2a2a", + color: "#ccc", + "& fieldset": { borderColor: "#444" }, + "&:hover fieldset": { borderColor: "#666" } + }, + label: { color: "#aaa" }, + mt: 1 + }} + /> + + + + + + + ); +} + +export function RenameCustomViewDialog({ open, value, onChange, onCancel, onSave }) { + return ( + + Rename Custom View + + onChange(e.target.value)} + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#2a2a2a", + color: "#ccc", + "& fieldset": { borderColor: "#444" }, + "&:hover fieldset": { borderColor: "#666" } + }, + label: { color: "#aaa" }, + mt: 1 + }} + /> + + + + + + + ); +} + +export function CreateSiteDialog({ open, onCancel, onCreate }) { + const [name, setName] = React.useState(""); + const [description, setDescription] = React.useState(""); + + React.useEffect(() => { + if (open) { + setName(""); + setDescription(""); + } + }, [open]); + + return ( + + Create Site + + + Create a new site and optionally add a description. + + setName(e.target.value)} + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#2a2a2a", + color: "#ccc", + "& fieldset": { borderColor: "#444" }, + "&:hover fieldset": { borderColor: "#666" } + }, + label: { color: "#aaa" }, + mt: 1 + }} + /> + setDescription(e.target.value)} + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#2a2a2a", + color: "#ccc", + "& fieldset": { borderColor: "#444" }, + "&:hover fieldset": { borderColor: "#666" } + }, + label: { color: "#aaa" }, + mt: 2 + }} + /> + + + + + + + ); +} + +export function RenameSiteDialog({ open, value, onChange, onCancel, onSave }) { + return ( + + Rename Site + + onChange(e.target.value)} + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#2a2a2a", + color: "#ccc", + "& fieldset": { borderColor: "#444" }, + "&:hover fieldset": { borderColor: "#666" } + }, + label: { color: "#aaa" }, + mt: 1 + }} + /> + + + + + + + ); +} diff --git a/Data/Server/WebUI/src/Flow_Editor/Context_Menu_Sidebar.jsx b/Data/Server/WebUI/src/Flow_Editor/Context_Menu_Sidebar.jsx new file mode 100644 index 00000000..3f5ca090 --- /dev/null +++ b/Data/Server/WebUI/src/Flow_Editor/Context_Menu_Sidebar.jsx @@ -0,0 +1,415 @@ +import React, { useState, useEffect } from "react"; +import { Box, Typography, Tabs, Tab, TextField, MenuItem, Button, Slider, IconButton, Tooltip } from "@mui/material"; +import ContentCopyIcon from "@mui/icons-material/ContentCopy"; +import ContentPasteIcon from "@mui/icons-material/ContentPaste"; +import RestoreIcon from "@mui/icons-material/Restore"; +import { SketchPicker } from "react-color"; + +const SIDEBAR_WIDTH = 400; + +const DEFAULT_EDGE_STYLE = { + type: "bezier", + animated: true, + style: { strokeDasharray: "6 3", stroke: "#58a6ff", strokeWidth: 1 }, + label: "", + labelStyle: { fill: "#fff", fontWeight: "bold" }, + labelBgStyle: { fill: "#2c2c2c", fillOpacity: 0.85, rx: 16, ry: 16 }, + labelBgPadding: [8, 4], +}; + +let globalEdgeClipboard = null; + +function clone(obj) { + return JSON.parse(JSON.stringify(obj)); +} + +export default function Context_Menu_Sidebar({ + open, + onClose, + edge, + updateEdge, +}) { + const [activeTab, setActiveTab] = useState(0); + const [editState, setEditState] = useState(() => (edge ? clone(edge) : {})); + const [colorPicker, setColorPicker] = useState({ field: null, anchor: null }); + + useEffect(() => { + if (edge && edge.id !== editState.id) setEditState(clone(edge)); + // eslint-disable-next-line + }, [edge]); + + const handleChange = (field, value) => { + setEditState((prev) => { + const updated = { ...prev }; + if (field === "label") updated.label = value; + else if (field === "labelStyle.fill") updated.labelStyle = { ...updated.labelStyle, fill: value }; + else if (field === "labelBgStyle.fill") updated.labelBgStyle = { ...updated.labelBgStyle, fill: value }; + else if (field === "labelBgStyle.rx") updated.labelBgStyle = { ...updated.labelBgStyle, rx: value, ry: value }; + else if (field === "labelBgPadding") updated.labelBgPadding = value; + else if (field === "labelBgStyle.fillOpacity") updated.labelBgStyle = { ...updated.labelBgStyle, fillOpacity: value }; + else if (field === "type") updated.type = value; + else if (field === "animated") updated.animated = value; + else if (field === "style.stroke") updated.style = { ...updated.style, stroke: value }; + else if (field === "style.strokeDasharray") updated.style = { ...updated.style, strokeDasharray: value }; + else if (field === "style.strokeWidth") updated.style = { ...updated.style, strokeWidth: value }; + else if (field === "labelStyle.fontWeight") updated.labelStyle = { ...updated.labelStyle, fontWeight: value }; + else updated[field] = value; + + if (field === "style.strokeDasharray") { + if (value === "") { + updated.animated = false; + updated.style = { ...updated.style, strokeDasharray: "" }; + } else { + updated.animated = true; + updated.style = { ...updated.style, strokeDasharray: value }; + } + } + updateEdge({ ...updated, id: prev.id }); + return updated; + }); + }; + + // Color Picker with right alignment + const openColorPicker = (field, event) => { + setColorPicker({ field, anchor: event.currentTarget }); + }; + + const closeColorPicker = () => { + setColorPicker({ field: null, anchor: null }); + }; + + const handleColorChange = (color) => { + handleChange(colorPicker.field, color.hex); + closeColorPicker(); + }; + + // Reset, Copy, Paste logic + const handleReset = () => { + setEditState(clone({ ...DEFAULT_EDGE_STYLE, id: edge.id })); + updateEdge({ ...DEFAULT_EDGE_STYLE, id: edge.id }); + }; + const handleCopy = () => { globalEdgeClipboard = clone(editState); }; + const handlePaste = () => { + if (globalEdgeClipboard) { + setEditState(clone({ ...globalEdgeClipboard, id: edge.id })); + updateEdge({ ...globalEdgeClipboard, id: edge.id }); + } + }; + + const renderColorButton = (label, field, value) => ( + + + {colorPicker.field === field && ( + + + + )} + + ); + + // Label tab + const renderLabelTab = () => ( + + + Label + + handleChange("label", e.target.value)} + sx={{ + mb: 2, + input: { color: "#fff", bgcolor: "#1e1e1e", fontSize: "0.95rem" }, + "& fieldset": { borderColor: "#444" }, + }} + /> + + + Text Color + {renderColorButton("Label Text Color", "labelStyle.fill", editState.labelStyle?.fill || "#fff")} + + + + Background + {renderColorButton("Label Background Color", "labelBgStyle.fill", editState.labelBgStyle?.fill || "#2c2c2c")} + + + + Padding + { + const val = e.target.value.split(",").map(x => parseInt(x.trim())).filter(x => !isNaN(x)); + if (val.length === 2) handleChange("labelBgPadding", val); + }} + sx={{ width: 80, input: { color: "#fff", bgcolor: "#1e1e1e", fontSize: "0.95rem" } }} + /> + + + + Background Style + = 11 ? "rounded" : "square"} + onChange={e => { + handleChange("labelBgStyle.rx", e.target.value === "rounded" ? 11 : 0); + }} + sx={{ + width: 150, + bgcolor: "#1e1e1e", + "& .MuiSelect-select": { color: "#fff" } + }} + > + Rounded + Square + + + + + Background Opacity + handleChange("labelBgStyle.fillOpacity", v)} + sx={{ width: 100, ml: 2 }} + /> + handleChange("labelBgStyle.fillOpacity", parseFloat(e.target.value) || 0)} + sx={{ width: 60, ml: 2, input: { color: "#fff", bgcolor: "#1e1e1e", fontSize: "0.95rem" } }} + /> + + + ); + + const renderStyleTab = () => ( + + + Edge Style + handleChange("type", e.target.value)} + sx={{ + width: 200, + bgcolor: "#1e1e1e", + "& .MuiSelect-select": { color: "#fff" } + }} + > + Step + Curved (Bezier) + Straight + Smoothstep + + + + + Edge Animation + { + const val = e.target.value; + handleChange("style.strokeDasharray", + val === "dashes" ? "6 3" : + val === "dots" ? "2 4" : "" + ); + }} + sx={{ + width: 200, + bgcolor: "#1e1e1e", + "& .MuiSelect-select": { color: "#fff" } + }} + > + Dashes + Dots + Solid + + + + + Color + {renderColorButton("Edge Color", "style.stroke", editState.style?.stroke || "#58a6ff")} + + + Edge Width + handleChange("style.strokeWidth", v)} + sx={{ width: 100, ml: 2 }} + /> + handleChange("style.strokeWidth", parseInt(e.target.value) || 1)} + sx={{ width: 60, ml: 2, input: { color: "#fff", bgcolor: "#1e1e1e", fontSize: "0.95rem" } }} + /> + + + ); + + // Always render the sidebar for animation! + if (!edge) return null; + + return ( + <> + {/* Overlay */} + + + {/* Sidebar */} + e.stopPropagation()} + > + + + + Edit Edge Properties + + + setActiveTab(v)} + variant="fullWidth" + textColor="inherit" + TabIndicatorProps={{ style: { backgroundColor: "#ccc" } }} + sx={{ + borderTop: "1px solid #333", + borderBottom: "1px solid #333", + minHeight: "36px", + height: "36px" + }} + > + + + + + + {/* Main fields scrollable */} + + {activeTab === 0 && renderLabelTab()} + {activeTab === 1 && renderStyleTab()} + + + {/* Sticky footer bar */} + + + + + + + + + + + + ); +} diff --git a/Data/Server/WebUI/src/Flow_Editor/Flow_Editor.jsx b/Data/Server/WebUI/src/Flow_Editor/Flow_Editor.jsx new file mode 100644 index 00000000..0c66aab6 --- /dev/null +++ b/Data/Server/WebUI/src/Flow_Editor/Flow_Editor.jsx @@ -0,0 +1,374 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/Flow_Editor.jsx +// Import Node Configuration Sidebar and new Context Menu Sidebar +import NodeConfigurationSidebar from "./Node_Configuration_Sidebar"; +import ContextMenuSidebar from "./Context_Menu_Sidebar"; + +import React, { useState, useEffect, useCallback, useRef } from "react"; +import ReactFlow, { + Background, + addEdge, + applyNodeChanges, + applyEdgeChanges, + useReactFlow +} from "reactflow"; + +import { Menu, MenuItem, Box } from "@mui/material"; +import { + Polyline as PolylineIcon, + DeleteForever as DeleteForeverIcon, + Edit as EditIcon +} from "@mui/icons-material"; + +import "reactflow/dist/style.css"; + +export default function FlowEditor({ + flowId, + nodes, + edges, + setNodes, + setEdges, + nodeTypes, + categorizedNodes +}) { + // Node Configuration Sidebar State + const [drawerOpen, setDrawerOpen] = useState(false); + const [selectedNodeId, setSelectedNodeId] = useState(null); + + // Edge Properties Sidebar State + const [edgeSidebarOpen, setEdgeSidebarOpen] = useState(false); + const [edgeSidebarEdgeId, setEdgeSidebarEdgeId] = useState(null); + + // Context Menus + const [nodeContextMenu, setNodeContextMenu] = useState(null); // { mouseX, mouseY, nodeId } + const [edgeContextMenu, setEdgeContextMenu] = useState(null); // { mouseX, mouseY, edgeId } + + // Drag/snap helpers (untouched) + const wrapperRef = useRef(null); + const { project } = useReactFlow(); + const [guides, setGuides] = useState([]); + const [activeGuides, setActiveGuides] = useState([]); + const movingFlowSize = useRef({ width: 0, height: 0 }); + + // ----- Node/Edge Definitions ----- + const selectedNode = nodes.find((n) => n.id === selectedNodeId); + const selectedEdge = edges.find((e) => e.id === edgeSidebarEdgeId); + + // --------- Context Menu Handlers ---------- + const handleRightClick = (e, node) => { + e.preventDefault(); + setNodeContextMenu({ mouseX: e.clientX + 2, mouseY: e.clientY - 6, nodeId: node.id }); + }; + + const handleEdgeRightClick = (e, edge) => { + e.preventDefault(); + setEdgeContextMenu({ mouseX: e.clientX + 2, mouseY: e.clientY - 6, edgeId: edge.id }); + }; + + // --------- Node Context Menu Actions --------- + const handleDisconnectAllEdges = (nodeId) => { + setEdges((eds) => eds.filter((e) => e.source !== nodeId && e.target !== nodeId)); + setNodeContextMenu(null); + }; + + const handleRemoveNode = (nodeId) => { + setNodes((nds) => nds.filter((n) => n.id !== nodeId)); + setEdges((eds) => eds.filter((e) => e.source !== nodeId && e.target !== nodeId)); + setNodeContextMenu(null); + }; + + const handleEditNodeProps = (nodeId) => { + setSelectedNodeId(nodeId); + setDrawerOpen(true); + setNodeContextMenu(null); + }; + + // --------- Edge Context Menu Actions --------- + const handleUnlinkEdge = (edgeId) => { + setEdges((eds) => eds.filter((e) => e.id !== edgeId)); + setEdgeContextMenu(null); + }; + + const handleEditEdgeProps = (edgeId) => { + setEdgeSidebarEdgeId(edgeId); + setEdgeSidebarOpen(true); + setEdgeContextMenu(null); + }; + + // ----- Sidebar Closing ----- + const handleCloseNodeSidebar = () => { + setDrawerOpen(false); + setSelectedNodeId(null); + }; + + const handleCloseEdgeSidebar = () => { + setEdgeSidebarOpen(false); + setEdgeSidebarEdgeId(null); + }; + + // ----- Update Edge Callback for Sidebar ----- + const updateEdge = (updatedEdgeObj) => { + setEdges((eds) => + eds.map((e) => (e.id === updatedEdgeObj.id ? { ...e, ...updatedEdgeObj } : e)) + ); + }; + + // ----- Drag/Drop, Guides, Node Snap Logic (unchanged) ----- + const computeGuides = useCallback((dragNode) => { + if (!wrapperRef.current) return; + const parentRect = wrapperRef.current.getBoundingClientRect(); + const dragEl = wrapperRef.current.querySelector( + `.react-flow__node[data-id="${dragNode.id}"]` + ); + if (dragEl) { + const dr = dragEl.getBoundingClientRect(); + const relLeft = dr.left - parentRect.left; + const relTop = dr.top - parentRect.top; + const relRight = relLeft + dr.width; + const relBottom = relTop + dr.height; + const pTL = project({ x: relLeft, y: relTop }); + const pTR = project({ x: relRight, y: relTop }); + const pBL = project({ x: relLeft, y: relBottom }); + movingFlowSize.current = { width: pTR.x - pTL.x, height: pBL.y - pTL.y }; + } + const lines = []; + nodes.forEach((n) => { + if (n.id === dragNode.id) return; + const el = wrapperRef.current.querySelector( + `.react-flow__node[data-id="${n.id}"]` + ); + if (!el) return; + const r = el.getBoundingClientRect(); + const relLeft = r.left - parentRect.left; + const relTop = r.top - parentRect.top; + const relRight = relLeft + r.width; + const relBottom = relTop + r.height; + const pTL = project({ x: relLeft, y: relTop }); + const pTR = project({ x: relRight, y: relTop }); + const pBL = project({ x: relLeft, y: relBottom }); + lines.push({ xFlow: pTL.x, xPx: relLeft }); + lines.push({ xFlow: pTR.x, xPx: relRight }); + lines.push({ yFlow: pTL.y, yPx: relTop }); + lines.push({ yFlow: pBL.y, yPx: relBottom }); + }); + setGuides(lines); + }, [nodes, project]); + + const onNodeDrag = useCallback((_, node) => { + const threshold = 5; + let snapX = null, snapY = null; + const show = []; + const { width: fw, height: fh } = movingFlowSize.current; + guides.forEach((ln) => { + if (ln.xFlow != null) { + if (Math.abs(node.position.x - ln.xFlow) < threshold) { snapX = ln.xFlow; show.push({ xPx: ln.xPx }); } + else if (Math.abs(node.position.x + fw - ln.xFlow) < threshold) { snapX = ln.xFlow - fw; show.push({ xPx: ln.xPx }); } + } + if (ln.yFlow != null) { + if (Math.abs(node.position.y - ln.yFlow) < threshold) { snapY = ln.yFlow; show.push({ yPx: ln.yPx }); } + else if (Math.abs(node.position.y + fh - ln.yFlow) < threshold) { snapY = ln.yFlow - fh; show.push({ yPx: ln.yPx }); } + } + }); + if (snapX !== null || snapY !== null) { + setNodes((nds) => + applyNodeChanges( + [{ + id: node.id, + type: "position", + position: { + x: snapX !== null ? snapX : node.position.x, + y: snapY !== null ? snapY : node.position.y + } + }], + nds + ) + ); + setActiveGuides(show); + } else { + setActiveGuides([]); + } + }, [guides, setNodes]); + + const onDrop = useCallback((event) => { + event.preventDefault(); + const type = event.dataTransfer.getData("application/reactflow"); + if (!type) return; + const bounds = wrapperRef.current.getBoundingClientRect(); + const position = project({ + x: event.clientX - bounds.left, + y: event.clientY - bounds.top + }); + const id = "node-" + Date.now(); + const nodeMeta = Object.values(categorizedNodes).flat().find((n) => n.type === type); + // Seed config defaults: + const configDefaults = {}; + (nodeMeta?.config || []).forEach(cfg => { + if (cfg.defaultValue !== undefined) { + configDefaults[cfg.key] = cfg.defaultValue; + } + }); + const newNode = { + id, + type, + position, + data: { + label: nodeMeta?.label || type, + content: nodeMeta?.content, + ...configDefaults + }, + dragHandle: ".borealis-node-header" + }; + setNodes((nds) => [...nds, newNode]); + + }, [project, setNodes, categorizedNodes]); + + const onDragOver = useCallback((event) => { + event.preventDefault(); + event.dataTransfer.dropEffect = "move"; + }, []); + + const onConnect = useCallback((params) => { + setEdges((eds) => + addEdge({ + ...params, + type: "bezier", + animated: true, + style: { strokeDasharray: "6 3", stroke: "#58a6ff" } + }, eds) + ); + }, [setEdges]); + + const onNodesChange = useCallback((changes) => { + setNodes((nds) => applyNodeChanges(changes, nds)); + }, [setNodes]); + + const onEdgesChange = useCallback((changes) => { + setEdges((eds) => applyEdgeChanges(changes, eds)); + }, [setEdges]); + + useEffect(() => { + const nodeCountEl = document.getElementById("nodeCount"); + if (nodeCountEl) nodeCountEl.innerText = nodes.length; + }, [nodes]); + + const nodeDef = selectedNode + ? Object.values(categorizedNodes).flat().find((def) => def.type === selectedNode.type) + : null; + + // --------- MAIN RENDER ---------- + return ( +
+ {/* Node Config Sidebar */} + + + {/* Edge Properties Sidebar */} + { + // Provide id if missing + if (!edge.id && edgeSidebarEdgeId) edge.id = edgeSidebarEdgeId; + updateEdge(edge); + }} + /> + + computeGuides(node)} + onNodeDrag={onNodeDrag} + onNodeDragStop={() => { setGuides([]); setActiveGuides([]); }} + > + + + + {/* Helper lines for snapping */} + {activeGuides.map((ln, i) => + ln.xPx != null ? ( +
+ ) : ( +
+ ) + )} + + {/* Node Context Menu */} + setNodeContextMenu(null)} + anchorReference="anchorPosition" + anchorPosition={nodeContextMenu ? { top: nodeContextMenu.mouseY, left: nodeContextMenu.mouseX } : undefined} + PaperProps={{ sx: { bgcolor: "#1e1e1e", color: "#fff", fontSize: "13px" } }} + > + handleEditNodeProps(nodeContextMenu.nodeId)}> + + Edit Properties + + handleDisconnectAllEdges(nodeContextMenu.nodeId)}> + + Disconnect All Edges + + handleRemoveNode(nodeContextMenu.nodeId)}> + + Remove Node + + + + {/* Edge Context Menu */} + setEdgeContextMenu(null)} + anchorReference="anchorPosition" + anchorPosition={edgeContextMenu ? { top: edgeContextMenu.mouseY, left: edgeContextMenu.mouseX } : undefined} + PaperProps={{ sx: { bgcolor: "#1e1e1e", color: "#fff", fontSize: "13px" } }} + > + handleEditEdgeProps(edgeContextMenu.edgeId)}> + + Edit Properties + + handleUnlinkEdge(edgeContextMenu.edgeId)}> + + Unlink Edge + + +
+ ); +} diff --git a/Data/Server/WebUI/src/Flow_Editor/Flow_Tabs.jsx b/Data/Server/WebUI/src/Flow_Editor/Flow_Tabs.jsx new file mode 100644 index 00000000..5ff89cdc --- /dev/null +++ b/Data/Server/WebUI/src/Flow_Editor/Flow_Tabs.jsx @@ -0,0 +1,100 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/Flow_Tabs.jsx + +import React from "react"; +import { Box, Tabs, Tab, Tooltip } from "@mui/material"; +import { Add as AddIcon } from "@mui/icons-material"; + +/** + * Renders the tab bar (including the "add tab" button). + * + * Props: + * - tabs (array of {id, tab_name, nodes, edges}) + * - activeTabId (string) + * - onTabChange(newActiveTabId: string) + * - onAddTab() + * - onTabRightClick(evt: MouseEvent, tabId: string) + */ +export default function FlowTabs({ + tabs, + activeTabId, + onTabChange, + onAddTab, + onTabRightClick +}) { + // Determine the currently active tab index + const activeIndex = (() => { + const idx = tabs.findIndex((t) => t.id === activeTabId); + return idx >= 0 ? idx : 0; + })(); + + // Handle tab clicks + const handleChange = (event, newValue) => { + if (newValue === "__addtab__") { + // The "plus" tab + onAddTab(); + } else { + // normal tab index + const newTab = tabs[newValue]; + if (newTab) { + onTabChange(newTab.id); + } + } + }; + + return ( + + + {tabs.map((tab, index) => ( + onTabRightClick(evt, tab.id)} + sx={{ + minHeight: "36px", + height: "36px", + textTransform: "none", + backgroundColor: tab.id === activeTabId ? "#2C2C2C" : "transparent", + color: "#58a6ff" + }} + /> + ))} + {/* The "plus" tab has a special value */} + + } + value="__addtab__" + sx={{ + minHeight: "36px", + height: "36px", + color: "#58a6ff", + textTransform: "none" + }} + /> + + + + ); +} diff --git a/Data/Server/WebUI/src/Flow_Editor/Node_Configuration_Sidebar.jsx b/Data/Server/WebUI/src/Flow_Editor/Node_Configuration_Sidebar.jsx new file mode 100644 index 00000000..bf8ea641 --- /dev/null +++ b/Data/Server/WebUI/src/Flow_Editor/Node_Configuration_Sidebar.jsx @@ -0,0 +1,485 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/Node_Configuration_Sidebar.jsx +import { Box, Typography, Tabs, Tab, TextField, MenuItem, IconButton, Dialog, DialogTitle, DialogContent, DialogActions, Button, Tooltip } from "@mui/material"; +import React, { useState } from "react"; +import { useReactFlow } from "reactflow"; +import ReactMarkdown from "react-markdown"; // Used for Node Usage Documentation +import EditIcon from "@mui/icons-material/Edit"; +import PaletteIcon from "@mui/icons-material/Palette"; +import { SketchPicker } from "react-color"; + +// ---- NEW: Brightness utility for gradient ---- +function darkenColor(hex, percent = 0.7) { + if (!/^#[0-9A-Fa-f]{6}$/.test(hex)) return hex; + let r = parseInt(hex.slice(1, 3), 16); + let g = parseInt(hex.slice(3, 5), 16); + let b = parseInt(hex.slice(5, 7), 16); + r = Math.round(r * percent); + g = Math.round(g * percent); + b = Math.round(b * percent); + return `#${r.toString(16).padStart(2,"0")}${g.toString(16).padStart(2,"0")}${b.toString(16).padStart(2,"0")}`; +} +// -------------------------------------------- + +export default function NodeConfigurationSidebar({ drawerOpen, setDrawerOpen, title, nodeData, setNodes, selectedNode }) { + const [activeTab, setActiveTab] = useState(0); + const contextSetNodes = useReactFlow().setNodes; + // Use setNodes from props if provided, else fallback to context (for backward compatibility) + const effectiveSetNodes = setNodes || contextSetNodes; + const handleTabChange = (_, newValue) => setActiveTab(newValue); + + // Rename dialog state + const [renameOpen, setRenameOpen] = useState(false); + const [renameValue, setRenameValue] = useState(title || ""); + + // ---- NEW: Accent Color Picker ---- + const [colorDialogOpen, setColorDialogOpen] = useState(false); + const accentColor = selectedNode?.data?.accentColor || "#58a6ff"; + // ---------------------------------- + + const renderConfigFields = () => { + const config = nodeData?.config || []; + const nodeId = nodeData?.nodeId; + + const normalizeOptions = (opts = []) => + opts.map((opt) => { + if (typeof opt === "string") { + return { value: opt, label: opt, disabled: false }; + } + if (opt && typeof opt === "object") { + const val = + opt.value ?? + opt.id ?? + opt.handle ?? + (typeof opt.label === "string" ? opt.label : ""); + const label = + opt.label ?? + opt.name ?? + opt.title ?? + (typeof val !== "undefined" ? String(val) : ""); + return { + value: typeof val === "undefined" ? "" : String(val), + label: typeof label === "undefined" ? "" : String(label), + disabled: Boolean(opt.disabled) + }; + } + return { value: String(opt ?? ""), label: String(opt ?? ""), disabled: false }; + }); + + return config.map((field, index) => { + const value = nodeData?.[field.key] ?? ""; + const isReadOnly = Boolean(field.readOnly); + + // ---- DYNAMIC DROPDOWN SUPPORT ---- + if (field.type === "select") { + let options = field.options || []; + + if (field.optionsKey && Array.isArray(nodeData?.[field.optionsKey])) { + options = nodeData[field.optionsKey]; + } else if (field.dynamicOptions && nodeData?.windowList && Array.isArray(nodeData.windowList)) { + options = nodeData.windowList + .map((win) => ({ + value: String(win.handle), + label: `${win.title} (${win.handle})` + })) + .sort((a, b) => a.label.localeCompare(b.label, undefined, { sensitivity: "base" })); + } + + options = normalizeOptions(options); + + // Handle dynamic options for things like Target Window + if (field.dynamicOptions && (!nodeData?.windowList || !Array.isArray(nodeData.windowList))) { + options = []; + } + + return ( + + + {field.label || field.key} + + { + if (isReadOnly) return; + const newValue = e.target.value; + if (!nodeId) return; + effectiveSetNodes((nds) => + nds.map((n) => + n.id === nodeId + ? { ...n, data: { ...n.data, [field.key]: newValue } } + : n + ) + ); + window.BorealisValueBus[nodeId] = newValue; + }} + SelectProps={{ + MenuProps: { + PaperProps: { + sx: { + bgcolor: "#1e1e1e", + color: "#ccc", + border: "1px solid #58a6ff", + "& .MuiMenuItem-root": { + color: "#ccc", + fontSize: "0.85rem", + "&:hover": { + backgroundColor: "#2a2a2a" + }, + "&.Mui-selected": { + backgroundColor: "#2c2c2c !important", + color: "#58a6ff" + }, + "&.Mui-selected:hover": { + backgroundColor: "#2a2a2a !important" + } + } + } + } + } + }} + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#1e1e1e", + color: "#ccc", + fontSize: "0.85rem", + "& fieldset": { + borderColor: "#444" + }, + "&:hover fieldset": { + borderColor: "#58a6ff" + }, + "&.Mui-focused fieldset": { + borderColor: "#58a6ff" + } + }, + "& .MuiSelect-select": { + backgroundColor: "#1e1e1e" + } + }} + > + {options.length === 0 ? ( + + {field.label === "Target Window" + ? "No windows detected" + : "No options"} + + ) : ( + options.map((opt, idx) => ( + + {opt.label} + + )) + )} + + + ); + } + // ---- END DYNAMIC DROPDOWN SUPPORT ---- + + return ( + + + {field.label || field.key} + + { + if (isReadOnly) return; + const newValue = e.target.value; + if (!nodeId) return; + effectiveSetNodes((nds) => + nds.map((n) => + n.id === nodeId + ? { ...n, data: { ...n.data, [field.key]: newValue } } + : n + ) + ); + window.BorealisValueBus[nodeId] = newValue; + }} + /> + + ); + }); + }; + + // ---- NEW: Accent Color Button ---- + const renderAccentColorButton = () => ( + + setColorDialogOpen(true)} + sx={{ + ml: 1, + border: "1px solid #58a6ff", + background: accentColor, + color: "#222", + width: 28, height: 28, p: 0 + }} + > + + + + ); + // ---------------------------------- + + return ( + <> + setDrawerOpen(false)} + sx={{ + position: "absolute", + top: 0, + left: 0, + right: 0, + bottom: 0, + backgroundColor: "rgba(0, 0, 0, 0.3)", + opacity: drawerOpen ? 1 : 0, + pointerEvents: drawerOpen ? "auto" : "none", + transition: "opacity 0.6s ease", + zIndex: 10 + }} + /> + + e.stopPropagation()} + > + + + + + {"Edit " + (title || "Node")} + + + { + setRenameValue(title || ""); + setRenameOpen(true); + }} + sx={{ ml: 1, color: "#58a6ff" }} + > + + + {/* ---- NEW: Accent Color Picker button next to pencil ---- */} + {renderAccentColorButton()} + {/* ------------------------------------------------------ */} + + + + + + + + + + + + {activeTab === 0 && renderConfigFields()} + {activeTab === 1 && ( + + ( + + ), + p: ({ node, ...props }) => ( + + ), + ul: ({ node, ...props }) => ( +
    + ), + li: ({ node, ...props }) => ( +
  • + ) + }} + /> + + )} + + + + {/* Rename Node Dialog */} + setRenameOpen(false)} + PaperProps={{ sx: { bgcolor: "#232323" } }} + > + Rename Node + + setRenameValue(e.target.value)} + sx={{ + mt: 1, + bgcolor: "#1e1e1e", + "& .MuiOutlinedInput-root": { + color: "#ccc", + backgroundColor: "#1e1e1e", + "& fieldset": { borderColor: "#444" } + }, + label: { color: "#aaa" } + }} + /> + + + + + + + + {/* ---- Accent Color Picker Dialog ---- */} + setColorDialogOpen(false)} + PaperProps={{ sx: { bgcolor: "#232323" } }} + > + Pick Node Header/Accent Color + + { + const nodeId = selectedNode?.id || nodeData?.nodeId; + if (!nodeId) return; + const accent = color.hex; + const accentDark = darkenColor(accent, 0.7); + effectiveSetNodes((nds) => + nds.map((n) => + n.id === nodeId + ? { + ...n, + data: { ...n.data, accentColor: accent }, + style: { + ...n.style, + "--borealis-accent": accent, + "--borealis-accent-dark": accentDark, + "--borealis-title": accent, + }, + } + : n + ) + ); + }} + disableAlpha + presetColors={[ + "#58a6ff", "#0475c2", "#00d18c", "#ff4f4f", "#ff8c00", + "#6b21a8", "#0e7490", "#888", "#fff", "#000" + ]} + /> + + + The node's header text and accent gradient will use your selected color.
    + The accent gradient fades to a slightly darker version. +
    + + + + {accentColor} + + +
    +
    + + + +
    + {/* ---- END ACCENT COLOR PICKER DIALOG ---- */} + + ); +} diff --git a/Data/Server/WebUI/src/Flow_Editor/Node_Sidebar.jsx b/Data/Server/WebUI/src/Flow_Editor/Node_Sidebar.jsx new file mode 100644 index 00000000..9b408ffb --- /dev/null +++ b/Data/Server/WebUI/src/Flow_Editor/Node_Sidebar.jsx @@ -0,0 +1,260 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/Node_Sidebar.jsx + +import React, { useState } from "react"; +import { + Accordion, + AccordionSummary, + AccordionDetails, + Button, + Tooltip, + Typography, + Box +} from "@mui/material"; +import { + ExpandMore as ExpandMoreIcon, + SaveAlt as SaveAltIcon, + Save as SaveIcon, + FileOpen as FileOpenIcon, + DeleteForever as DeleteForeverIcon, + DragIndicator as DragIndicatorIcon, + Polyline as PolylineIcon, + ChevronLeft as ChevronLeftIcon, + ChevronRight as ChevronRightIcon +} from "@mui/icons-material"; +import { SaveWorkflowDialog } from "../Dialogs"; + +export default function NodeSidebar({ + categorizedNodes, + handleExportFlow, + handleImportFlow, + handleSaveFlow, + handleOpenCloseAllDialog, + fileInputRef, + onFileInputChange, + currentTabName +}) { + const [expandedCategory, setExpandedCategory] = useState(null); + const [collapsed, setCollapsed] = useState(false); + const [saveOpen, setSaveOpen] = useState(false); + const [saveName, setSaveName] = useState(""); + + const handleAccordionChange = (category) => (_, isExpanded) => { + setExpandedCategory(isExpanded ? category : null); + }; + + return ( +
    +
    + {!collapsed && ( + <> + {/* Workflows Section */} + + } + sx={{ + backgroundColor: "#2c2c2c", + minHeight: "36px", + "& .MuiAccordionSummary-content": { margin: 0 } + }} + > + + Workflows + + + + + + + + + + + + + + + + {/* Nodes Section */} + + } + sx={{ + backgroundColor: "#2c2c2c", + minHeight: "36px", + "& .MuiAccordionSummary-content": { margin: 0 } + }} + > + + Nodes + + + + {Object.entries(categorizedNodes).map(([category, items]) => ( + + } + sx={{ + bgcolor: "#1e1e1e", + px: 2, + minHeight: "32px", + "& .MuiAccordionSummary-content": { margin: 0 } + }} + > + + {category} + + + + {items.map((nodeDef) => ( + + {nodeDef.description || "Drag & Drop into Editor"} + + } + placement="right" + arrow + > + + + ))} + + + ))} + + + + {/* Hidden file input */} + + + )} +
    + + {/* Bottom toggle button */} + + setCollapsed(!collapsed)} + sx={{ + height: "36px", + borderTop: "1px solid #333", + cursor: "pointer", + display: "flex", + alignItems: "center", + justifyContent: "center", + color: "#888", + backgroundColor: "#121212", + transition: "background-color 0.2s ease", + "&:hover": { + backgroundColor: "#1e1e1e" + }, + "&:active": { + backgroundColor: "#2a2a2a" + } + }} + > + {collapsed ? : } + + + setSaveOpen(false)} + onSave={() => { + setSaveOpen(false); + handleSaveFlow(saveName); + }} + /> +
    + ); +} + +const buttonStyle = { + color: "#ccc", + backgroundColor: "#232323", + justifyContent: "flex-start", + pl: 2, + fontSize: "0.9rem", + textTransform: "none", + "&:hover": { + backgroundColor: "#2a2a2a" + } +}; + +const nodeButtonStyle = { + color: "#ccc", + backgroundColor: "#232323", + justifyContent: "space-between", + pl: 2, + pr: 1, + fontSize: "0.9rem", + textTransform: "none", + "&:hover": { + backgroundColor: "#2a2a2a" + } +}; diff --git a/Data/Server/WebUI/src/Login.jsx b/Data/Server/WebUI/src/Login.jsx new file mode 100644 index 00000000..375bc61b --- /dev/null +++ b/Data/Server/WebUI/src/Login.jsx @@ -0,0 +1,332 @@ +import React, { useMemo, useState } from "react"; +import { Box, TextField, Button, Typography } from "@mui/material"; + +export default function Login({ onLogin }) { + const [username, setUsername] = useState("admin"); + const [password, setPassword] = useState(""); + const [error, setError] = useState(""); + const [isSubmitting, setIsSubmitting] = useState(false); + const [step, setStep] = useState("credentials"); // 'credentials' | 'mfa' + const [pendingToken, setPendingToken] = useState(""); + const [mfaStage, setMfaStage] = useState(null); + const [mfaCode, setMfaCode] = useState(""); + const [setupSecret, setSetupSecret] = useState(""); + const [setupQr, setSetupQr] = useState(""); + const [setupUri, setSetupUri] = useState(""); + + const formattedSecret = useMemo(() => { + if (!setupSecret) return ""; + return setupSecret.replace(/(.{4})/g, "$1 ").trim(); + }, [setupSecret]); + + const sha512 = async (text) => { + try { + if (window.crypto && window.crypto.subtle && window.isSecureContext) { + const encoder = new TextEncoder(); + const data = encoder.encode(text); + const hashBuffer = await window.crypto.subtle.digest("SHA-512", data); + const hashArray = Array.from(new Uint8Array(hashBuffer)); + return hashArray.map((b) => b.toString(16).padStart(2, "0")).join(""); + } + } catch (_) { + // fall through to return null + } + // Not a secure context or subtle crypto unavailable + return null; + }; + + const resetMfaState = () => { + setStep("credentials"); + setPendingToken(""); + setMfaStage(null); + setMfaCode(""); + setSetupSecret(""); + setSetupQr(""); + setSetupUri(""); + }; + + const handleCredentialsSubmit = async (e) => { + e.preventDefault(); + setIsSubmitting(true); + setError(""); + try { + const hash = await sha512(password); + const body = hash + ? { username, password_sha512: hash } + : { username, password }; + const resp = await fetch("/api/auth/login", { + method: "POST", + headers: { "Content-Type": "application/json" }, + credentials: "include", + body: JSON.stringify(body) + }); + const data = await resp.json(); + if (!resp.ok) { + throw new Error(data?.error || "Invalid username or password"); + } + if (data?.status === "mfa_required") { + setPendingToken(data.pending_token || ""); + setMfaStage(data.stage || "verify"); + setStep("mfa"); + setMfaCode(""); + setSetupSecret(data.secret || ""); + setSetupQr(data.qr_image || ""); + setSetupUri(data.otpauth_url || ""); + setError(""); + setPassword(""); + return; + } + if (data?.token) { + try { + document.cookie = `borealis_auth=${data.token}; Path=/; SameSite=Lax`; + } catch (_) {} + } + onLogin({ username: data.username, role: data.role }); + } catch (err) { + const msg = err?.message || "Unable to log in"; + setError(msg); + resetMfaState(); + } finally { + setIsSubmitting(false); + } + }; + + const handleMfaSubmit = async (e) => { + e.preventDefault(); + if (!pendingToken) { + setError("Your MFA session expired. Please log in again."); + resetMfaState(); + return; + } + if (!mfaCode || mfaCode.trim().length < 6) { + setError("Enter the 6-digit code from your authenticator app."); + return; + } + setIsSubmitting(true); + setError(""); + try { + const resp = await fetch("/api/auth/mfa/verify", { + method: "POST", + headers: { "Content-Type": "application/json" }, + credentials: "include", + body: JSON.stringify({ pending_token: pendingToken, code: mfaCode }) + }); + const data = await resp.json(); + if (!resp.ok) { + const errKey = data?.error; + if (errKey === "expired" || errKey === "invalid_session" || errKey === "mfa_pending") { + setError("Your MFA session expired. Please log in again."); + resetMfaState(); + return; + } + const msgMap = { + invalid_code: "Incorrect code. Please try again.", + mfa_not_configured: "MFA is not configured for this account." + }; + setError(msgMap[errKey] || data?.error || "Failed to verify code."); + return; + } + if (data?.token) { + try { + document.cookie = `borealis_auth=${data.token}; Path=/; SameSite=Lax`; + } catch (_) {} + } + setError(""); + onLogin({ username: data.username, role: data.role }); + } catch (err) { + setError("Failed to verify code."); + } finally { + setIsSubmitting(false); + } + }; + + const handleBackToLogin = () => { + resetMfaState(); + setPassword(""); + setError(""); + }; + + const onCodeChange = (event) => { + const raw = event.target.value || ""; + const digits = raw.replace(/\D/g, "").slice(0, 6); + setMfaCode(digits); + }; + + const formTitle = step === "mfa" + ? "Multi-Factor Authentication" + : "Borealis - Automation Platform"; + + return ( + + + Borealis Logo + + {formTitle} + + + {step === "credentials" ? ( + <> + setUsername(e.target.value)} + margin="normal" + /> + setPassword(e.target.value)} + margin="normal" + /> + {error && ( + + {error} + + )} + + + ) : ( + <> + {mfaStage === "setup" ? ( + <> + + Scan the QR code with your authenticator app, then enter the 6-digit code to complete setup for {username}. + + {setupQr ? ( + MFA enrollment QR code + ) : null} + {formattedSecret ? ( + + + Manual code + + + {formattedSecret} + + + ) : null} + {setupUri ? ( + + {setupUri} + + ) : null} + + ) : ( + + Enter the 6-digit code from your authenticator app for {username}. + + )} + + + {error && ( + + {error} + + )} + + + + )} + + + ); +} diff --git a/Data/Server/WebUI/src/Navigation_Sidebar.jsx b/Data/Server/WebUI/src/Navigation_Sidebar.jsx new file mode 100644 index 00000000..c2a7d712 --- /dev/null +++ b/Data/Server/WebUI/src/Navigation_Sidebar.jsx @@ -0,0 +1,409 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/Navigation_Sidebar.jsx + +import React, { useState } from "react"; +import { + Accordion, + AccordionSummary, + AccordionDetails, + Typography, + Box, + ListItemButton, + ListItemText +} from "@mui/material"; +import { + ExpandMore as ExpandMoreIcon, + Devices as DevicesIcon, + FilterAlt as FilterIcon, + Groups as GroupsIcon, + Work as JobsIcon, + Polyline as WorkflowsIcon, + Code as ScriptIcon, + PeopleOutline as CommunityIcon, + Apps as AssembliesIcon +} from "@mui/icons-material"; +import { LocationCity as SitesIcon } from "@mui/icons-material"; +import { + Dns as ServerInfoIcon, + VpnKey as CredentialIcon, + PersonOutline as UserIcon, + GitHub as GitHubIcon, + Key as KeyIcon, + AdminPanelSettings as AdminPanelSettingsIcon +} from "@mui/icons-material"; + +function NavigationSidebar({ currentPage, onNavigate, isAdmin = false }) { + const [expandedNav, setExpandedNav] = useState({ + sites: true, + devices: true, + automation: true, + filters: true, + access: true, + admin: true + }); + + const NavItem = ({ icon, label, pageKey, indent = 0 }) => { + const active = currentPage === pageKey; + return ( + onNavigate(pageKey)} + sx={{ + pl: indent ? 4 : 2, + py: 1, + color: active ? "#e6f2ff" : "#ccc", + position: "relative", + background: active + ? "linear-gradient(90deg, rgba(88,166,255,0.10) 0%, rgba(88,166,255,0.03) 60%, rgba(88,166,255,0.00) 100%)" + : "transparent", + borderTopRightRadius: 0, + borderBottomRightRadius: 0, + boxShadow: active + ? "inset 0 0 0 1px rgba(88,166,255,0.25)" + : "none", + transition: "background 160ms ease, box-shadow 160ms ease, color 160ms ease", + "&:hover": { + background: active + ? "linear-gradient(90deg, rgba(88,166,255,0.14) 0%, rgba(88,166,255,0.06) 60%, rgba(88,166,255,0.00) 100%)" + : "#2c2c2c" + } + }} + selected={active} + > + + {icon && ( + + {icon} + + )} + + + ); + }; + + return ( + + + {/* Sites */} + {(() => { + const groupActive = currentPage === "sites"; + return ( + setExpandedNav((s) => ({ ...s, sites: e }))} + square + disableGutters + sx={{ "&:before": { display: "none" }, margin: 0, border: 0 }} + > + } + sx={{ + position: "relative", + background: groupActive + ? "linear-gradient(90deg, rgba(88,166,255,0.08) 0%, rgba(88,166,255,0.00) 100%)" + : "#2c2c2c", + minHeight: "36px", + "& .MuiAccordionSummary-content": { margin: 0 }, + "&::before": { + content: '""', + position: "absolute", + left: 0, + top: 0, + bottom: 0, + width: groupActive ? 3 : 0, + bgcolor: "#58a6ff", + borderTopRightRadius: 2, + borderBottomRightRadius: 2, + transition: "width 160ms ease" + } + }} + > + + Sites + + + + } label="All Sites" pageKey="sites" /> + + + ); + })()} + {/* Inventory */} + {(() => { + const groupActive = ["devices", "ssh_devices", "winrm_devices", "agent_devices"].includes(currentPage); + return ( + setExpandedNav((s) => ({ ...s, devices: e }))} + square + disableGutters + sx={{ "&:before": { display: "none" }, margin: 0, border: 0 }} + > + } + sx={{ + position: "relative", + background: groupActive + ? "linear-gradient(90deg, rgba(88,166,255,0.08) 0%, rgba(88,166,255,0.00) 100%)" + : "#2c2c2c", + minHeight: "36px", + "& .MuiAccordionSummary-content": { margin: 0 }, + "&::before": { + content: '""', + position: "absolute", + left: 0, + top: 0, + bottom: 0, + width: groupActive ? 3 : 0, + bgcolor: "#58a6ff", + borderTopRightRadius: 2, + borderBottomRightRadius: 2, + transition: "width 160ms ease" + } + }} + > + + Inventory + + + + } label="Device Approvals" pageKey="admin_device_approvals" /> + } label="Enrollment Codes" pageKey="admin_enrollment_codes" indent /> + } label="Devices" pageKey="devices" /> + } label="Agent Devices" pageKey="agent_devices" indent /> + } label="SSH Devices" pageKey="ssh_devices" indent /> + } label="WinRM Devices" pageKey="winrm_devices" indent /> + + + ); + })()} + + {/* Automation */} + {(() => { + const groupActive = ["jobs", "assemblies", "community"].includes(currentPage); + return ( + setExpandedNav((s) => ({ ...s, automation: e }))} + square + disableGutters + sx={{ "&:before": { display: "none" }, margin: 0, border: 0 }} + > + } + sx={{ + position: "relative", + background: groupActive + ? "linear-gradient(90deg, rgba(88,166,255,0.08) 0%, rgba(88,166,255,0.00) 100%)" + : "#2c2c2c", + minHeight: "36px", + "& .MuiAccordionSummary-content": { margin: 0 }, + "&::before": { + content: '""', + position: "absolute", + left: 0, + top: 0, + bottom: 0, + width: groupActive ? 3 : 0, + bgcolor: "#58a6ff", + borderTopRightRadius: 2, + borderBottomRightRadius: 2, + transition: "width 160ms ease" + } + }} + > + + Automation + + + + } label="Assemblies" pageKey="assemblies" /> + } label="Scheduled Jobs" pageKey="jobs" /> + } label="Community Content" pageKey="community" /> + + + ); + })()} + + {/* Filters & Groups */} + {(() => { + const groupActive = currentPage === "filters" || currentPage === "groups"; + return ( + setExpandedNav((s) => ({ ...s, filters: e }))} + square + disableGutters + sx={{ "&:before": { display: "none" }, margin: 0, border: 0 }} + > + } + sx={{ + position: "relative", + background: groupActive + ? "linear-gradient(90deg, rgba(88,166,255,0.08) 0%, rgba(88,166,255,0.00) 100%)" + : "#2c2c2c", + minHeight: "36px", + "& .MuiAccordionSummary-content": { margin: 0 }, + "&::before": { + content: '""', + position: "absolute", + left: 0, + top: 0, + bottom: 0, + width: groupActive ? 3 : 0, + bgcolor: "#58a6ff", + borderTopRightRadius: 2, + borderBottomRightRadius: 2, + transition: "width 160ms ease" + } + }} + > + + Filters & Groups + + + + } label="Filters" pageKey="filters" /> + } label="Groups" pageKey="groups" /> + + + ); + })()} + + {/* Access Management */} + {(() => { + if (!isAdmin) return null; + const groupActive = + currentPage === "access_credentials" || + currentPage === "access_users" || + currentPage === "access_github_token"; + return ( + setExpandedNav((s) => ({ ...s, access: e }))} + square + disableGutters + sx={{ "&:before": { display: "none" }, margin: 0, border: 0 }} + > + } + sx={{ + position: "relative", + background: groupActive + ? "linear-gradient(90deg, rgba(88,166,255,0.08) 0%, rgba(88,166,255,0.00) 100%)" + : "#2c2c2c", + minHeight: "36px", + "& .MuiAccordionSummary-content": { margin: 0 }, + "&::before": { + content: '""', + position: "absolute", + left: 0, + top: 0, + bottom: 0, + width: groupActive ? 3 : 0, + bgcolor: "#58a6ff", + borderTopRightRadius: 2, + borderBottomRightRadius: 2, + transition: "width 160ms ease" + } + }} + > + + Access Management + + + + } label="Credentials" pageKey="access_credentials" /> + } label="GitHub API Token" pageKey="access_github_token" /> + } label="Users" pageKey="access_users" /> + + + ); + })()} + + {/* Admin */} + {(() => { + if (!isAdmin) return null; + const groupActive = + currentPage === "server_info" || + currentPage === "admin_enrollment_codes" || + currentPage === "admin_device_approvals"; + return ( + setExpandedNav((s) => ({ ...s, admin: e }))} + square + disableGutters + sx={{ "&:before": { display: "none" }, margin: 0, border: 0 }} + > + } + sx={{ + position: "relative", + background: groupActive + ? "linear-gradient(90deg, rgba(88,166,255,0.08) 0%, rgba(88,166,255,0.00) 100%)" + : "#2c2c2c", + minHeight: "36px", + "& .MuiAccordionSummary-content": { margin: 0 }, + "&::before": { + content: '""', + position: "absolute", + left: 0, + top: 0, + bottom: 0, + width: groupActive ? 3 : 0, + bgcolor: "#58a6ff", + borderTopRightRadius: 2, + borderBottomRightRadius: 2, + transition: "width 160ms ease" + } + }} + > + + Admin Settings + + + + } label="Server Info" pageKey="server_info" /> + + + ); + })()} + + + ); +} + +export default React.memo(NavigationSidebar); diff --git a/Data/Server/WebUI/src/Scheduling/Create_Job.jsx b/Data/Server/WebUI/src/Scheduling/Create_Job.jsx new file mode 100644 index 00000000..89b09962 --- /dev/null +++ b/Data/Server/WebUI/src/Scheduling/Create_Job.jsx @@ -0,0 +1,2141 @@ +import React, { useEffect, useMemo, useState, useCallback, useRef } from "react"; +import { + Paper, + Box, + Typography, + Tabs, + Tab, + TextField, + Button, + IconButton, + Checkbox, + FormControl, + FormControlLabel, + Select, + InputLabel, + Menu, + MenuItem, + Divider, + Dialog, + DialogTitle, + DialogContent, + DialogActions, + Table, + TableHead, + TableRow, + TableCell, + TableBody, + TableSortLabel, + GlobalStyles, + CircularProgress +} from "@mui/material"; +import { + Add as AddIcon, + Delete as DeleteIcon, + FilterList as FilterListIcon, + PendingActions as PendingActionsIcon, + Sync as SyncIcon, + Timer as TimerIcon, + Check as CheckIcon, + Error as ErrorIcon, + Refresh as RefreshIcon +} from "@mui/icons-material"; +import { SimpleTreeView, TreeItem } from "@mui/x-tree-view"; +import { LocalizationProvider } from "@mui/x-date-pickers/LocalizationProvider"; +import { DateTimePicker } from "@mui/x-date-pickers/DateTimePicker"; +import { AdapterDayjs } from "@mui/x-date-pickers/AdapterDayjs"; +import dayjs from "dayjs"; +import Prism from "prismjs"; +import "prismjs/components/prism-yaml"; +import "prismjs/components/prism-bash"; +import "prismjs/components/prism-powershell"; +import "prismjs/components/prism-batch"; +import "prismjs/themes/prism-okaidia.css"; +import Editor from "react-simple-code-editor"; +import ReactFlow, { Handle, Position } from "reactflow"; +import "reactflow/dist/style.css"; + +const hiddenHandleStyle = { + width: 12, + height: 12, + border: "none", + background: "transparent", + opacity: 0, + pointerEvents: "none" +}; + +const STATUS_META = { + pending: { label: "Pending", color: "#aab2bf", Icon: PendingActionsIcon }, + running: { label: "Running", color: "#58a6ff", Icon: SyncIcon }, + expired: { label: "Expired", color: "#aab2bf", Icon: TimerIcon }, + success: { label: "Success", color: "#00d18c", Icon: CheckIcon }, + failed: { label: "Failed", color: "#ff4f4f", Icon: ErrorIcon } +}; + +const DEVICE_COLUMNS = [ + { key: "hostname", label: "Hostname" }, + { key: "online", label: "Status" }, + { key: "site", label: "Site" }, + { key: "ran_on", label: "Ran On" }, + { key: "job_status", label: "Job Status" }, + { key: "output", label: "StdOut / StdErr" } +]; + +function StatusNode({ data }) { + const { label, color, count, onClick, isActive, Icon } = data || {}; + const displayCount = Number.isFinite(count) ? count : Number(count) || 0; + const borderColor = color || "#333"; + const activeGlow = color ? `${color}55` : "rgba(88,166,255,0.35)"; + const handleClick = useCallback((event) => { + event?.preventDefault(); + event?.stopPropagation(); + onClick && onClick(); + }, [onClick]); + return ( + + + + + + + {Icon ? : null} + + {`${displayCount} ${label || ""}`} + + + + ); +} + +function SectionHeader({ title, action }) { + return ( + + {title} + {action || null} + + ); +} + +// Recursive renderer for both Scripts and Workflows trees +function renderTreeNodes(nodes = [], map = {}) { + return nodes.map((n) => ( + + {n.children && n.children.length ? renderTreeNodes(n.children, map) : null} + + )); +} + +// --- Scripts tree helpers (reuse approach from Quick_Job) --- +function buildScriptTree(scripts, folders) { + const map = {}; + const rootNode = { id: "root_s", label: "Scripts", path: "", isFolder: true, children: [] }; + map[rootNode.id] = rootNode; + (folders || []).forEach((f) => { + const parts = (f || "").split("/"); + let children = rootNode.children; let parentPath = ""; + parts.forEach((part) => { + const path = parentPath ? `${parentPath}/${part}` : part; + let node = children.find((n) => n.id === path); + if (!node) { node = { id: path, label: part, path, isFolder: true, children: [] }; children.push(node); map[path] = node; } + children = node.children; parentPath = path; + }); + }); + (scripts || []).forEach((s) => { + const parts = (s.rel_path || "").split("/"); + let children = rootNode.children; let parentPath = ""; + parts.forEach((part, idx) => { + const path = parentPath ? `${parentPath}/${part}` : part; + const isFile = idx === parts.length - 1; + let node = children.find((n) => n.id === path); + if (!node) { + node = { id: path, label: isFile ? (s.name || s.file_name || part) : part, path, isFolder: !isFile, fileName: s.file_name, script: isFile ? s : null, children: [] }; + children.push(node); map[path] = node; + } + if (!isFile) { children = node.children; parentPath = path; } + }); + }); + return { root: [rootNode], map }; +} + +// --- Ansible tree helpers (reuse scripts tree builder) --- +function buildAnsibleTree(playbooks, folders) { + return buildScriptTree(playbooks, folders); +} + +// --- Workflows tree helpers (reuse approach from Workflow_List) --- +function buildWorkflowTree(workflows, folders) { + const map = {}; + const rootNode = { id: "root_w", label: "Workflows", path: "", isFolder: true, children: [] }; + map[rootNode.id] = rootNode; + (folders || []).forEach((f) => { + const parts = (f || "").split("/"); + let children = rootNode.children; let parentPath = ""; + parts.forEach((part) => { + const path = parentPath ? `${parentPath}/${part}` : part; + let node = children.find((n) => n.id === path); + if (!node) { node = { id: path, label: part, path, isFolder: true, children: [] }; children.push(node); map[path] = node; } + children = node.children; parentPath = path; + }); + }); + (workflows || []).forEach((w) => { + const parts = (w.rel_path || "").split("/"); + let children = rootNode.children; let parentPath = ""; + parts.forEach((part, idx) => { + const path = parentPath ? `${parentPath}/${part}` : part; + const isFile = idx === parts.length - 1; + let node = children.find((n) => n.id === path); + if (!node) { + node = { id: path, label: isFile ? (w.tab_name?.trim() || w.file_name) : part, path, isFolder: !isFile, fileName: w.file_name, workflow: isFile ? w : null, children: [] }; + children.push(node); map[path] = node; + } + if (!isFile) { children = node.children; parentPath = path; } + }); + }); + return { root: [rootNode], map }; +} + +function normalizeVariableDefinitions(vars = []) { + return (Array.isArray(vars) ? vars : []) + .map((raw) => { + if (!raw || typeof raw !== "object") return null; + const name = typeof raw.name === "string" ? raw.name.trim() : typeof raw.key === "string" ? raw.key.trim() : ""; + if (!name) return null; + const label = typeof raw.label === "string" && raw.label.trim() ? raw.label.trim() : name; + const type = typeof raw.type === "string" ? raw.type.toLowerCase() : "string"; + const required = Boolean(raw.required); + const description = typeof raw.description === "string" ? raw.description : ""; + let defaultValue = ""; + if (Object.prototype.hasOwnProperty.call(raw, "default")) defaultValue = raw.default; + else if (Object.prototype.hasOwnProperty.call(raw, "defaultValue")) defaultValue = raw.defaultValue; + else if (Object.prototype.hasOwnProperty.call(raw, "default_value")) defaultValue = raw.default_value; + return { name, label, type, required, description, default: defaultValue }; + }) + .filter(Boolean); +} + +function coerceVariableValue(type, value) { + if (type === "boolean") { + if (typeof value === "boolean") return value; + if (typeof value === "number") return value !== 0; + if (value == null) return false; + const str = String(value).trim().toLowerCase(); + if (!str) return false; + return ["true", "1", "yes", "on"].includes(str); + } + if (type === "number") { + if (value == null || value === "") return ""; + if (typeof value === "number" && Number.isFinite(value)) return String(value); + const parsed = Number(value); + return Number.isFinite(parsed) ? String(parsed) : ""; + } + return value == null ? "" : String(value); +} + +function mergeComponentVariables(docVars = [], storedVars = [], storedValueMap = {}) { + const definitions = normalizeVariableDefinitions(docVars); + const overrides = {}; + const storedMeta = {}; + (Array.isArray(storedVars) ? storedVars : []).forEach((raw) => { + if (!raw || typeof raw !== "object") return; + const name = typeof raw.name === "string" ? raw.name.trim() : ""; + if (!name) return; + if (Object.prototype.hasOwnProperty.call(raw, "value")) overrides[name] = raw.value; + else if (Object.prototype.hasOwnProperty.call(raw, "default")) overrides[name] = raw.default; + storedMeta[name] = { + label: typeof raw.label === "string" && raw.label.trim() ? raw.label.trim() : name, + type: typeof raw.type === "string" ? raw.type.toLowerCase() : undefined, + required: Boolean(raw.required), + description: typeof raw.description === "string" ? raw.description : "", + default: Object.prototype.hasOwnProperty.call(raw, "default") ? raw.default : "" + }; + }); + if (storedValueMap && typeof storedValueMap === "object") { + Object.entries(storedValueMap).forEach(([key, val]) => { + const name = typeof key === "string" ? key.trim() : ""; + if (name) overrides[name] = val; + }); + } + + const used = new Set(); + const merged = definitions.map((def) => { + const override = Object.prototype.hasOwnProperty.call(overrides, def.name) ? overrides[def.name] : undefined; + used.add(def.name); + return { + ...def, + value: override !== undefined ? coerceVariableValue(def.type, override) : coerceVariableValue(def.type, def.default) + }; + }); + + (Array.isArray(storedVars) ? storedVars : []).forEach((raw) => { + if (!raw || typeof raw !== "object") return; + const name = typeof raw.name === "string" ? raw.name.trim() : ""; + if (!name || used.has(name)) return; + const meta = storedMeta[name] || {}; + const type = meta.type || (typeof overrides[name] === "boolean" ? "boolean" : typeof overrides[name] === "number" ? "number" : "string"); + const defaultValue = Object.prototype.hasOwnProperty.call(meta, "default") ? meta.default : ""; + const override = Object.prototype.hasOwnProperty.call(overrides, name) + ? overrides[name] + : Object.prototype.hasOwnProperty.call(raw, "value") + ? raw.value + : defaultValue; + merged.push({ + name, + label: meta.label || name, + type, + required: Boolean(meta.required), + description: meta.description || "", + default: defaultValue, + value: coerceVariableValue(type, override) + }); + used.add(name); + }); + + Object.entries(overrides).forEach(([nameRaw, val]) => { + const name = typeof nameRaw === "string" ? nameRaw.trim() : ""; + if (!name || used.has(name)) return; + const type = typeof val === "boolean" ? "boolean" : typeof val === "number" ? "number" : "string"; + merged.push({ + name, + label: name, + type, + required: false, + description: "", + default: "", + value: coerceVariableValue(type, val) + }); + used.add(name); + }); + + return merged; +} + +function ComponentCard({ comp, onRemove, onVariableChange, errors = {} }) { + const variables = Array.isArray(comp.variables) + ? comp.variables.filter((v) => v && typeof v.name === "string" && v.name) + : []; + const description = comp.description || comp.path || ""; + return ( + + + + + {comp.type === "script" ? comp.name : comp.name} + + + {description} + + + + + Variables + {variables.length ? ( + + {variables.map((variable) => ( + + {variable.type === "boolean" ? ( + <> + onVariableChange(comp.localId, variable.name, e.target.checked)} + /> + )} + label={ + + {variable.label} + {variable.required ? " *" : ""} + + } + /> + {variable.description ? ( + + {variable.description} + + ) : null} + + ) : ( + onVariableChange(comp.localId, variable.name, e.target.value)} + InputLabelProps={{ shrink: true }} + sx={{ + "& .MuiOutlinedInput-root": { bgcolor: "#1b1b1b", color: "#e6edf3" }, + "& .MuiInputBase-input": { color: "#e6edf3" } + }} + error={Boolean(errors[variable.name])} + helperText={errors[variable.name] || variable.description || ""} + /> + )} + + ))} + + ) : ( + No variables defined for this assembly. + )} + + + onRemove(comp.localId)} size="small" sx={{ color: "#ff6666" }}> + + + + + + ); +} + +export default function CreateJob({ onCancel, onCreated, initialJob = null }) { + const [tab, setTab] = useState(0); + const [jobName, setJobName] = useState(""); + const [pageTitleJobName, setPageTitleJobName] = useState(""); + // Components the job will run: {type:'script'|'workflow', path, name, description} + const [components, setComponents] = useState([]); + const [targets, setTargets] = useState([]); // array of hostnames + const [scheduleType, setScheduleType] = useState("immediately"); + const [startDateTime, setStartDateTime] = useState(() => dayjs().add(5, "minute").second(0)); + const [stopAfterEnabled, setStopAfterEnabled] = useState(false); + const [expiration, setExpiration] = useState("no_expire"); + const [execContext, setExecContext] = useState("system"); + const [credentials, setCredentials] = useState([]); + const [credentialLoading, setCredentialLoading] = useState(false); + const [credentialError, setCredentialError] = useState(""); + const [selectedCredentialId, setSelectedCredentialId] = useState(""); + const [useSvcAccount, setUseSvcAccount] = useState(true); + + const loadCredentials = useCallback(async () => { + setCredentialLoading(true); + setCredentialError(""); + try { + const resp = await fetch("/api/credentials"); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + const list = Array.isArray(data?.credentials) ? data.credentials : []; + list.sort((a, b) => String(a?.name || "").localeCompare(String(b?.name || ""))); + setCredentials(list); + } catch (err) { + setCredentials([]); + setCredentialError(String(err.message || err)); + } finally { + setCredentialLoading(false); + } + }, []); + + useEffect(() => { + loadCredentials(); + }, [loadCredentials]); + + const remoteExec = useMemo(() => execContext === "ssh" || execContext === "winrm", [execContext]); + const handleExecContextChange = useCallback((value) => { + const normalized = String(value || "system").toLowerCase(); + setExecContext(normalized); + if (normalized === "winrm") { + setUseSvcAccount(true); + setSelectedCredentialId(""); + } else { + setUseSvcAccount(false); + } + }, []); + const filteredCredentials = useMemo(() => { + if (!remoteExec) return credentials; + const target = execContext === "winrm" ? "winrm" : "ssh"; + return credentials.filter((cred) => String(cred.connection_type || "").toLowerCase() === target); + }, [credentials, remoteExec, execContext]); + + useEffect(() => { + if (!remoteExec) { + return; + } + if (execContext === "winrm" && useSvcAccount) { + setSelectedCredentialId(""); + return; + } + if (!filteredCredentials.length) { + setSelectedCredentialId(""); + return; + } + if (!selectedCredentialId || !filteredCredentials.some((cred) => String(cred.id) === String(selectedCredentialId))) { + setSelectedCredentialId(String(filteredCredentials[0].id)); + } + }, [remoteExec, filteredCredentials, selectedCredentialId, execContext, useSvcAccount]); + + // dialogs state + const [addCompOpen, setAddCompOpen] = useState(false); + const [compTab, setCompTab] = useState("scripts"); + const [scriptTree, setScriptTree] = useState([]); const [scriptMap, setScriptMap] = useState({}); + const [workflowTree, setWorkflowTree] = useState([]); const [workflowMap, setWorkflowMap] = useState({}); + const [ansibleTree, setAnsibleTree] = useState([]); const [ansibleMap, setAnsibleMap] = useState({}); + const [selectedNodeId, setSelectedNodeId] = useState(""); + + const [addTargetOpen, setAddTargetOpen] = useState(false); + const [availableDevices, setAvailableDevices] = useState([]); // [{hostname, display, online}] + const [selectedTargets, setSelectedTargets] = useState({}); // map hostname->bool + const [deviceSearch, setDeviceSearch] = useState(""); + const [componentVarErrors, setComponentVarErrors] = useState({}); + const [deviceRows, setDeviceRows] = useState([]); + const [deviceStatusFilter, setDeviceStatusFilter] = useState(null); + const [deviceOrderBy, setDeviceOrderBy] = useState("hostname"); + const [deviceOrder, setDeviceOrder] = useState("asc"); + const [deviceFilters, setDeviceFilters] = useState({}); + const [filterAnchorEl, setFilterAnchorEl] = useState(null); + const [activeFilterColumn, setActiveFilterColumn] = useState(null); + const [pendingFilterValue, setPendingFilterValue] = useState(""); + + const generateLocalId = useCallback( + () => `${Date.now()}_${Math.random().toString(36).slice(2, 8)}`, + [] + ); + + const getDefaultFilterValue = useCallback((key) => (["online", "job_status", "output"].includes(key) ? "all" : ""), []); + + const isColumnFiltered = useCallback((key) => { + if (!deviceFilters || typeof deviceFilters !== "object") return false; + const value = deviceFilters[key]; + if (value == null) return false; + if (typeof value === "string") { + const trimmed = value.trim(); + if (!trimmed || trimmed === "all") return false; + return true; + } + return true; + }, [deviceFilters]); + + const openFilterMenu = useCallback((event, columnKey) => { + setActiveFilterColumn(columnKey); + setPendingFilterValue(deviceFilters[columnKey] ?? getDefaultFilterValue(columnKey)); + setFilterAnchorEl(event.currentTarget); + }, [deviceFilters, getDefaultFilterValue]); + + const closeFilterMenu = useCallback(() => { + setFilterAnchorEl(null); + setActiveFilterColumn(null); + }, []); + + const applyFilter = useCallback(() => { + if (!activeFilterColumn) { + closeFilterMenu(); + return; + } + const value = pendingFilterValue; + setDeviceFilters((prev) => { + const next = { ...(prev || {}) }; + if (!value || value === "all" || (typeof value === "string" && !value.trim())) { + delete next[activeFilterColumn]; + } else { + next[activeFilterColumn] = value; + } + return next; + }); + closeFilterMenu(); + }, [activeFilterColumn, pendingFilterValue, closeFilterMenu]); + + const clearFilter = useCallback(() => { + if (!activeFilterColumn) { + closeFilterMenu(); + return; + } + setDeviceFilters((prev) => { + const next = { ...(prev || {}) }; + delete next[activeFilterColumn]; + return next; + }); + setPendingFilterValue(getDefaultFilterValue(activeFilterColumn)); + closeFilterMenu(); + }, [activeFilterColumn, closeFilterMenu, getDefaultFilterValue]); + + const renderFilterControl = () => { + const columnKey = activeFilterColumn; + if (!columnKey) return null; + if (columnKey === "online") { + return ( + + ); + } + if (columnKey === "job_status") { + const options = ["success", "failed", "running", "pending", "expired", "timed out"]; + return ( + + ); + } + if (columnKey === "output") { + return ( + + ); + } + const placeholders = { + hostname: "Filter hostname", + site: "Filter site", + ran_on: "Filter date/time" + }; + const value = typeof pendingFilterValue === "string" ? pendingFilterValue : ""; + return ( + setPendingFilterValue(e.target.value)} + onKeyDown={(e) => { + if (e.key === "Enter") { + e.preventDefault(); + applyFilter(); + } + }} + /> + ); + }; + + const handleDeviceSort = useCallback((key) => { + setDeviceOrderBy((prevKey) => { + if (prevKey === key) { + setDeviceOrder((prevDir) => (prevDir === "asc" ? "desc" : "asc")); + return prevKey; + } + setDeviceOrder(key === "ran_on" ? "desc" : "asc"); + return key; + }); + }, []); + + const fmtTs = useCallback((ts) => { + if (!ts) return ""; + try { + const d = new Date(Number(ts) * 1000); + return d.toLocaleString(undefined, { + year: "numeric", + month: "2-digit", + day: "2-digit", + hour: "numeric", + minute: "2-digit" + }); + } catch { + return ""; + } + }, []); + + const deviceFiltered = useMemo(() => { + const matchStatusFilter = (status, filterKey) => { + if (filterKey === "pending") return status === "pending" || status === "scheduled" || status === "queued" || status === ""; + if (filterKey === "running") return status === "running"; + if (filterKey === "success") return status === "success"; + if (filterKey === "failed") return status === "failed" || status === "failure" || status === "timed out" || status === "timed_out" || status === "warning"; + if (filterKey === "expired") return status === "expired"; + return true; + }; + + return deviceRows.filter((row) => { + const normalizedStatus = String(row?.job_status || "").trim().toLowerCase(); + if (deviceStatusFilter && !matchStatusFilter(normalizedStatus, deviceStatusFilter)) { + return false; + } + if (deviceFilters && typeof deviceFilters === "object") { + for (const [key, rawValue] of Object.entries(deviceFilters)) { + if (rawValue == null) continue; + if (typeof rawValue === "string") { + const trimmed = rawValue.trim(); + if (!trimmed || trimmed === "all") continue; + } + if (key === "hostname") { + const expected = String(rawValue || "").toLowerCase(); + if (!String(row?.hostname || "").toLowerCase().includes(expected)) return false; + } else if (key === "online") { + if (rawValue === "online" && !row?.online) return false; + if (rawValue === "offline" && row?.online) return false; + } else if (key === "site") { + const expected = String(rawValue || "").toLowerCase(); + if (!String(row?.site || "").toLowerCase().includes(expected)) return false; + } else if (key === "ran_on") { + const expected = String(rawValue || "").toLowerCase(); + const formatted = fmtTs(row?.ran_on).toLowerCase(); + if (!formatted.includes(expected)) return false; + } else if (key === "job_status") { + const expected = String(rawValue || "").toLowerCase(); + if (!normalizedStatus.includes(expected)) return false; + } else if (key === "output") { + if (rawValue === "stdout" && !row?.has_stdout) return false; + if (rawValue === "stderr" && !row?.has_stderr) return false; + if (rawValue === "both" && (!row?.has_stdout || !row?.has_stderr)) return false; + if (rawValue === "none" && (row?.has_stdout || row?.has_stderr)) return false; + } + } + } + return true; + }); + }, [deviceRows, deviceStatusFilter, deviceFilters, fmtTs]); + + const deviceSorted = useMemo(() => { + const arr = [...deviceFiltered]; + const dir = deviceOrder === "asc" ? 1 : -1; + arr.sort((a, b) => { + let delta = 0; + switch (deviceOrderBy) { + case "hostname": + delta = String(a?.hostname || "").localeCompare(String(b?.hostname || "")); + break; + case "online": + delta = Number(a?.online ? 1 : 0) - Number(b?.online ? 1 : 0); + break; + case "site": + delta = String(a?.site || "").localeCompare(String(b?.site || "")); + break; + case "ran_on": + delta = Number(a?.ran_on || 0) - Number(b?.ran_on || 0); + break; + case "job_status": + delta = String(a?.job_status || "").localeCompare(String(b?.job_status || "")); + break; + case "output": { + const score = (row) => (row?.has_stdout ? 2 : 0) + (row?.has_stderr ? 1 : 0); + delta = score(a) - score(b); + break; + } + default: + delta = 0; + } + if (delta === 0) { + delta = String(a?.hostname || "").localeCompare(String(b?.hostname || "")); + } + return delta * dir; + }); + return arr; + }, [deviceFiltered, deviceOrder, deviceOrderBy]); + + const normalizeComponentPath = useCallback((type, rawPath) => { + const trimmed = (rawPath || "").replace(/\\/g, "/").replace(/^\/+/, "").trim(); + if (!trimmed) return ""; + if (type === "script") { + return trimmed.startsWith("Scripts/") ? trimmed : `Scripts/${trimmed}`; + } + return trimmed; + }, []); + + const fetchAssemblyDoc = useCallback(async (type, rawPath) => { + const normalizedPath = normalizeComponentPath(type, rawPath); + if (!normalizedPath) return { doc: null, normalizedPath: "" }; + const trimmed = normalizedPath.replace(/\\/g, "/").replace(/^\/+/, "").trim(); + if (!trimmed) return { doc: null, normalizedPath: "" }; + let requestPath = trimmed; + if (type === "script" && requestPath.toLowerCase().startsWith("scripts/")) { + requestPath = requestPath.slice("Scripts/".length); + } else if (type === "ansible" && requestPath.toLowerCase().startsWith("ansible_playbooks/")) { + requestPath = requestPath.slice("Ansible_Playbooks/".length); + } + if (!requestPath) return { doc: null, normalizedPath }; + try { + const island = type === "ansible" ? "ansible" : "scripts"; + const resp = await fetch(`/api/assembly/load?island=${island}&path=${encodeURIComponent(requestPath)}`); + if (!resp.ok) { + return { doc: null, normalizedPath }; + } + const data = await resp.json(); + return { doc: data, normalizedPath }; + } catch { + return { doc: null, normalizedPath }; + } + }, [normalizeComponentPath]); + + const hydrateExistingComponents = useCallback(async (rawComponents = []) => { + const results = []; + for (const raw of rawComponents) { + if (!raw || typeof raw !== "object") continue; + const typeRaw = raw.type || raw.component_type || "script"; + if (typeRaw === "workflow") { + results.push({ + ...raw, + type: "workflow", + variables: Array.isArray(raw.variables) ? raw.variables : [], + localId: generateLocalId() + }); + continue; + } + const type = typeRaw === "ansible" ? "ansible" : "script"; + const basePath = raw.path || raw.script_path || raw.rel_path || ""; + const { doc, normalizedPath } = await fetchAssemblyDoc(type, basePath); + const assembly = doc?.assembly || {}; + const docVars = assembly?.variables || doc?.variables || []; + const mergedVariables = mergeComponentVariables(docVars, raw.variables, raw.variable_values); + results.push({ + ...raw, + type, + path: normalizedPath || basePath, + name: raw.name || assembly?.name || raw.file_name || raw.tab_name || normalizedPath || basePath, + description: raw.description || assembly?.description || normalizedPath || basePath, + variables: mergedVariables, + localId: generateLocalId() + }); + } + return results; + }, [fetchAssemblyDoc, generateLocalId]); + + const sanitizeComponentsForSave = useCallback((items) => { + return (Array.isArray(items) ? items : []).map((comp) => { + if (!comp || typeof comp !== "object") return comp; + const { localId, ...rest } = comp; + const sanitized = { ...rest }; + if (Array.isArray(comp.variables)) { + const valuesMap = {}; + sanitized.variables = comp.variables + .filter((v) => v && typeof v.name === "string" && v.name) + .map((v) => { + const entry = { + name: v.name, + label: v.label || v.name, + type: v.type || "string", + required: Boolean(v.required), + description: v.description || "" + }; + if (Object.prototype.hasOwnProperty.call(v, "default")) entry.default = v.default; + if (Object.prototype.hasOwnProperty.call(v, "value")) { + entry.value = v.value; + valuesMap[v.name] = v.value; + } + return entry; + }); + if (!sanitized.variables.length) sanitized.variables = []; + if (Object.keys(valuesMap).length) sanitized.variable_values = valuesMap; + else delete sanitized.variable_values; + } + return sanitized; + }); + }, []); + + const updateComponentVariable = useCallback((localId, name, value) => { + if (!localId || !name) return; + setComponents((prev) => prev.map((comp) => { + if (!comp || comp.localId !== localId) return comp; + const vars = Array.isArray(comp.variables) ? comp.variables : []; + const nextVars = vars.map((variable) => { + if (!variable || variable.name !== name) return variable; + return { ...variable, value: coerceVariableValue(variable.type || "string", value) }; + }); + return { ...comp, variables: nextVars }; + })); + setComponentVarErrors((prev) => { + if (!prev[localId] || !prev[localId][name]) return prev; + const next = { ...prev }; + const compErrors = { ...next[localId] }; + delete compErrors[name]; + if (Object.keys(compErrors).length) next[localId] = compErrors; + else delete next[localId]; + return next; + }); + }, []); + + const removeComponent = useCallback((localId) => { + setComponents((prev) => prev.filter((comp) => comp.localId !== localId)); + setComponentVarErrors((prev) => { + if (!prev[localId]) return prev; + const next = { ...prev }; + delete next[localId]; + return next; + }); + }, []); + + const isValid = useMemo(() => { + const base = jobName.trim().length > 0 && components.length > 0 && targets.length > 0; + if (!base) return false; + const needsCredential = remoteExec && !(execContext === "winrm" && useSvcAccount); + if (needsCredential && !selectedCredentialId) return false; + if (scheduleType !== "immediately") { + return !!startDateTime; + } + return true; + }, [jobName, components.length, targets.length, scheduleType, startDateTime, remoteExec, selectedCredentialId, execContext, useSvcAccount]); + + const [confirmOpen, setConfirmOpen] = useState(false); + const editing = !!(initialJob && initialJob.id); + + // --- Job History (only when editing) --- + const [historyRows, setHistoryRows] = useState([]); + const [historyOrderBy, setHistoryOrderBy] = useState("started_ts"); + const [historyOrder, setHistoryOrder] = useState("desc"); + const activityCacheRef = useRef(new Map()); + const [outputOpen, setOutputOpen] = useState(false); + const [outputTitle, setOutputTitle] = useState(""); + const [outputSections, setOutputSections] = useState([]); + const [outputLoading, setOutputLoading] = useState(false); + const [outputError, setOutputError] = useState(""); + + const loadHistory = useCallback(async () => { + if (!editing) return; + try { + const [runsResp, jobResp, devResp] = await Promise.all([ + fetch(`/api/scheduled_jobs/${initialJob.id}/runs?days=30`), + fetch(`/api/scheduled_jobs/${initialJob.id}`), + fetch(`/api/scheduled_jobs/${initialJob.id}/devices`) + ]); + const runs = await runsResp.json(); + const job = await jobResp.json(); + const dev = await devResp.json(); + if (!runsResp.ok) throw new Error(runs.error || `HTTP ${runsResp.status}`); + if (!jobResp.ok) throw new Error(job.error || `HTTP ${jobResp.status}`); + if (!devResp.ok) throw new Error(dev.error || `HTTP ${devResp.status}`); + setHistoryRows(Array.isArray(runs.runs) ? runs.runs : []); + setJobSummary(job.job || {}); + const devices = Array.isArray(dev.devices) ? dev.devices.map((device) => ({ + ...device, + activities: Array.isArray(device.activities) ? device.activities : [], + })) : []; + setDeviceRows(devices); + } catch { + setHistoryRows([]); + setJobSummary({}); + setDeviceRows([]); + } + }, [editing, initialJob?.id]); + + useEffect(() => { + if (!editing) return; + let t; + (async () => { try { await loadHistory(); } catch {} })(); + t = setInterval(loadHistory, 10000); + return () => { if (t) clearInterval(t); }; + }, [editing, loadHistory]); + + const resultChip = (status) => { + const map = { + Success: { bg: '#00d18c', fg: '#000' }, + Running: { bg: '#58a6ff', fg: '#000' }, + Scheduled: { bg: '#999999', fg: '#fff' }, + Expired: { bg: '#777777', fg: '#fff' }, + Failed: { bg: '#ff4f4f', fg: '#fff' }, + Warning: { bg: '#ff8c00', fg: '#000' } + }; + const c = map[status] || { bg: '#aaa', fg: '#000' }; + return ( + + {status || ''} + + ); + }; + + const aggregatedHistory = useMemo(() => { + if (!Array.isArray(historyRows) || historyRows.length === 0) return []; + const map = new Map(); + historyRows.forEach((row) => { + const key = row?.scheduled_ts || row?.started_ts || row?.finished_ts || row?.id; + if (!key) return; + const strKey = String(key); + const existing = map.get(strKey) || { + key: strKey, + scheduled_ts: row?.scheduled_ts || null, + started_ts: null, + finished_ts: null, + statuses: new Set() + }; + if (!existing.scheduled_ts && row?.scheduled_ts) existing.scheduled_ts = row.scheduled_ts; + if (row?.started_ts) { + existing.started_ts = existing.started_ts == null ? row.started_ts : Math.min(existing.started_ts, row.started_ts); + } + if (row?.finished_ts) { + existing.finished_ts = existing.finished_ts == null ? row.finished_ts : Math.max(existing.finished_ts, row.finished_ts); + } + if (row?.status) existing.statuses.add(String(row.status)); + map.set(strKey, existing); + }); + const summaries = []; + map.forEach((entry) => { + const statuses = Array.from(entry.statuses).map((s) => String(s || "").trim().toLowerCase()).filter(Boolean); + if (!statuses.length) return; + const hasInFlight = statuses.some((s) => s === "running" || s === "pending" || s === "scheduled"); + if (hasInFlight) return; + const hasFailure = statuses.some((s) => ["failed", "failure", "expired", "timed out", "timed_out", "warning"].includes(s)); + const allSuccess = statuses.every((s) => s === "success"); + const statusLabel = hasFailure ? "Failed" : (allSuccess ? "Success" : "Failed"); + summaries.push({ + key: entry.key, + scheduled_ts: entry.scheduled_ts, + started_ts: entry.started_ts, + finished_ts: entry.finished_ts, + status: statusLabel + }); + }); + return summaries; + }, [historyRows]); + + const sortedHistory = useMemo(() => { + const dir = historyOrder === 'asc' ? 1 : -1; + const key = historyOrderBy; + return [...aggregatedHistory].sort((a, b) => { + const getVal = (row) => { + if (key === 'scheduled_ts' || key === 'started_ts' || key === 'finished_ts') { + return Number(row?.[key] || 0); + } + return String(row?.[key] || ''); + }; + const A = getVal(a); + const B = getVal(b); + if (typeof A === 'number' && typeof B === 'number') { + return (A - B) * dir; + } + return String(A).localeCompare(String(B)) * dir; + }); + }, [aggregatedHistory, historyOrderBy, historyOrder]); + + const handleHistorySort = (col) => { + if (historyOrderBy === col) setHistoryOrder(historyOrder === 'asc' ? 'desc' : 'asc'); + else { setHistoryOrderBy(col); setHistoryOrder('asc'); } + }; + + const renderHistory = () => ( + + + + + + handleHistorySort('scheduled_ts')}> + Scheduled + + + + handleHistorySort('started_ts')}> + Started + + + + handleHistorySort('finished_ts')}> + Finished + + + Status + + + + {sortedHistory.map((r) => ( + + {fmtTs(r.scheduled_ts)} + {fmtTs(r.started_ts)} + {fmtTs(r.finished_ts)} + {resultChip(r.status)} + + ))} + {sortedHistory.length === 0 && ( + + No runs in the last 30 days. + + )} + +
    +
    + ); + + // --- Job Progress (summary) --- + const [jobSummary, setJobSummary] = useState({}); + const counts = jobSummary?.result_counts || {}; + + const deviceStatusCounts = useMemo(() => { + const base = { pending: 0, running: 0, success: 0, failed: 0, expired: 0 }; + deviceRows.forEach((row) => { + const normalized = String(row?.job_status || "").trim().toLowerCase(); + if (!normalized || normalized === "pending" || normalized === "scheduled" || normalized === "queued") { + base.pending += 1; + } else if (normalized === "running") { + base.running += 1; + } else if (normalized === "success") { + base.success += 1; + } else if (normalized === "expired") { + base.expired += 1; + } else if (normalized === "failed" || normalized === "failure" || normalized === "timed out" || normalized === "timed_out" || normalized === "warning") { + base.failed += 1; + } else { + base.pending += 1; + } + }); + return base; + }, [deviceRows]); + + const statusCounts = useMemo(() => { + const merged = { pending: 0, running: 0, success: 0, failed: 0, expired: 0 }; + Object.keys(merged).forEach((key) => { + const summaryVal = Number((counts || {})[key] ?? 0); + const fallback = deviceStatusCounts[key] ?? 0; + merged[key] = summaryVal > 0 ? summaryVal : fallback; + }); + return merged; + }, [counts, deviceStatusCounts]); + + const statusNodeTypes = useMemo(() => ({ statusNode: StatusNode }), []); + + const handleStatusNodeClick = useCallback((key) => { + setDeviceStatusFilter((prev) => (prev === key ? null : key)); + }, []); + + const statusNodes = useMemo(() => [ + { + id: "pending", + type: "statusNode", + position: { x: -420, y: 170 }, + data: { + label: STATUS_META.pending.label, + color: STATUS_META.pending.color, + count: statusCounts.pending, + Icon: STATUS_META.pending.Icon, + onClick: () => handleStatusNodeClick("pending"), + isActive: deviceStatusFilter === "pending" + }, + draggable: false, + selectable: false + }, + { + id: "running", + type: "statusNode", + position: { x: 0, y: 0 }, + data: { + label: STATUS_META.running.label, + color: STATUS_META.running.color, + count: statusCounts.running, + Icon: STATUS_META.running.Icon, + onClick: () => handleStatusNodeClick("running"), + isActive: deviceStatusFilter === "running" + }, + draggable: false, + selectable: false + }, + { + id: "expired", + type: "statusNode", + position: { x: 0, y: 340 }, + data: { + label: STATUS_META.expired.label, + color: STATUS_META.expired.color, + count: statusCounts.expired, + Icon: STATUS_META.expired.Icon, + onClick: () => handleStatusNodeClick("expired"), + isActive: deviceStatusFilter === "expired" + }, + draggable: false, + selectable: false + }, + { + id: "success", + type: "statusNode", + position: { x: 420, y: 0 }, + data: { + label: STATUS_META.success.label, + color: STATUS_META.success.color, + count: statusCounts.success, + Icon: STATUS_META.success.Icon, + onClick: () => handleStatusNodeClick("success"), + isActive: deviceStatusFilter === "success" + }, + draggable: false, + selectable: false + }, + { + id: "failed", + type: "statusNode", + position: { x: 420, y: 340 }, + data: { + label: STATUS_META.failed.label, + color: STATUS_META.failed.color, + count: statusCounts.failed, + Icon: STATUS_META.failed.Icon, + onClick: () => handleStatusNodeClick("failed"), + isActive: deviceStatusFilter === "failed" + }, + draggable: false, + selectable: false + } + ], [statusCounts, handleStatusNodeClick, deviceStatusFilter]); + + const statusEdges = useMemo(() => [ + { + id: "pending-running", + source: "pending", + target: "running", + sourceHandle: "right-top", + targetHandle: "left-top", + type: "smoothstep", + animated: true, + className: "status-flow-edge" + }, + { + id: "pending-expired", + source: "pending", + target: "expired", + sourceHandle: "right-bottom", + targetHandle: "left-bottom", + type: "smoothstep", + animated: true, + className: "status-flow-edge" + }, + { + id: "running-success", + source: "running", + target: "success", + sourceHandle: "right-top", + targetHandle: "left-top", + type: "smoothstep", + animated: true, + className: "status-flow-edge" + }, + { + id: "running-failed", + source: "running", + target: "failed", + sourceHandle: "right-bottom", + targetHandle: "left-bottom", + type: "smoothstep", + animated: true, + className: "status-flow-edge" + } + ], []); + + const JobStatusFlow = () => ( + + + + { + if (node?.id && STATUS_META[node.id]) handleStatusNodeClick(node.id); + }} + selectionOnDrag={false} + proOptions={{ hideAttribution: true }} + style={{ background: "transparent" }} + /> + + {deviceStatusFilter ? ( + + + Showing devices with {STATUS_META[deviceStatusFilter]?.label || deviceStatusFilter} results + + + + ) : null} + + ); + const inferLanguage = useCallback((path = "") => { + const lower = String(path || "").toLowerCase(); + if (lower.endsWith(".ps1")) return "powershell"; + if (lower.endsWith(".bat")) return "batch"; + if (lower.endsWith(".sh")) return "bash"; + if (lower.endsWith(".yml") || lower.endsWith(".yaml")) return "yaml"; + return "powershell"; + }, []); + + const highlightCode = useCallback((code, lang) => { + try { + return Prism.highlight(code ?? "", Prism.languages[lang] || Prism.languages.markup, lang); + } catch { + return String(code || ""); + } + }, []); + + const loadActivity = useCallback(async (activityId) => { + const idNum = Number(activityId || 0); + if (!idNum) return null; + if (activityCacheRef.current.has(idNum)) { + return activityCacheRef.current.get(idNum); + } + try { + const resp = await fetch(`/api/device/activity/job/${idNum}`); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + activityCacheRef.current.set(idNum, data); + return data; + } catch { + return null; + } + }, []); + + const handleViewDeviceOutput = useCallback(async (row, mode = "stdout") => { + if (!row) return; + const label = mode === "stderr" ? "StdErr" : "StdOut"; + const activities = Array.isArray(row.activities) ? row.activities : []; + const relevant = activities.filter((act) => (mode === "stderr" ? act.has_stderr : act.has_stdout)); + setOutputTitle(`${label} - ${row.hostname || ""}`); + setOutputSections([]); + setOutputError(""); + setOutputLoading(true); + setOutputOpen(true); + if (!relevant.length) { + setOutputError(`No ${label} available for this device.`); + setOutputLoading(false); + return; + } + const sections = []; + for (const act of relevant) { + const activityId = Number(act.activity_id || act.id || 0); + if (!activityId) continue; + const data = await loadActivity(activityId); + if (!data) continue; + const content = mode === "stderr" ? (data.stderr || "") : (data.stdout || ""); + const sectionTitle = act.component_name || data.script_name || data.script_path || `Activity ${activityId}`; + sections.push({ + key: `${activityId}-${mode}`, + title: sectionTitle, + path: data.script_path || "", + lang: inferLanguage(data.script_path || ""), + content, + }); + } + if (!sections.length) { + setOutputError(`No ${label} available for this device.`); + } + setOutputSections(sections); + setOutputLoading(false); + }, [inferLanguage, loadActivity]); + + useEffect(() => { + let canceled = false; + const hydrate = async () => { + if (initialJob && initialJob.id) { + setJobName(initialJob.name || ""); + setPageTitleJobName(typeof initialJob.name === "string" ? initialJob.name.trim() : ""); + setTargets(Array.isArray(initialJob.targets) ? initialJob.targets : []); + setScheduleType(initialJob.schedule_type || initialJob.schedule?.type || "immediately"); + setStartDateTime(initialJob.start_ts ? dayjs(Number(initialJob.start_ts) * 1000).second(0) : (initialJob.schedule?.start ? dayjs(initialJob.schedule.start).second(0) : dayjs().add(5, "minute").second(0))); + setStopAfterEnabled(Boolean(initialJob.duration_stop_enabled)); + setExpiration(initialJob.expiration || "no_expire"); + setExecContext(initialJob.execution_context || "system"); + setSelectedCredentialId(initialJob.credential_id ? String(initialJob.credential_id) : ""); + if ((initialJob.execution_context || "").toLowerCase() === "winrm") { + setUseSvcAccount(initialJob.use_service_account !== false); + } else { + setUseSvcAccount(false); + } + const comps = Array.isArray(initialJob.components) ? initialJob.components : []; + const hydrated = await hydrateExistingComponents(comps); + if (!canceled) { + setComponents(hydrated); + setComponentVarErrors({}); + } + } else if (!initialJob) { + setPageTitleJobName(""); + setComponents([]); + setComponentVarErrors({}); + setSelectedCredentialId(""); + setUseSvcAccount(true); + } + }; + hydrate(); + return () => { + canceled = true; + }; + }, [initialJob, hydrateExistingComponents]); + + const openAddComponent = async () => { + setAddCompOpen(true); + try { + // scripts + const sResp = await fetch("/api/assembly/list?island=scripts"); + if (sResp.ok) { + const sData = await sResp.json(); + const { root, map } = buildScriptTree(sData.items || [], sData.folders || []); + setScriptTree(root); setScriptMap(map); + } else { setScriptTree([]); setScriptMap({}); } + } catch { setScriptTree([]); setScriptMap({}); } + try { + // workflows + const wResp = await fetch("/api/assembly/list?island=workflows"); + if (wResp.ok) { + const wData = await wResp.json(); + const { root, map } = buildWorkflowTree(wData.items || [], wData.folders || []); + setWorkflowTree(root); setWorkflowMap(map); + } else { setWorkflowTree([]); setWorkflowMap({}); } + } catch { setWorkflowTree([]); setWorkflowMap({}); } + try { + // ansible playbooks + const aResp = await fetch("/api/assembly/list?island=ansible"); + if (aResp.ok) { + const aData = await aResp.json(); + const { root, map } = buildAnsibleTree(aData.items || [], aData.folders || []); + setAnsibleTree(root); setAnsibleMap(map); + } else { setAnsibleTree([]); setAnsibleMap({}); } + } catch { setAnsibleTree([]); setAnsibleMap({}); } + }; + + const addSelectedComponent = useCallback(async () => { + const map = compTab === "scripts" ? scriptMap : (compTab === "ansible" ? ansibleMap : workflowMap); + const node = map[selectedNodeId]; + if (!node || node.isFolder) return false; + if (compTab === "workflows" && node.workflow) { + alert("Workflows within Scheduled Jobs are not supported yet"); + return false; + } + if (compTab === "scripts" || compTab === "ansible") { + const type = compTab === "scripts" ? "script" : "ansible"; + const rawPath = node.path || node.id || ""; + const { doc, normalizedPath } = await fetchAssemblyDoc(type, rawPath); + const assembly = doc?.assembly || {}; + const docVars = assembly?.variables || doc?.variables || []; + const mergedVars = mergeComponentVariables(docVars, [], {}); + setComponents((prev) => [ + ...prev, + { + type, + path: normalizedPath || rawPath, + name: assembly?.name || node.fileName || node.label, + description: assembly?.description || normalizedPath || rawPath, + variables: mergedVars, + localId: generateLocalId() + } + ]); + setSelectedNodeId(""); + return true; + } + setSelectedNodeId(""); + return false; + }, [compTab, scriptMap, ansibleMap, workflowMap, selectedNodeId, fetchAssemblyDoc, generateLocalId]); + + const openAddTargets = async () => { + setAddTargetOpen(true); + setSelectedTargets({}); + try { + const resp = await fetch("/api/agents"); + if (resp.ok) { + const data = await resp.json(); + const list = Object.values(data || {}).map((a) => ({ + hostname: a.hostname || a.agent_hostname || a.id || "unknown", + display: a.hostname || a.agent_hostname || a.id || "unknown", + online: !!a.collector_active + })); + list.sort((a, b) => a.display.localeCompare(b.display)); + setAvailableDevices(list); + } else { + setAvailableDevices([]); + } + } catch { + setAvailableDevices([]); + } + }; + + const handleCreate = async () => { + if (remoteExec && !(execContext === "winrm" && useSvcAccount) && !selectedCredentialId) { + alert("Please select a credential for this execution context."); + return; + } + const requiredErrors = {}; + components.forEach((comp) => { + if (!comp || !comp.localId) return; + (Array.isArray(comp.variables) ? comp.variables : []).forEach((variable) => { + if (!variable || !variable.name || !variable.required) return; + if ((variable.type || "string") === "boolean") return; + const value = variable.value; + if (value == null || value === "") { + if (!requiredErrors[comp.localId]) requiredErrors[comp.localId] = {}; + requiredErrors[comp.localId][variable.name] = "Required"; + } + }); + }); + if (Object.keys(requiredErrors).length) { + setComponentVarErrors(requiredErrors); + setTab(1); + alert("Please fill in all required variable values."); + return; + } + setComponentVarErrors({}); + const payloadComponents = sanitizeComponentsForSave(components); + const payload = { + name: jobName, + components: payloadComponents, + targets, + schedule: { type: scheduleType, start: scheduleType !== "immediately" ? (() => { try { const d = startDateTime?.toDate?.() || new Date(startDateTime); d.setSeconds(0,0); return d.toISOString(); } catch { return startDateTime; } })() : null }, + duration: { stopAfterEnabled, expiration }, + execution_context: execContext, + credential_id: remoteExec && !useSvcAccount && selectedCredentialId ? Number(selectedCredentialId) : null, + use_service_account: execContext === "winrm" ? Boolean(useSvcAccount) : false + }; + try { + const resp = await fetch(initialJob && initialJob.id ? `/api/scheduled_jobs/${initialJob.id}` : "/api/scheduled_jobs", { + method: initialJob && initialJob.id ? "PUT" : "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(payload) + }); + const data = await resp.json(); + if (!resp.ok) throw new Error(data.error || `HTTP ${resp.status}`); + onCreated && onCreated(data.job || payload); + onCancel && onCancel(); + } catch (err) { + alert(String(err.message || err)); + } + }; + + const tabDefs = useMemo(() => { + const base = [ + { key: "name", label: "Job Name" }, + { key: "components", label: "Assemblies" }, + { key: "targets", label: "Targets" }, + { key: "schedule", label: "Schedule" }, + { key: "context", label: "Execution Context" } + ]; + if (editing) base.push({ key: 'history', label: 'Job History' }); + return base; + }, [editing]); + + return ( + + + + Create a Scheduled Job + {pageTitleJobName && ( + + {`: "${pageTitleJobName}"`} + + )} + + + Configure advanced schedulable automation jobs for one or more devices. + + + + + setTab(v)} sx={{ minHeight: 36 }}> + {tabDefs.map((t, i) => ( + + ))} + + + + + + + + + {tab === 0 && ( + + + setJobName(e.target.value)} + onBlur={(e) => setPageTitleJobName(e.target.value.trim())} + InputLabelProps={{ shrink: true }} + error={jobName.trim().length === 0} + helperText={jobName.trim().length === 0 ? "Job name is required" : ""} + /> + + )} + + {tab === 1 && ( + + } onClick={openAddComponent} + sx={{ color: "#58a6ff", borderColor: "#58a6ff" }} variant="outlined"> + Add Assembly + + )} + /> + {components.length === 0 && ( + No assemblies added yet. + )} + {components.map((c) => ( + + ))} + {components.length === 0 && ( + At least one assembly is required. + )} + + )} + + {tab === 2 && ( + + } onClick={openAddTargets} + sx={{ color: "#58a6ff", borderColor: "#58a6ff" }} variant="outlined"> + Add Target + + )} + /> + + + + Name + Status + Actions + + + + {targets.map((h) => ( + + {h} + + + setTargets((prev) => prev.filter((x) => x !== h))} sx={{ color: "#ff6666" }}> + + + + + ))} + {targets.length === 0 && ( + + No targets selected. + + )} + +
    + {targets.length === 0 && ( + At least one target is required. + )} +
    + )} + + {tab === 3 && ( + + + + + Recurrence + + + {(scheduleType !== "immediately") && ( + + Start date and execution time + + setStartDateTime(val?.second ? val.second(0) : val)} + views={['year','month','day','hours','minutes']} + format="YYYY-MM-DD hh:mm A" + slotProps={{ textField: { size: "small" } }} + /> + + + )} + + + + + setStopAfterEnabled(e.target.checked)} />} + label={Stop running this job after} + /> + + Expiration + + + + )} + + {tab === 4 && ( + + + + {remoteExec && ( + + {execContext === "winrm" && ( + { + const checked = e.target.checked; + setUseSvcAccount(checked); + if (checked) { + setSelectedCredentialId(""); + } else if (!selectedCredentialId && filteredCredentials.length) { + setSelectedCredentialId(String(filteredCredentials[0].id)); + } + }} + /> + } + label="Use Configured svcBorealis Account" + /> + )} + + Credential + + + + {credentialLoading && } + {!credentialLoading && credentialError && ( + + {credentialError} + + )} + {execContext === "winrm" && useSvcAccount && ( + + Runs with the agent's svcBorealis account. + + )} + {!credentialLoading && !credentialError && !filteredCredentials.length && (!(execContext === "winrm" && useSvcAccount)) && ( + + No {execContext === "winrm" ? "WinRM" : "SSH"} credentials available. Create one under Access Management > Credentials. + + )} + + )} + + )} + + {/* Job History tab (only when editing) */} + {editing && tab === tabDefs.findIndex(t => t.key === 'history') && ( + + + Job History + + + Showing the last 30 days of runs. + + + + + + + Devices + Devices targeted by this scheduled job. Individual job history is listed here. + + + + {DEVICE_COLUMNS.map((col) => ( + + + handleDeviceSort(col.key)} + > + {col.label} + + openFilterMenu(event, col.key)} + sx={{ color: isColumnFiltered(col.key) ? "#58a6ff" : "#666" }} + > + + + + + ))} + + + + {deviceSorted.map((d, i) => ( + + {d.hostname} + + + {d.online ? 'Online' : 'Offline'} + + {d.site || ''} + {fmtTs(d.ran_on)} + {resultChip(d.job_status)} + + + {d.has_stdout ? ( + + ) : null} + {d.has_stderr ? ( + + ) : null} + + + + ))} + {deviceSorted.length === 0 && ( + + No targets found for this job. + + )} + +
    + + + {renderFilterControl()} + + + + + + +
    + + + Past Job History + Historical job history summaries. Detailed job history is not recorded. + + {renderHistory()} + + +
    + )} +
    + + setOutputOpen(false)} fullWidth maxWidth="md" + PaperProps={{ sx: { bgcolor: "#121212", color: "#fff" } }} + > + {outputTitle} + + {outputLoading ? ( + Loading output… + ) : null} + {!outputLoading && outputError ? ( + {outputError} + ) : null} + {!outputLoading && !outputError ? ( + outputSections.map((section) => ( + + {section.title} + {section.path ? ( + {section.path} + ) : null} + + {}} + highlight={(code) => highlightCode(code, section.lang)} + padding={12} + style={{ + fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace', + fontSize: 12, + color: "#e6edf3", + minHeight: 160 + }} + textareaProps={{ readOnly: true }} + /> + + + )) + ) : null} + + + + + + + {/* Bottom actions removed per design; actions live next to tabs. */} + + {/* Add Component Dialog */} + setAddCompOpen(false)} fullWidth maxWidth="md" + PaperProps={{ sx: { bgcolor: "#121212", color: "#fff" } }} + > + Select an Assembly + + + + + + + {compTab === "scripts" && ( + + { + const n = scriptMap[id]; + if (n && !n.isFolder) setSelectedNodeId(id); + }}> + {scriptTree.length ? (scriptTree.map((n) => ( + + {n.children && n.children.length ? renderTreeNodes(n.children, scriptMap) : null} + + ))) : ( + No scripts found. + )} + + + )} + {compTab === "workflows" && ( + + { + const n = workflowMap[id]; + if (n && !n.isFolder) setSelectedNodeId(id); + }}> + {workflowTree.length ? (workflowTree.map((n) => ( + + {n.children && n.children.length ? renderTreeNodes(n.children, workflowMap) : null} + + ))) : ( + No workflows found. + )} + + + )} + {compTab === "ansible" && ( + + { + const n = ansibleMap[id]; + if (n && !n.isFolder) setSelectedNodeId(id); + }}> + {ansibleTree.length ? (ansibleTree.map((n) => ( + + {n.children && n.children.length ? renderTreeNodes(n.children, ansibleMap) : null} + + ))) : ( + No playbooks found. + )} + + + )} + + + + + + + + {/* Add Targets Dialog */} + setAddTargetOpen(false)} fullWidth maxWidth="md" + PaperProps={{ sx: { bgcolor: "#121212", color: "#fff" } }} + > + Select Targets + + + setDeviceSearch(e.target.value)} + sx={{ flex: 1, "& .MuiOutlinedInput-root": { bgcolor: "#1b1b1b" }, "& .MuiInputBase-input": { color: "#e6edf3" } }} + /> + + + + + + Name + Status + + + + {availableDevices + .filter((d) => d.display.toLowerCase().includes(deviceSearch.toLowerCase())) + .map((d) => ( + setSelectedTargets((prev) => ({ ...prev, [d.hostname]: !prev[d.hostname] }))}> + + setSelectedTargets((prev) => ({ ...prev, [d.hostname]: e.target.checked }))} + /> + + {d.display} + + + {d.online ? "Online" : "Offline"} + + + ))} + {availableDevices.length === 0 && ( + No devices available. + )} + +
    +
    + + + + +
    + + {/* Confirm Create Dialog */} + setConfirmOpen(false)} + PaperProps={{ sx: { bgcolor: "#121212", color: "#fff" } }}> + {initialJob && initialJob.id ? "Are you sure you wish to save changes?" : "Are you sure you wish to create this Job?"} + + + + + +
    + ); +} diff --git a/Data/Server/WebUI/src/Scheduling/Quick_Job.jsx b/Data/Server/WebUI/src/Scheduling/Quick_Job.jsx new file mode 100644 index 00000000..5ffb1a4f --- /dev/null +++ b/Data/Server/WebUI/src/Scheduling/Quick_Job.jsx @@ -0,0 +1,593 @@ +import React, { useEffect, useState, useCallback } from "react"; +import { + Dialog, + DialogTitle, + DialogContent, + DialogActions, + Button, + Box, + Typography, + Paper, + FormControlLabel, + Checkbox, + TextField, + FormControl, + InputLabel, + Select, + MenuItem, + CircularProgress +} from "@mui/material"; +import { Folder as FolderIcon, Description as DescriptionIcon } from "@mui/icons-material"; +import { SimpleTreeView, TreeItem } from "@mui/x-tree-view"; + +function buildTree(items, folders, rootLabel = "Scripts") { + const map = {}; + const rootNode = { + id: "root", + label: rootLabel, + path: "", + isFolder: true, + children: [] + }; + map[rootNode.id] = rootNode; + + (folders || []).forEach((f) => { + const parts = (f || "").split("/"); + let children = rootNode.children; + let parentPath = ""; + parts.forEach((part) => { + const path = parentPath ? `${parentPath}/${part}` : part; + let node = children.find((n) => n.id === path); + if (!node) { + node = { id: path, label: part, path, isFolder: true, children: [] }; + children.push(node); + map[path] = node; + } + children = node.children; + parentPath = path; + }); + }); + + (items || []).forEach((s) => { + const parts = (s.rel_path || "").split("/"); + let children = rootNode.children; + let parentPath = ""; + parts.forEach((part, idx) => { + const path = parentPath ? `${parentPath}/${part}` : part; + const isFile = idx === parts.length - 1; + let node = children.find((n) => n.id === path); + if (!node) { + node = { + id: path, + label: isFile ? (s.name || s.file_name || part) : part, + path, + isFolder: !isFile, + fileName: s.file_name, + script: isFile ? s : null, + children: [] + }; + children.push(node); + map[path] = node; + } + if (!isFile) { + children = node.children; + parentPath = path; + } + }); + }); + + return { root: [rootNode], map }; +} + +export default function QuickJob({ open, onClose, hostnames = [] }) { + const [tree, setTree] = useState([]); + const [nodeMap, setNodeMap] = useState({}); + const [selectedPath, setSelectedPath] = useState(""); + const [running, setRunning] = useState(false); + const [error, setError] = useState(""); + const [runAsCurrentUser, setRunAsCurrentUser] = useState(false); + const [mode, setMode] = useState("scripts"); // 'scripts' | 'ansible' + const [credentials, setCredentials] = useState([]); + const [credentialsLoading, setCredentialsLoading] = useState(false); + const [credentialsError, setCredentialsError] = useState(""); + const [selectedCredentialId, setSelectedCredentialId] = useState(""); + const [useSvcAccount, setUseSvcAccount] = useState(true); + const [variables, setVariables] = useState([]); + const [variableValues, setVariableValues] = useState({}); + const [variableErrors, setVariableErrors] = useState({}); + const [variableStatus, setVariableStatus] = useState({ loading: false, error: "" }); + + const loadTree = useCallback(async () => { + try { + const island = mode === 'ansible' ? 'ansible' : 'scripts'; + const resp = await fetch(`/api/assembly/list?island=${island}`); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + const { root, map } = buildTree(data.items || [], data.folders || [], mode === 'ansible' ? 'Ansible Playbooks' : 'Scripts'); + setTree(root); + setNodeMap(map); + } catch (err) { + console.error("Failed to load scripts:", err); + setTree([]); + setNodeMap({}); + } + }, [mode]); + + useEffect(() => { + if (open) { + setSelectedPath(""); + setError(""); + setVariables([]); + setVariableValues({}); + setVariableErrors({}); + setVariableStatus({ loading: false, error: "" }); + setUseSvcAccount(true); + setSelectedCredentialId(""); + loadTree(); + } + }, [open, loadTree]); + + useEffect(() => { + if (!open || mode !== "ansible") return; + let canceled = false; + setCredentialsLoading(true); + setCredentialsError(""); + (async () => { + try { + const resp = await fetch("/api/credentials"); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + if (canceled) return; + const list = Array.isArray(data?.credentials) + ? data.credentials.filter((cred) => { + const conn = String(cred.connection_type || "").toLowerCase(); + return conn === "ssh" || conn === "winrm"; + }) + : []; + list.sort((a, b) => String(a?.name || "").localeCompare(String(b?.name || ""))); + setCredentials(list); + } catch (err) { + if (!canceled) { + setCredentials([]); + setCredentialsError(String(err.message || err)); + } + } finally { + if (!canceled) setCredentialsLoading(false); + } + })(); + return () => { + canceled = true; + }; + }, [open, mode]); + + useEffect(() => { + if (!open) { + setSelectedCredentialId(""); + } + }, [open]); + + useEffect(() => { + if (mode !== "ansible" || useSvcAccount) return; + if (!credentials.length) { + setSelectedCredentialId(""); + return; + } + if (!selectedCredentialId || !credentials.some((cred) => String(cred.id) === String(selectedCredentialId))) { + setSelectedCredentialId(String(credentials[0].id)); + } + }, [mode, credentials, selectedCredentialId, useSvcAccount]); + + const renderNodes = (nodes = []) => + nodes.map((n) => ( + + {n.isFolder ? ( + + ) : ( + + )} + {n.label} + + } + > + {n.children && n.children.length ? renderNodes(n.children) : null} + + )); + + const onItemSelect = (_e, itemId) => { + const node = nodeMap[itemId]; + if (node && !node.isFolder) { + setSelectedPath(node.path); + setError(""); + setVariableErrors({}); + } + }; + + const normalizeVariables = (list) => { + if (!Array.isArray(list)) return []; + return list + .map((raw) => { + if (!raw || typeof raw !== "object") return null; + const name = typeof raw.name === "string" ? raw.name.trim() : typeof raw.key === "string" ? raw.key.trim() : ""; + if (!name) return null; + const type = typeof raw.type === "string" ? raw.type.toLowerCase() : "string"; + const label = typeof raw.label === "string" && raw.label.trim() ? raw.label.trim() : name; + const description = typeof raw.description === "string" ? raw.description : ""; + const required = Boolean(raw.required); + const defaultValue = raw.hasOwnProperty("default") + ? raw.default + : raw.hasOwnProperty("defaultValue") + ? raw.defaultValue + : raw.hasOwnProperty("default_value") + ? raw.default_value + : ""; + return { name, label, type, description, required, default: defaultValue }; + }) + .filter(Boolean); + }; + + const deriveInitialValue = (variable) => { + const { type, default: defaultValue } = variable; + if (type === "boolean") { + if (typeof defaultValue === "boolean") return defaultValue; + if (defaultValue == null) return false; + const str = String(defaultValue).trim().toLowerCase(); + if (!str) return false; + return ["true", "1", "yes", "on"].includes(str); + } + if (type === "number") { + if (defaultValue == null || defaultValue === "") return ""; + if (typeof defaultValue === "number" && Number.isFinite(defaultValue)) { + return String(defaultValue); + } + const parsed = Number(defaultValue); + return Number.isFinite(parsed) ? String(parsed) : ""; + } + return defaultValue == null ? "" : String(defaultValue); + }; + + useEffect(() => { + if (!selectedPath) { + setVariables([]); + setVariableValues({}); + setVariableErrors({}); + setVariableStatus({ loading: false, error: "" }); + return; + } + let canceled = false; + const loadAssembly = async () => { + setVariableStatus({ loading: true, error: "" }); + try { + const island = mode === "ansible" ? "ansible" : "scripts"; + const trimmed = (selectedPath || "").replace(/\\/g, "/").replace(/^\/+/, "").trim(); + if (!trimmed) { + setVariables([]); + setVariableValues({}); + setVariableErrors({}); + setVariableStatus({ loading: false, error: "" }); + return; + } + let relPath = trimmed; + if (island === "scripts" && relPath.toLowerCase().startsWith("scripts/")) { + relPath = relPath.slice("Scripts/".length); + } else if (island === "ansible" && relPath.toLowerCase().startsWith("ansible_playbooks/")) { + relPath = relPath.slice("Ansible_Playbooks/".length); + } + const resp = await fetch(`/api/assembly/load?island=${island}&path=${encodeURIComponent(relPath)}`); + if (!resp.ok) throw new Error(`Failed to load assembly (HTTP ${resp.status})`); + const data = await resp.json(); + const defs = normalizeVariables(data?.assembly?.variables || []); + if (!canceled) { + setVariables(defs); + const initialValues = {}; + defs.forEach((v) => { + initialValues[v.name] = deriveInitialValue(v); + }); + setVariableValues(initialValues); + setVariableErrors({}); + setVariableStatus({ loading: false, error: "" }); + } + } catch (err) { + if (!canceled) { + setVariables([]); + setVariableValues({}); + setVariableErrors({}); + setVariableStatus({ loading: false, error: err?.message || String(err) }); + } + } + }; + loadAssembly(); + return () => { + canceled = true; + }; + }, [selectedPath, mode]); + + const handleVariableChange = (variable, rawValue) => { + const { name, type } = variable; + if (!name) return; + setVariableValues((prev) => ({ + ...prev, + [name]: type === "boolean" ? Boolean(rawValue) : rawValue + })); + setVariableErrors((prev) => { + if (!prev[name]) return prev; + const next = { ...prev }; + delete next[name]; + return next; + }); + }; + + const buildVariablePayload = () => { + const payload = {}; + variables.forEach((variable) => { + if (!variable?.name) return; + const { name, type } = variable; + const hasOverride = Object.prototype.hasOwnProperty.call(variableValues, name); + const raw = hasOverride ? variableValues[name] : deriveInitialValue(variable); + if (type === "boolean") { + payload[name] = Boolean(raw); + } else if (type === "number") { + if (raw === "" || raw === null || raw === undefined) { + payload[name] = ""; + } else { + const num = Number(raw); + payload[name] = Number.isFinite(num) ? num : ""; + } + } else { + payload[name] = raw == null ? "" : String(raw); + } + }); + return payload; + }; + + const onRun = async () => { + if (!selectedPath) { + setError(mode === 'ansible' ? "Please choose a playbook to run." : "Please choose a script to run."); + return; + } + if (mode === 'ansible' && !useSvcAccount && !selectedCredentialId) { + setError("Select a credential to run this playbook."); + return; + } + if (variables.length) { + const errors = {}; + variables.forEach((variable) => { + if (!variable) return; + if (!variable.required) return; + if (variable.type === "boolean") return; + const hasOverride = Object.prototype.hasOwnProperty.call(variableValues, variable.name); + const raw = hasOverride ? variableValues[variable.name] : deriveInitialValue(variable); + if (raw == null || raw === "") { + errors[variable.name] = "Required"; + } + }); + if (Object.keys(errors).length) { + setVariableErrors(errors); + setError("Please fill in all required variable values."); + return; + } + } + setRunning(true); + setError(""); + try { + let resp; + const variableOverrides = buildVariablePayload(); + if (mode === 'ansible') { + const playbook_path = selectedPath; // relative to ansible island + resp = await fetch("/api/ansible/quick_run", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + playbook_path, + hostnames, + variable_values: variableOverrides, + credential_id: !useSvcAccount && selectedCredentialId ? Number(selectedCredentialId) : null, + use_service_account: Boolean(useSvcAccount) + }) + }); + } else { + // quick_run expects a path relative to Assemblies root with 'Scripts/' prefix + const script_path = selectedPath.startsWith('Scripts/') ? selectedPath : `Scripts/${selectedPath}`; + resp = await fetch("/api/scripts/quick_run", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + script_path, + hostnames, + run_mode: runAsCurrentUser ? "current_user" : "system", + variable_values: variableOverrides + }) + }); + } + const data = await resp.json(); + if (!resp.ok) throw new Error(data.error || `HTTP ${resp.status}`); + onClose && onClose(); + } catch (err) { + setError(String(err.message || err)); + } finally { + setRunning(false); + } + }; + + const credentialRequired = mode === "ansible" && !useSvcAccount; + const disableRun = + running || + !selectedPath || + (credentialRequired && (!selectedCredentialId || !credentials.length)); + + return ( + + Quick Job + + + + + + + Select a {mode === 'ansible' ? 'playbook' : 'script'} to run on {hostnames.length} device{hostnames.length !== 1 ? "s" : ""}. + + {mode === 'ansible' && ( + + { + const checked = e.target.checked; + setUseSvcAccount(checked); + if (checked) { + setSelectedCredentialId(""); + } else if (!selectedCredentialId && credentials.length) { + setSelectedCredentialId(String(credentials[0].id)); + } + }} + size="small" + /> + } + label="Use Configured svcBorealis Account" + sx={{ mr: 2 }} + /> + + Credential + + + {useSvcAccount && ( + + Runs with the agent's svcBorealis account. + + )} + {credentialsLoading && } + {!credentialsLoading && credentialsError && ( + {credentialsError} + )} + {!useSvcAccount && !credentialsLoading && !credentialsError && !credentials.length && ( + + No SSH or WinRM credentials available. Create one under Access Management. + + )} + + )} + + + + {tree.length ? renderNodes(tree) : ( + + {mode === 'ansible' ? 'No playbooks found.' : 'No scripts found.'} + + )} + + + + Selection + + {selectedPath || (mode === 'ansible' ? 'No playbook selected' : 'No script selected')} + + + {mode !== 'ansible' && ( + <> + setRunAsCurrentUser(e.target.checked)} />} + label={Run as currently logged-in user} + /> + + Unchecked = Run-As BUILTIN\SYSTEM + + + )} + + + Variables + {variableStatus.loading ? ( + Loading variables… + ) : variableStatus.error ? ( + {variableStatus.error} + ) : variables.length ? ( + + {variables.map((variable) => ( + + {variable.type === "boolean" ? ( + handleVariableChange(variable, e.target.checked)} + /> + )} + label={ + + {variable.label} + {variable.required ? " *" : ""} + + } + /> + ) : ( + handleVariableChange(variable, e.target.value)} + InputLabelProps={{ shrink: true }} + sx={{ + "& .MuiOutlinedInput-root": { bgcolor: "#1b1b1b", color: "#e6edf3" }, + "& .MuiInputBase-input": { color: "#e6edf3" } + }} + error={Boolean(variableErrors[variable.name])} + helperText={variableErrors[variable.name] || variable.description || ""} + /> + )} + {variable.type === "boolean" && variable.description ? ( + + {variable.description} + + ) : null} + + ))} + + ) : ( + No variables defined for this assembly. + )} + + {error && ( + {error} + )} + + + + + + + + + ); +} diff --git a/Data/Server/WebUI/src/Scheduling/Scheduled_Jobs_List.jsx b/Data/Server/WebUI/src/Scheduling/Scheduled_Jobs_List.jsx new file mode 100644 index 00000000..7e22a10e --- /dev/null +++ b/Data/Server/WebUI/src/Scheduling/Scheduled_Jobs_List.jsx @@ -0,0 +1,685 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/Scheduled_Jobs_List.jsx + +import React, { + useCallback, + useEffect, + useMemo, + useRef, + useState +} from "react"; +import { + Paper, + Box, + Typography, + Button, + Switch, + Dialog, + DialogTitle, + DialogActions, + CircularProgress +} from "@mui/material"; +import { AgGridReact } from "ag-grid-react"; +import { ModuleRegistry, AllCommunityModule, themeQuartz } from "ag-grid-community"; + +ModuleRegistry.registerModules([AllCommunityModule]); + +const myTheme = themeQuartz.withParams({ + accentColor: "#FFA6FF", + backgroundColor: "#1f2836", + browserColorScheme: "dark", + chromeBackgroundColor: { + ref: "foregroundColor", + mix: 0.07, + onto: "backgroundColor" + }, + fontFamily: { + googleFont: "IBM Plex Sans" + }, + foregroundColor: "#FFF", + headerFontSize: 14 +}); + +const themeClassName = myTheme.themeName || "ag-theme-quartz"; +const gridFontFamily = '"IBM Plex Sans", "Helvetica Neue", Arial, sans-serif'; +const iconFontFamily = '"Quartz Regular"'; + +function ResultsBar({ counts }) { + const total = Math.max(1, Number(counts?.total_targets || 0)); + const sections = [ + { key: "success", color: "#00d18c" }, + { key: "running", color: "#58a6ff" }, + { key: "failed", color: "#ff4f4f" }, + { key: "timed_out", color: "#b36ae2" }, + { key: "expired", color: "#777777" }, + { key: "pending", color: "#999999" } + ]; + const labelFor = (key) => + key === "pending" + ? "Scheduled" + : key + .replace(/_/g, " ") + .replace(/^./, (c) => c.toUpperCase()); + + const hasNonPending = sections + .filter((section) => section.key !== "pending") + .some((section) => Number(counts?.[section.key] || 0) > 0); + + return ( + + + {sections.map((section) => { + const value = Number(counts?.[section.key] || 0); + if (!value) return null; + const width = `${Math.round((value / total) * 100)}%`; + return ( + + ); + })} + + + {(() => { + if (!hasNonPending && Number(counts?.pending || 0) > 0) { + return Scheduled; + } + return sections + .filter((section) => Number(counts?.[section.key] || 0) > 0) + .map((section) => ( + + + {counts?.[section.key]} {labelFor(section.key)} + + )); + })()} + + + ); +} + +export default function ScheduledJobsList({ onCreateJob, onEditJob, refreshToken }) { + const [rows, setRows] = useState([]); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(""); + const [bulkDeleteOpen, setBulkDeleteOpen] = useState(false); + const [selectedIds, setSelectedIds] = useState(() => new Set()); + const gridApiRef = useRef(null); + + const loadJobs = useCallback( + async ({ showLoading = false } = {}) => { + if (showLoading) { + setLoading(true); + setError(""); + } + try { + const resp = await fetch("/api/scheduled_jobs"); + const data = await resp.json().catch(() => ({})); + if (!resp.ok) { + throw new Error(data?.error || `HTTP ${resp.status}`); + } + const pretty = (st) => { + const s = String(st || "").toLowerCase(); + const map = { + immediately: "Immediately", + once: "Once", + every_5_minutes: "Every 5 Minutes", + every_10_minutes: "Every 10 Minutes", + every_15_minutes: "Every 15 Minutes", + every_30_minutes: "Every 30 Minutes", + every_hour: "Every Hour", + daily: "Daily", + weekly: "Weekly", + monthly: "Monthly", + yearly: "Yearly" + }; + if (map[s]) return map[s]; + try { + return s.replace(/_/g, " ").replace(/^./, (c) => c.toUpperCase()); + } catch { + return String(st || ""); + } + }; + const fmt = (ts) => { + if (!ts) return ""; + try { + const d = new Date(Number(ts) * 1000); + if (Number.isNaN(d?.getTime())) return ""; + return d.toLocaleString(undefined, { + year: "numeric", + month: "2-digit", + day: "2-digit", + hour: "numeric", + minute: "2-digit" + }); + } catch { + return ""; + } + }; + const mappedRows = (data?.jobs || []).map((j) => { + const compName = (Array.isArray(j.components) && j.components[0]?.name) || "Demonstration Component"; + const targetText = Array.isArray(j.targets) + ? `${j.targets.length} device${j.targets.length !== 1 ? "s" : ""}` + : ""; + const occurrence = pretty(j.schedule_type || "immediately"); + const resultsCounts = { + total_targets: Array.isArray(j.targets) ? j.targets.length : 0, + pending: Array.isArray(j.targets) ? j.targets.length : 0, + ...(j.result_counts || {}) + }; + if (resultsCounts && resultsCounts.total_targets == null) { + resultsCounts.total_targets = Array.isArray(j.targets) ? j.targets.length : 0; + } + return { + id: j.id, + name: j.name, + scriptWorkflow: compName, + target: targetText, + occurrence, + lastRun: fmt(j.last_run_ts), + nextRun: fmt(j.next_run_ts || j.start_ts), + result: j.last_status || (j.next_run_ts ? "Scheduled" : ""), + resultsCounts, + enabled: Boolean(j.enabled), + raw: j + }; + }); + setRows(mappedRows); + setError(""); + setSelectedIds((prev) => { + if (!prev.size) return prev; + const valid = new Set( + mappedRows.map((row, index) => row.id ?? row.name ?? String(index)) + ); + let changed = false; + const next = new Set(); + prev.forEach((value) => { + if (valid.has(value)) { + next.add(value); + } else { + changed = true; + } + }); + return changed ? next : prev; + }); + } catch (err) { + setRows([]); + setSelectedIds(() => new Set()); + setError(String(err?.message || err || "Failed to load scheduled jobs")); + } finally { + if (showLoading) { + setLoading(false); + } + } + }, + [] + ); + + useEffect(() => { + let timer; + let isMounted = true; + (async () => { + if (!isMounted) return; + await loadJobs({ showLoading: true }); + })(); + timer = setInterval(() => { + loadJobs(); + }, 5000); + return () => { + isMounted = false; + if (timer) clearInterval(timer); + }; + }, [loadJobs, refreshToken]); + + const handleGridReady = useCallback((params) => { + gridApiRef.current = params.api; + }, []); + + useEffect(() => { + const api = gridApiRef.current; + if (!api) return; + if (loading) { + api.showLoadingOverlay(); + } else if (!rows.length) { + api.showNoRowsOverlay(); + } else { + api.hideOverlay(); + } + }, [loading, rows]); + + useEffect(() => { + const api = gridApiRef.current; + if (!api) return; + api.forEachNode((node) => { + const shouldSelect = selectedIds.has(node.id); + if (node.isSelected() !== shouldSelect) { + node.setSelected(shouldSelect); + } + }); + }, [rows, selectedIds]); + + const anySelected = selectedIds.size > 0; + + const handleSelectionChanged = useCallback(() => { + const api = gridApiRef.current; + if (!api) return; + const selectedNodes = api.getSelectedNodes(); + const next = new Set(); + selectedNodes.forEach((node) => { + if (node?.id != null) { + next.add(String(node.id)); + } + }); + setSelectedIds(next); + }, []); + + const getRowId = useCallback((params) => { + return ( + params?.data?.id ?? + params?.data?.name ?? + String(params?.rowIndex ?? "") + ); + }, []); + + const nameCellRenderer = useCallback( + (params) => { + const row = params.data; + if (!row) return null; + const handleClick = (event) => { + event.preventDefault(); + event.stopPropagation(); + if (typeof onEditJob === "function") { + onEditJob(row.raw); + } + }; + return ( + + ); + }, + [onEditJob] + ); + + const resultsCellRenderer = useCallback((params) => { + return ; + }, []); + + const enabledCellRenderer = useCallback( + (params) => { + const row = params.data; + if (!row) return null; + const handleToggle = async (event) => { + event.stopPropagation(); + const nextEnabled = event.target.checked; + try { + await fetch(`/api/scheduled_jobs/${row.id}/toggle`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ enabled: nextEnabled }) + }); + } catch { + // ignore network errors for toggle + } + setRows((prev) => + prev.map((job) => { + if ((job.id ?? job.name) === (row.id ?? row.name)) { + const updatedRaw = { ...(job.raw || {}), enabled: nextEnabled }; + return { ...job, enabled: nextEnabled, raw: updatedRaw }; + } + return job; + }) + ); + }; + return ( + event.stopPropagation()} + sx={{ + "& .MuiSwitch-switchBase.Mui-checked": { + color: "#58a6ff" + }, + "& .MuiSwitch-switchBase.Mui-checked + .MuiSwitch-track": { + bgcolor: "#58a6ff" + } + }} + /> + ); + }, + [] + ); + + const columnDefs = useMemo( + () => [ + { + headerName: "", + field: "__checkbox__", + checkboxSelection: true, + headerCheckboxSelection: true, + maxWidth: 60, + minWidth: 60, + sortable: false, + filter: false, + resizable: false, + suppressMenu: true, + pinned: false + }, + { + headerName: "Name", + field: "name", + cellRenderer: nameCellRenderer, + sort: "asc" + }, + { + headerName: "Assembly(s)", + field: "scriptWorkflow", + valueGetter: (params) => params.data?.scriptWorkflow || "Demonstration Component" + }, + { + headerName: "Target", + field: "target" + }, + { + headerName: "Recurrence", + field: "occurrence" + }, + { + headerName: "Last Run", + field: "lastRun" + }, + { + headerName: "Next Run", + field: "nextRun" + }, + { + headerName: "Results", + field: "resultsCounts", + minWidth: 280, + cellRenderer: resultsCellRenderer, + sortable: false, + filter: false + }, + { + headerName: "Enabled", + field: "enabled", + minWidth: 140, + maxWidth: 160, + cellRenderer: enabledCellRenderer, + sortable: false, + filter: false, + resizable: false, + suppressMenu: true + } + ], + [enabledCellRenderer, nameCellRenderer, resultsCellRenderer] + ); + + const defaultColDef = useMemo( + () => ({ + sortable: true, + filter: "agTextColumnFilter", + resizable: true, + flex: 1, + minWidth: 140, + cellStyle: { + display: "flex", + alignItems: "center", + color: "#f5f7fa", + fontFamily: gridFontFamily, + fontSize: "13px" + }, + headerClass: "scheduled-jobs-grid-header" + }), + [] + ); + + return ( + + + + + Scheduled Jobs + + + List of automation jobs with schedules, results, and actions. + + + + + + + + + {loading && ( + + + Loading scheduled jobs… + + )} + + {error && ( + + {error} + + )} + + + + + + + + setBulkDeleteOpen(false)} + PaperProps={{ sx: { bgcolor: "#121212", color: "#fff" } }} + > + Are you sure you want to delete this job(s)? + + + + + + + ); +} diff --git a/Data/Server/WebUI/src/Sites/Site_List.jsx b/Data/Server/WebUI/src/Sites/Site_List.jsx new file mode 100644 index 00000000..478a7b2e --- /dev/null +++ b/Data/Server/WebUI/src/Sites/Site_List.jsx @@ -0,0 +1,385 @@ +import React, { useEffect, useMemo, useState, useCallback, useRef } from "react"; +import { + Paper, + Box, + Typography, + Table, + TableBody, + TableCell, + TableHead, + TableRow, + TableSortLabel, + Checkbox, + Button, + IconButton, + Popover, + TextField, + MenuItem +} from "@mui/material"; +import AddIcon from "@mui/icons-material/Add"; +import DeleteIcon from "@mui/icons-material/DeleteOutline"; +import EditIcon from "@mui/icons-material/Edit"; +import FilterListIcon from "@mui/icons-material/FilterList"; +import ViewColumnIcon from "@mui/icons-material/ViewColumn"; +import { CreateSiteDialog, ConfirmDeleteDialog, RenameSiteDialog } from "../Dialogs.jsx"; + +export default function SiteList({ onOpenDevicesForSite }) { + const [rows, setRows] = useState([]); // {id, name, description, device_count} + const [orderBy, setOrderBy] = useState("name"); + const [order, setOrder] = useState("asc"); + const [selectedIds, setSelectedIds] = useState(() => new Set()); + + // Columns configuration (similar style to Device_List) + const COL_LABELS = useMemo(() => ({ + name: "Name", + description: "Description", + device_count: "Devices", + }), []); + const defaultColumns = useMemo( + () => [ + { id: "name", label: COL_LABELS.name }, + { id: "description", label: COL_LABELS.description }, + { id: "device_count", label: COL_LABELS.device_count }, + ], + [COL_LABELS] + ); + const [columns, setColumns] = useState(defaultColumns); + const dragColId = useRef(null); + const [colChooserAnchor, setColChooserAnchor] = useState(null); + + const [filters, setFilters] = useState({}); + const [filterAnchor, setFilterAnchor] = useState(null); // { id, anchorEl } + + const [createOpen, setCreateOpen] = useState(false); + const [deleteOpen, setDeleteOpen] = useState(false); + const [renameOpen, setRenameOpen] = useState(false); + const [renameValue, setRenameValue] = useState(""); + + const fetchSites = useCallback(async () => { + try { + const res = await fetch("/api/sites"); + const data = await res.json(); + setRows(Array.isArray(data?.sites) ? data.sites : []); + } catch { + setRows([]); + } + }, []); + + useEffect(() => { fetchSites(); }, [fetchSites]); + + // Apply initial filters from global search + useEffect(() => { + try { + const json = localStorage.getItem('site_list_initial_filters'); + if (json) { + const obj = JSON.parse(json); + if (obj && typeof obj === 'object') setFilters((prev) => ({ ...prev, ...obj })); + localStorage.removeItem('site_list_initial_filters'); + } + } catch {} + }, []); + + const handleSort = (col) => { + if (orderBy === col) setOrder(order === "asc" ? "desc" : "asc"); + else { setOrderBy(col); setOrder("asc"); } + }; + + const filtered = useMemo(() => { + if (!filters || Object.keys(filters).length === 0) return rows; + return rows.filter((r) => + Object.entries(filters).every(([k, v]) => { + const val = String(v || "").toLowerCase(); + if (!val) return true; + return String(r[k] ?? "").toLowerCase().includes(val); + }) + ); + }, [rows, filters]); + + const sorted = useMemo(() => { + const dir = order === "asc" ? 1 : -1; + const arr = [...filtered]; + arr.sort((a, b) => { + if (orderBy === "device_count") return ((a.device_count||0) - (b.device_count||0)) * dir; + return String(a[orderBy] ?? "").localeCompare(String(b[orderBy] ?? "")) * dir; + }); + return arr; + }, [filtered, orderBy, order]); + + const onHeaderDragStart = (colId) => (e) => { dragColId.current = colId; try { e.dataTransfer.setData("text/plain", colId); } catch {} }; + const onHeaderDragOver = (e) => { e.preventDefault(); }; + const onHeaderDrop = (targetColId) => (e) => { + e.preventDefault(); + const fromId = dragColId.current; if (!fromId || fromId === targetColId) return; + setColumns((prev) => { + const cur = [...prev]; + const fromIdx = cur.findIndex((c) => c.id === fromId); + const toIdx = cur.findIndex((c) => c.id === targetColId); + if (fromIdx < 0 || toIdx < 0) return prev; + const [moved] = cur.splice(fromIdx, 1); + cur.splice(toIdx, 0, moved); + return cur; + }); + dragColId.current = null; + }; + + const openFilter = (id) => (e) => setFilterAnchor({ id, anchorEl: e.currentTarget }); + const closeFilter = () => setFilterAnchor(null); + const onFilterChange = (id) => (e) => setFilters((prev) => ({ ...prev, [id]: e.target.value })); + + const isAllChecked = sorted.length > 0 && sorted.every((r) => selectedIds.has(r.id)); + const isIndeterminate = selectedIds.size > 0 && !isAllChecked; + const toggleAll = (e) => { + const checked = e.target.checked; + setSelectedIds((prev) => { + const next = new Set(prev); + if (checked) sorted.forEach((r) => next.add(r.id)); + else next.clear(); + return next; + }); + }; + const toggleOne = (id) => (e) => { + const checked = e.target.checked; + setSelectedIds((prev) => { + const next = new Set(prev); + if (checked) next.add(id); else next.delete(id); + return next; + }); + }; + + return ( + + + Sites + + + + + + + + + + + + + + {columns.map((col) => ( + + + handleSort(col.id)}> + {col.label} + + + + + + + ))} + + + + {sorted.map((r) => ( + + e.stopPropagation()}> + + + {columns.map((col) => { + switch (col.id) { + case 'name': + return ( + { + if (onOpenDevicesForSite) onOpenDevicesForSite(r.name); + }} + sx={{ color: '#58a6ff', '&:hover': { cursor: 'pointer', textDecoration: 'underline' } }} + > + {r.name} + + ); + case 'description': + return {r.description || ''}; + case 'device_count': + return {r.device_count ?? 0}; + default: + return ; + } + })} + + ))} + {sorted.length === 0 && ( + + No sites defined. + + )} + +
    + + {/* Column chooser */} + setColChooserAnchor(null)} + anchorOrigin={{ vertical: 'bottom', horizontal: 'right' }} + PaperProps={{ sx: { bgcolor: '#1e1e1e', color: '#fff', p: 1 } }} + > + + {[ + { id: 'name', label: 'Name' }, + { id: 'description', label: 'Description' }, + { id: 'device_count', label: 'Devices' }, + ].map((opt) => ( + e.stopPropagation()} sx={{ gap: 1 }}> + c.id === opt.id)} + onChange={(e) => { + const checked = e.target.checked; + setColumns((prev) => { + const exists = prev.some((c) => c.id === opt.id); + if (checked) { + if (exists) return prev; + return [...prev, { id: opt.id, label: opt.label }]; + } + return prev.filter((c) => c.id !== opt.id); + }); + }} + sx={{ p: 0.3, color: '#bbb' }} + /> + {opt.label} + + ))} + + + + + + + {/* Filter popover */} + + {filterAnchor && ( + + c.id === filterAnchor.id)?.label || ''}`} + value={filters[filterAnchor.id] || ''} + onChange={onFilterChange(filterAnchor.id)} + onKeyDown={(e) => { if (e.key === 'Escape') closeFilter(); }} + sx={{ + input: { color: '#fff' }, + minWidth: 220, + '& .MuiOutlinedInput-root': { '& fieldset': { borderColor: '#555' }, '&:hover fieldset': { borderColor: '#888' } }, + }} + /> + + + )} + + + {/* Create site dialog */} + setCreateOpen(false)} + onCreate={async (name, description) => { + try { + const res = await fetch('/api/sites', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ name, description }) }); + if (!res.ok) return; + setCreateOpen(false); + await fetchSites(); + } catch {} + }} + /> + + {/* Delete confirmation */} + setDeleteOpen(false)} + onConfirm={async () => { + try { + const ids = Array.from(selectedIds); + await fetch('/api/sites/delete', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ ids }) }); + } catch {} + setDeleteOpen(false); + setSelectedIds(new Set()); + await fetchSites(); + }} + /> + + {/* Rename site dialog */} + setRenameOpen(false)} + onSave={async () => { + const newName = (renameValue || '').trim(); + if (!newName) return; + const selId = selectedIds.size === 1 ? Array.from(selectedIds)[0] : null; + if (selId == null) return; + try { + const res = await fetch('/api/sites/rename', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ id: selId, new_name: newName }) + }); + if (!res.ok) { + // Keep dialog open on error; optionally log + try { const err = await res.json(); console.warn('Rename failed', err); } catch {} + return; + } + setRenameOpen(false); + await fetchSites(); + } catch (e) { + console.warn('Rename error', e); + } + }} + /> +
    + ); +} diff --git a/Data/Server/WebUI/src/Status_Bar.jsx b/Data/Server/WebUI/src/Status_Bar.jsx new file mode 100644 index 00000000..e2ec75da --- /dev/null +++ b/Data/Server/WebUI/src/Status_Bar.jsx @@ -0,0 +1,93 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/Status_Bar.jsx + +import React, { useEffect, useState } from "react"; +import { Box, Button, Divider } from "@mui/material"; + +export default function StatusBar() { + const [apiStatus, setApiStatus] = useState("checking"); + + useEffect(() => { + fetch("/health") + .then((res) => (res.ok ? setApiStatus("online") : setApiStatus("offline"))) + .catch(() => setApiStatus("offline")); + }, []); + + const applyRate = () => { + const val = parseInt( + document.getElementById("updateRateInput")?.value + ); + if (!isNaN(val) && val >= 50) { + window.BorealisUpdateRate = val; + console.log("Global update rate set to", val + "ms"); + } else { + alert("Please enter a valid number (min 50)."); + } + }; + + return ( + + + Nodes: 0 + + Update Rate (ms): + + + + + + Backend API Server: + + {apiStatus === "checking" ? "..." : apiStatus.charAt(0).toUpperCase() + apiStatus.slice(1)} + + + + ); +} diff --git a/Data/Server/WebUI/src/index.jsx b/Data/Server/WebUI/src/index.jsx new file mode 100644 index 00000000..a64e173b --- /dev/null +++ b/Data/Server/WebUI/src/index.jsx @@ -0,0 +1,21 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/index.js + +import React from 'react'; +import ReactDOM from 'react-dom/client'; + +// Global Styles +import "normalize.css/normalize.css"; +import "@fontsource/ibm-plex-sans/400.css"; +import "@fontsource/ibm-plex-sans/500.css"; +import "@fontsource/ibm-plex-sans/600.css"; +import "@fortawesome/fontawesome-free/css/all.min.css"; +import './Borealis.css'; // Global Theming for All of Borealis + +import App from './App.jsx'; + +const root = ReactDOM.createRoot(document.getElementById('root')); +root.render( + + + +); \ No newline at end of file diff --git a/Data/Server/WebUI/src/nodes/Agent/Node_Agent.jsx b/Data/Server/WebUI/src/nodes/Agent/Node_Agent.jsx new file mode 100644 index 00000000..b892aceb --- /dev/null +++ b/Data/Server/WebUI/src/nodes/Agent/Node_Agent.jsx @@ -0,0 +1,554 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/nodes/Agent/Node_Agent.jsx +import React, { useEffect, useState, useCallback, useMemo, useRef } from "react"; +import { Handle, Position, useReactFlow, useStore } from "reactflow"; + +// Modern Node: Borealis Agent (Sidebar Config Enabled) +const BorealisAgentNode = ({ id, data }) => { + const { getNodes, setNodes } = useReactFlow(); + const edges = useStore((state) => state.edges); + const [agents, setAgents] = useState({}); + const [sites, setSites] = useState([]); + const [isConnected, setIsConnected] = useState(false); + const [siteMapping, setSiteMapping] = useState({}); + const prevRolesRef = useRef([]); + const selectionRef = useRef({ host: "", mode: "", agentId: "", siteId: "" }); + + const selectedSiteId = data?.agent_site_id ? String(data.agent_site_id) : ""; + const selectedHost = data?.agent_host || ""; + const selectedMode = + (data?.agent_mode || "currentuser").toString().toLowerCase() === "system" + ? "system" + : "currentuser"; + const selectedAgent = data?.agent_id || ""; + + // Group agents by hostname and execution context + const agentsByHostname = useMemo(() => { + if (!agents || typeof agents !== "object") return {}; + const grouped = {}; + Object.entries(agents).forEach(([aid, info]) => { + if (!info || typeof info !== "object") return; + const status = (info.status || "").toString().toLowerCase(); + if (status === "offline") return; + const host = (info.hostname || info.agent_hostname || "").trim() || "unknown"; + const modeRaw = (info.service_mode || "").toString().toLowerCase(); + const mode = modeRaw === "system" ? "system" : "currentuser"; + if (!grouped[host]) { + grouped[host] = { currentuser: null, system: null }; + } + grouped[host][mode] = { + agent_id: aid, + status: info.status || "offline", + last_seen: info.last_seen || 0, + info, + }; + }); + return grouped; + }, [agents]); + +// Locale-aware, case-insensitive, numeric-friendly sorter (e.g., "host2" < "host10") +const hostCollator = useMemo( + () => new Intl.Collator(undefined, { sensitivity: "base", numeric: true }), + [] +); + +const hostOptions = useMemo(() => { + const entries = Object.entries(agentsByHostname) + .map(([host, contexts]) => { + const candidates = [contexts.currentuser, contexts.system].filter(Boolean); + if (!candidates.length) return null; + + // Label is just the hostname (you already simplified this earlier) + const label = host; + + // Keep latest around if you use it elsewhere, but it no longer affects ordering + const latest = Math.max(...candidates.map((r) => r.last_seen || 0)); + + return { host, label, contexts, latest }; + }) + .filter(Boolean) + // Always alphabetical, case-insensitive, numeric-aware + .sort((a, b) => hostCollator.compare(a.host, b.host)); + + return entries; +}, [agentsByHostname, hostCollator]); + + // Fetch Agents Periodically + useEffect(() => { + const fetchAgents = () => { + fetch("/api/agents") + .then((res) => res.json()) + .then(setAgents) + .catch(() => {}); + }; + fetchAgents(); + const interval = setInterval(fetchAgents, 10000); // Update Agent List Every 10 Seconds + return () => clearInterval(interval); + }, []); + + // Fetch sites list + useEffect(() => { + const fetchSites = () => { + fetch("/api/sites") + .then((res) => res.json()) + .then((data) => { + const siteEntries = Array.isArray(data?.sites) ? data.sites : []; + setSites(siteEntries); + }) + .catch(() => setSites([])); + }; + fetchSites(); + }, []); + + // Fetch site mapping for current host options + useEffect(() => { + const hostnames = hostOptions.map(({ host }) => host).filter(Boolean); + if (!hostnames.length) { + setSiteMapping({}); + return; + } + const query = hostnames.map(encodeURIComponent).join(","); + fetch(`/api/sites/device_map?hostnames=${query}`) + .then((res) => res.json()) + .then((data) => { + const mapping = data?.mapping && typeof data.mapping === "object" ? data.mapping : {}; + setSiteMapping(mapping); + }) + .catch(() => setSiteMapping({})); + }, [hostOptions]); + + const filteredHostOptions = useMemo(() => { + if (!selectedSiteId) return hostOptions; + return hostOptions.filter(({ host }) => { + const mapping = siteMapping[host]; + if (!mapping || typeof mapping.site_id === "undefined" || mapping.site_id === null) { + return false; + } + return String(mapping.site_id) === selectedSiteId; + }); + }, [hostOptions, selectedSiteId, siteMapping]); + + // Align selected site with known host mapping when available + useEffect(() => { + if (selectedSiteId || !selectedHost) return; + const mapping = siteMapping[selectedHost]; + if (!mapping || typeof mapping.site_id === "undefined" || mapping.site_id === null) return; + const mappedId = String(mapping.site_id); + setNodes((nds) => + nds.map((n) => + n.id === id + ? { + ...n, + data: { + ...n.data, + agent_site_id: mappedId, + }, + } + : n + ) + ); + }, [selectedHost, selectedSiteId, siteMapping, id, setNodes]); + + // Ensure host selection stays aligned with available agents + useEffect(() => { + if (!selectedHost) return; + + const hostExists = filteredHostOptions.some((opt) => opt.host === selectedHost); + if (hostExists) return; + + if (selectedAgent && agents[selectedAgent]) { + const info = agents[selectedAgent]; + const inferredHost = (info?.hostname || info?.agent_hostname || "").trim() || "unknown"; + const allowed = filteredHostOptions.some((opt) => opt.host === inferredHost); + if (allowed && inferredHost && inferredHost !== selectedHost) { + setNodes((nds) => + nds.map((n) => + n.id === id + ? { + ...n, + data: { + ...n.data, + agent_host: inferredHost, + }, + } + : n + ) + ); + return; + } + } + + setNodes((nds) => + nds.map((n) => + n.id === id + ? { + ...n, + data: { + ...n.data, + agent_host: "", + agent_id: "", + agent_mode: "currentuser", + }, + } + : n + ) + ); + }, [filteredHostOptions, selectedHost, selectedAgent, agents, id, setNodes]); + + const siteSelectOptions = useMemo(() => { + const entries = Array.isArray(sites) ? [...sites] : []; + entries.sort((a, b) => + (a?.name || "").localeCompare(b?.name || "", undefined, { sensitivity: "base" }) + ); + const mapped = entries.map((site) => ({ + value: String(site.id), + label: site.name || `Site ${site.id}`, + })); + return [{ value: "", label: "All Sites" }, ...mapped]; + }, [sites]); + + const hostSelectOptions = useMemo(() => { + const mapped = filteredHostOptions.map(({ host, label }) => ({ + value: host, + label, + })); + return [{ value: "", label: "-- Select --" }, ...mapped]; + }, [filteredHostOptions]); + + const activeHostContexts = selectedHost ? agentsByHostname[selectedHost] : null; + + const modeSelectOptions = useMemo( + () => [ + { + value: "currentuser", + label: "CURRENTUSER (Screen Capture / Macros)", + disabled: !activeHostContexts?.currentuser, + }, + { + value: "system", + label: "SYSTEM (Scripts)", + disabled: !activeHostContexts?.system, + }, + ], + [activeHostContexts] + ); + + useEffect(() => { + setNodes((nds) => + nds.map((n) => + n.id === id + ? { + ...n, + data: { + ...n.data, + siteOptions: siteSelectOptions, + hostOptions: hostSelectOptions, + modeOptions: modeSelectOptions, + }, + } + : n + ) + ); + }, [id, setNodes, siteSelectOptions, hostSelectOptions, modeSelectOptions]); + + useEffect(() => { + if (!selectedHost) { + if (selectedAgent || selectedMode !== "currentuser") { + setNodes((nds) => + nds.map((n) => + n.id === id + ? { + ...n, + data: { + ...n.data, + agent_id: "", + agent_mode: "currentuser", + }, + } + : n + ) + ); + } + return; + } + + const contexts = agentsByHostname[selectedHost]; + if (!contexts) { + if (selectedAgent || selectedMode !== "currentuser") { + setNodes((nds) => + nds.map((n) => + n.id === id + ? { + ...n, + data: { + ...n.data, + agent_id: "", + agent_mode: "currentuser", + }, + } + : n + ) + ); + } + return; + } + + if (!contexts[selectedMode]) { + const fallbackMode = contexts.currentuser + ? "currentuser" + : contexts.system + ? "system" + : "currentuser"; + const fallbackAgentId = contexts[fallbackMode]?.agent_id || ""; + if (fallbackMode !== selectedMode || fallbackAgentId !== selectedAgent) { + setNodes((nds) => + nds.map((n) => + n.id === id + ? { + ...n, + data: { + ...n.data, + agent_mode: fallbackMode, + agent_id: fallbackAgentId, + }, + } + : n + ) + ); + } + return; + } + + const targetAgentId = contexts[selectedMode]?.agent_id || ""; + if (targetAgentId !== selectedAgent) { + setNodes((nds) => + nds.map((n) => + n.id === id + ? { + ...n, + data: { + ...n.data, + agent_id: targetAgentId, + }, + } + : n + ) + ); + } + }, [selectedHost, selectedMode, agentsByHostname, selectedAgent, id, setNodes]); + + useEffect(() => { + const prev = selectionRef.current; + const changed = + prev.host !== selectedHost || + prev.mode !== selectedMode || + prev.agentId !== selectedAgent || + prev.siteId !== selectedSiteId; + if (!changed) return; + + const selectionChangedAgent = + prev.agentId && + (prev.agentId !== selectedAgent || prev.host !== selectedHost || prev.mode !== selectedMode); + if (selectionChangedAgent) { + setIsConnected(false); + prevRolesRef.current = []; + } + + selectionRef.current = { + host: selectedHost, + mode: selectedMode, + agentId: selectedAgent, + siteId: selectedSiteId, + }; + }, [selectedHost, selectedMode, selectedAgent, selectedSiteId]); + + // Attached Roles logic + const attachedRoleIds = useMemo( + () => + edges + .filter((e) => e.source === id && e.sourceHandle === "provisioner") + .map((e) => e.target), + [edges, id] + ); + const getAttachedRoles = useCallback(() => { + const allNodes = getNodes(); + return attachedRoleIds + .map((nid) => { + const fn = window.__BorealisInstructionNodes?.[nid]; + return typeof fn === "function" ? fn() : null; + }) + .filter((r) => r); + }, [attachedRoleIds, getNodes]); + + // Provision Roles to Agent + const provisionRoles = useCallback((roles) => { + if (!selectedAgent) return; + fetch("/api/agent/provision", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ agent_id: selectedAgent, roles }) + }) + .then(() => { + setIsConnected(true); + prevRolesRef.current = roles; + }) + .catch(() => {}); + }, [selectedAgent]); + const handleConnect = useCallback(() => { + const roles = getAttachedRoles(); + provisionRoles(roles); + }, [getAttachedRoles, provisionRoles]); + const handleDisconnect = useCallback(() => { + if (!selectedAgent) return; + fetch("/api/agent/provision", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ agent_id: selectedAgent, roles: [] }) + }) + .then(() => { + setIsConnected(false); + prevRolesRef.current = []; + }) + .catch(() => {}); + }, [selectedAgent]); + + // Auto-provision on role change + useEffect(() => { + const newRoles = getAttachedRoles(); + const prevSerialized = JSON.stringify(prevRolesRef.current || []); + const newSerialized = JSON.stringify(newRoles); + if (isConnected && newSerialized !== prevSerialized) { + provisionRoles(newRoles); + } + }, [attachedRoleIds, isConnected, getAttachedRoles, provisionRoles]); + + // Status Label + const selectedAgentStatus = useMemo(() => { + if (!selectedHost) return "Unassigned"; + const contexts = agentsByHostname[selectedHost]; + if (!contexts) return "Offline"; + const activeContext = contexts[selectedMode]; + if (!selectedAgent || !activeContext) return "Unavailable"; + const status = (activeContext.status || "").toString().toLowerCase(); + if (status === "provisioned") return "Connected"; + if (status === "orphaned") return "Available"; + if (!status) return "Available"; + return status.charAt(0).toUpperCase() + status.slice(1); + }, [agentsByHostname, selectedHost, selectedMode, selectedAgent]); + + // Render (Sidebar handles config) + return ( +
    + + +
    Device Agent
    +
    +
    Right-Click to Configure Agent
    + +
    + {selectedHost ? `${selectedHost} · ${selectedMode.toUpperCase()}` : "No device selected"} +
    +
    +
    + ); +}; + +// Node Registration Object with sidebar config and docs +export default { + type: "Borealis_Agent", + label: "Device Agent", + description: ` +Select and connect to a remote Borealis Agent. +- Assign roles to agent dynamically by connecting "Agent Role" nodes. +- Auto-provisions agent as role assignments change. +- See live agent status and re-connect/disconnect easily. +- Choose between CURRENTUSER and SYSTEM contexts for each device. +`.trim(), + content: "Select and manage an Agent with dynamic roles", + component: BorealisAgentNode, + config: [ + { + key: "agent_site_id", + label: "Site", + type: "select", + optionsKey: "siteOptions", + defaultValue: "" + }, + { + key: "agent_host", + label: "Device", + type: "select", + optionsKey: "hostOptions", + defaultValue: "" + }, + { + key: "agent_mode", + label: "Agent Context", + type: "select", + optionsKey: "modeOptions", + defaultValue: "currentuser" + }, + { + key: "agent_id", + label: "Agent ID", + type: "text", + readOnly: true, + defaultValue: "" + } + ], + usage_documentation: ` +### Borealis Agent Node + +This node allows you to establish a connection with a device running a Borealis "Agent", so you can instruct the agent to do things from your workflow. + +#### Features +- **Select** a site, then a device, then finally an agent context (CURRENTUSER vs SYSTEM). +- **Connect/Disconnect** from the agent at any time. +- **Attach roles** (by connecting "Agent Role" nodes to this node's output handle) to assign behaviors dynamically. + +#### How to Use +1. **Drag and drop in a Borealis Agent node.** +2. **Pick an agent** from the dropdown list (auto-populates from API backend). +3. **Click "Connect to Agent"**. +4. **Attach Agent Role Nodes** (e.g., Screenshot, Macro Keypress) to the "provisioner" output handle to define what the agent should do. +5. Agent will automatically update its roles as you change connected Role Nodes. + +#### Good to Know +- If an agent disconnects or goes offline, its status will show "Reconnecting..." until it returns. +- **Roles update LIVE**: Any time you change attached roles, the agent gets updated instantly. + +`.trim() +}; diff --git a/Data/Server/WebUI/src/nodes/Agent/Node_Agent_Role_Macro.jsx b/Data/Server/WebUI/src/nodes/Agent/Node_Agent_Role_Macro.jsx new file mode 100644 index 00000000..78101a5f --- /dev/null +++ b/Data/Server/WebUI/src/nodes/Agent/Node_Agent_Role_Macro.jsx @@ -0,0 +1,310 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/nodes/Agent Roles/Node_Agent_Role_Macro.jsx +import React, { useState, useEffect, useRef } from "react"; +import { Handle, Position, useReactFlow, useStore } from "reactflow"; +import "react-simple-keyboard/build/css/index.css"; + +// Default update interval for window list refresh (in ms) +const WINDOW_LIST_REFRESH_MS = 4000; + +if (!window.BorealisValueBus) window.BorealisValueBus = {}; +if (!window.BorealisUpdateRate) window.BorealisUpdateRate = 100; + +const DEFAULT_OPERATION_MODE = "Continuous"; +const OPERATION_MODES = [ + "Run Once", + "Continuous", + "Trigger-Once", + "Trigger-Continuous" +]; + +const MACRO_TYPES = [ + "keypress", + "typed_text" +]; + +const statusColors = { + idle: "#333", + running: "#00d18c", + error: "#ff4f4f", + success: "#00d18c" +}; + +const MacroKeyPressNode = ({ id, data }) => { + const { setNodes, getNodes } = useReactFlow(); + const edges = useStore((state) => state.edges); + const [windowList, setWindowList] = useState([]); + const [status, setStatus] = useState({ state: "idle", message: "" }); + const socketRef = useRef(null); + + // Determine if agent is connected + const agentEdge = edges.find((e) => e.target === id && e.targetHandle === "agent"); + const agentNode = agentEdge && getNodes().find((n) => n.id === agentEdge.source); + const agentConnection = !!(agentNode && agentNode.data && agentNode.data.agent_id); + const agent_id = agentNode && agentNode.data && agentNode.data.agent_id; + + // Macro run/trigger state (sidebar sets this via config, but node UI just shows status) + const running = data?.active === true || data?.active === "true"; + + // Store for last macro error/status + const [lastMacroStatus, setLastMacroStatus] = useState({ success: true, message: "", timestamp: null }); + + // Setup WebSocket for agent macro status updates + useEffect(() => { + if (!window.BorealisSocket) return; + const socket = window.BorealisSocket; + socketRef.current = socket; + + function handleMacroStatus(payload) { + if ( + payload && + payload.agent_id === agent_id && + payload.node_id === id + ) { + setLastMacroStatus({ + success: !!payload.success, + message: payload.message || "", + timestamp: payload.timestamp || Date.now() + }); + setStatus({ + state: payload.success ? "success" : "error", + message: payload.message || (payload.success ? "Success" : "Error") + }); + } + } + + socket.on("macro_status", handleMacroStatus); + return () => { + socket.off("macro_status", handleMacroStatus); + }; + }, [agent_id, id]); + + // Auto-refresh window list from agent + useEffect(() => { + let intervalId = null; + async function fetchWindows() { + if (window.BorealisSocket && agentConnection) { + window.BorealisSocket.emit("list_agent_windows", { + agent_id + }); + } + } + fetchWindows(); + intervalId = setInterval(fetchWindows, WINDOW_LIST_REFRESH_MS); + + // Listen for agent_window_list updates + function handleAgentWindowList(payload) { + if (payload?.agent_id === agent_id && Array.isArray(payload.windows)) { + setWindowList(payload.windows); + + // Store windowList in node data for sidebar dynamic dropdowns + setNodes(nds => + nds.map(n => + n.id === id + ? { ...n, data: { ...n.data, windowList: payload.windows } } + : n + ) + ); + } + } + if (window.BorealisSocket) { + window.BorealisSocket.on("agent_window_list", handleAgentWindowList); + } + + return () => { + clearInterval(intervalId); + if (window.BorealisSocket) { + window.BorealisSocket.off("agent_window_list", handleAgentWindowList); + } + }; + }, [agent_id, agentConnection, setNodes, id]); + + // UI: Start/Pause Button + const handleToggleMacro = () => { + setNodes(nds => + nds.map(n => + n.id === id + ? { + ...n, + data: { + ...n.data, + active: n.data?.active === true || n.data?.active === "true" ? "false" : "true" + } + } + : n + ) + ); + }; + + // Optional: Show which window is targeted by name + const selectedWindow = (windowList || []).find(w => String(w.handle) === String(data?.window_handle)); + + // Node UI (no config fields, only status + window list) + return ( +
    + {/* --- INPUT LABELS & HANDLES --- */} +
    + Agent +
    + +
    + Trigger +
    + + +
    + Agent Role: Macro +
    +
    + +
    + Status:{" "} + {status.state === "error" + ? ( + + Error{lastMacroStatus.message ? `: ${lastMacroStatus.message}` : ""} + + ) + : running + ? ( + + Running{lastMacroStatus.message ? ` (${lastMacroStatus.message})` : ""} + + ) + : "Idle"} +
    + Agent Connection: {agentConnection ? "Connected" : "Not Connected"} +
    + Target Window:{" "} + {selectedWindow + ? `${selectedWindow.title} (${selectedWindow.handle})` + : data?.window_handle + ? `Handle: ${data.window_handle}` + : Not set} +
    + Mode: {data?.operation_mode || DEFAULT_OPERATION_MODE} +
    + Macro Type: {data?.macro_type || "keypress"} +
    + +
    + + {lastMacroStatus.timestamp + ? `Last event: ${new Date(lastMacroStatus.timestamp).toLocaleTimeString()}` + : ""} + +
    +
    + ); +}; + +// ----- Node Catalog Export ----- +export default { + type: "Macro_KeyPress", + label: "Agent Role: Macro", + description: ` +Send automated key presses or typed text to any open application window on the connected agent. +Supports manual, continuous, trigger, and one-shot modes for automation and event-driven workflows. +`, + content: "Send Key Press or Typed Text to Window via Agent", + component: MacroKeyPressNode, + config: [ + { key: "window_handle", label: "Target Window", type: "select", dynamicOptions: true, defaultValue: "" }, + { key: "macro_type", label: "Macro Type", type: "select", options: ["keypress", "typed_text"], defaultValue: "keypress" }, + { key: "key", label: "Key", type: "text", defaultValue: "" }, + { key: "text", label: "Typed Text", type: "text", defaultValue: "" }, + { key: "interval_ms", label: "Interval (ms)", type: "text", defaultValue: "1000" }, + { key: "randomize_interval", label: "Randomize Interval", type: "select", options: ["true", "false"], defaultValue: "false" }, + { key: "random_min", label: "Random Min (ms)", type: "text", defaultValue: "750" }, + { key: "random_max", label: "Random Max (ms)", type: "text", defaultValue: "950" }, + { key: "operation_mode", label: "Operation Mode", type: "select", options: OPERATION_MODES, defaultValue: "Continuous" }, + { key: "active", label: "Macro Enabled", type: "select", options: ["true", "false"], defaultValue: "false" }, + { key: "trigger", label: "Trigger Value", type: "text", defaultValue: "0" } + ], + usage_documentation: ` +### Agent Role: Macro + +**Modes:** +- **Continuous**: Macro sends input non-stop when started by button. +- **Trigger-Continuous**: Macro sends input as long as upstream trigger is "1". +- **Trigger-Once**: Macro fires once per upstream "1" (one-shot edge). +- **Run Once**: Macro runs only once when started by button. + +**Macro Types:** +- **Single Keypress**: Press a single key. +- **Typed Text**: Types out a string. + +**Window Target:** +- Dropdown of live windows from agent, stays updated. + +**Event-Driven Support:** +- Chain with other Borealis nodes (text recognition, event triggers, etc). + +**Live Status:** +- Displays last agent macro event and error feedback in node. + +--- + `.trim() +}; diff --git a/Data/Server/WebUI/src/nodes/Agent/Node_Agent_Role_Screenshot.jsx b/Data/Server/WebUI/src/nodes/Agent/Node_Agent_Role_Screenshot.jsx new file mode 100644 index 00000000..fa184781 --- /dev/null +++ b/Data/Server/WebUI/src/nodes/Agent/Node_Agent_Role_Screenshot.jsx @@ -0,0 +1,271 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/nodes/Agent/Node_Agent_Role_Screenshot.jsx +import React, { useCallback, useEffect, useRef, useState } from "react"; +import { Handle, Position, useReactFlow, useStore } from "reactflow"; +import ShareIcon from "@mui/icons-material/Share"; +import IconButton from "@mui/material/IconButton"; + +/* + Agent Role: Screenshot Node (Modern, Sidebar Config Enabled) + + - Defines a screenshot region to be captured by a remote Borealis Agent. + - Pushes live base64 PNG data to downstream nodes. + - Region coordinates (x, y, w, h), visibility, overlay label, and interval are all persisted and synchronized. + - All configuration is moved to the right sidebar (Node Properties). + - Maintains full bi-directional write-back of coordinates and overlay settings. +*/ + +if (!window.BorealisValueBus) window.BorealisValueBus = {}; +if (!window.BorealisUpdateRate) window.BorealisUpdateRate = 100; + +const AgentScreenshotNode = ({ id, data }) => { + const { setNodes, getNodes } = useReactFlow(); + const edges = useStore(state => state.edges); + + const resolveAgentData = useCallback(() => { + try { + const agentEdge = edges.find(e => e.target === id && e.sourceHandle === "provisioner"); + const agentNode = getNodes().find(n => n.id === agentEdge?.source); + return agentNode?.data || null; + } catch (err) { + return null; + } + }, [edges, getNodes, id]); + + + // Core config values pulled from sidebar config (with defaults) + const interval = parseInt(data?.interval || 1000, 10) || 1000; + const region = { + x: parseInt(data?.x ?? 250, 10), + y: parseInt(data?.y ?? 100, 10), + w: parseInt(data?.w ?? 300, 10), + h: parseInt(data?.h ?? 200, 10) + }; + const visible = (data?.visible ?? "true") === "true"; + const alias = data?.alias || ""; + const [imageBase64, setImageBase64] = useState(data?.value || ""); + const agentData = resolveAgentData(); + const targetModeLabel = ((agentData?.agent_mode || "").toString().toLowerCase() === "system") + ? "SYSTEM Agent" + : "CURRENTUSER Agent"; + const targetHostLabel = (agentData?.agent_host || "").toString(); + + // Always push current imageBase64 into BorealisValueBus at the global update rate + useEffect(() => { + const intervalId = setInterval(() => { + if (imageBase64) { + window.BorealisValueBus[id] = imageBase64; + setNodes(nds => + nds.map(n => + n.id === id ? { ...n, data: { ...n.data, value: imageBase64 } } : n + ) + ); + } + }, window.BorealisUpdateRate || 100); + return () => clearInterval(intervalId); + }, [id, imageBase64, setNodes]); + + // Listen for agent screenshot and overlay region updates + useEffect(() => { + const socket = window.BorealisSocket; + if (!socket) return; + + const handleScreenshot = (payload) => { + if (payload?.node_id !== id) return; + // Additionally ensure payload is from the agent connected upstream of this node + const agentData = resolveAgentData(); + const selectedAgentId = agentData?.agent_id; + if (!selectedAgentId || payload?.agent_id !== selectedAgentId) return; + + if (payload.image_base64) { + setImageBase64(payload.image_base64); + window.BorealisValueBus[id] = payload.image_base64; + } + const { x, y, w, h } = payload; + if ( + x !== undefined && + y !== undefined && + w !== undefined && + h !== undefined + ) { + setNodes(nds => + nds.map(n => + n.id === id ? { ...n, data: { ...n.data, x, y, w, h } } : n + ) + ); + } + }; + + socket.on("agent_screenshot_task", handleScreenshot); + return () => socket.off("agent_screenshot_task", handleScreenshot); + }, [id, setNodes, resolveAgentData]); + + // Register this node for the agent provisioning sync + window.__BorealisInstructionNodes = window.__BorealisInstructionNodes || {}; + window.__BorealisInstructionNodes[id] = () => { + const agentData = resolveAgentData() || {}; + const modeRaw = (agentData.agent_mode || "").toString().toLowerCase(); + const targetMode = modeRaw === "system" ? "system" : "currentuser"; + return { + node_id: id, + role: "screenshot", + interval, + visible, + alias, + target_agent_mode: targetMode, + target_agent_host: agentData.agent_host || "", + ...region + }; + }; + + // Manual live view copy button + const handleCopyLiveViewLink = () => { + const agentData = resolveAgentData(); + const selectedAgentId = agentData?.agent_id; + + if (!selectedAgentId) { + alert("No valid agent connection found."); + return; + } + + const liveUrl = `${window.location.origin}/api/agent/${selectedAgentId}/node/${id}/screenshot/live`; + navigator.clipboard.writeText(liveUrl) + .then(() => console.log(`[Clipboard] Live View URL copied: ${liveUrl}`)) + .catch(err => console.error("Clipboard copy failed:", err)); + }; + + // Node card UI - config handled in sidebar + return ( +
    + + + +
    + {data?.label || "Agent Role: Screenshot"} +
    +
    +
    + Region: X:{region.x} Y:{region.y} W:{region.w} H:{region.h} +
    +
    + Interval: {interval} ms +
    +
    + Agent Context: {targetModeLabel} +
    +
    + Target Host:{" "} + {targetHostLabel ? ( + targetHostLabel + ) : ( + unknown + )} +
    +
    + Overlay: {visible ? "Yes" : "No"} +
    +
    + Label: {alias || none} +
    +
    + {imageBase64 + ? `Last image: ${Math.round(imageBase64.length / 1024)} KB` + : "Awaiting Screenshot Data..."} +
    +
    +
    + + + +
    +
    + ); +}; + +// Node registration for Borealis catalog (sidebar config enabled) +export default { + type: "Agent_Role_Screenshot", + label: "Agent Role: Screenshot", + description: ` +Capture a live screenshot of a defined region from a remote Borealis Agent. + +- Define region (X, Y, Width, Height) +- Select update interval (ms) +- Optionally show a visual overlay with a label +- Pushes base64 PNG stream to downstream nodes +- Use copy button to share live view URL +- Targets the CURRENTUSER or SYSTEM agent context selected upstream +`.trim(), + content: "Capture screenshot region via agent", + component: AgentScreenshotNode, + config: [ + { + key: "interval", + label: "Update Interval (ms)", + type: "text", + defaultValue: "1000" + }, + { + key: "x", + label: "Region X", + type: "text", + defaultValue: "250" + }, + { + key: "y", + label: "Region Y", + type: "text", + defaultValue: "100" + }, + { + key: "w", + label: "Region Width", + type: "text", + defaultValue: "300" + }, + { + key: "h", + label: "Region Height", + type: "text", + defaultValue: "200" + }, + { + key: "visible", + label: "Show Overlay on Agent", + type: "select", + options: ["true", "false"], + defaultValue: "true" + }, + { + key: "alias", + label: "Overlay Label", + type: "text", + defaultValue: "" + } + ], + usage_documentation: ` +### Agent Role: Screenshot Node + +This node defines a screenshot-capture role for a Borealis Agent. + +**How It Works** +- The region (X, Y, W, H) is sent to the Agent for real-time screenshot capture. +- The interval determines how often the Agent captures and pushes new images. +- Optionally, an overlay with a label can be displayed on the Agent's screen for visual feedback. +- The captured screenshot (as a base64 PNG) is available to downstream nodes. +- Use the share button to copy a live viewing URL for the screenshot stream. + +**Configuration** +- All fields are edited via the right sidebar. +- Coordinates update live if region is changed from the Agent. + +**Warning** +- Changing region from the Agent UI will update this node's coordinates. +- Do not remove the bi-directional region write-back: if the region moves, this node updates immediately. + +**Example Use Cases** +- Automated visual QA (comparing regions of apps) +- OCR on live application windows +- Remote monitoring dashboards + + `.trim() +}; diff --git a/Data/Server/WebUI/src/nodes/Alerting/Node_Alert_Sound.jsx b/Data/Server/WebUI/src/nodes/Alerting/Node_Alert_Sound.jsx new file mode 100644 index 00000000..ecd10521 --- /dev/null +++ b/Data/Server/WebUI/src/nodes/Alerting/Node_Alert_Sound.jsx @@ -0,0 +1,326 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: Node_Alert_Sound.jsx + +/** + * ================================================== + * Borealis - Alert Sound Node (with Base64 Restore) + * ================================================== + * + * COMPONENT ROLE: + * Plays a sound when input = "1". Provides a visual indicator: + * - Green dot: input is 0 + * - Red dot: input is 1 + * + * Modes: + * - "Once": Triggers once when going 0 -> 1 + * - "Constant": Triggers repeatedly every X ms while input = 1 + * + * Supports embedding base64 audio directly into the workflow. + */ + +import React, { useEffect, useRef, useState } from "react"; +import { Handle, Position, useReactFlow, useStore } from "reactflow"; + +if (!window.BorealisValueBus) window.BorealisValueBus = {}; +if (!window.BorealisUpdateRate) window.BorealisUpdateRate = 100; + +const AlertSoundNode = ({ id, data }) => { + const edges = useStore(state => state.edges); + const { setNodes } = useReactFlow(); + + const [alertType, setAlertType] = useState(data?.alertType || "Once"); + const [intervalMs, setIntervalMs] = useState(data?.interval || 1000); + const [prevInput, setPrevInput] = useState("0"); + const [customAudioBase64, setCustomAudioBase64] = useState(data?.audio || null); + const [currentInput, setCurrentInput] = useState("0"); + + const audioRef = useRef(null); + + const playSound = () => { + if (audioRef.current) { + console.log(`[Alert Node ${id}] Attempting to play sound`); + try { + audioRef.current.pause(); + audioRef.current.currentTime = 0; + audioRef.current.load(); + audioRef.current.play().then(() => { + console.log(`[Alert Node ${id}] Sound played successfully`); + }).catch((err) => { + console.warn(`[Alert Node ${id}] Audio play blocked or errored:`, err); + }); + } catch (err) { + console.error(`[Alert Node ${id}] Failed to play sound:`, err); + } + } else { + console.warn(`[Alert Node ${id}] No audioRef loaded`); + } + }; + + const handleFileUpload = (event) => { + const file = event.target.files[0]; + if (!file) return; + + console.log(`[Alert Node ${id}] File selected:`, file.name, file.type); + + const supportedTypes = ["audio/wav", "audio/mp3", "audio/mpeg", "audio/ogg"]; + if (!supportedTypes.includes(file.type)) { + console.warn(`[Alert Node ${id}] Unsupported audio type: ${file.type}`); + return; + } + + const reader = new FileReader(); + reader.onload = (e) => { + const base64 = e.target.result; + const mimeType = file.type || "audio/mpeg"; + const safeURL = base64.startsWith("data:") + ? base64 + : `data:${mimeType};base64,${base64}`; + + if (audioRef.current) { + audioRef.current.pause(); + audioRef.current.src = ""; + audioRef.current.load(); + audioRef.current = null; + } + + const newAudio = new Audio(); + newAudio.src = safeURL; + + let readyFired = false; + + newAudio.addEventListener("canplaythrough", () => { + if (readyFired) return; + readyFired = true; + console.log(`[Alert Node ${id}] Audio is decodable and ready: ${file.name}`); + + setCustomAudioBase64(safeURL); + audioRef.current = newAudio; + newAudio.load(); + + setNodes(nds => + nds.map(n => + n.id === id + ? { ...n, data: { ...n.data, audio: safeURL } } + : n + ) + ); + }); + + setTimeout(() => { + if (!readyFired) { + console.warn(`[Alert Node ${id}] WARNING: Audio not marked ready in time. May fail silently.`); + } + }, 2000); + }; + + reader.onerror = (e) => { + console.error(`[Alert Node ${id}] File read error:`, e); + }; + + reader.readAsDataURL(file); + }; + + // Restore embedded audio from saved workflow + useEffect(() => { + if (customAudioBase64) { + console.log(`[Alert Node ${id}] Loading embedded audio from workflow`); + + if (audioRef.current) { + audioRef.current.pause(); + audioRef.current.src = ""; + audioRef.current.load(); + audioRef.current = null; + } + + const loadedAudio = new Audio(customAudioBase64); + loadedAudio.addEventListener("canplaythrough", () => { + console.log(`[Alert Node ${id}] Embedded audio ready`); + }); + + audioRef.current = loadedAudio; + loadedAudio.load(); + } else { + console.log(`[Alert Node ${id}] No custom audio, using fallback silent wav`); + audioRef.current = new Audio("data:audio/wav;base64,UklGRiQAAABXQVZFZm10IBAAAAABAAEAESsAACJWAAACABAAZGF0YRAAAAAA"); + audioRef.current.load(); + } + }, [customAudioBase64]); + + useEffect(() => { + let currentRate = window.BorealisUpdateRate; + let intervalId = null; + + const runLogic = () => { + const inputEdge = edges.find(e => e.target === id); + const sourceId = inputEdge?.source || null; + const val = sourceId ? (window.BorealisValueBus[sourceId] || "0") : "0"; + + setCurrentInput(val); + + if (alertType === "Once") { + if (val === "1" && prevInput !== "1") { + console.log(`[Alert Node ${id}] Triggered ONCE playback`); + playSound(); + } + } + + setPrevInput(val); + }; + + const start = () => { + if (alertType === "Constant") { + intervalId = setInterval(() => { + const inputEdge = edges.find(e => e.target === id); + const sourceId = inputEdge?.source || null; + const val = sourceId ? (window.BorealisValueBus[sourceId] || "0") : "0"; + setCurrentInput(val); + if (String(val) === "1") { + console.log(`[Alert Node ${id}] Triggered CONSTANT playback`); + playSound(); + } + }, intervalMs); + } else { + intervalId = setInterval(runLogic, currentRate); + } + }; + + start(); + + const monitor = setInterval(() => { + const newRate = window.BorealisUpdateRate; + if (newRate !== currentRate && alertType === "Once") { + currentRate = newRate; + clearInterval(intervalId); + start(); + } + }, 250); + + return () => { + clearInterval(intervalId); + clearInterval(monitor); + }; + }, [edges, alertType, intervalMs, prevInput]); + + const indicatorColor = currentInput === "1" ? "#ff4444" : "#44ff44"; + + return ( +
    + + + {/* Header with indicator dot */} +
    + {data?.label || "Alert Sound"} +
    +
    + +
    +
    + Play a sound alert when input is "1" +
    + + + + + + setIntervalMs(parseInt(e.target.value))} + disabled={alertType === "Once"} + style={{ + ...inputStyle, + background: alertType === "Once" ? "#2a2a2a" : "#1e1e1e" + }} + /> + + + +
    + + +
    +
    +
    + ); +}; + +const dropdownStyle = { + fontSize: "9px", + padding: "4px", + background: "#1e1e1e", + color: "#ccc", + border: "1px solid #444", + borderRadius: "2px", + width: "100%", + marginBottom: "8px" +}; + +const inputStyle = { + fontSize: "9px", + padding: "4px", + color: "#ccc", + border: "1px solid #444", + borderRadius: "2px", + width: "100%", + marginBottom: "8px" +}; + +export default { + type: "AlertSoundNode", + label: "Alert Sound", + description: ` +Plays a sound alert when input = "1" + +- "Once" = Only when 0 -> 1 transition +- "Constant" = Repeats every X ms while input stays 1 +- Custom audio supported (MP3/WAV/OGG) +- Base64 audio embedded in workflow and restored +- Visual status indicator (green = 0, red = 1) +- Manual "Test" button for validation +`.trim(), + content: "Sound alert when input value = 1", + component: AlertSoundNode +}; diff --git a/Data/Server/WebUI/src/nodes/Data Analysis & Manipulation/Node_Array_Index_Extractor.jsx b/Data/Server/WebUI/src/nodes/Data Analysis & Manipulation/Node_Array_Index_Extractor.jsx new file mode 100644 index 00000000..0ff675db --- /dev/null +++ b/Data/Server/WebUI/src/nodes/Data Analysis & Manipulation/Node_Array_Index_Extractor.jsx @@ -0,0 +1,142 @@ +import React, { useEffect, useRef, useState } from "react"; +import { Handle, Position, useReactFlow, useStore } from "reactflow"; + +// Ensure Borealis shared memory exists +if (!window.BorealisValueBus) window.BorealisValueBus = {}; +if (!window.BorealisUpdateRate) window.BorealisUpdateRate = 100; + +const ArrayIndexExtractorNode = ({ id, data }) => { + const edges = useStore((state) => state.edges); + const { setNodes } = useReactFlow(); + const [result, setResult] = useState("Line Does Not Exist"); + const valueRef = useRef(result); + + // Use config field, always 1-based for UX, fallback to 1 + const lineNumber = parseInt(data?.lineNumber, 10) || 1; + + useEffect(() => { + let intervalId = null; + let currentRate = window.BorealisUpdateRate; + + const runNodeLogic = () => { + const inputEdge = edges.find((e) => e.target === id); + if (!inputEdge) { + valueRef.current = "Line Does Not Exist"; + setResult("Line Does Not Exist"); + window.BorealisValueBus[id] = "Line Does Not Exist"; + return; + } + + const upstreamValue = window.BorealisValueBus[inputEdge.source]; + if (!Array.isArray(upstreamValue)) { + valueRef.current = "Line Does Not Exist"; + setResult("Line Does Not Exist"); + window.BorealisValueBus[id] = "Line Does Not Exist"; + return; + } + + const index = Math.max(0, lineNumber - 1); // 1-based to 0-based + const selected = upstreamValue[index] ?? "Line Does Not Exist"; + + if (selected !== valueRef.current) { + valueRef.current = selected; + setResult(selected); + window.BorealisValueBus[id] = selected; + } + }; + + intervalId = setInterval(runNodeLogic, currentRate); + + // Monitor update rate live + const monitor = setInterval(() => { + const newRate = window.BorealisUpdateRate; + if (newRate !== currentRate) { + clearInterval(intervalId); + currentRate = newRate; + intervalId = setInterval(runNodeLogic, currentRate); + } + }, 300); + + return () => { + clearInterval(intervalId); + clearInterval(monitor); + }; + }, [id, edges, lineNumber]); + + return ( +
    + +
    + {data?.label || "Array Index Extractor"} +
    +
    +
    + Output a specific line from an upstream array. +
    +
    + Line Number: {lineNumber} +
    + + +
    + +
    + ); +}; + +// ---- Node Registration Object with Sidebar Config & Markdown Docs ---- +export default { + type: "ArrayIndexExtractor", + label: "Array Index Extractor", + description: ` +Outputs a specific line from an upstream array, such as the result of OCR multi-line extraction. + +- Specify the **line number** (1 = first line) +- Outputs the value at that index if present +- If index is out of bounds, outputs "Line Does Not Exist" +`.trim(), + content: "Output a Specific Array Index's Value", + component: ArrayIndexExtractorNode, + config: [ + { + key: "lineNumber", + label: "Line Number (1 = First Line)", + type: "text", + defaultValue: "1" + } + ], + usage_documentation: ` +### Array Index Extractor Node + +This node allows you to extract a specific line or item from an upstream array value. + +**Typical Use:** +- Used after OCR or any node that outputs an array of lines or items. +- Set the **Line Number** (1-based, so "1" = first line). + +**Behavior:** +- If the line exists, outputs the value at that position. +- If not, outputs: \`Line Does Not Exist\`. + +**Input:** +- Connect an upstream node that outputs an array (such as OCR Text Extraction). + +**Sidebar Config:** +- Set the desired line number from the configuration sidebar for live updates. + +--- +`.trim() +}; diff --git a/Data/Server/WebUI/src/nodes/Data Analysis & Manipulation/Node_JSON_Display.jsx b/Data/Server/WebUI/src/nodes/Data Analysis & Manipulation/Node_JSON_Display.jsx new file mode 100644 index 00000000..a6c5548c --- /dev/null +++ b/Data/Server/WebUI/src/nodes/Data Analysis & Manipulation/Node_JSON_Display.jsx @@ -0,0 +1,179 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/nodes/Data Analysis & Manipulation/Node_JSON_Display.jsx + +import React, { useEffect, useState, useRef, useCallback } from "react"; +import { Handle, Position, useReactFlow, useStore } from "reactflow"; +// For syntax highlighting, ensure prismjs is installed: npm install prismjs +import Prism from "prismjs"; +import "prismjs/components/prism-json"; +import "prismjs/themes/prism-okaidia.css"; + +const JSONPrettyDisplayNode = ({ id, data }) => { + const { setNodes } = useReactFlow(); + const edges = useStore((state) => state.edges); + const containerRef = useRef(null); + const resizingRef = useRef(false); + const startPosRef = useRef({ x: 0, y: 0 }); + const startDimRef = useRef({ width: 0, height: 0 }); + + const [jsonData, setJsonData] = useState(data?.jsonData || {}); + const initW = parseInt(data?.width || "300", 10); + const initH = parseInt(data?.height || "150", 10); + const [dimensions, setDimensions] = useState({ width: initW, height: initH }); + const jsonRef = useRef(jsonData); + + const persistDimensions = useCallback(() => { + const w = `${Math.round(dimensions.width)}px`; + const h = `${Math.round(dimensions.height)}px`; + setNodes((nds) => + nds.map((n) => + n.id === id + ? { ...n, data: { ...n.data, width: w, height: h } } + : n + ) + ); + }, [dimensions, id, setNodes]); + + useEffect(() => { + const onMouseMove = (e) => { + if (!resizingRef.current) return; + const dx = e.clientX - startPosRef.current.x; + const dy = e.clientY - startPosRef.current.y; + setDimensions({ + width: Math.max(100, startDimRef.current.width + dx), + height: Math.max(60, startDimRef.current.height + dy) + }); + }; + const onMouseUp = () => { + if (resizingRef.current) { + resizingRef.current = false; + persistDimensions(); + } + }; + window.addEventListener("mousemove", onMouseMove); + window.addEventListener("mouseup", onMouseUp); + return () => { + window.removeEventListener("mousemove", onMouseMove); + window.removeEventListener("mouseup", onMouseUp); + }; + }, [persistDimensions]); + + const onResizeMouseDown = (e) => { + e.stopPropagation(); + resizingRef.current = true; + startPosRef.current = { x: e.clientX, y: e.clientY }; + startDimRef.current = { ...dimensions }; + }; + + useEffect(() => { + let rate = window.BorealisUpdateRate; + const tick = () => { + const edge = edges.find((e) => e.target === id); + if (edge && edge.source) { + const upstream = window.BorealisValueBus[edge.source]; + if (typeof upstream === "object") { + if (JSON.stringify(upstream) !== JSON.stringify(jsonRef.current)) { + jsonRef.current = upstream; + setJsonData(upstream); + window.BorealisValueBus[id] = upstream; + setNodes((nds) => + nds.map((n) => + n.id === id ? { ...n, data: { ...n.data, jsonData: upstream } } : n + ) + ); + } + } + } else { + window.BorealisValueBus[id] = jsonRef.current; + } + }; + const iv = setInterval(tick, rate); + const monitor = setInterval(() => { + if (window.BorealisUpdateRate !== rate) { + clearInterval(iv); + clearInterval(monitor); + } + }, 200); + return () => { clearInterval(iv); clearInterval(monitor); }; + }, [id, edges, setNodes]); + + // Generate highlighted HTML + const pretty = JSON.stringify(jsonData, null, 2); + const highlighted = Prism.highlight(pretty, Prism.languages.json, "json"); + + return ( +
    + + + +
    Display JSON Data
    +
    +
    + Display prettified JSON from upstream. +
    +
    +
    +        
    +
    + +
    +
    + ); +}; + +export default { + type: "Node_JSON_Pretty_Display", + label: "Display JSON Data", + description: "Display upstream JSON object as prettified JSON with syntax highlighting.", + content: "Display prettified multi-line JSON from upstream node.", + component: JSONPrettyDisplayNode +}; diff --git a/Data/Server/WebUI/src/nodes/Data Analysis & Manipulation/Node_JSON_Value_Extractor.jsx b/Data/Server/WebUI/src/nodes/Data Analysis & Manipulation/Node_JSON_Value_Extractor.jsx new file mode 100644 index 00000000..7e581f95 --- /dev/null +++ b/Data/Server/WebUI/src/nodes/Data Analysis & Manipulation/Node_JSON_Value_Extractor.jsx @@ -0,0 +1,132 @@ +////////// PROJECT FILE SEPARATION LINE ////////// CODE AFTER THIS LINE ARE FROM: /Data/WebUI/src/nodes/Data Analysis & Manipulation/Node_JSON_Value_Extractor.jsx + +import React, { useState, useEffect } from "react"; +import { Handle, Position, useReactFlow } from "reactflow"; + +const JSONValueExtractorNode = ({ id, data }) => { + const { setNodes, getEdges } = useReactFlow(); + const [keyName, setKeyName] = useState(data?.keyName || ""); + const [value, setValue] = useState(data?.result || ""); + + const handleKeyChange = (e) => { + const newKey = e.target.value; + setKeyName(newKey); + setNodes((nds) => + nds.map((n) => + n.id === id + ? { ...n, data: { ...n.data, keyName: newKey } } + : n + ) + ); + }; + + useEffect(() => { + let currentRate = window.BorealisUpdateRate; + let intervalId; + + const runNodeLogic = () => { + const edges = getEdges(); + const incoming = edges.filter((e) => e.target === id); + const sourceId = incoming[0]?.source; + let newValue = "Key Not Found"; + + if (sourceId && window.BorealisValueBus[sourceId] !== undefined) { + let upstream = window.BorealisValueBus[sourceId]; + if (upstream && typeof upstream === "object" && keyName) { + const pathSegments = keyName.split("."); + let nodeVal = upstream; + for (let segment of pathSegments) { + if ( + nodeVal != null && + (typeof nodeVal === "object" || Array.isArray(nodeVal)) && + segment in nodeVal + ) { + nodeVal = nodeVal[segment]; + } else { + nodeVal = undefined; + break; + } + } + if (nodeVal !== undefined) { + newValue = String(nodeVal); + } + } + } + + if (newValue !== value) { + setValue(newValue); + window.BorealisValueBus[id] = newValue; + setNodes((nds) => + nds.map((n) => + n.id === id + ? { ...n, data: { ...n.data, result: newValue } } + : n + ) + ); + } + }; + + runNodeLogic(); + intervalId = setInterval(runNodeLogic, currentRate); + + const monitor = setInterval(() => { + const newRate = window.BorealisUpdateRate; + if (newRate !== currentRate) { + clearInterval(intervalId); + currentRate = newRate; + intervalId = setInterval(runNodeLogic, currentRate); + } + }, 250); + + return () => { + clearInterval(intervalId); + clearInterval(monitor); + }; + }, [keyName, id, setNodes, getEdges, value]); + + return ( +
    +
    JSON Value Extractor
    +
    + + + +