mirror of
https://github.com/bunny-lab-io/Borealis.git
synced 2025-10-26 17:41:58 -06:00
Add Engine SQLite infrastructure
This commit is contained in:
@@ -15,7 +15,7 @@
|
||||
- 3.2 Stub out blueprint/Socket.IO registration hooks that mirror names from legacy code (no logic yet).
|
||||
- 3.3 Smoke-test app startup via `python Data/Engine/bootstrapper.py` (or Flask CLI) to ensure no regressions.
|
||||
|
||||
4. Establish SQLite infrastructure
|
||||
[COMPLETED] 4. Establish SQLite infrastructure
|
||||
- 4.1 Copy `_db_conn` logic into `repositories/sqlite/connection.py`, parameterized by database path (`<root>/database.db`).
|
||||
- 4.2 Port migration helpers into `repositories/sqlite/migrations.py`; expose an `apply_all()` callable.
|
||||
- 4.3 Wire migrations to run during Engine bootstrap (behind a flag) and confirm tables initialize in a sandbox DB.
|
||||
|
||||
@@ -10,6 +10,7 @@ The Engine mirrors the legacy defaults so it can boot without additional configu
|
||||
| --- | --- | --- |
|
||||
| `BOREALIS_ROOT` | Overrides automatic project root detection. Useful when running from a packaged location. | Directory two levels above `Data/Engine/` |
|
||||
| `BOREALIS_DATABASE_PATH` | Path to the SQLite database. | `<project_root>/database.db` |
|
||||
| `BOREALIS_ENGINE_AUTO_MIGRATE` | Run Engine-managed schema migrations during bootstrap (`true`/`false`). | `true` |
|
||||
| `BOREALIS_STATIC_ROOT` | Directory that serves static assets for the SPA. | First existing path among `Data/Server/web-interface/build`, `Data/Server/WebUI/build`, `Data/WebUI/build` |
|
||||
| `BOREALIS_CORS_ALLOWED_ORIGINS` | Comma-delimited list of origins granted CORS access. Use `*` for all origins. | `*` |
|
||||
| `BOREALIS_FLASK_SECRET_KEY` | Secret key for Flask session signing. | `change-me` |
|
||||
@@ -23,9 +24,9 @@ The Engine mirrors the legacy defaults so it can boot without additional configu
|
||||
|
||||
## Bootstrapping flow
|
||||
|
||||
1. `Data/Engine/bootstrapper.py` loads the environment, configures logging, and builds the Flask application via `Data/Engine/server.py`.
|
||||
1. `Data/Engine/bootstrapper.py` loads the environment, configures logging, prepares the SQLite connection factory, optionally applies schema migrations, and builds the Flask application via `Data/Engine/server.py`.
|
||||
2. Placeholder HTTP and Socket.IO registration hooks run so the Engine can start without any migrated routes yet.
|
||||
3. The resulting runtime object exposes the Flask app, resolved settings, and optional Socket.IO server. `bootstrapper.main()` runs the appropriate server based on whether Socket.IO is present.
|
||||
3. The resulting runtime object exposes the Flask app, resolved settings, optional Socket.IO server, and the configured database connection factory. `bootstrapper.main()` runs the appropriate server based on whether Socket.IO is present.
|
||||
|
||||
As migration continues, services, repositories, interfaces, and integrations will live under their respective subpackages while maintaining isolation from the legacy server.
|
||||
|
||||
|
||||
@@ -13,6 +13,8 @@ from .interfaces import (
|
||||
register_http_interfaces,
|
||||
register_ws_interfaces,
|
||||
)
|
||||
from .repositories.sqlite import connection as sqlite_connection
|
||||
from .repositories.sqlite import migrations as sqlite_migrations
|
||||
from .server import create_app
|
||||
|
||||
|
||||
@@ -23,6 +25,7 @@ class EngineRuntime:
|
||||
app: Flask
|
||||
settings: EngineSettings
|
||||
socketio: Optional[object]
|
||||
db_factory: sqlite_connection.SQLiteConnectionFactory
|
||||
|
||||
|
||||
def bootstrap() -> EngineRuntime:
|
||||
@@ -31,12 +34,22 @@ def bootstrap() -> EngineRuntime:
|
||||
settings = load_environment()
|
||||
logger = configure_logging(settings)
|
||||
logger.info("bootstrap-started")
|
||||
app = create_app(settings)
|
||||
|
||||
db_factory = sqlite_connection.connection_factory(settings.database_path)
|
||||
if settings.apply_migrations:
|
||||
logger.info("migrations-start")
|
||||
with sqlite_connection.connection_scope(settings.database_path) as conn:
|
||||
sqlite_migrations.apply_all(conn)
|
||||
logger.info("migrations-complete")
|
||||
else:
|
||||
logger.info("migrations-skipped")
|
||||
|
||||
app = create_app(settings, db_factory=db_factory)
|
||||
register_http_interfaces(app)
|
||||
socketio = create_socket_server(app, settings.socketio)
|
||||
register_ws_interfaces(socketio)
|
||||
logger.info("bootstrap-complete")
|
||||
return EngineRuntime(app=app, settings=settings, socketio=socketio)
|
||||
return EngineRuntime(app=app, settings=settings, socketio=socketio, db_factory=db_factory)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
|
||||
@@ -13,6 +13,7 @@ class DatabaseSettings:
|
||||
"""SQLite database configuration for the Engine."""
|
||||
|
||||
path: Path
|
||||
apply_migrations: bool
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
@@ -62,6 +63,12 @@ class EngineSettings:
|
||||
|
||||
return self.database.path
|
||||
|
||||
@property
|
||||
def apply_migrations(self) -> bool:
|
||||
"""Return whether schema migrations should run at bootstrap."""
|
||||
|
||||
return self.database.apply_migrations
|
||||
|
||||
|
||||
def _resolve_project_root() -> Path:
|
||||
candidate = os.getenv("BOREALIS_ROOT")
|
||||
@@ -77,6 +84,11 @@ def _resolve_database_path(project_root: Path) -> Path:
|
||||
return (project_root / "database.db").resolve()
|
||||
|
||||
|
||||
def _should_apply_migrations() -> bool:
|
||||
raw = os.getenv("BOREALIS_ENGINE_AUTO_MIGRATE", "true")
|
||||
return raw.lower() in {"1", "true", "yes", "on"}
|
||||
|
||||
|
||||
def _resolve_static_root(project_root: Path) -> Path:
|
||||
candidate = os.getenv("BOREALIS_STATIC_ROOT")
|
||||
if candidate:
|
||||
@@ -110,7 +122,10 @@ def load_environment() -> EngineSettings:
|
||||
"""Load Engine settings from environment variables and filesystem hints."""
|
||||
|
||||
project_root = _resolve_project_root()
|
||||
database = DatabaseSettings(path=_resolve_database_path(project_root))
|
||||
database = DatabaseSettings(
|
||||
path=_resolve_database_path(project_root),
|
||||
apply_migrations=_should_apply_migrations(),
|
||||
)
|
||||
cors_allowed_origins = _parse_origins(os.getenv("BOREALIS_CORS_ALLOWED_ORIGINS"))
|
||||
flask_settings = FlaskSettings(
|
||||
secret_key=os.getenv("BOREALIS_FLASK_SECRET_KEY", "change-me"),
|
||||
|
||||
@@ -2,4 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
__all__: list[str] = []
|
||||
from . import sqlite
|
||||
|
||||
__all__ = ["sqlite"]
|
||||
|
||||
21
Data/Engine/repositories/sqlite/__init__.py
Normal file
21
Data/Engine/repositories/sqlite/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""SQLite persistence helpers for the Borealis Engine."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .connection import (
|
||||
SQLiteConnectionFactory,
|
||||
configure_connection,
|
||||
connect,
|
||||
connection_factory,
|
||||
connection_scope,
|
||||
)
|
||||
from .migrations import apply_all
|
||||
|
||||
__all__ = [
|
||||
"SQLiteConnectionFactory",
|
||||
"configure_connection",
|
||||
"connect",
|
||||
"connection_factory",
|
||||
"connection_scope",
|
||||
"apply_all",
|
||||
]
|
||||
67
Data/Engine/repositories/sqlite/connection.py
Normal file
67
Data/Engine/repositories/sqlite/connection.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""SQLite connection utilities for the Borealis Engine."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import Iterator, Protocol
|
||||
|
||||
__all__ = [
|
||||
"SQLiteConnectionFactory",
|
||||
"configure_connection",
|
||||
"connect",
|
||||
"connection_factory",
|
||||
"connection_scope",
|
||||
]
|
||||
|
||||
|
||||
class SQLiteConnectionFactory(Protocol):
|
||||
"""Callable protocol for obtaining configured SQLite connections."""
|
||||
|
||||
def __call__(self) -> sqlite3.Connection:
|
||||
"""Return a new :class:`sqlite3.Connection`."""
|
||||
|
||||
|
||||
def configure_connection(conn: sqlite3.Connection) -> None:
|
||||
"""Apply the Borealis-standard pragmas to *conn*."""
|
||||
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
cur.execute("PRAGMA journal_mode=WAL")
|
||||
cur.execute("PRAGMA busy_timeout=5000")
|
||||
cur.execute("PRAGMA synchronous=NORMAL")
|
||||
conn.commit()
|
||||
except Exception:
|
||||
# Pragmas are best-effort; failing to apply them should not block startup.
|
||||
conn.rollback()
|
||||
finally:
|
||||
cur.close()
|
||||
|
||||
|
||||
def connect(path: Path, *, timeout: float = 15.0) -> sqlite3.Connection:
|
||||
"""Create a new SQLite connection to *path* with Engine pragmas applied."""
|
||||
|
||||
conn = sqlite3.connect(str(path), timeout=timeout)
|
||||
configure_connection(conn)
|
||||
return conn
|
||||
|
||||
|
||||
def connection_factory(path: Path, *, timeout: float = 15.0) -> SQLiteConnectionFactory:
|
||||
"""Return a factory that opens connections to *path* when invoked."""
|
||||
|
||||
def factory() -> sqlite3.Connection:
|
||||
return connect(path, timeout=timeout)
|
||||
|
||||
return factory
|
||||
|
||||
|
||||
@contextmanager
|
||||
def connection_scope(path: Path, *, timeout: float = 15.0) -> Iterator[sqlite3.Connection]:
|
||||
"""Context manager yielding a configured connection to *path*."""
|
||||
|
||||
conn = connect(path, timeout=timeout)
|
||||
try:
|
||||
yield conn
|
||||
finally:
|
||||
conn.close()
|
||||
402
Data/Engine/repositories/sqlite/migrations.py
Normal file
402
Data/Engine/repositories/sqlite/migrations.py
Normal file
@@ -0,0 +1,402 @@
|
||||
"""SQLite schema migrations for the Borealis Engine.
|
||||
|
||||
This module centralises schema evolution so the Engine and its interfaces can stay
|
||||
focused on request handling. The migration functions are intentionally
|
||||
idempotent — they can run repeatedly without changing state once the schema
|
||||
matches the desired shape.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import List, Optional, Sequence, Tuple
|
||||
|
||||
|
||||
DEVICE_TABLE = "devices"
|
||||
|
||||
|
||||
def apply_all(conn: sqlite3.Connection) -> None:
|
||||
"""
|
||||
Run all known schema migrations against the provided sqlite3 connection.
|
||||
"""
|
||||
|
||||
_ensure_devices_table(conn)
|
||||
_ensure_device_aux_tables(conn)
|
||||
_ensure_refresh_token_table(conn)
|
||||
_ensure_install_code_table(conn)
|
||||
_ensure_device_approval_table(conn)
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
def _ensure_devices_table(conn: sqlite3.Connection) -> None:
|
||||
cur = conn.cursor()
|
||||
if not _table_exists(cur, DEVICE_TABLE):
|
||||
_create_devices_table(cur)
|
||||
return
|
||||
|
||||
column_info = _table_info(cur, DEVICE_TABLE)
|
||||
col_names = [c[1] for c in column_info]
|
||||
pk_cols = [c[1] for c in column_info if c[5]]
|
||||
|
||||
needs_rebuild = pk_cols != ["guid"]
|
||||
required_columns = {
|
||||
"guid": "TEXT",
|
||||
"hostname": "TEXT",
|
||||
"description": "TEXT",
|
||||
"created_at": "INTEGER",
|
||||
"agent_hash": "TEXT",
|
||||
"memory": "TEXT",
|
||||
"network": "TEXT",
|
||||
"software": "TEXT",
|
||||
"storage": "TEXT",
|
||||
"cpu": "TEXT",
|
||||
"device_type": "TEXT",
|
||||
"domain": "TEXT",
|
||||
"external_ip": "TEXT",
|
||||
"internal_ip": "TEXT",
|
||||
"last_reboot": "TEXT",
|
||||
"last_seen": "INTEGER",
|
||||
"last_user": "TEXT",
|
||||
"operating_system": "TEXT",
|
||||
"uptime": "INTEGER",
|
||||
"agent_id": "TEXT",
|
||||
"ansible_ee_ver": "TEXT",
|
||||
"connection_type": "TEXT",
|
||||
"connection_endpoint": "TEXT",
|
||||
"ssl_key_fingerprint": "TEXT",
|
||||
"token_version": "INTEGER",
|
||||
"status": "TEXT",
|
||||
"key_added_at": "TEXT",
|
||||
}
|
||||
|
||||
missing_columns = [col for col in required_columns if col not in col_names]
|
||||
if missing_columns:
|
||||
needs_rebuild = True
|
||||
|
||||
if needs_rebuild:
|
||||
_rebuild_devices_table(conn, column_info)
|
||||
else:
|
||||
_ensure_column_defaults(cur)
|
||||
|
||||
_ensure_device_indexes(cur)
|
||||
|
||||
|
||||
def _ensure_device_aux_tables(conn: sqlite3.Connection) -> None:
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS device_keys (
|
||||
id TEXT PRIMARY KEY,
|
||||
guid TEXT NOT NULL,
|
||||
ssl_key_fingerprint TEXT NOT NULL,
|
||||
added_at TEXT NOT NULL,
|
||||
retired_at TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS uq_device_keys_guid_fingerprint
|
||||
ON device_keys(guid, ssl_key_fingerprint)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_device_keys_guid
|
||||
ON device_keys(guid)
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _ensure_refresh_token_table(conn: sqlite3.Connection) -> None:
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS refresh_tokens (
|
||||
id TEXT PRIMARY KEY,
|
||||
guid TEXT NOT NULL,
|
||||
token_hash TEXT NOT NULL,
|
||||
dpop_jkt TEXT,
|
||||
created_at TEXT NOT NULL,
|
||||
expires_at TEXT NOT NULL,
|
||||
revoked_at TEXT,
|
||||
last_used_at TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_refresh_tokens_guid
|
||||
ON refresh_tokens(guid)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_refresh_tokens_expires_at
|
||||
ON refresh_tokens(expires_at)
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _ensure_install_code_table(conn: sqlite3.Connection) -> None:
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS enrollment_install_codes (
|
||||
id TEXT PRIMARY KEY,
|
||||
code TEXT NOT NULL UNIQUE,
|
||||
expires_at TEXT NOT NULL,
|
||||
created_by_user_id TEXT,
|
||||
used_at TEXT,
|
||||
used_by_guid TEXT,
|
||||
max_uses INTEGER NOT NULL DEFAULT 1,
|
||||
use_count INTEGER NOT NULL DEFAULT 0,
|
||||
last_used_at TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_eic_expires_at
|
||||
ON enrollment_install_codes(expires_at)
|
||||
"""
|
||||
)
|
||||
|
||||
columns = {row[1] for row in _table_info(cur, "enrollment_install_codes")}
|
||||
if "max_uses" not in columns:
|
||||
cur.execute(
|
||||
"""
|
||||
ALTER TABLE enrollment_install_codes
|
||||
ADD COLUMN max_uses INTEGER NOT NULL DEFAULT 1
|
||||
"""
|
||||
)
|
||||
if "use_count" not in columns:
|
||||
cur.execute(
|
||||
"""
|
||||
ALTER TABLE enrollment_install_codes
|
||||
ADD COLUMN use_count INTEGER NOT NULL DEFAULT 0
|
||||
"""
|
||||
)
|
||||
if "last_used_at" not in columns:
|
||||
cur.execute(
|
||||
"""
|
||||
ALTER TABLE enrollment_install_codes
|
||||
ADD COLUMN last_used_at TEXT
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _ensure_device_approval_table(conn: sqlite3.Connection) -> None:
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS device_approvals (
|
||||
id TEXT PRIMARY KEY,
|
||||
approval_reference TEXT NOT NULL UNIQUE,
|
||||
guid TEXT,
|
||||
hostname_claimed TEXT NOT NULL,
|
||||
ssl_key_fingerprint_claimed TEXT NOT NULL,
|
||||
enrollment_code_id TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
client_nonce TEXT NOT NULL,
|
||||
server_nonce TEXT NOT NULL,
|
||||
agent_pubkey_der BLOB NOT NULL,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL,
|
||||
approved_by_user_id TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_da_status
|
||||
ON device_approvals(status)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_da_fp_status
|
||||
ON device_approvals(ssl_key_fingerprint_claimed, status)
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _create_devices_table(cur: sqlite3.Cursor) -> None:
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE TABLE devices (
|
||||
guid TEXT PRIMARY KEY,
|
||||
hostname TEXT,
|
||||
description TEXT,
|
||||
created_at INTEGER,
|
||||
agent_hash TEXT,
|
||||
memory TEXT,
|
||||
network TEXT,
|
||||
software TEXT,
|
||||
storage TEXT,
|
||||
cpu TEXT,
|
||||
device_type TEXT,
|
||||
domain TEXT,
|
||||
external_ip TEXT,
|
||||
internal_ip TEXT,
|
||||
last_reboot TEXT,
|
||||
last_seen INTEGER,
|
||||
last_user TEXT,
|
||||
operating_system TEXT,
|
||||
uptime INTEGER,
|
||||
agent_id TEXT,
|
||||
ansible_ee_ver TEXT,
|
||||
connection_type TEXT,
|
||||
connection_endpoint TEXT,
|
||||
ssl_key_fingerprint TEXT,
|
||||
token_version INTEGER DEFAULT 1,
|
||||
status TEXT DEFAULT 'active',
|
||||
key_added_at TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
_ensure_device_indexes(cur)
|
||||
|
||||
|
||||
def _ensure_device_indexes(cur: sqlite3.Cursor) -> None:
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS uq_devices_hostname
|
||||
ON devices(hostname)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_devices_ssl_key
|
||||
ON devices(ssl_key_fingerprint)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_devices_status
|
||||
ON devices(status)
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _ensure_column_defaults(cur: sqlite3.Cursor) -> None:
|
||||
cur.execute(
|
||||
"""
|
||||
UPDATE devices
|
||||
SET token_version = COALESCE(token_version, 1)
|
||||
WHERE token_version IS NULL
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
UPDATE devices
|
||||
SET status = COALESCE(status, 'active')
|
||||
WHERE status IS NULL OR status = ''
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _rebuild_devices_table(conn: sqlite3.Connection, column_info: Sequence[Tuple]) -> None:
|
||||
cur = conn.cursor()
|
||||
cur.execute("PRAGMA foreign_keys=OFF")
|
||||
cur.execute("BEGIN IMMEDIATE")
|
||||
|
||||
cur.execute("ALTER TABLE devices RENAME TO devices_legacy")
|
||||
_create_devices_table(cur)
|
||||
|
||||
legacy_columns = [c[1] for c in column_info]
|
||||
cur.execute(f"SELECT {', '.join(legacy_columns)} FROM devices_legacy")
|
||||
rows = cur.fetchall()
|
||||
|
||||
insert_sql = (
|
||||
"""
|
||||
INSERT OR REPLACE INTO devices (
|
||||
guid, hostname, description, created_at, agent_hash, memory,
|
||||
network, software, storage, cpu, device_type, domain, external_ip,
|
||||
internal_ip, last_reboot, last_seen, last_user, operating_system,
|
||||
uptime, agent_id, ansible_ee_ver, connection_type, connection_endpoint,
|
||||
ssl_key_fingerprint, token_version, status, key_added_at
|
||||
)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
"""
|
||||
)
|
||||
|
||||
for row in rows:
|
||||
record = dict(zip(legacy_columns, row))
|
||||
guid = _normalized_guid(record.get("guid"))
|
||||
if not guid:
|
||||
guid = str(uuid.uuid4())
|
||||
hostname = record.get("hostname")
|
||||
created_at = record.get("created_at")
|
||||
key_added_at = record.get("key_added_at")
|
||||
if key_added_at is None:
|
||||
key_added_at = _default_key_added_at(created_at)
|
||||
|
||||
params: Tuple = (
|
||||
guid,
|
||||
hostname,
|
||||
record.get("description"),
|
||||
created_at,
|
||||
record.get("agent_hash"),
|
||||
record.get("memory"),
|
||||
record.get("network"),
|
||||
record.get("software"),
|
||||
record.get("storage"),
|
||||
record.get("cpu"),
|
||||
record.get("device_type"),
|
||||
record.get("domain"),
|
||||
record.get("external_ip"),
|
||||
record.get("internal_ip"),
|
||||
record.get("last_reboot"),
|
||||
record.get("last_seen"),
|
||||
record.get("last_user"),
|
||||
record.get("operating_system"),
|
||||
record.get("uptime"),
|
||||
record.get("agent_id"),
|
||||
record.get("ansible_ee_ver"),
|
||||
record.get("connection_type"),
|
||||
record.get("connection_endpoint"),
|
||||
record.get("ssl_key_fingerprint"),
|
||||
record.get("token_version") or 1,
|
||||
record.get("status") or "active",
|
||||
key_added_at,
|
||||
)
|
||||
cur.execute(insert_sql, params)
|
||||
|
||||
cur.execute("DROP TABLE devices_legacy")
|
||||
cur.execute("COMMIT")
|
||||
cur.execute("PRAGMA foreign_keys=ON")
|
||||
|
||||
|
||||
def _default_key_added_at(created_at: Optional[int]) -> Optional[str]:
|
||||
if created_at:
|
||||
try:
|
||||
dt = datetime.fromtimestamp(int(created_at), tz=timezone.utc)
|
||||
return dt.isoformat()
|
||||
except Exception:
|
||||
pass
|
||||
return datetime.now(tz=timezone.utc).isoformat()
|
||||
|
||||
|
||||
def _table_exists(cur: sqlite3.Cursor, name: str) -> bool:
|
||||
cur.execute(
|
||||
"SELECT 1 FROM sqlite_master WHERE type='table' AND name=?",
|
||||
(name,),
|
||||
)
|
||||
return cur.fetchone() is not None
|
||||
|
||||
|
||||
def _table_info(cur: sqlite3.Cursor, name: str) -> List[Tuple]:
|
||||
cur.execute(f"PRAGMA table_info({name})")
|
||||
return cur.fetchall()
|
||||
|
||||
|
||||
def _normalized_guid(value: Optional[str]) -> str:
|
||||
if not value:
|
||||
return ""
|
||||
return str(value).strip()
|
||||
|
||||
__all__ = ["apply_all"]
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from flask import Flask, request, send_from_directory
|
||||
from flask_cors import CORS
|
||||
@@ -12,6 +13,12 @@ from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
from .config import EngineSettings
|
||||
|
||||
|
||||
from .repositories.sqlite.connection import (
|
||||
SQLiteConnectionFactory,
|
||||
connection_factory as create_sqlite_connection_factory,
|
||||
)
|
||||
|
||||
|
||||
def _resolve_static_folder(static_root: Path) -> tuple[str, str]:
|
||||
return str(static_root), ""
|
||||
|
||||
@@ -51,9 +58,16 @@ def _register_spa_routes(app: Flask, assets_root: Path) -> None:
|
||||
return error
|
||||
|
||||
|
||||
def create_app(settings: EngineSettings) -> Flask:
|
||||
def create_app(
|
||||
settings: EngineSettings,
|
||||
*,
|
||||
db_factory: Optional[SQLiteConnectionFactory] = None,
|
||||
) -> Flask:
|
||||
"""Create the Flask application instance for the Engine."""
|
||||
|
||||
if db_factory is None:
|
||||
db_factory = create_sqlite_connection_factory(settings.database_path)
|
||||
|
||||
static_folder, static_url_path = _resolve_static_folder(settings.flask.static_root)
|
||||
app = Flask(
|
||||
__name__,
|
||||
@@ -68,6 +82,7 @@ def create_app(settings: EngineSettings) -> Flask:
|
||||
SESSION_COOKIE_SECURE=not settings.debug,
|
||||
SESSION_COOKIE_SAMESITE="Lax",
|
||||
ENGINE_DATABASE_PATH=str(settings.database_path),
|
||||
ENGINE_DB_CONN_FACTORY=db_factory,
|
||||
)
|
||||
app.config.setdefault("PREFERRED_URL_SCHEME", "https")
|
||||
|
||||
|
||||
Reference in New Issue
Block a user