mirror of
https://github.com/bunny-lab-io/Borealis.git
synced 2025-10-26 17:41:58 -06:00
feat: scaffold security modules and TLS foundation
This commit is contained in:
1
Data/Server/Modules/__init__.py
Normal file
1
Data/Server/Modules/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
1
Data/Server/Modules/auth/__init__.py
Normal file
1
Data/Server/Modules/auth/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
109
Data/Server/Modules/auth/dpop.py
Normal file
109
Data/Server/Modules/auth/dpop.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""
|
||||
DPoP proof verification helpers.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import time
|
||||
from threading import Lock
|
||||
from typing import Dict, Optional
|
||||
|
||||
import jwt
|
||||
|
||||
_DP0P_MAX_SKEW = 300.0 # seconds
|
||||
|
||||
|
||||
class DPoPVerificationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DPoPReplayError(DPoPVerificationError):
|
||||
pass
|
||||
|
||||
|
||||
class DPoPValidator:
|
||||
def __init__(self) -> None:
|
||||
self._observed_jti: Dict[str, float] = {}
|
||||
self._lock = Lock()
|
||||
|
||||
def verify(
|
||||
self,
|
||||
method: str,
|
||||
htu: str,
|
||||
proof: str,
|
||||
access_token: Optional[str] = None,
|
||||
) -> str:
|
||||
"""
|
||||
Verify the presented DPoP proof. Returns the JWK thumbprint on success.
|
||||
"""
|
||||
|
||||
if not proof:
|
||||
raise DPoPVerificationError("DPoP proof missing")
|
||||
|
||||
try:
|
||||
header = jwt.get_unverified_header(proof)
|
||||
except Exception as exc:
|
||||
raise DPoPVerificationError("invalid DPoP header") from exc
|
||||
|
||||
jwk = header.get("jwk")
|
||||
alg = header.get("alg")
|
||||
if not jwk or not isinstance(jwk, dict):
|
||||
raise DPoPVerificationError("missing jwk in DPoP header")
|
||||
if alg not in ("EdDSA", "ES256", "ES384", "ES512"):
|
||||
raise DPoPVerificationError(f"unsupported DPoP alg {alg}")
|
||||
|
||||
try:
|
||||
key = jwt.PyJWK(jwk)
|
||||
public_key = key.key
|
||||
except Exception as exc:
|
||||
raise DPoPVerificationError("invalid jwk in DPoP header") from exc
|
||||
|
||||
try:
|
||||
claims = jwt.decode(
|
||||
proof,
|
||||
public_key,
|
||||
algorithms=[alg],
|
||||
options={"require": ["htm", "htu", "jti", "iat"]},
|
||||
)
|
||||
except Exception as exc:
|
||||
raise DPoPVerificationError("invalid DPoP signature") from exc
|
||||
|
||||
htm = claims.get("htm")
|
||||
proof_htu = claims.get("htu")
|
||||
jti = claims.get("jti")
|
||||
iat = claims.get("iat")
|
||||
ath = claims.get("ath")
|
||||
|
||||
if not isinstance(htm, str) or htm.lower() != method.lower():
|
||||
raise DPoPVerificationError("DPoP htm mismatch")
|
||||
if not isinstance(proof_htu, str) or proof_htu != htu:
|
||||
raise DPoPVerificationError("DPoP htu mismatch")
|
||||
if not isinstance(jti, str):
|
||||
raise DPoPVerificationError("DPoP jti missing")
|
||||
if not isinstance(iat, (int, float)):
|
||||
raise DPoPVerificationError("DPoP iat missing")
|
||||
|
||||
now = time.time()
|
||||
if abs(now - float(iat)) > _DP0P_MAX_SKEW:
|
||||
raise DPoPVerificationError("DPoP proof outside allowed skew")
|
||||
|
||||
if ath and access_token:
|
||||
expected_ath = jwt.utils.base64url_encode(
|
||||
hashlib.sha256(access_token.encode("utf-8")).digest()
|
||||
).decode("ascii")
|
||||
if expected_ath != ath:
|
||||
raise DPoPVerificationError("DPoP ath mismatch")
|
||||
|
||||
with self._lock:
|
||||
expiry = self._observed_jti.get(jti)
|
||||
if expiry and expiry > now:
|
||||
raise DPoPReplayError("DPoP proof replay detected")
|
||||
self._observed_jti[jti] = now + _DP0P_MAX_SKEW
|
||||
# Opportunistic cleanup
|
||||
stale = [key for key, exp in self._observed_jti.items() if exp <= now]
|
||||
for key in stale:
|
||||
self._observed_jti.pop(key, None)
|
||||
|
||||
thumbprint = jwt.PyJWK(jwk).thumbprint()
|
||||
return thumbprint.decode("ascii")
|
||||
118
Data/Server/Modules/auth/jwt_service.py
Normal file
118
Data/Server/Modules/auth/jwt_service.py
Normal file
@@ -0,0 +1,118 @@
|
||||
"""
|
||||
JWT access-token helpers backed by an Ed25519 signing key.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import time
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import jwt
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import ed25519
|
||||
|
||||
_KEY_DIR = Path(__file__).resolve().parent.parent / "keys"
|
||||
_KEY_FILE = _KEY_DIR / "borealis-jwt-ed25519.key"
|
||||
|
||||
|
||||
class JWTService:
|
||||
def __init__(self, private_key: ed25519.Ed25519PrivateKey, key_id: str):
|
||||
self._private_key = private_key
|
||||
self._public_key = private_key.public_key()
|
||||
self._key_id = key_id
|
||||
|
||||
@property
|
||||
def key_id(self) -> str:
|
||||
return self._key_id
|
||||
|
||||
def issue_access_token(
|
||||
self,
|
||||
guid: str,
|
||||
ssl_key_fingerprint: str,
|
||||
token_version: int,
|
||||
expires_in: int = 900,
|
||||
extra_claims: Optional[Dict[str, Any]] = None,
|
||||
) -> str:
|
||||
now = int(time.time())
|
||||
payload: Dict[str, Any] = {
|
||||
"sub": f"device:{guid}",
|
||||
"guid": guid,
|
||||
"ssl_key_fingerprint": ssl_key_fingerprint,
|
||||
"token_version": int(token_version),
|
||||
"iat": now,
|
||||
"nbf": now,
|
||||
"exp": now + int(expires_in),
|
||||
}
|
||||
if extra_claims:
|
||||
payload.update(extra_claims)
|
||||
|
||||
token = jwt.encode(
|
||||
payload,
|
||||
self._private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.PKCS8,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
),
|
||||
algorithm="EdDSA",
|
||||
headers={"kid": self._key_id},
|
||||
)
|
||||
return token
|
||||
|
||||
def decode(self, token: str, *, audience: Optional[str] = None) -> Dict[str, Any]:
|
||||
options = {"require": ["exp", "iat", "sub"]}
|
||||
public_pem = self._public_key.public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
)
|
||||
return jwt.decode(
|
||||
token,
|
||||
public_pem,
|
||||
algorithms=["EdDSA"],
|
||||
audience=audience,
|
||||
options=options,
|
||||
)
|
||||
|
||||
def public_jwk(self) -> Dict[str, Any]:
|
||||
public_bytes = self._public_key.public_bytes(
|
||||
encoding=serialization.Encoding.Raw,
|
||||
format=serialization.PublicFormat.Raw,
|
||||
)
|
||||
# PyJWT expects base64url without padding.
|
||||
jwk_x = jwt.utils.base64url_encode(public_bytes).decode("ascii")
|
||||
return {"kty": "OKP", "crv": "Ed25519", "kid": self._key_id, "alg": "EdDSA", "use": "sig", "x": jwk_x}
|
||||
|
||||
|
||||
def load_service() -> JWTService:
|
||||
private_key = _load_or_create_private_key()
|
||||
public_bytes = private_key.public_key().public_bytes(
|
||||
encoding=serialization.Encoding.DER,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
)
|
||||
key_id = hashlib.sha256(public_bytes).hexdigest()[:16]
|
||||
return JWTService(private_key, key_id)
|
||||
|
||||
|
||||
def _load_or_create_private_key() -> ed25519.Ed25519PrivateKey:
|
||||
_KEY_DIR.mkdir(parents=True, exist_ok=True)
|
||||
if _KEY_FILE.exists():
|
||||
with _KEY_FILE.open("rb") as fh:
|
||||
return serialization.load_pem_private_key(fh.read(), password=None)
|
||||
|
||||
private_key = ed25519.Ed25519PrivateKey.generate()
|
||||
pem = private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.PKCS8,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
)
|
||||
with _KEY_FILE.open("wb") as fh:
|
||||
fh.write(pem)
|
||||
try:
|
||||
if _KEY_FILE.exists() and hasattr(_KEY_FILE, "chmod"):
|
||||
_KEY_FILE.chmod(0o600)
|
||||
except Exception:
|
||||
pass
|
||||
return private_key
|
||||
|
||||
41
Data/Server/Modules/auth/rate_limit.py
Normal file
41
Data/Server/Modules/auth/rate_limit.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""
|
||||
Tiny in-memory rate limiter suitable for single-process development servers.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from collections import deque
|
||||
from dataclasses import dataclass
|
||||
from threading import Lock
|
||||
from typing import Deque, Dict, Tuple
|
||||
|
||||
|
||||
@dataclass
|
||||
class RateLimitDecision:
|
||||
allowed: bool
|
||||
retry_after: float
|
||||
|
||||
|
||||
class SlidingWindowRateLimiter:
|
||||
def __init__(self) -> None:
|
||||
self._buckets: Dict[str, Deque[float]] = {}
|
||||
self._lock = Lock()
|
||||
|
||||
def check(self, key: str, limit: int, window_seconds: float) -> RateLimitDecision:
|
||||
now = time.monotonic()
|
||||
with self._lock:
|
||||
bucket = self._buckets.get(key)
|
||||
if bucket is None:
|
||||
bucket = deque()
|
||||
self._buckets[key] = bucket
|
||||
|
||||
while bucket and now - bucket[0] > window_seconds:
|
||||
bucket.popleft()
|
||||
|
||||
if len(bucket) >= limit:
|
||||
retry_after = max(0.0, window_seconds - (now - bucket[0]))
|
||||
return RateLimitDecision(False, retry_after)
|
||||
|
||||
bucket.append(now)
|
||||
return RateLimitDecision(True, 0.0)
|
||||
1
Data/Server/Modules/crypto/__init__.py
Normal file
1
Data/Server/Modules/crypto/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
133
Data/Server/Modules/crypto/certificates.py
Normal file
133
Data/Server/Modules/crypto/certificates.py
Normal file
@@ -0,0 +1,133 @@
|
||||
"""
|
||||
Self-signed certificate management for Borealis.
|
||||
|
||||
The production Flask server and the Vite dev server both consume the same
|
||||
certificate chain so agents and browsers can pin a single trust anchor during
|
||||
enrollment.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import ssl
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from typing import Tuple
|
||||
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import ec
|
||||
from cryptography.x509.oid import NameOID
|
||||
|
||||
_CERT_DIR = Path(__file__).resolve().parent.parent / "certs"
|
||||
_CERT_FILE = _CERT_DIR / "borealis-server-cert.pem"
|
||||
_KEY_FILE = _CERT_DIR / "borealis-server-key.pem"
|
||||
_BUNDLE_FILE = _CERT_DIR / "borealis-server-bundle.pem"
|
||||
|
||||
# 100-year lifetime (effectively "never" for self-signed deployments).
|
||||
_CERT_VALIDITY = timedelta(days=365 * 100)
|
||||
|
||||
|
||||
def ensure_certificate(common_name: str = "Borealis Server") -> Tuple[Path, Path, Path]:
|
||||
"""
|
||||
Ensure the self-signed certificate and key exist on disk.
|
||||
|
||||
Returns (cert_path, key_path, bundle_path).
|
||||
"""
|
||||
|
||||
_CERT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
regenerate = not (_CERT_FILE.exists() and _KEY_FILE.exists())
|
||||
if not regenerate:
|
||||
try:
|
||||
with _CERT_FILE.open("rb") as fh:
|
||||
cert = x509.load_pem_x509_certificate(fh.read())
|
||||
if cert.not_valid_after.replace(tzinfo=timezone.utc) <= datetime.now(tz=timezone.utc):
|
||||
regenerate = True
|
||||
except Exception:
|
||||
regenerate = True
|
||||
|
||||
if regenerate:
|
||||
_generate_certificate(common_name)
|
||||
|
||||
if not _BUNDLE_FILE.exists():
|
||||
_BUNDLE_FILE.write_bytes(_CERT_FILE.read_bytes())
|
||||
|
||||
return _CERT_FILE, _KEY_FILE, _BUNDLE_FILE
|
||||
|
||||
|
||||
def _generate_certificate(common_name: str) -> None:
|
||||
private_key = ec.generate_private_key(ec.SECP384R1())
|
||||
public_key = private_key.public_key()
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
builder = (
|
||||
x509.CertificateBuilder()
|
||||
.subject_name(
|
||||
x509.Name(
|
||||
[
|
||||
x509.NameAttribute(NameOID.COMMON_NAME, common_name),
|
||||
x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Borealis"),
|
||||
]
|
||||
)
|
||||
)
|
||||
.issuer_name(
|
||||
x509.Name(
|
||||
[
|
||||
x509.NameAttribute(NameOID.COMMON_NAME, common_name),
|
||||
]
|
||||
)
|
||||
)
|
||||
.public_key(public_key)
|
||||
.serial_number(x509.random_serial_number())
|
||||
.not_valid_before(now - timedelta(minutes=5))
|
||||
.not_valid_after(now + _CERT_VALIDITY)
|
||||
.add_extension(
|
||||
x509.SubjectAlternativeName(
|
||||
[
|
||||
x509.DNSName("localhost"),
|
||||
x509.DNSName("127.0.0.1"),
|
||||
x509.DNSName("::1"),
|
||||
]
|
||||
),
|
||||
critical=False,
|
||||
)
|
||||
.add_extension(x509.BasicConstraints(ca=True, path_length=None), critical=True)
|
||||
)
|
||||
|
||||
certificate = builder.sign(private_key=private_key, algorithm=hashes.SHA384())
|
||||
|
||||
_KEY_FILE.write_bytes(
|
||||
private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.TraditionalOpenSSL,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
)
|
||||
)
|
||||
_CERT_FILE.write_bytes(certificate.public_bytes(serialization.Encoding.PEM))
|
||||
|
||||
# Propagate filesystem permissions to restrict accidental disclosure.
|
||||
_tighten_permissions(_KEY_FILE)
|
||||
_tighten_permissions(_CERT_FILE)
|
||||
|
||||
|
||||
def _tighten_permissions(path: Path) -> None:
|
||||
try:
|
||||
if os.name == "posix":
|
||||
path.chmod(0o600)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def build_ssl_context() -> ssl.SSLContext:
|
||||
cert_path, key_path, _bundle_path = ensure_certificate()
|
||||
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
context.minimum_version = ssl.TLSVersion.TLSv1_3
|
||||
context.load_cert_chain(certfile=str(cert_path), keyfile=str(key_path))
|
||||
return context
|
||||
|
||||
|
||||
def certificate_paths() -> Tuple[str, str, str]:
|
||||
cert_path, key_path, bundle_path = ensure_certificate()
|
||||
return str(cert_path), str(key_path), str(bundle_path)
|
||||
|
||||
71
Data/Server/Modules/crypto/keys.py
Normal file
71
Data/Server/Modules/crypto/keys.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""
|
||||
Utility helpers for working with Ed25519 keys and fingerprints.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import re
|
||||
from typing import Tuple
|
||||
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.serialization import load_der_public_key
|
||||
from cryptography.hazmat.primitives.asymmetric import ed25519
|
||||
|
||||
|
||||
def generate_ed25519_keypair() -> Tuple[ed25519.Ed25519PrivateKey, bytes]:
|
||||
"""
|
||||
Generate a new Ed25519 keypair.
|
||||
|
||||
Returns the private key object and the public key encoded as SubjectPublicKeyInfo DER bytes.
|
||||
"""
|
||||
|
||||
private_key = ed25519.Ed25519PrivateKey.generate()
|
||||
public_key = private_key.public_key().public_bytes(
|
||||
encoding=serialization.Encoding.DER,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
)
|
||||
return private_key, public_key
|
||||
|
||||
|
||||
def normalize_base64(data: str) -> str:
|
||||
"""
|
||||
Collapse whitespace and normalise URL-safe encodings so we can reliably decode.
|
||||
"""
|
||||
|
||||
cleaned = re.sub(r"\\s+", "", data or "")
|
||||
return cleaned.replace("-", "+").replace("_", "/")
|
||||
|
||||
|
||||
def spki_der_from_base64(spki_b64: str) -> bytes:
|
||||
return base64.b64decode(normalize_base64(spki_b64), validate=True)
|
||||
|
||||
|
||||
def base64_from_spki_der(spki_der: bytes) -> str:
|
||||
return base64.b64encode(spki_der).decode("ascii")
|
||||
|
||||
|
||||
def fingerprint_from_spki_der(spki_der: bytes) -> str:
|
||||
digest = hashlib.sha256(spki_der).hexdigest()
|
||||
return digest.lower()
|
||||
|
||||
|
||||
def fingerprint_from_base64_spki(spki_b64: str) -> str:
|
||||
return fingerprint_from_spki_der(spki_der_from_base64(spki_b64))
|
||||
|
||||
|
||||
def private_key_to_pem(private_key: ed25519.Ed25519PrivateKey) -> bytes:
|
||||
return private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.PKCS8,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
)
|
||||
|
||||
|
||||
def public_key_to_pem(public_spki_der: bytes) -> bytes:
|
||||
public_key = load_der_public_key(public_spki_der)
|
||||
return public_key.public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
)
|
||||
70
Data/Server/Modules/crypto/signing.py
Normal file
70
Data/Server/Modules/crypto/signing.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""
|
||||
Code-signing helpers for delivering scripts to agents.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Tuple
|
||||
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import ed25519
|
||||
|
||||
from .keys import base64_from_spki_der
|
||||
|
||||
_KEY_DIR = Path(__file__).resolve().parent.parent / "keys"
|
||||
_SIGNING_KEY_FILE = _KEY_DIR / "borealis-script-ed25519.key"
|
||||
_SIGNING_PUB_FILE = _KEY_DIR / "borealis-script-ed25519.pub"
|
||||
|
||||
|
||||
class ScriptSigner:
|
||||
def __init__(self, private_key: ed25519.Ed25519PrivateKey):
|
||||
self._private = private_key
|
||||
self._public = private_key.public_key()
|
||||
|
||||
def sign(self, payload: bytes) -> bytes:
|
||||
return self._private.sign(payload)
|
||||
|
||||
def public_spki_der(self) -> bytes:
|
||||
return self._public.public_bytes(
|
||||
encoding=serialization.Encoding.DER,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
)
|
||||
|
||||
def public_base64_spki(self) -> str:
|
||||
return base64_from_spki_der(self.public_spki_der())
|
||||
|
||||
|
||||
def load_signer() -> ScriptSigner:
|
||||
private_key = _load_or_create()
|
||||
return ScriptSigner(private_key)
|
||||
|
||||
|
||||
def _load_or_create() -> ed25519.Ed25519PrivateKey:
|
||||
_KEY_DIR.mkdir(parents=True, exist_ok=True)
|
||||
if _SIGNING_KEY_FILE.exists():
|
||||
with _SIGNING_KEY_FILE.open("rb") as fh:
|
||||
return serialization.load_pem_private_key(fh.read(), password=None)
|
||||
|
||||
private_key = ed25519.Ed25519PrivateKey.generate()
|
||||
pem = private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.PKCS8,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
)
|
||||
with _SIGNING_KEY_FILE.open("wb") as fh:
|
||||
fh.write(pem)
|
||||
try:
|
||||
if hasattr(_SIGNING_KEY_FILE, "chmod"):
|
||||
_SIGNING_KEY_FILE.chmod(0o600)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
pub_der = private_key.public_key().public_bytes(
|
||||
encoding=serialization.Encoding.DER,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
)
|
||||
_SIGNING_PUB_FILE.write_bytes(pub_der)
|
||||
|
||||
return private_key
|
||||
|
||||
375
Data/Server/Modules/db_migrations.py
Normal file
375
Data/Server/Modules/db_migrations.py
Normal file
@@ -0,0 +1,375 @@
|
||||
"""
|
||||
Database migration helpers for Borealis.
|
||||
|
||||
This module centralises schema evolution so the main server module can stay
|
||||
focused on request handling. The migration functions are intentionally
|
||||
idempotent — they can run repeatedly without changing state once the schema
|
||||
matches the desired shape.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import List, Optional, Sequence, Tuple
|
||||
|
||||
|
||||
DEVICE_TABLE = "devices"
|
||||
|
||||
|
||||
def apply_all(conn: sqlite3.Connection) -> None:
|
||||
"""
|
||||
Run all known schema migrations against the provided sqlite3 connection.
|
||||
"""
|
||||
|
||||
_ensure_devices_table(conn)
|
||||
_ensure_device_aux_tables(conn)
|
||||
_ensure_refresh_token_table(conn)
|
||||
_ensure_install_code_table(conn)
|
||||
_ensure_device_approval_table(conn)
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
def _ensure_devices_table(conn: sqlite3.Connection) -> None:
|
||||
cur = conn.cursor()
|
||||
if not _table_exists(cur, DEVICE_TABLE):
|
||||
_create_devices_table(cur)
|
||||
return
|
||||
|
||||
column_info = _table_info(cur, DEVICE_TABLE)
|
||||
col_names = [c[1] for c in column_info]
|
||||
pk_cols = [c[1] for c in column_info if c[5]]
|
||||
|
||||
needs_rebuild = pk_cols != ["guid"]
|
||||
required_columns = {
|
||||
"guid": "TEXT",
|
||||
"hostname": "TEXT",
|
||||
"description": "TEXT",
|
||||
"created_at": "INTEGER",
|
||||
"agent_hash": "TEXT",
|
||||
"memory": "TEXT",
|
||||
"network": "TEXT",
|
||||
"software": "TEXT",
|
||||
"storage": "TEXT",
|
||||
"cpu": "TEXT",
|
||||
"device_type": "TEXT",
|
||||
"domain": "TEXT",
|
||||
"external_ip": "TEXT",
|
||||
"internal_ip": "TEXT",
|
||||
"last_reboot": "TEXT",
|
||||
"last_seen": "INTEGER",
|
||||
"last_user": "TEXT",
|
||||
"operating_system": "TEXT",
|
||||
"uptime": "INTEGER",
|
||||
"agent_id": "TEXT",
|
||||
"ansible_ee_ver": "TEXT",
|
||||
"connection_type": "TEXT",
|
||||
"connection_endpoint": "TEXT",
|
||||
"ssl_key_fingerprint": "TEXT",
|
||||
"token_version": "INTEGER",
|
||||
"status": "TEXT",
|
||||
"key_added_at": "TEXT",
|
||||
}
|
||||
|
||||
missing_columns = [col for col in required_columns if col not in col_names]
|
||||
if missing_columns:
|
||||
needs_rebuild = True
|
||||
|
||||
if needs_rebuild:
|
||||
_rebuild_devices_table(conn, column_info)
|
||||
else:
|
||||
_ensure_column_defaults(cur)
|
||||
|
||||
_ensure_device_indexes(cur)
|
||||
|
||||
|
||||
def _ensure_device_aux_tables(conn: sqlite3.Connection) -> None:
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS device_keys (
|
||||
id TEXT PRIMARY KEY,
|
||||
guid TEXT NOT NULL,
|
||||
ssl_key_fingerprint TEXT NOT NULL,
|
||||
added_at TEXT NOT NULL,
|
||||
retired_at TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS uq_device_keys_guid_fingerprint
|
||||
ON device_keys(guid, ssl_key_fingerprint)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_device_keys_guid
|
||||
ON device_keys(guid)
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _ensure_refresh_token_table(conn: sqlite3.Connection) -> None:
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS refresh_tokens (
|
||||
id TEXT PRIMARY KEY,
|
||||
guid TEXT NOT NULL,
|
||||
token_hash TEXT NOT NULL,
|
||||
dpop_jkt TEXT,
|
||||
created_at TEXT NOT NULL,
|
||||
expires_at TEXT NOT NULL,
|
||||
revoked_at TEXT,
|
||||
last_used_at TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_refresh_tokens_guid
|
||||
ON refresh_tokens(guid)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_refresh_tokens_expires_at
|
||||
ON refresh_tokens(expires_at)
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _ensure_install_code_table(conn: sqlite3.Connection) -> None:
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS enrollment_install_codes (
|
||||
id TEXT PRIMARY KEY,
|
||||
code TEXT NOT NULL UNIQUE,
|
||||
expires_at TEXT NOT NULL,
|
||||
created_by_user_id TEXT,
|
||||
used_at TEXT,
|
||||
used_by_guid TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_eic_expires_at
|
||||
ON enrollment_install_codes(expires_at)
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _ensure_device_approval_table(conn: sqlite3.Connection) -> None:
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS device_approvals (
|
||||
id TEXT PRIMARY KEY,
|
||||
approval_reference TEXT NOT NULL UNIQUE,
|
||||
guid TEXT,
|
||||
hostname_claimed TEXT NOT NULL,
|
||||
ssl_key_fingerprint_claimed TEXT NOT NULL,
|
||||
enrollment_code_id TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
client_nonce TEXT NOT NULL,
|
||||
server_nonce TEXT NOT NULL,
|
||||
agent_pubkey_der BLOB NOT NULL,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL,
|
||||
approved_by_user_id TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_da_status
|
||||
ON device_approvals(status)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_da_fp_status
|
||||
ON device_approvals(ssl_key_fingerprint_claimed, status)
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _create_devices_table(cur: sqlite3.Cursor) -> None:
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE TABLE devices (
|
||||
guid TEXT PRIMARY KEY,
|
||||
hostname TEXT,
|
||||
description TEXT,
|
||||
created_at INTEGER,
|
||||
agent_hash TEXT,
|
||||
memory TEXT,
|
||||
network TEXT,
|
||||
software TEXT,
|
||||
storage TEXT,
|
||||
cpu TEXT,
|
||||
device_type TEXT,
|
||||
domain TEXT,
|
||||
external_ip TEXT,
|
||||
internal_ip TEXT,
|
||||
last_reboot TEXT,
|
||||
last_seen INTEGER,
|
||||
last_user TEXT,
|
||||
operating_system TEXT,
|
||||
uptime INTEGER,
|
||||
agent_id TEXT,
|
||||
ansible_ee_ver TEXT,
|
||||
connection_type TEXT,
|
||||
connection_endpoint TEXT,
|
||||
ssl_key_fingerprint TEXT,
|
||||
token_version INTEGER DEFAULT 1,
|
||||
status TEXT DEFAULT 'active',
|
||||
key_added_at TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
_ensure_device_indexes(cur)
|
||||
|
||||
|
||||
def _ensure_device_indexes(cur: sqlite3.Cursor) -> None:
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS uq_devices_hostname
|
||||
ON devices(hostname)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_devices_ssl_key
|
||||
ON devices(ssl_key_fingerprint)
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_devices_status
|
||||
ON devices(status)
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _ensure_column_defaults(cur: sqlite3.Cursor) -> None:
|
||||
cur.execute(
|
||||
"""
|
||||
UPDATE devices
|
||||
SET token_version = COALESCE(token_version, 1)
|
||||
WHERE token_version IS NULL
|
||||
"""
|
||||
)
|
||||
cur.execute(
|
||||
"""
|
||||
UPDATE devices
|
||||
SET status = COALESCE(status, 'active')
|
||||
WHERE status IS NULL OR status = ''
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _rebuild_devices_table(conn: sqlite3.Connection, column_info: Sequence[Tuple]) -> None:
|
||||
cur = conn.cursor()
|
||||
cur.execute("PRAGMA foreign_keys=OFF")
|
||||
cur.execute("BEGIN IMMEDIATE")
|
||||
|
||||
cur.execute("ALTER TABLE devices RENAME TO devices_legacy")
|
||||
_create_devices_table(cur)
|
||||
|
||||
legacy_columns = [c[1] for c in column_info]
|
||||
cur.execute(f"SELECT {', '.join(legacy_columns)} FROM devices_legacy")
|
||||
rows = cur.fetchall()
|
||||
|
||||
insert_sql = (
|
||||
"""
|
||||
INSERT OR REPLACE INTO devices (
|
||||
guid, hostname, description, created_at, agent_hash, memory,
|
||||
network, software, storage, cpu, device_type, domain, external_ip,
|
||||
internal_ip, last_reboot, last_seen, last_user, operating_system,
|
||||
uptime, agent_id, ansible_ee_ver, connection_type, connection_endpoint,
|
||||
ssl_key_fingerprint, token_version, status, key_added_at
|
||||
)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
"""
|
||||
)
|
||||
|
||||
for row in rows:
|
||||
record = dict(zip(legacy_columns, row))
|
||||
guid = _normalized_guid(record.get("guid"))
|
||||
if not guid:
|
||||
guid = str(uuid.uuid4())
|
||||
hostname = record.get("hostname")
|
||||
created_at = record.get("created_at")
|
||||
key_added_at = record.get("key_added_at")
|
||||
if key_added_at is None:
|
||||
key_added_at = _default_key_added_at(created_at)
|
||||
|
||||
params: Tuple = (
|
||||
guid,
|
||||
hostname,
|
||||
record.get("description"),
|
||||
created_at,
|
||||
record.get("agent_hash"),
|
||||
record.get("memory"),
|
||||
record.get("network"),
|
||||
record.get("software"),
|
||||
record.get("storage"),
|
||||
record.get("cpu"),
|
||||
record.get("device_type"),
|
||||
record.get("domain"),
|
||||
record.get("external_ip"),
|
||||
record.get("internal_ip"),
|
||||
record.get("last_reboot"),
|
||||
record.get("last_seen"),
|
||||
record.get("last_user"),
|
||||
record.get("operating_system"),
|
||||
record.get("uptime"),
|
||||
record.get("agent_id"),
|
||||
record.get("ansible_ee_ver"),
|
||||
record.get("connection_type"),
|
||||
record.get("connection_endpoint"),
|
||||
record.get("ssl_key_fingerprint"),
|
||||
record.get("token_version") or 1,
|
||||
record.get("status") or "active",
|
||||
key_added_at,
|
||||
)
|
||||
cur.execute(insert_sql, params)
|
||||
|
||||
cur.execute("DROP TABLE devices_legacy")
|
||||
cur.execute("COMMIT")
|
||||
cur.execute("PRAGMA foreign_keys=ON")
|
||||
|
||||
|
||||
def _default_key_added_at(created_at: Optional[int]) -> Optional[str]:
|
||||
if created_at:
|
||||
try:
|
||||
dt = datetime.fromtimestamp(int(created_at), tz=timezone.utc)
|
||||
return dt.isoformat()
|
||||
except Exception:
|
||||
pass
|
||||
return datetime.now(tz=timezone.utc).isoformat()
|
||||
|
||||
|
||||
def _table_exists(cur: sqlite3.Cursor, name: str) -> bool:
|
||||
cur.execute(
|
||||
"SELECT 1 FROM sqlite_master WHERE type='table' AND name=?",
|
||||
(name,),
|
||||
)
|
||||
return cur.fetchone() is not None
|
||||
|
||||
|
||||
def _table_info(cur: sqlite3.Cursor, name: str) -> List[Tuple]:
|
||||
cur.execute(f"PRAGMA table_info({name})")
|
||||
return cur.fetchall()
|
||||
|
||||
|
||||
def _normalized_guid(value: Optional[str]) -> str:
|
||||
if not value:
|
||||
return ""
|
||||
return str(value).strip()
|
||||
1
Data/Server/Modules/enrollment/__init__.py
Normal file
1
Data/Server/Modules/enrollment/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
@@ -2,6 +2,20 @@
|
||||
import { defineConfig } from 'vite';
|
||||
import react from '@vitejs/plugin-react';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
const defaultCert = path.resolve(__dirname, '../certs/borealis-server-cert.pem');
|
||||
const defaultKey = path.resolve(__dirname, '../certs/borealis-server-key.pem');
|
||||
|
||||
const certPath = process.env.BOREALIS_TLS_CERT ?? defaultCert;
|
||||
const keyPath = process.env.BOREALIS_TLS_KEY ?? defaultKey;
|
||||
|
||||
const httpsOptions = fs.existsSync(certPath) && fs.existsSync(keyPath)
|
||||
? {
|
||||
cert: fs.readFileSync(certPath),
|
||||
key: fs.readFileSync(keyPath),
|
||||
}
|
||||
: undefined;
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
@@ -12,13 +26,20 @@ export default defineConfig({
|
||||
// Allow LAN/IP access during dev (so other devices can reach Vite)
|
||||
// If you want to restrict, replace `true` with an explicit allowlist.
|
||||
allowedHosts: true,
|
||||
https: httpsOptions,
|
||||
proxy: {
|
||||
// Ensure cookies/headers are forwarded correctly to Flask
|
||||
// Ensure cookies/headers are forwarded correctly to Flask over TLS
|
||||
'/api': {
|
||||
target: 'http://127.0.0.1:5000',
|
||||
target: 'https://127.0.0.1:5000',
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
},
|
||||
'/socket.io': { target:'ws://127.0.0.1:5000', ws:true, changeOrigin: true }
|
||||
'/socket.io': {
|
||||
target: 'wss://127.0.0.1:5000',
|
||||
ws: true,
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
}
|
||||
}
|
||||
},
|
||||
build: {
|
||||
|
||||
@@ -11,6 +11,7 @@ flask_socketio
|
||||
flask-cors
|
||||
eventlet
|
||||
cryptography
|
||||
PyJWT[crypto]
|
||||
pyotp
|
||||
qrcode
|
||||
|
||||
|
||||
@@ -48,6 +48,10 @@ from threading import Lock
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from Modules import db_migrations
|
||||
from Modules.auth import jwt_service as jwt_service_module
|
||||
from Modules.crypto import certificates
|
||||
|
||||
try:
|
||||
from cryptography.fernet import Fernet # type: ignore
|
||||
except Exception:
|
||||
@@ -136,6 +140,13 @@ os.makedirs(os.path.dirname(DB_PATH), exist_ok=True)
|
||||
_GITHUB_TOKEN_CACHE: Dict[str, Any] = {"token": None, "loaded_at": 0.0, "known": False}
|
||||
_GITHUB_TOKEN_LOCK = Lock()
|
||||
|
||||
TLS_CERT_PATH, TLS_KEY_PATH, TLS_BUNDLE_PATH = certificates.certificate_paths()
|
||||
os.environ.setdefault("BOREALIS_TLS_CERT", TLS_CERT_PATH)
|
||||
os.environ.setdefault("BOREALIS_TLS_KEY", TLS_KEY_PATH)
|
||||
os.environ.setdefault("BOREALIS_TLS_BUNDLE", TLS_BUNDLE_PATH)
|
||||
|
||||
JWT_SERVICE = jwt_service_module.load_service()
|
||||
|
||||
|
||||
def _set_cached_github_token(token: Optional[str]) -> None:
|
||||
with _GITHUB_TOKEN_LOCK:
|
||||
@@ -509,6 +520,7 @@ app.config.update(
|
||||
SESSION_COOKIE_SAMESITE=os.environ.get('BOREALIS_COOKIE_SAMESITE', 'Lax'), # set to 'None' when UI/API are on different sites
|
||||
SESSION_COOKIE_SECURE=(os.environ.get('BOREALIS_COOKIE_SECURE', '0').lower() in ('1', 'true', 'yes')),
|
||||
)
|
||||
app.config.setdefault("PREFERRED_URL_SCHEME", "https")
|
||||
|
||||
# Optionally pin cookie domain if served under a fixed hostname (leave unset for host-only/IP dev)
|
||||
_cookie_domain = os.environ.get('BOREALIS_COOKIE_DOMAIN') # e.g. ".example.com" or "borealis.bunny-lab.io"
|
||||
@@ -3773,11 +3785,11 @@ _DEVICE_JSON_OBJECT_FIELDS = {
|
||||
|
||||
|
||||
_DEVICE_TABLE_COLUMNS = [
|
||||
"guid",
|
||||
"hostname",
|
||||
"description",
|
||||
"created_at",
|
||||
"agent_hash",
|
||||
"guid",
|
||||
"memory",
|
||||
"network",
|
||||
"software",
|
||||
@@ -3796,6 +3808,10 @@ _DEVICE_TABLE_COLUMNS = [
|
||||
"ansible_ee_ver",
|
||||
"connection_type",
|
||||
"connection_endpoint",
|
||||
"ssl_key_fingerprint",
|
||||
"token_version",
|
||||
"status",
|
||||
"key_added_at",
|
||||
]
|
||||
|
||||
|
||||
@@ -4407,6 +4423,7 @@ def _secret_fingerprint(secret_blob: Optional[bytes]) -> str:
|
||||
def init_db():
|
||||
"""Initialize all required tables in the unified database."""
|
||||
conn = _db_conn()
|
||||
db_migrations.apply_all(conn)
|
||||
cur = conn.cursor()
|
||||
|
||||
# Device table (renamed from historical device_details)
|
||||
@@ -7946,4 +7963,5 @@ def relay_ansible_run(data):
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Use SocketIO runner so WebSocket transport works with eventlet.
|
||||
socketio.run(app, host="0.0.0.0", port=5000)
|
||||
ssl_context = certificates.build_ssl_context()
|
||||
socketio.run(app, host="0.0.0.0", port=5000, ssl_context=ssl_context)
|
||||
|
||||
Reference in New Issue
Block a user