mirror of
				https://github.com/bunny-lab-io/Borealis.git
				synced 2025-10-27 03:41:57 -06:00 
			
		
		
		
	Port core API routes for sites and devices
This commit is contained in:
		| @@ -24,8 +24,12 @@ __all__ = [ | ||||
| try:  # pragma: no cover - optional dependency shim | ||||
|     from .device_repository import SQLiteDeviceRepository | ||||
|     from .enrollment_repository import SQLiteEnrollmentRepository | ||||
|     from .device_inventory_repository import SQLiteDeviceInventoryRepository | ||||
|     from .device_view_repository import SQLiteDeviceViewRepository | ||||
|     from .credential_repository import SQLiteCredentialRepository | ||||
|     from .github_repository import SQLiteGitHubRepository | ||||
|     from .job_repository import SQLiteJobRepository | ||||
|     from .site_repository import SQLiteSiteRepository | ||||
|     from .token_repository import SQLiteRefreshTokenRepository | ||||
|     from .user_repository import SQLiteUserRepository | ||||
| except ModuleNotFoundError as exc:  # pragma: no cover - triggered when auth deps missing | ||||
| @@ -36,8 +40,12 @@ except ModuleNotFoundError as exc:  # pragma: no cover - triggered when auth dep | ||||
|  | ||||
|     SQLiteDeviceRepository = _missing_repo  # type: ignore[assignment] | ||||
|     SQLiteEnrollmentRepository = _missing_repo  # type: ignore[assignment] | ||||
|     SQLiteDeviceInventoryRepository = _missing_repo  # type: ignore[assignment] | ||||
|     SQLiteDeviceViewRepository = _missing_repo  # type: ignore[assignment] | ||||
|     SQLiteCredentialRepository = _missing_repo  # type: ignore[assignment] | ||||
|     SQLiteGitHubRepository = _missing_repo  # type: ignore[assignment] | ||||
|     SQLiteJobRepository = _missing_repo  # type: ignore[assignment] | ||||
|     SQLiteSiteRepository = _missing_repo  # type: ignore[assignment] | ||||
|     SQLiteRefreshTokenRepository = _missing_repo  # type: ignore[assignment] | ||||
| else: | ||||
|     __all__ += [ | ||||
| @@ -45,6 +53,10 @@ else: | ||||
|         "SQLiteRefreshTokenRepository", | ||||
|         "SQLiteJobRepository", | ||||
|         "SQLiteEnrollmentRepository", | ||||
|         "SQLiteDeviceInventoryRepository", | ||||
|         "SQLiteDeviceViewRepository", | ||||
|         "SQLiteCredentialRepository", | ||||
|         "SQLiteGitHubRepository", | ||||
|         "SQLiteUserRepository", | ||||
|         "SQLiteSiteRepository", | ||||
|     ] | ||||
|   | ||||
							
								
								
									
										103
									
								
								Data/Engine/repositories/sqlite/credential_repository.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										103
									
								
								Data/Engine/repositories/sqlite/credential_repository.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,103 @@ | ||||
| """SQLite access for operator credential metadata.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| import json | ||||
| import logging | ||||
| import sqlite3 | ||||
| from contextlib import closing | ||||
| from typing import Dict, List, Optional | ||||
|  | ||||
| from Data.Engine.repositories.sqlite.connection import SQLiteConnectionFactory | ||||
|  | ||||
| __all__ = ["SQLiteCredentialRepository"] | ||||
|  | ||||
|  | ||||
| class SQLiteCredentialRepository: | ||||
|     def __init__( | ||||
|         self, | ||||
|         connection_factory: SQLiteConnectionFactory, | ||||
|         *, | ||||
|         logger: Optional[logging.Logger] = None, | ||||
|     ) -> None: | ||||
|         self._connections = connection_factory | ||||
|         self._log = logger or logging.getLogger("borealis.engine.repositories.credentials") | ||||
|  | ||||
|     def list_credentials( | ||||
|         self, | ||||
|         *, | ||||
|         site_id: Optional[int] = None, | ||||
|         connection_type: Optional[str] = None, | ||||
|     ) -> List[Dict[str, object]]: | ||||
|         sql = """ | ||||
|             SELECT c.id, | ||||
|                    c.name, | ||||
|                    c.description, | ||||
|                    c.credential_type, | ||||
|                    c.connection_type, | ||||
|                    c.username, | ||||
|                    c.site_id, | ||||
|                    s.name AS site_name, | ||||
|                    c.become_method, | ||||
|                    c.become_username, | ||||
|                    c.metadata_json, | ||||
|                    c.created_at, | ||||
|                    c.updated_at, | ||||
|                    c.password_encrypted, | ||||
|                    c.private_key_encrypted, | ||||
|                    c.private_key_passphrase_encrypted, | ||||
|                    c.become_password_encrypted | ||||
|               FROM credentials c | ||||
|          LEFT JOIN sites s ON s.id = c.site_id | ||||
|         """ | ||||
|         clauses: List[str] = [] | ||||
|         params: List[object] = [] | ||||
|         if site_id is not None: | ||||
|             clauses.append("c.site_id = ?") | ||||
|             params.append(site_id) | ||||
|         if connection_type: | ||||
|             clauses.append("LOWER(c.connection_type) = LOWER(?)") | ||||
|             params.append(connection_type) | ||||
|         if clauses: | ||||
|             sql += " WHERE " + " AND ".join(clauses) | ||||
|         sql += " ORDER BY LOWER(c.name) ASC" | ||||
|  | ||||
|         with closing(self._connections()) as conn: | ||||
|             conn.row_factory = sqlite3.Row  # type: ignore[attr-defined] | ||||
|             cur = conn.cursor() | ||||
|             cur.execute(sql, params) | ||||
|             rows = cur.fetchall() | ||||
|  | ||||
|         results: List[Dict[str, object]] = [] | ||||
|         for row in rows: | ||||
|             metadata_json = row["metadata_json"] if "metadata_json" in row.keys() else None | ||||
|             metadata = {} | ||||
|             if metadata_json: | ||||
|                 try: | ||||
|                     candidate = json.loads(metadata_json) | ||||
|                     if isinstance(candidate, dict): | ||||
|                         metadata = candidate | ||||
|                 except Exception: | ||||
|                     metadata = {} | ||||
|             results.append( | ||||
|                 { | ||||
|                     "id": row["id"], | ||||
|                     "name": row["name"], | ||||
|                     "description": row["description"] or "", | ||||
|                     "credential_type": row["credential_type"] or "machine", | ||||
|                     "connection_type": row["connection_type"] or "ssh", | ||||
|                     "site_id": row["site_id"], | ||||
|                     "site_name": row["site_name"], | ||||
|                     "username": row["username"] or "", | ||||
|                     "become_method": row["become_method"] or "", | ||||
|                     "become_username": row["become_username"] or "", | ||||
|                     "metadata": metadata, | ||||
|                     "created_at": int(row["created_at"] or 0), | ||||
|                     "updated_at": int(row["updated_at"] or 0), | ||||
|                     "has_password": bool(row["password_encrypted"]), | ||||
|                     "has_private_key": bool(row["private_key_encrypted"]), | ||||
|                     "has_private_key_passphrase": bool(row["private_key_passphrase_encrypted"]), | ||||
|                     "has_become_password": bool(row["become_password_encrypted"]), | ||||
|                 } | ||||
|             ) | ||||
|         return results | ||||
							
								
								
									
										253
									
								
								Data/Engine/repositories/sqlite/device_inventory_repository.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										253
									
								
								Data/Engine/repositories/sqlite/device_inventory_repository.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,253 @@ | ||||
| """Device inventory operations backed by SQLite.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| import logging | ||||
| import sqlite3 | ||||
| import time | ||||
| from contextlib import closing | ||||
| from typing import Any, Dict, List, Optional, Tuple | ||||
|  | ||||
| from Data.Engine.domain.devices import ( | ||||
|     DEVICE_TABLE, | ||||
|     DEVICE_TABLE_COLUMNS, | ||||
|     assemble_device_snapshot, | ||||
|     clean_device_str, | ||||
|     coerce_int, | ||||
|     device_column_sql, | ||||
|     row_to_device_dict, | ||||
|     serialize_device_json, | ||||
| ) | ||||
| from Data.Engine.repositories.sqlite.connection import SQLiteConnectionFactory | ||||
|  | ||||
| __all__ = ["SQLiteDeviceInventoryRepository"] | ||||
|  | ||||
|  | ||||
| class SQLiteDeviceInventoryRepository: | ||||
|     def __init__( | ||||
|         self, | ||||
|         connection_factory: SQLiteConnectionFactory, | ||||
|         *, | ||||
|         logger: Optional[logging.Logger] = None, | ||||
|     ) -> None: | ||||
|         self._connections = connection_factory | ||||
|         self._log = logger or logging.getLogger("borealis.engine.repositories.device_inventory") | ||||
|  | ||||
|     def fetch_devices( | ||||
|         self, | ||||
|         *, | ||||
|         connection_type: Optional[str] = None, | ||||
|         hostname: Optional[str] = None, | ||||
|         only_agents: bool = False, | ||||
|     ) -> List[Dict[str, Any]]: | ||||
|         sql = f""" | ||||
|             SELECT {device_column_sql('d')}, s.id, s.name, s.description | ||||
|               FROM {DEVICE_TABLE} d | ||||
|          LEFT JOIN device_sites ds ON ds.device_hostname = d.hostname | ||||
|          LEFT JOIN sites s ON s.id = ds.site_id | ||||
|         """ | ||||
|         clauses: List[str] = [] | ||||
|         params: List[Any] = [] | ||||
|         if connection_type: | ||||
|             clauses.append("LOWER(d.connection_type) = LOWER(?)") | ||||
|             params.append(connection_type) | ||||
|         if hostname: | ||||
|             clauses.append("LOWER(d.hostname) = LOWER(?)") | ||||
|             params.append(hostname.lower()) | ||||
|         if only_agents: | ||||
|             clauses.append("(d.connection_type IS NULL OR TRIM(d.connection_type) = '')") | ||||
|         if clauses: | ||||
|             sql += " WHERE " + " AND ".join(clauses) | ||||
|  | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             cur.execute(sql, params) | ||||
|             rows = cur.fetchall() | ||||
|  | ||||
|         now = time.time() | ||||
|         devices: List[Dict[str, Any]] = [] | ||||
|         for row in rows: | ||||
|             core = row[: len(DEVICE_TABLE_COLUMNS)] | ||||
|             site_id, site_name, site_description = row[len(DEVICE_TABLE_COLUMNS) :] | ||||
|             record = row_to_device_dict(core, DEVICE_TABLE_COLUMNS) | ||||
|             snapshot = assemble_device_snapshot(record) | ||||
|             summary = snapshot.get("summary", {}) | ||||
|             last_seen = snapshot.get("last_seen") or 0 | ||||
|             status = "Offline" | ||||
|             try: | ||||
|                 if last_seen and (now - float(last_seen)) <= 300: | ||||
|                     status = "Online" | ||||
|             except Exception: | ||||
|                 pass | ||||
|             devices.append( | ||||
|                 { | ||||
|                     **snapshot, | ||||
|                     "site_id": site_id, | ||||
|                     "site_name": site_name or "", | ||||
|                     "site_description": site_description or "", | ||||
|                     "status": status, | ||||
|                 } | ||||
|             ) | ||||
|         return devices | ||||
|  | ||||
|     def load_snapshot(self, *, hostname: Optional[str] = None, guid: Optional[str] = None) -> Optional[Dict[str, Any]]: | ||||
|         if not hostname and not guid: | ||||
|             return None | ||||
|         sql = None | ||||
|         params: Tuple[Any, ...] | ||||
|         if hostname: | ||||
|             sql = f"SELECT {device_column_sql()} FROM {DEVICE_TABLE} WHERE hostname = ?" | ||||
|             params = (hostname,) | ||||
|         else: | ||||
|             sql = f"SELECT {device_column_sql()} FROM {DEVICE_TABLE} WHERE LOWER(guid) = LOWER(?)" | ||||
|             params = (guid,) | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             cur.execute(sql, params) | ||||
|             row = cur.fetchone() | ||||
|         if not row: | ||||
|             return None | ||||
|         record = row_to_device_dict(row, DEVICE_TABLE_COLUMNS) | ||||
|         return assemble_device_snapshot(record) | ||||
|  | ||||
|     def upsert_device( | ||||
|         self, | ||||
|         hostname: str, | ||||
|         description: Optional[str], | ||||
|         merged_details: Dict[str, Any], | ||||
|         created_at: Optional[int], | ||||
|         *, | ||||
|         agent_hash: Optional[str] = None, | ||||
|         guid: Optional[str] = None, | ||||
|     ) -> None: | ||||
|         if not hostname: | ||||
|             return | ||||
|  | ||||
|         column_values = self._extract_device_columns(merged_details or {}) | ||||
|         normalized_description = description if description is not None else "" | ||||
|         try: | ||||
|             normalized_description = str(normalized_description) | ||||
|         except Exception: | ||||
|             normalized_description = "" | ||||
|  | ||||
|         normalized_hash = clean_device_str(agent_hash) or None | ||||
|         normalized_guid = clean_device_str(guid) or None | ||||
|         created_ts = coerce_int(created_at) or int(time.time()) | ||||
|  | ||||
|         sql = f""" | ||||
|             INSERT INTO {DEVICE_TABLE}( | ||||
|                 hostname, | ||||
|                 description, | ||||
|                 created_at, | ||||
|                 agent_hash, | ||||
|                 guid, | ||||
|                 memory, | ||||
|                 network, | ||||
|                 software, | ||||
|                 storage, | ||||
|                 cpu, | ||||
|                 device_type, | ||||
|                 domain, | ||||
|                 external_ip, | ||||
|                 internal_ip, | ||||
|                 last_reboot, | ||||
|                 last_seen, | ||||
|                 last_user, | ||||
|                 operating_system, | ||||
|                 uptime, | ||||
|                 agent_id, | ||||
|                 ansible_ee_ver, | ||||
|                 connection_type, | ||||
|                 connection_endpoint | ||||
|             ) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?) | ||||
|             ON CONFLICT(hostname) DO UPDATE SET | ||||
|                 description=excluded.description, | ||||
|                 created_at=COALESCE({DEVICE_TABLE}.created_at, excluded.created_at), | ||||
|                 agent_hash=COALESCE(NULLIF(excluded.agent_hash, ''), {DEVICE_TABLE}.agent_hash), | ||||
|                 guid=COALESCE(NULLIF(excluded.guid, ''), {DEVICE_TABLE}.guid), | ||||
|                 memory=excluded.memory, | ||||
|                 network=excluded.network, | ||||
|                 software=excluded.software, | ||||
|                 storage=excluded.storage, | ||||
|                 cpu=excluded.cpu, | ||||
|                 device_type=COALESCE(NULLIF(excluded.device_type, ''), {DEVICE_TABLE}.device_type), | ||||
|                 domain=COALESCE(NULLIF(excluded.domain, ''), {DEVICE_TABLE}.domain), | ||||
|                 external_ip=COALESCE(NULLIF(excluded.external_ip, ''), {DEVICE_TABLE}.external_ip), | ||||
|                 internal_ip=COALESCE(NULLIF(excluded.internal_ip, ''), {DEVICE_TABLE}.internal_ip), | ||||
|                 last_reboot=COALESCE(NULLIF(excluded.last_reboot, ''), {DEVICE_TABLE}.last_reboot), | ||||
|                 last_seen=COALESCE(NULLIF(excluded.last_seen, 0), {DEVICE_TABLE}.last_seen), | ||||
|                 last_user=COALESCE(NULLIF(excluded.last_user, ''), {DEVICE_TABLE}.last_user), | ||||
|                 operating_system=COALESCE(NULLIF(excluded.operating_system, ''), {DEVICE_TABLE}.operating_system), | ||||
|                 uptime=COALESCE(NULLIF(excluded.uptime, 0), {DEVICE_TABLE}.uptime), | ||||
|                 agent_id=COALESCE(NULLIF(excluded.agent_id, ''), {DEVICE_TABLE}.agent_id), | ||||
|                 ansible_ee_ver=COALESCE(NULLIF(excluded.ansible_ee_ver, ''), {DEVICE_TABLE}.ansible_ee_ver), | ||||
|                 connection_type=COALESCE(NULLIF(excluded.connection_type, ''), {DEVICE_TABLE}.connection_type), | ||||
|                 connection_endpoint=COALESCE(NULLIF(excluded.connection_endpoint, ''), {DEVICE_TABLE}.connection_endpoint) | ||||
|         """ | ||||
|  | ||||
|         params: List[Any] = [ | ||||
|             hostname, | ||||
|             normalized_description, | ||||
|             created_ts, | ||||
|             normalized_hash, | ||||
|             normalized_guid, | ||||
|             column_values.get("memory"), | ||||
|             column_values.get("network"), | ||||
|             column_values.get("software"), | ||||
|             column_values.get("storage"), | ||||
|             column_values.get("cpu"), | ||||
|             column_values.get("device_type"), | ||||
|             column_values.get("domain"), | ||||
|             column_values.get("external_ip"), | ||||
|             column_values.get("internal_ip"), | ||||
|             column_values.get("last_reboot"), | ||||
|             column_values.get("last_seen"), | ||||
|             column_values.get("last_user"), | ||||
|             column_values.get("operating_system"), | ||||
|             column_values.get("uptime"), | ||||
|             column_values.get("agent_id"), | ||||
|             column_values.get("ansible_ee_ver"), | ||||
|             column_values.get("connection_type"), | ||||
|             column_values.get("connection_endpoint"), | ||||
|         ] | ||||
|  | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             cur.execute(sql, params) | ||||
|             conn.commit() | ||||
|  | ||||
|     def delete_device_by_hostname(self, hostname: str) -> None: | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             cur.execute("DELETE FROM device_sites WHERE device_hostname = ?", (hostname,)) | ||||
|             cur.execute(f"DELETE FROM {DEVICE_TABLE} WHERE hostname = ?", (hostname,)) | ||||
|             conn.commit() | ||||
|  | ||||
|     def _extract_device_columns(self, details: Dict[str, Any]) -> Dict[str, Any]: | ||||
|         summary = details.get("summary") or {} | ||||
|         payload: Dict[str, Any] = {} | ||||
|         for field in ("memory", "network", "software", "storage"): | ||||
|             payload[field] = serialize_device_json(details.get(field), []) | ||||
|         payload["cpu"] = serialize_device_json(summary.get("cpu") or details.get("cpu"), {}) | ||||
|         payload["device_type"] = clean_device_str(summary.get("device_type") or summary.get("type")) | ||||
|         payload["domain"] = clean_device_str(summary.get("domain")) | ||||
|         payload["external_ip"] = clean_device_str(summary.get("external_ip") or summary.get("public_ip")) | ||||
|         payload["internal_ip"] = clean_device_str(summary.get("internal_ip") or summary.get("private_ip")) | ||||
|         payload["last_reboot"] = clean_device_str(summary.get("last_reboot") or summary.get("last_boot")) | ||||
|         payload["last_seen"] = coerce_int(summary.get("last_seen")) | ||||
|         payload["last_user"] = clean_device_str( | ||||
|             summary.get("last_user") | ||||
|             or summary.get("last_user_name") | ||||
|             or summary.get("logged_in_user") | ||||
|         ) | ||||
|         payload["operating_system"] = clean_device_str( | ||||
|             summary.get("operating_system") or summary.get("os") | ||||
|         ) | ||||
|         payload["uptime"] = coerce_int(summary.get("uptime")) | ||||
|         payload["agent_id"] = clean_device_str(summary.get("agent_id")) | ||||
|         payload["ansible_ee_ver"] = clean_device_str(summary.get("ansible_ee_ver")) | ||||
|         payload["connection_type"] = clean_device_str(summary.get("connection_type")) | ||||
|         payload["connection_endpoint"] = clean_device_str( | ||||
|             summary.get("connection_endpoint") or summary.get("endpoint") | ||||
|         ) | ||||
|         return payload | ||||
							
								
								
									
										143
									
								
								Data/Engine/repositories/sqlite/device_view_repository.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										143
									
								
								Data/Engine/repositories/sqlite/device_view_repository.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,143 @@ | ||||
| """SQLite persistence for device list views.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| import json | ||||
| import logging | ||||
| import sqlite3 | ||||
| import time | ||||
| from contextlib import closing | ||||
| from typing import Dict, Iterable, List, Optional | ||||
|  | ||||
| from Data.Engine.domain.device_views import DeviceListView | ||||
| from Data.Engine.repositories.sqlite.connection import SQLiteConnectionFactory | ||||
|  | ||||
| __all__ = ["SQLiteDeviceViewRepository"] | ||||
|  | ||||
|  | ||||
| class SQLiteDeviceViewRepository: | ||||
|     def __init__( | ||||
|         self, | ||||
|         connection_factory: SQLiteConnectionFactory, | ||||
|         *, | ||||
|         logger: Optional[logging.Logger] = None, | ||||
|     ) -> None: | ||||
|         self._connections = connection_factory | ||||
|         self._log = logger or logging.getLogger("borealis.engine.repositories.device_views") | ||||
|  | ||||
|     def list_views(self) -> List[DeviceListView]: | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             cur.execute( | ||||
|                 "SELECT id, name, columns_json, filters_json, created_at, updated_at\n" | ||||
|                 "  FROM device_list_views ORDER BY name COLLATE NOCASE ASC" | ||||
|             ) | ||||
|             rows = cur.fetchall() | ||||
|         return [self._row_to_view(row) for row in rows] | ||||
|  | ||||
|     def get_view(self, view_id: int) -> Optional[DeviceListView]: | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             cur.execute( | ||||
|                 "SELECT id, name, columns_json, filters_json, created_at, updated_at\n" | ||||
|                 "  FROM device_list_views WHERE id = ?", | ||||
|                 (view_id,), | ||||
|             ) | ||||
|             row = cur.fetchone() | ||||
|         return self._row_to_view(row) if row else None | ||||
|  | ||||
|     def create_view(self, name: str, columns: List[str], filters: Dict[str, object]) -> DeviceListView: | ||||
|         now = int(time.time()) | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             try: | ||||
|                 cur.execute( | ||||
|                     "INSERT INTO device_list_views(name, columns_json, filters_json, created_at, updated_at)\n" | ||||
|                     "VALUES (?, ?, ?, ?, ?)", | ||||
|                     (name, json.dumps(columns), json.dumps(filters), now, now), | ||||
|                 ) | ||||
|             except sqlite3.IntegrityError as exc: | ||||
|                 raise ValueError("duplicate") from exc | ||||
|             view_id = cur.lastrowid | ||||
|             conn.commit() | ||||
|             cur.execute( | ||||
|                 "SELECT id, name, columns_json, filters_json, created_at, updated_at FROM device_list_views WHERE id = ?", | ||||
|                 (view_id,), | ||||
|             ) | ||||
|             row = cur.fetchone() | ||||
|         if not row: | ||||
|             raise RuntimeError("view missing after insert") | ||||
|         return self._row_to_view(row) | ||||
|  | ||||
|     def update_view( | ||||
|         self, | ||||
|         view_id: int, | ||||
|         *, | ||||
|         name: Optional[str] = None, | ||||
|         columns: Optional[List[str]] = None, | ||||
|         filters: Optional[Dict[str, object]] = None, | ||||
|     ) -> DeviceListView: | ||||
|         fields: List[str] = [] | ||||
|         params: List[object] = [] | ||||
|         if name is not None: | ||||
|             fields.append("name = ?") | ||||
|             params.append(name) | ||||
|         if columns is not None: | ||||
|             fields.append("columns_json = ?") | ||||
|             params.append(json.dumps(columns)) | ||||
|         if filters is not None: | ||||
|             fields.append("filters_json = ?") | ||||
|             params.append(json.dumps(filters)) | ||||
|         fields.append("updated_at = ?") | ||||
|         params.append(int(time.time())) | ||||
|         params.append(view_id) | ||||
|  | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             try: | ||||
|                 cur.execute( | ||||
|                     f"UPDATE device_list_views SET {', '.join(fields)} WHERE id = ?", | ||||
|                     params, | ||||
|                 ) | ||||
|             except sqlite3.IntegrityError as exc: | ||||
|                 raise ValueError("duplicate") from exc | ||||
|             if cur.rowcount == 0: | ||||
|                 raise LookupError("not_found") | ||||
|             conn.commit() | ||||
|             cur.execute( | ||||
|                 "SELECT id, name, columns_json, filters_json, created_at, updated_at FROM device_list_views WHERE id = ?", | ||||
|                 (view_id,), | ||||
|             ) | ||||
|             row = cur.fetchone() | ||||
|         if not row: | ||||
|             raise LookupError("not_found") | ||||
|         return self._row_to_view(row) | ||||
|  | ||||
|     def delete_view(self, view_id: int) -> bool: | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             cur.execute("DELETE FROM device_list_views WHERE id = ?", (view_id,)) | ||||
|             deleted = cur.rowcount | ||||
|             conn.commit() | ||||
|         return bool(deleted) | ||||
|  | ||||
|     def _row_to_view(self, row: Optional[Iterable[object]]) -> DeviceListView: | ||||
|         if row is None: | ||||
|             raise ValueError("row required") | ||||
|         view_id, name, columns_json, filters_json, created_at, updated_at = row | ||||
|         try: | ||||
|             columns = json.loads(columns_json or "[]") | ||||
|         except Exception: | ||||
|             columns = [] | ||||
|         try: | ||||
|             filters = json.loads(filters_json or "{}") | ||||
|         except Exception: | ||||
|             filters = {} | ||||
|         return DeviceListView( | ||||
|             id=int(view_id), | ||||
|             name=str(name or ""), | ||||
|             columns=list(columns) if isinstance(columns, list) else [], | ||||
|             filters=dict(filters) if isinstance(filters, dict) else {}, | ||||
|             created_at=int(created_at or 0), | ||||
|             updated_at=int(updated_at or 0), | ||||
|         ) | ||||
							
								
								
									
										189
									
								
								Data/Engine/repositories/sqlite/site_repository.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										189
									
								
								Data/Engine/repositories/sqlite/site_repository.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,189 @@ | ||||
| """SQLite persistence for site management.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| import logging | ||||
| import sqlite3 | ||||
| import time | ||||
| from contextlib import closing | ||||
| from typing import Dict, Iterable, List, Optional, Sequence | ||||
|  | ||||
| from Data.Engine.domain.sites import SiteDeviceMapping, SiteSummary | ||||
| from Data.Engine.repositories.sqlite.connection import SQLiteConnectionFactory | ||||
|  | ||||
| __all__ = ["SQLiteSiteRepository"] | ||||
|  | ||||
|  | ||||
| class SQLiteSiteRepository: | ||||
|     """Repository exposing site CRUD and device assignment helpers.""" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         connection_factory: SQLiteConnectionFactory, | ||||
|         *, | ||||
|         logger: Optional[logging.Logger] = None, | ||||
|     ) -> None: | ||||
|         self._connections = connection_factory | ||||
|         self._log = logger or logging.getLogger("borealis.engine.repositories.sites") | ||||
|  | ||||
|     def list_sites(self) -> List[SiteSummary]: | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             cur.execute( | ||||
|                 """ | ||||
|                 SELECT s.id, s.name, s.description, s.created_at, | ||||
|                        COALESCE(ds.cnt, 0) AS device_count | ||||
|                   FROM sites s | ||||
|              LEFT JOIN ( | ||||
|                         SELECT site_id, COUNT(*) AS cnt | ||||
|                           FROM device_sites | ||||
|                       GROUP BY site_id | ||||
|                     ) ds | ||||
|                     ON ds.site_id = s.id | ||||
|               ORDER BY LOWER(s.name) ASC | ||||
|                 """ | ||||
|             ) | ||||
|             rows = cur.fetchall() | ||||
|         return [self._row_to_site(row) for row in rows] | ||||
|  | ||||
|     def create_site(self, name: str, description: str) -> SiteSummary: | ||||
|         now = int(time.time()) | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             try: | ||||
|                 cur.execute( | ||||
|                     "INSERT INTO sites(name, description, created_at) VALUES (?, ?, ?)", | ||||
|                     (name, description, now), | ||||
|                 ) | ||||
|             except sqlite3.IntegrityError as exc: | ||||
|                 raise ValueError("duplicate") from exc | ||||
|             site_id = cur.lastrowid | ||||
|             conn.commit() | ||||
|  | ||||
|             cur.execute( | ||||
|                 "SELECT id, name, description, created_at, 0 FROM sites WHERE id = ?", | ||||
|                 (site_id,), | ||||
|             ) | ||||
|             row = cur.fetchone() | ||||
|         if not row: | ||||
|             raise RuntimeError("site not found after insert") | ||||
|         return self._row_to_site(row) | ||||
|  | ||||
|     def delete_sites(self, ids: Sequence[int]) -> int: | ||||
|         if not ids: | ||||
|             return 0 | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             placeholders = ",".join("?" for _ in ids) | ||||
|             try: | ||||
|                 cur.execute( | ||||
|                     f"DELETE FROM device_sites WHERE site_id IN ({placeholders})", | ||||
|                     tuple(ids), | ||||
|                 ) | ||||
|                 cur.execute( | ||||
|                     f"DELETE FROM sites WHERE id IN ({placeholders})", | ||||
|                     tuple(ids), | ||||
|                 ) | ||||
|             except sqlite3.DatabaseError as exc: | ||||
|                 conn.rollback() | ||||
|                 raise | ||||
|             deleted = cur.rowcount | ||||
|             conn.commit() | ||||
|         return deleted | ||||
|  | ||||
|     def rename_site(self, site_id: int, new_name: str) -> SiteSummary: | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             try: | ||||
|                 cur.execute("UPDATE sites SET name = ? WHERE id = ?", (new_name, site_id)) | ||||
|             except sqlite3.IntegrityError as exc: | ||||
|                 raise ValueError("duplicate") from exc | ||||
|             if cur.rowcount == 0: | ||||
|                 raise LookupError("not_found") | ||||
|             conn.commit() | ||||
|             cur.execute( | ||||
|                 """ | ||||
|                 SELECT s.id, s.name, s.description, s.created_at, | ||||
|                        COALESCE(ds.cnt, 0) AS device_count | ||||
|                   FROM sites s | ||||
|              LEFT JOIN ( | ||||
|                         SELECT site_id, COUNT(*) AS cnt | ||||
|                           FROM device_sites | ||||
|                       GROUP BY site_id | ||||
|                     ) ds | ||||
|                     ON ds.site_id = s.id | ||||
|                  WHERE s.id = ? | ||||
|                 """, | ||||
|                 (site_id,), | ||||
|             ) | ||||
|             row = cur.fetchone() | ||||
|         if not row: | ||||
|             raise LookupError("not_found") | ||||
|         return self._row_to_site(row) | ||||
|  | ||||
|     def map_devices(self, hostnames: Optional[Iterable[str]] = None) -> Dict[str, SiteDeviceMapping]: | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             if hostnames: | ||||
|                 normalized = [hn.strip() for hn in hostnames if hn and hn.strip()] | ||||
|                 if not normalized: | ||||
|                     return {} | ||||
|                 placeholders = ",".join("?" for _ in normalized) | ||||
|                 cur.execute( | ||||
|                     f""" | ||||
|                     SELECT ds.device_hostname, s.id, s.name | ||||
|                       FROM device_sites ds | ||||
|                 INNER JOIN sites s ON s.id = ds.site_id | ||||
|                      WHERE ds.device_hostname IN ({placeholders}) | ||||
|                     """, | ||||
|                     tuple(normalized), | ||||
|                 ) | ||||
|             else: | ||||
|                 cur.execute( | ||||
|                     """ | ||||
|                     SELECT ds.device_hostname, s.id, s.name | ||||
|                       FROM device_sites ds | ||||
|                 INNER JOIN sites s ON s.id = ds.site_id | ||||
|                     """ | ||||
|                 ) | ||||
|             rows = cur.fetchall() | ||||
|         mapping: Dict[str, SiteDeviceMapping] = {} | ||||
|         for hostname, site_id, site_name in rows: | ||||
|             mapping[str(hostname)] = SiteDeviceMapping( | ||||
|                 hostname=str(hostname), | ||||
|                 site_id=int(site_id) if site_id is not None else None, | ||||
|                 site_name=str(site_name or ""), | ||||
|             ) | ||||
|         return mapping | ||||
|  | ||||
|     def assign_devices(self, site_id: int, hostnames: Sequence[str]) -> None: | ||||
|         now = int(time.time()) | ||||
|         normalized = [hn.strip() for hn in hostnames if isinstance(hn, str) and hn.strip()] | ||||
|         if not normalized: | ||||
|             return | ||||
|         with closing(self._connections()) as conn: | ||||
|             cur = conn.cursor() | ||||
|             cur.execute("SELECT 1 FROM sites WHERE id = ?", (site_id,)) | ||||
|             if not cur.fetchone(): | ||||
|                 raise LookupError("not_found") | ||||
|             for hostname in normalized: | ||||
|                 cur.execute( | ||||
|                     """ | ||||
|                     INSERT INTO device_sites(device_hostname, site_id, assigned_at) | ||||
|                     VALUES (?, ?, ?) | ||||
|                     ON CONFLICT(device_hostname) | ||||
|                     DO UPDATE SET site_id = excluded.site_id, | ||||
|                                   assigned_at = excluded.assigned_at | ||||
|                     """, | ||||
|                     (hostname, site_id, now), | ||||
|                 ) | ||||
|             conn.commit() | ||||
|  | ||||
|     def _row_to_site(self, row: Sequence[object]) -> SiteSummary: | ||||
|         return SiteSummary( | ||||
|             id=int(row[0]), | ||||
|             name=str(row[1] or ""), | ||||
|             description=str(row[2] or ""), | ||||
|             created_at=int(row[3] or 0), | ||||
|             device_count=int(row[4] or 0), | ||||
|         ) | ||||
		Reference in New Issue
	
	Block a user