From 3990a50e8d439cf2318889b7627d29ecd9bd36b3 Mon Sep 17 00:00:00 2001 From: Nicole Rappe Date: Sat, 27 Sep 2025 18:10:08 -0600 Subject: [PATCH] Changes to Agent Behavior --- Data/Agent/Roles/role_DeviceAudit.py | 264 +++++++++++++++++++++++-- Data/Agent/agent.py | 37 ++-- Data/Server/server.py | 283 ++++++++++++++++++++------- 3 files changed, 477 insertions(+), 107 deletions(-) diff --git a/Data/Agent/Roles/role_DeviceAudit.py b/Data/Agent/Roles/role_DeviceAudit.py index 4ad1b28..1f1961e 100644 --- a/Data/Agent/Roles/role_DeviceAudit.py +++ b/Data/Agent/Roles/role_DeviceAudit.py @@ -20,7 +20,7 @@ except Exception: ROLE_NAME = 'device_audit' -ROLE_CONTEXTS = ['interactive'] +ROLE_CONTEXTS = ['system'] IS_WINDOWS = os.name == 'nt' @@ -124,9 +124,127 @@ def collect_summary(CONFIG): return {'hostname': socket.gethostname()} +def _ps_json(cmd: str, timeout: int = 60): + try: + out = subprocess.run(["powershell", "-NoProfile", "-Command", cmd], capture_output=True, text=True, timeout=timeout) + txt = out.stdout or "" + if txt.strip(): + try: + data = json.loads(txt) + return data + except Exception: + # Sometimes PS emits BOM or warnings; try to find JSON block + try: + start = txt.find('[{') + if start == -1: + start = txt.find('{') + end = txt.rfind('}') + if start != -1 and end != -1 and end > start: + return json.loads(txt[start:end+1]) + except Exception: + pass + return None + except Exception: + return None + + def collect_software(): - # Placeholder: fuller inventory can be added later - return [] + plat = platform.system().lower() + if plat != 'windows': + return [] + # 1) Try PowerShell registry scrape (fast when ConvertTo-Json is available) + try: + ps = r""" +$paths = @( + 'HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\*', + 'HKLM:\SOFTWARE\WOW6432Node\Microsoft\Windows\CurrentVersion\Uninstall\*', + 'HKCU:\SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\*' +) +$list = @() +foreach ($p in $paths) { + try { + $list += Get-ItemProperty -Path $p -ErrorAction SilentlyContinue | + Select-Object DisplayName, DisplayVersion + } catch {} +} +$list = $list | Where-Object { $_.DisplayName -and ("$($_.DisplayName)".Trim().Length -gt 0) } +$list | Sort-Object DisplayName -Unique | ConvertTo-Json -Depth 2 +""" + data = _ps_json(ps, timeout=120) + out = [] + if isinstance(data, dict): + data = [data] + for it in (data or []): + name = str(it.get('DisplayName') or '').strip() + if not name: + continue + ver = str(it.get('DisplayVersion') or '').strip() + out.append({'name': name, 'version': ver}) + if out: + return out + except Exception: + pass + + # 2) Fallback: read registry directly via Python winreg (works on Win7+) + try: + try: + import winreg # type: ignore + except Exception: + return [] + + def _enum_uninstall(root, path, wow_flag=0): + items = [] + try: + key = winreg.OpenKey(root, path, 0, winreg.KEY_READ | wow_flag) + except Exception: + return items + try: + i = 0 + while True: + try: + sub = winreg.EnumKey(key, i) + except OSError: + break + i += 1 + try: + sk = winreg.OpenKey(key, sub, 0, winreg.KEY_READ | wow_flag) + try: + name, _ = winreg.QueryValueEx(sk, 'DisplayName') + except Exception: + name = '' + if name and str(name).strip(): + try: + ver, _ = winreg.QueryValueEx(sk, 'DisplayVersion') + except Exception: + ver = '' + items.append({'name': str(name).strip(), 'version': str(ver or '').strip()}) + except Exception: + continue + except Exception: + pass + return items + + HKLM = getattr(winreg, 'HKEY_LOCAL_MACHINE') + HKCU = getattr(winreg, 'HKEY_CURRENT_USER') + WOW64_64 = getattr(winreg, 'KEY_WOW64_64KEY', 0) + WOW64_32 = getattr(winreg, 'KEY_WOW64_32KEY', 0) + paths = [ + (HKLM, r"SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall", WOW64_64), + (HKLM, r"SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall", WOW64_32), + (HKLM, r"SOFTWARE\WOW6432Node\Microsoft\Windows\CurrentVersion\Uninstall", 0), + (HKCU, r"SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall", 0), + ] + merged = {} + for root, path, flag in paths: + for it in _enum_uninstall(root, path, flag): + key = (it['name'] or '').lower() + if not key: + continue + if key not in merged: + merged[key] = it + return sorted(merged.values(), key=lambda x: x['name']) + except Exception: + return [] def collect_memory(): @@ -207,19 +325,63 @@ def collect_network(): adapters = [] try: if IS_WINDOWS: + # Try modern Get-NetIPAddress; fallback to ipconfig parsing (Win7) try: ps_cmd = ( - "Get-NetAdapter | Where-Object { $_.Status -eq 'Up' } | " - "ForEach-Object { $_ | Select-Object -Property InterfaceAlias, MacAddress } | ConvertTo-Json" + "try { " + "$ip = Get-NetIPAddress -AddressFamily IPv4 -ErrorAction Stop | " + "Where-Object { $_.IPAddress -and $_.IPAddress -notmatch '^169\\.254\\.' -and $_.IPAddress -ne '127.0.0.1' }; " + "$ad = Get-NetAdapter | ForEach-Object { $_ | Select-Object -Property InterfaceAlias, MacAddress }; " + "$map = @{}; foreach($a in $ad){ $map[$a.InterfaceAlias] = $a.MacAddress }; " + "$out = @(); foreach($e in $ip){ $mac = $map[$e.InterfaceAlias]; $out += [pscustomobject]@{ InterfaceAlias=$e.InterfaceAlias; IPAddress=$e.IPAddress; MacAddress=$mac } } " + "$out | ConvertTo-Json -Depth 3 } catch { '' }" ) - out = subprocess.run(["powershell", "-NoProfile", "-Command", ps_cmd], capture_output=True, text=True, timeout=60) - data = json.loads(out.stdout or "[]") + data = _ps_json(ps_cmd, timeout=60) if isinstance(data, dict): data = [data] - for a in data: - adapters.append({'adapter': a.get('InterfaceAlias', 'unknown'), 'ips': [], 'mac': a.get('MacAddress', 'unknown')}) + tmp = {} + for e in (data or []): + alias = e.get('InterfaceAlias') or 'unknown' + ip = e.get('IPAddress') or '' + mac = e.get('MacAddress') or 'unknown' + if not ip: + continue + item = tmp.setdefault(alias, {'adapter': alias, 'ips': [], 'mac': mac}) + if ip not in item['ips']: + item['ips'].append(ip) + if tmp: + adapters = list(tmp.values()) + else: + raise Exception('empty') except Exception: - pass + # Win7/older fallback: parse ipconfig + try: + out = subprocess.run(["ipconfig"], capture_output=True, text=True, timeout=30) + cur = None + for line in (out.stdout or '').splitlines(): + s = line.strip() + if not s: + continue + if s.endswith(":") and ('adapter' in s.lower() or 'ethernet' in s.lower() or 'wireless' in s.lower()): + cur = {'adapter': s.replace(':','').strip(), 'ips': [], 'mac': 'unknown'} + adapters.append(cur) + continue + if s.lower().startswith('ipv4 address') or s.lower().startswith('ipv4-adresse') or 'ipv4' in s.lower(): + try: + ip = s.split(':')[-1].strip() + except Exception: + ip = '' + if ip and not ip.startswith('169.254.') and ip != '127.0.0.1' and cur: + cur['ips'].append(ip) + if s.lower().startswith('physical address') or s.lower().startswith('mac address'): + try: + mac = s.split(':')[-1].strip() + except Exception: + mac = '' + if mac and cur: + cur['mac'] = mac + except Exception: + pass else: out = subprocess.run(["ip", "-o", "-4", "addr", "show"], capture_output=True, text=True, timeout=60) for line in out.stdout.splitlines(): @@ -233,9 +395,36 @@ def collect_network(): return adapters +def detect_device_type(): + try: + plat = platform.system().lower() + if plat != 'windows': + return '' + ps = r""" +function _getCim($cls){ try { return Get-CimInstance $cls -ErrorAction Stop } catch { try { return Get-WmiObject -Class $cls -ErrorAction Stop } catch { return $null } } } +$os = _getCim 'Win32_OperatingSystem' +$cs = _getCim 'Win32_ComputerSystem' +$caption = ""; if ($os) { $caption = [string]$os.Caption } +$model = ""; if ($cs) { $model = [string]$cs.Model } +$manu = ""; if ($cs) { $manu = [string]$cs.Manufacturer } +$virt = $false +if ($model -match 'Virtual' -or $manu -match 'Microsoft Corporation' -and $model -match 'Virtual Machine' -or $manu -match 'VMware' -or $manu -match 'innotek' -or $manu -match 'VirtualBox' -or $manu -match 'QEMU' -or $manu -match 'Xen' -or $manu -match 'Parallels') { $virt = $true } +if ($virt) { 'Virtual Machine' } +elseif ($caption -match 'Server') { 'Server' } +else { 'Workstation' } +""" + out = subprocess.run(["powershell", "-NoProfile", "-Command", ps], capture_output=True, text=True, timeout=15) + s = (out.stdout or '').strip() + return s.splitlines()[0].strip() if s else '' + except Exception: + return '' + + class Role: def __init__(self, ctx): self.ctx = ctx + self._ext_ip = None + self._ext_ip_ts = 0 try: # Set OS string once self.ctx.config.data['agent_operating_system'] = detect_agent_os() @@ -265,6 +454,29 @@ class Role: 'storage': collect_storage(), 'network': collect_network(), } + # Derive additional summary fields + try: + # Internal IP: first IPv4 on first adapter + internal_ip = '' + for a in (details.get('network') or []): + for ip in (a.get('ips') or []): + if ip and not ip.startswith('169.254.') and ip != '127.0.0.1': + internal_ip = ip + break + if internal_ip: + break + details['summary']['internal_ip'] = internal_ip + except Exception: + pass + try: + details['summary']['device_type'] = detect_device_type() + except Exception: + pass + try: + if psutil: + details['summary']['last_reboot'] = int(psutil.boot_time()) + except Exception: + pass url = (self.ctx.config.data.get('borealis_server_url', 'http://localhost:5000') or '').rstrip('/') + '/api/agent/details' payload = { 'agent_id': self.ctx.agent_id, @@ -273,8 +485,38 @@ class Role: } if aiohttp is not None: async with aiohttp.ClientSession() as session: + # External IP: refresh at most every 30 minutes + try: + now = time.time() + if (now - self._ext_ip_ts) > 1800: + # Try ipify JSON; fallback to plain-text ifconfig.me + ok = False + try: + async with session.get('https://api.ipify.org?format=json', timeout=8) as resp: + if resp.status == 200: + j = await resp.json() + self._ext_ip = (j.get('ip') or '').strip() + self._ext_ip_ts = now + ok = True + except Exception: + pass + if not ok: + try: + async with session.get('https://ifconfig.me/ip', timeout=8) as resp2: + if resp2.status == 200: + t = (await resp2.text()) or '' + t = t.strip() + if t: + self._ext_ip = t + self._ext_ip_ts = now + ok = True + except Exception: + pass + if self._ext_ip: + details['summary']['external_ip'] = self._ext_ip + except Exception: + pass await session.post(url, json=payload, timeout=10) except Exception: pass await asyncio.sleep(300) - diff --git a/Data/Agent/agent.py b/Data/Agent/agent.py index 3dd111d..adc5032 100644 --- a/Data/Agent/agent.py +++ b/Data/Agent/agent.py @@ -1016,11 +1016,7 @@ async def connect(): pass await sio.emit('request_config', {"agent_id": AGENT_ID}) - # Kick off a one-time device details post for faster UI population - try: - asyncio.create_task(send_agent_details_once()) - except Exception: - pass + # Inventory details posting is managed by the DeviceAudit role (SYSTEM). No one-shot post here. @sio.event async def disconnect(): @@ -1314,17 +1310,19 @@ if __name__=='__main__': hooks=hooks, ) ROLE_MANAGER.load() - # Load system roles when headless or alongside interactive - ROLE_MANAGER_SYS = RoleManager( - base_dir=os.path.dirname(__file__), - context='system', - sio=sio, - agent_id=AGENT_ID, - config=CONFIG, - loop=loop, - hooks=hooks, - ) - ROLE_MANAGER_SYS.load() + # Load system roles only when running in SYSTEM service mode + ROLE_MANAGER_SYS = None + if SYSTEM_SERVICE_MODE: + ROLE_MANAGER_SYS = RoleManager( + base_dir=os.path.dirname(__file__), + context='system', + sio=sio, + agent_id=AGENT_ID, + config=CONFIG, + loop=loop, + hooks=hooks, + ) + ROLE_MANAGER_SYS.load() except Exception as e: try: _bootstrap_log(f'role load init failed: {e}') @@ -1336,11 +1334,8 @@ if __name__=='__main__': background_tasks.append(loop.create_task(idle_task())) # Start periodic heartbeats background_tasks.append(loop.create_task(send_heartbeat())) - # Periodic device details upload so Devices view populates - try: - background_tasks.append(loop.create_task(send_agent_details())) - except Exception: - pass + # Inventory upload is handled by the DeviceAudit role running in SYSTEM context. + # Do not schedule the legacy agent-level details poster to avoid duplicates. # Register unified Quick Job handler last to avoid role override issues @sio.on('quick_job_run') diff --git a/Data/Server/server.py b/Data/Server/server.py index 39d3845..6f6132e 100644 --- a/Data/Server/server.py +++ b/Data/Server/server.py @@ -142,7 +142,17 @@ def _sha512_hex(s: str) -> str: def _db_conn(): - return sqlite3.connect(DB_PATH) + conn = sqlite3.connect(DB_PATH, timeout=15) + try: + cur = conn.cursor() + # Enable better read/write concurrency + cur.execute("PRAGMA journal_mode=WAL") + cur.execute("PRAGMA busy_timeout=5000") + cur.execute("PRAGMA synchronous=NORMAL") + conn.commit() + except Exception: + pass + return conn def _user_row_to_dict(row): @@ -1047,13 +1057,21 @@ os.makedirs(os.path.dirname(DB_PATH), exist_ok=True) def init_db(): """Initialize all required tables in the unified database.""" - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() # Device details table cur.execute( - "CREATE TABLE IF NOT EXISTS device_details (hostname TEXT PRIMARY KEY, description TEXT, details TEXT)" + "CREATE TABLE IF NOT EXISTS device_details (hostname TEXT PRIMARY KEY, description TEXT, details TEXT, created_at INTEGER)" ) + # Backfill missing created_at column on existing installs + try: + cur.execute("PRAGMA table_info(device_details)") + cols = [r[1] for r in cur.fetchall()] + if 'created_at' not in cols: + cur.execute("ALTER TABLE device_details ADD COLUMN created_at INTEGER") + except Exception: + pass # Activity history table for script/job runs cur.execute( @@ -1239,7 +1257,7 @@ def _row_to_site(row): @app.route("/api/sites", methods=["GET"]) def list_sites(): try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute( """ @@ -1270,7 +1288,7 @@ def create_site(): return jsonify({"error": "name is required"}), 400 now = int(time.time()) try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute( "INSERT INTO sites(name, description, created_at) VALUES (?, ?, ?)", @@ -1308,7 +1326,7 @@ def delete_sites(): if not norm_ids: return jsonify({"status": "ok", "deleted": 0}) try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() # Clean assignments first (in case FK ON DELETE CASCADE not enforced) cur.execute( @@ -1342,7 +1360,7 @@ def sites_device_map(): p = part.strip() if p: filter_set.add(p) - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() if filter_set: placeholders = ','.join('?' * len(filter_set)) @@ -1385,7 +1403,7 @@ def assign_devices_to_site(): return jsonify({"error": "hostnames must be a list of strings"}), 400 now = int(time.time()) try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() # Ensure site exists cur.execute("SELECT 1 FROM sites WHERE id = ?", (site_id,)) @@ -1420,7 +1438,7 @@ def _load_device_records(limit: int = 0): hostname, description, last_user, internal_ip, external_ip, site_id, site_name """ try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute("SELECT hostname, description, details FROM device_details") rows = cur.fetchall() @@ -1530,7 +1548,7 @@ def search_suggest(): if field in site_fields: column = site_fields[field] try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute("SELECT id, name, description FROM sites") for sid, name, desc in cur.fetchall(): @@ -1573,7 +1591,7 @@ def _row_to_view(row): @app.route("/api/device_list_views", methods=["GET"]) def list_device_list_views(): try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute( "SELECT id, name, columns_json, filters_json, created_at, updated_at FROM device_list_views ORDER BY name COLLATE NOCASE ASC" @@ -1588,7 +1606,7 @@ def list_device_list_views(): @app.route("/api/device_list_views/", methods=["GET"]) def get_device_list_view(view_id: int): try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute( "SELECT id, name, columns_json, filters_json, created_at, updated_at FROM device_list_views WHERE id = ?", @@ -1621,7 +1639,7 @@ def create_device_list_view(): now = int(time.time()) try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute( "INSERT INTO device_list_views(name, columns_json, filters_json, created_at, updated_at) VALUES (?, ?, ?, ?, ?)", @@ -1676,7 +1694,7 @@ def update_device_list_view(view_id: int): params.append(view_id) try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute(f"UPDATE device_list_views SET {', '.join(fields)} WHERE id = ?", params) if cur.rowcount == 0: @@ -1699,7 +1717,7 @@ def update_device_list_view(view_id: int): @app.route("/api/device_list_views/", methods=["DELETE"]) def delete_device_list_view(view_id: int): try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute("DELETE FROM device_list_views WHERE id = ?", (view_id,)) if cur.rowcount == 0: @@ -1722,27 +1740,31 @@ def _persist_last_seen(hostname: str, last_seen: int, agent_id: str = None): if not hostname or str(hostname).strip().lower() == "unknown": return try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute( - "SELECT details, description FROM device_details WHERE hostname = ?", + "SELECT details, description, created_at FROM device_details WHERE hostname = ?", (hostname,), ) row = cur.fetchone() - # Load existing details JSON or create a minimal one if row and row[0]: try: details = json.loads(row[0]) except Exception: details = {} - description = row[1] if len(row) > 1 else "" + description = row[1] or "" + created_at = int(row[2] or 0) else: details = {} description = "" + created_at = 0 summary = details.get("summary") or {} summary["hostname"] = summary.get("hostname") or hostname - summary["last_seen"] = int(last_seen or 0) + try: + summary["last_seen"] = int(last_seen or 0) + except Exception: + summary["last_seen"] = int(time.time()) if agent_id: try: summary["agent_id"] = str(agent_id) @@ -1750,9 +1772,27 @@ def _persist_last_seen(hostname: str, last_seen: int, agent_id: str = None): pass details["summary"] = summary + now = int(time.time()) + # Ensure 'created' string aligns with created_at we will store + target_created_at = created_at or now + try: + from datetime import datetime, timezone + human = datetime.fromtimestamp(target_created_at, timezone.utc).strftime('%Y-%m-%d %H:%M:%S') + details.setdefault('summary', {})['created'] = details.get('summary', {}).get('created') or human + except Exception: + pass + + # Single upsert to avoid unique-constraint races cur.execute( - "REPLACE INTO device_details (hostname, description, details) VALUES (?, ?, ?)", - (hostname, description, json.dumps(details)), + """ + INSERT INTO device_details(hostname, description, details, created_at) + VALUES (?, ?, ?, ?) + ON CONFLICT(hostname) DO UPDATE SET + description=excluded.description, + details=excluded.details, + created_at=COALESCE(device_details.created_at, excluded.created_at) + """, + (hostname, description, json.dumps(details), target_created_at), ) conn.commit() conn.close() @@ -1763,7 +1803,7 @@ def _persist_last_seen(hostname: str, last_seen: int, agent_id: str = None): def load_agents_from_db(): """Populate registered_agents with any devices stored in the database.""" try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute("SELECT hostname, details FROM device_details") for hostname, details_json in cur.fetchall(): @@ -1820,6 +1860,28 @@ def get_agents(): ## dayjs_to_ts removed; scheduling parsing now lives in job_scheduler +def _is_empty(v): + return v is None or v == '' or v == [] or v == {} + + +def _deep_merge_preserve(prev: dict, incoming: dict) -> dict: + out = dict(prev or {}) + for k, v in (incoming or {}).items(): + if isinstance(v, dict): + out[k] = _deep_merge_preserve(out.get(k) or {}, v) + elif isinstance(v, list): + # Only replace list if incoming has content; else keep prev + if v: + out[k] = v + else: + # Keep previous non-empty values when incoming is empty + if _is_empty(v): + # do not overwrite + continue + out[k] = v + return out + + @app.route("/api/agent/details", methods=["POST"]) def save_agent_details(): data = request.get_json(silent=True) or {} @@ -1827,63 +1889,104 @@ def save_agent_details(): details = data.get("details") agent_id = data.get("agent_id") if not hostname and isinstance(details, dict): - hostname = details.get("summary", {}).get("hostname") + hostname = (details.get("summary") or {}).get("hostname") if not hostname or not isinstance(details, dict): return jsonify({"error": "invalid payload"}), 400 try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() - # Load existing details/description so we can preserve description and merge last_seen + # Load existing row to preserve description and created_at and merge fields cur.execute( - "SELECT details, description FROM device_details WHERE hostname = ?", + "SELECT details, description, created_at FROM device_details WHERE hostname = ?", (hostname,), ) row = cur.fetchone() prev_details = {} - if row and row[0]: + description = "" + created_at = 0 + if row: try: - prev_details = json.loads(row[0]) + prev_details = json.loads(row[0] or '{}') except Exception: prev_details = {} - description = row[1] if row and len(row) > 1 else "" + description = row[1] or "" + try: + created_at = int(row[2] or 0) + except Exception: + created_at = 0 - # Ensure details.summary.last_seen is preserved/merged so it survives restarts - try: - incoming_summary = details.setdefault("summary", {}) - # Attach agent_id and hostname if provided/missing to aid future merges + # Ensure summary exists and attach hostname/agent_id if missing + incoming_summary = details.setdefault("summary", {}) + if agent_id and not incoming_summary.get("agent_id"): try: - if agent_id and not incoming_summary.get("agent_id"): - incoming_summary["agent_id"] = str(agent_id) + incoming_summary["agent_id"] = str(agent_id) except Exception: pass - if hostname and not incoming_summary.get("hostname"): - incoming_summary["hostname"] = hostname - if not incoming_summary.get("last_seen"): - last_seen = None - if agent_id and agent_id in registered_agents: - last_seen = registered_agents[agent_id].get("last_seen") - if not last_seen: - last_seen = (prev_details.get("summary") or {}).get("last_seen") - if last_seen: + if hostname and not incoming_summary.get("hostname"): + incoming_summary["hostname"] = hostname + + # Preserve last_seen if incoming omitted it + if not incoming_summary.get("last_seen"): + last_seen = None + if agent_id and agent_id in registered_agents: + last_seen = registered_agents[agent_id].get("last_seen") + if last_seen is None: + last_seen = (prev_details.get("summary") or {}).get("last_seen") + if last_seen is not None: + try: incoming_summary["last_seen"] = int(last_seen) - # Refresh server-side cache so /api/agents includes latest OS and device type - try: - if agent_id and agent_id in registered_agents: - rec = registered_agents[agent_id] - os_name = incoming_summary.get("operating_system") or incoming_summary.get("agent_operating_system") - if os_name: - rec["agent_operating_system"] = os_name - dt = (incoming_summary.get("device_type") or "").strip() - if dt: - rec["device_type"] = dt - except Exception: - pass + except Exception: + pass + + # Deep-merge incoming over previous, but do not overwrite with empties + merged = _deep_merge_preserve(prev_details, details) + + # Preserve last_user if incoming omitted/empty + try: + prev_last_user = (prev_details.get('summary') or {}).get('last_user') + cur_last_user = (merged.get('summary') or {}).get('last_user') + if _is_empty(cur_last_user) and prev_last_user: + merged.setdefault('summary', {})['last_user'] = prev_last_user except Exception: pass + # Refresh server-side in-memory registry for OS and device type + try: + if agent_id and agent_id in registered_agents: + rec = registered_agents[agent_id] + os_name = (merged.get("summary") or {}).get("operating_system") or (merged.get("summary") or {}).get("agent_operating_system") + if os_name: + rec["agent_operating_system"] = os_name + dt = ((merged.get("summary") or {}).get("device_type") or "").strip() + if dt: + rec["device_type"] = dt + except Exception: + pass + + now = int(time.time()) + # Ensure created_at is set on first insert and mirror into merged.summary.created as human string + if created_at <= 0: + created_at = now + try: + from datetime import datetime, timezone + human = datetime.fromtimestamp(created_at, timezone.utc).strftime('%Y-%m-%d %H:%M:%S') + merged.setdefault('summary', {}) + if not merged['summary'].get('created'): + merged['summary']['created'] = human + except Exception: + pass + + # Upsert row without destroying created_at; keep previous created_at if exists cur.execute( - "REPLACE INTO device_details (hostname, description, details) VALUES (?, ?, ?)", - (hostname, description, json.dumps(details)), + """ + INSERT INTO device_details(hostname, description, details, created_at) + VALUES (?,?,?,?) + ON CONFLICT(hostname) DO UPDATE SET + description=excluded.description, + details=excluded.details, + created_at=COALESCE(device_details.created_at, excluded.created_at) + """, + (hostname, description, json.dumps(merged), created_at), ) conn.commit() conn.close() @@ -1895,10 +1998,10 @@ def save_agent_details(): @app.route("/api/device/details/", methods=["GET"]) def get_device_details(hostname: str): try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute( - "SELECT details, description FROM device_details WHERE hostname = ?", + "SELECT details, description, created_at FROM device_details WHERE hostname = ?", (hostname,), ) row = cur.fetchone() @@ -1909,8 +2012,16 @@ def get_device_details(hostname: str): except Exception: details = {} description = row[1] if len(row) > 1 else "" + created_at = int(row[2] or 0) if len(row) > 2 else 0 if description: details.setdefault("summary", {})["description"] = description + # Ensure created string exists from created_at + try: + if created_at and not (details.get('summary') or {}).get('created'): + from datetime import datetime + details.setdefault('summary', {})['created'] = datetime.utcfromtimestamp(created_at).strftime('%Y-%m-%d %H:%M:%S') + except Exception: + pass return jsonify(details) except Exception: pass @@ -1922,12 +2033,19 @@ def set_device_description(hostname: str): data = request.get_json(silent=True) or {} description = (data.get("description") or "").strip() try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() + now = int(time.time()) + # Insert row if missing with created_at; otherwise update description only cur.execute( - "INSERT INTO device_details(hostname, description, details) VALUES (?, ?, COALESCE((SELECT details FROM device_details WHERE hostname = ?), '{}')) " - "ON CONFLICT(hostname) DO UPDATE SET description=excluded.description", - (hostname, description, hostname), + "INSERT INTO device_details(hostname, description, details, created_at) " + "VALUES (?, COALESCE(?, ''), COALESCE((SELECT details FROM device_details WHERE hostname = ?), '{}'), ?) " + "ON CONFLICT(hostname) DO NOTHING", + (hostname, description, hostname, now), + ) + cur.execute( + "UPDATE device_details SET description=? WHERE hostname=?", + (description, hostname), ) conn.commit() conn.close() @@ -1995,7 +2113,7 @@ def scripts_quick_run(): for host in hostnames: job_id = None try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute( """ @@ -2040,7 +2158,7 @@ def scripts_quick_run(): @app.route("/api/device/activity/", methods=["GET", "DELETE"]) def device_activity(hostname: str): try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() if request.method == "DELETE": cur.execute("DELETE FROM activity_history WHERE hostname = ?", (hostname,)) @@ -2074,7 +2192,7 @@ def device_activity(hostname: str): @app.route("/api/device/activity/job/", methods=["GET"]) def device_activity_job(job_id: int): try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute( "SELECT id, hostname, script_name, script_path, script_type, ran_at, status, stdout, stderr FROM activity_history WHERE id = ?", @@ -2111,7 +2229,7 @@ def handle_quick_job_result(data): stdout = data.get("stdout") or "" stderr = data.get("stderr") or "" try: - conn = sqlite3.connect(DB_PATH) + conn = _db_conn() cur = conn.cursor() cur.execute( "UPDATE activity_history SET status=?, stdout=?, stderr=? WHERE id=?", @@ -2140,25 +2258,40 @@ def handle_collector_status(data): rec['collector_active_ts'] = time.time() if last_user and (hostname or rec.get('hostname')): try: - conn = sqlite3.connect(DB_PATH) + host = hostname or rec.get('hostname') + conn = _db_conn() cur = conn.cursor() cur.execute( - "SELECT details, description FROM device_details WHERE hostname = ?", - (hostname or rec.get('hostname'),), + "SELECT details, description, created_at FROM device_details WHERE hostname = ?", + (host,), ) row = cur.fetchone() - details = {} if row and row[0]: try: details = json.loads(row[0]) except Exception: details = {} + description = row[1] or "" + created_at = int(row[2] or 0) + else: + details = {} + description = "" + created_at = 0 summary = details.get('summary') or {} + # Only update last_user if provided; do not clear other fields summary['last_user'] = last_user details['summary'] = summary + now = int(time.time()) cur.execute( - "REPLACE INTO device_details (hostname, description, details) VALUES (?, COALESCE((SELECT description FROM device_details WHERE hostname=?), ''), ?)", - ((hostname or rec.get('hostname')), (hostname or rec.get('hostname')), json.dumps(details)) + """ + INSERT INTO device_details(hostname, description, details, created_at) + VALUES (?,?,?,?) + ON CONFLICT(hostname) DO UPDATE SET + description=excluded.description, + details=excluded.details, + created_at=COALESCE(device_details.created_at, excluded.created_at) + """, + (host, description, json.dumps(details), created_at or now), ) conn.commit() conn.close()