Bundle agent hash into audit payload and surface in UI

This commit is contained in:
2025-10-05 02:21:43 -06:00
parent e6516b64dc
commit c8deb11356
2 changed files with 160 additions and 41 deletions

View File

@@ -9,6 +9,7 @@ import shutil
import string
import asyncio
from pathlib import Path
from typing import Optional
try:
import psutil # type: ignore
@@ -134,24 +135,132 @@ def _project_root():
return os.getcwd()
_AGENT_HASH_CACHE = {"path": None, "mtime": None, "value": None}
_AGENT_HASH_CACHE = {
"path": None,
"mtime": None,
"value": None,
"source": None,
"extra": None,
}
def _iter_hash_roots():
seen = set()
root = _project_root()
for _ in range(6):
if not root or root in seen:
break
yield root
seen.add(root)
parent = os.path.dirname(root)
if not parent or parent == root:
break
root = parent
def _resolve_git_head_hash(root: str) -> Optional[str]:
git_dir = os.path.join(root, ".git")
head_path = os.path.join(git_dir, "HEAD")
if not os.path.isfile(head_path):
return None
try:
with open(head_path, "r", encoding="utf-8") as fh:
head = fh.read().strip()
except Exception:
return None
if not head:
return None
if head.startswith("ref:"):
ref = head.split(" ", 1)[1].strip() if " " in head else head.split(":", 1)[1].strip()
if not ref:
return None
ref_path = os.path.join(git_dir, *ref.split("/"))
if os.path.isfile(ref_path):
try:
with open(ref_path, "r", encoding="utf-8") as rf:
commit = rf.read().strip()
return commit or None
except Exception:
return None
packed_refs = os.path.join(git_dir, "packed-refs")
if os.path.isfile(packed_refs):
try:
with open(packed_refs, "r", encoding="utf-8") as pf:
for line in pf:
line = line.strip()
if not line or line.startswith("#") or line.startswith("^"):
continue
try:
commit, ref_name = line.split(" ", 1)
except ValueError:
continue
if ref_name.strip() == ref:
commit = commit.strip()
return commit or None
except Exception:
return None
return None
# Detached head contains the commit hash directly
commit = head.splitlines()[0].strip()
return commit or None
def _read_agent_hash():
try:
root = _project_root()
path = os.path.join(root, 'github_repo_hash.txt')
cache = _AGENT_HASH_CACHE
for root in _iter_hash_roots():
path = os.path.join(root, 'github_repo_hash.txt')
if not os.path.isfile(path):
cache.update({"path": path, "mtime": None, "value": None})
return None
continue
mtime = os.path.getmtime(path)
if cache.get("path") == path and cache.get("mtime") == mtime:
if (
cache.get("source") == "file"
and cache.get("path") == path
and cache.get("mtime") == mtime
):
return cache.get("value")
with open(path, 'r', encoding='utf-8') as fh:
value = fh.read().strip()
cache.update({"path": path, "mtime": mtime, "value": value or None})
cache.update(
{
"source": "file",
"path": path,
"mtime": mtime,
"extra": None,
"value": value or None,
}
)
return cache.get("value")
for root in _iter_hash_roots():
git_dir = os.path.join(root, '.git')
head_path = os.path.join(git_dir, 'HEAD')
if not os.path.isfile(head_path):
continue
head_mtime = os.path.getmtime(head_path)
packed_path = os.path.join(git_dir, 'packed-refs')
packed_mtime = os.path.getmtime(packed_path) if os.path.isfile(packed_path) else None
if (
cache.get("source") == "git"
and cache.get("path") == head_path
and cache.get("mtime") == head_mtime
and cache.get("extra") == packed_mtime
):
return cache.get("value")
commit = _resolve_git_head_hash(root)
cache.update(
{
"source": "git",
"path": head_path,
"mtime": head_mtime,
"extra": packed_mtime,
"value": commit or None,
}
)
if commit:
return commit
cache.update({"source": None, "path": None, "mtime": None, "extra": None, "value": None})
return None
except Exception:
try:
_AGENT_HASH_CACHE.update({"value": None})
@@ -803,6 +912,12 @@ def _build_details_fallback() -> dict:
'storage': collect_storage(),
'network': network,
}
try:
agent_hash_value = _read_agent_hash()
if agent_hash_value:
details.setdefault('summary', {})['agent_hash'] = agent_hash_value
except Exception:
pass
return details

View File

@@ -167,7 +167,7 @@ export default function DeviceList({ onSelectDevice }) {
const arr = Object.entries(data || {}).map(([id, a]) => {
const hostname = a.hostname || id || "unknown";
const details = detailsByHost[hostname] || {};
const agentHash = (a.agent_hash || "").trim();
const agentHash = (a.agent_hash || details.agentHash || "").trim();
return {
id,
hostname,
@@ -230,9 +230,8 @@ export default function DeviceList({ onSelectDevice }) {
const externalIp = summary.external_ip || "";
const lastReboot = summary.last_reboot || "";
const description = summary.description || "";
setDetailsByHost((prev) => ({
...prev,
[h]: {
const agentHashValue = (summary.agent_hash || "").trim();
const enriched = {
lastUser,
created: createdRaw,
createdTs,
@@ -241,32 +240,37 @@ export default function DeviceList({ onSelectDevice }) {
externalIp,
lastReboot,
description,
},
agentHash: agentHashValue,
};
setDetailsByHost((prev) => ({
...prev,
[h]: enriched,
}));
setRows((prev) =>
prev.map((r) => {
if (r.hostname !== h) return r;
const nextHash = agentHashValue || r.agentHash;
return {
...r,
lastUser: enriched.lastUser || r.lastUser,
type: enriched.type || r.type,
created: enriched.created || r.created,
createdTs: enriched.createdTs || r.createdTs,
internalIp: enriched.internalIp || r.internalIp,
externalIp: enriched.externalIp || r.externalIp,
lastReboot: enriched.lastReboot || r.lastReboot,
description: enriched.description || r.description,
agentHash: nextHash,
agentVersion: computeAgentVersion(nextHash, repoSha),
};
})
);
} catch {
// ignore per-host failure
}
})
);
}
// After caching, refresh rows to apply newly available details
setRows((prev) =>
prev.map((r) => {
const det = detailsByHost[r.hostname];
if (!det) return r;
return {
...r,
lastUser: det.lastUser || r.lastUser,
type: det.type || r.type,
created: det.created || r.created,
createdTs: det.createdTs || r.createdTs,
internalIp: det.internalIp || r.internalIp,
externalIp: det.externalIp || r.externalIp,
lastReboot: det.lastReboot || r.lastReboot,
description: det.description || r.description,
};
})
);
}
} catch (e) {
console.warn("Failed to load agents:", e);