mirror of
https://github.com/bunny-lab-io/Borealis.git
synced 2025-10-26 17:21:58 -06:00
Implement server-managed agent update handshake
This commit is contained in:
313
Borealis.ps1
313
Borealis.ps1
@@ -633,37 +633,74 @@ function Start-AgentScheduledTasks {
|
||||
}
|
||||
}
|
||||
|
||||
$BorealisRepoOwner = 'bunny-lab-io'
|
||||
$BorealisRepoName = 'Borealis'
|
||||
$BorealisRepoBranch = 'main'
|
||||
|
||||
function Get-BorealisServerRepoSha {
|
||||
function Get-BorealisServerUrl {
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$BaseUrl,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Owner,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Repo,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Branch
|
||||
[string]$AgentRoot
|
||||
)
|
||||
|
||||
if ([string]::IsNullOrWhiteSpace($BaseUrl)) {
|
||||
throw 'Server URL is blank; cannot query repo hash.'
|
||||
$serverBaseUrl = $env:BOREALIS_SERVER_URL
|
||||
if (-not $serverBaseUrl) {
|
||||
try {
|
||||
if (-not $AgentRoot) { $AgentRoot = $scriptDir }
|
||||
$settingsDir = Join-Path $AgentRoot 'Settings'
|
||||
$serverUrlFile = Join-Path $settingsDir 'server_url.txt'
|
||||
if (Test-Path $serverUrlFile -PathType Leaf) {
|
||||
$content = Get-Content -Path $serverUrlFile -Raw -ErrorAction Stop
|
||||
if ($content) { $serverBaseUrl = $content.Trim() }
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
$base = $BaseUrl.TrimEnd('/')
|
||||
$repoParam = [System.Uri]::EscapeDataString("$Owner/$Repo")
|
||||
$branchParam = [System.Uri]::EscapeDataString($Branch)
|
||||
$uri = "$base/api/agent/repo_hash?repo=$repoParam&branch=$branchParam"
|
||||
if (-not $serverBaseUrl) { $serverBaseUrl = 'http://localhost:5000' }
|
||||
return $serverBaseUrl.Trim()
|
||||
}
|
||||
|
||||
function Get-AgentServiceId {
|
||||
param(
|
||||
[string]$AgentRoot
|
||||
)
|
||||
|
||||
if (-not $AgentRoot) { $AgentRoot = $scriptDir }
|
||||
$settingsDir = Join-Path $AgentRoot 'Settings'
|
||||
$candidates = @(
|
||||
Join-Path $settingsDir 'agent_settings_svc.json',
|
||||
Join-Path $settingsDir 'agent_settings.json'
|
||||
)
|
||||
|
||||
foreach ($path in $candidates) {
|
||||
try {
|
||||
if (Test-Path $path -PathType Leaf) {
|
||||
$raw = Get-Content -Path $path -Raw -ErrorAction Stop
|
||||
if (-not $raw) { continue }
|
||||
$cfg = $raw | ConvertFrom-Json -ErrorAction Stop
|
||||
$value = ($cfg.agent_id)
|
||||
if ($value) { return ($value.ToString()).Trim() }
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
function Invoke-AgentUpdateCheck {
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$ServerBaseUrl,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$AgentId
|
||||
)
|
||||
|
||||
if ([string]::IsNullOrWhiteSpace($ServerBaseUrl)) {
|
||||
throw 'Server URL is blank; cannot perform update check.'
|
||||
}
|
||||
|
||||
$base = $ServerBaseUrl.TrimEnd('/')
|
||||
$uri = "$base/api/agent/update_check"
|
||||
$payload = @{ agent_id = $AgentId } | ConvertTo-Json -Depth 3
|
||||
$headers = @{ 'User-Agent' = 'borealis-agent-updater' }
|
||||
|
||||
$resp = Invoke-WebRequest -Uri $uri -Method GET -Headers $headers -UseBasicParsing -ErrorAction Stop
|
||||
$resp = Invoke-WebRequest -Uri $uri -Method Post -Headers $headers -Body $payload -ContentType 'application/json' -UseBasicParsing -ErrorAction Stop
|
||||
$json = $resp.Content | ConvertFrom-Json
|
||||
|
||||
if ($resp.StatusCode -ne 200) {
|
||||
@@ -672,112 +709,86 @@ function Get-BorealisServerRepoSha {
|
||||
throw "Borealis server responded with an error: $message"
|
||||
}
|
||||
|
||||
$sha = ($json.sha)
|
||||
if (-not $sha) {
|
||||
$message = $json.error
|
||||
if ($message) {
|
||||
throw "Borealis server did not return a repository hash: $message"
|
||||
}
|
||||
throw 'Borealis server did not return a repository hash.'
|
||||
}
|
||||
|
||||
return ($sha.ToString()).Trim()
|
||||
return $json
|
||||
}
|
||||
|
||||
function Get-BorealisUpdatePlan {
|
||||
function Get-RepositoryCommitHash {
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$ScriptDirectory,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$RepoOwner,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$RepoName,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Branch
|
||||
[string]$ProjectRoot
|
||||
)
|
||||
|
||||
if ([string]::IsNullOrWhiteSpace($ScriptDirectory)) {
|
||||
throw 'Script directory was not provided.'
|
||||
}
|
||||
$candidates = @($ProjectRoot)
|
||||
$agentRootCandidate = Join-Path $ProjectRoot 'Agent\Borealis'
|
||||
if (Test-Path $agentRootCandidate -PathType Container) { $candidates += $agentRootCandidate }
|
||||
|
||||
$updateMode = $env:update_mode
|
||||
if ($updateMode) {
|
||||
$updateMode = $updateMode.ToLowerInvariant()
|
||||
} else {
|
||||
$updateMode = 'update'
|
||||
}
|
||||
$forceUpdate = $updateMode -eq 'force_update'
|
||||
|
||||
$agentRootCandidate = Join-Path $ScriptDirectory 'Agent\Borealis'
|
||||
$agentRoot = $ScriptDirectory
|
||||
if (Test-Path $agentRootCandidate -PathType Container) {
|
||||
foreach ($root in $candidates) {
|
||||
try {
|
||||
$agentRoot = (Resolve-Path -Path $agentRootCandidate -ErrorAction Stop).Path
|
||||
} catch {
|
||||
$agentRoot = $agentRootCandidate
|
||||
}
|
||||
$gitDir = Join-Path $root '.git'
|
||||
$headPath = Join-Path $gitDir 'HEAD'
|
||||
if (-not (Test-Path $headPath -PathType Leaf)) { continue }
|
||||
$head = (Get-Content -Path $headPath -Raw -ErrorAction Stop).Trim()
|
||||
if (-not $head) { continue }
|
||||
|
||||
if ($head -match '^ref:\s*(.+)$') {
|
||||
$ref = $Matches[1].Trim()
|
||||
if ($ref) {
|
||||
$refPath = $gitDir
|
||||
foreach ($part in ($ref -split '/')) {
|
||||
if ($part) { $refPath = Join-Path $refPath $part }
|
||||
}
|
||||
if (Test-Path $refPath -PathType Leaf) {
|
||||
$commit = (Get-Content -Path $refPath -Raw -ErrorAction Stop).Trim()
|
||||
if ($commit) { return $commit }
|
||||
}
|
||||
$packedRefs = Join-Path $gitDir 'packed-refs'
|
||||
if (Test-Path $packedRefs -PathType Leaf) {
|
||||
foreach ($line in Get-Content -Path $packedRefs -ErrorAction Stop) {
|
||||
$trim = ($line).Trim()
|
||||
if (-not $trim -or $trim.StartsWith('#') -or $trim.StartsWith('^')) { continue }
|
||||
$parts = $trim.Split(' ', 2)
|
||||
if ($parts.Count -ge 2 -and $parts[1].Trim() -eq $ref) {
|
||||
$candidate = $parts[0].Trim()
|
||||
if ($candidate) { return $candidate }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
$detached = $head.Split([Environment]::NewLine, [StringSplitOptions]::RemoveEmptyEntries)
|
||||
if ($detached.Length -gt 0) {
|
||||
$candidate = $detached[0].Trim()
|
||||
if ($candidate) { return $candidate }
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
$hashFile = Join-Path $agentRoot 'github_repo_hash.txt'
|
||||
$lastSha = $null
|
||||
if (Test-Path $hashFile -PathType Leaf) {
|
||||
$lastSha = (Get-Content $hashFile -Raw).Trim()
|
||||
return ''
|
||||
}
|
||||
|
||||
function Submit-AgentHash {
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$ServerBaseUrl,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$AgentId,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$AgentHash
|
||||
)
|
||||
|
||||
if ([string]::IsNullOrWhiteSpace($ServerBaseUrl) -or [string]::IsNullOrWhiteSpace($AgentId) -or [string]::IsNullOrWhiteSpace($AgentHash)) {
|
||||
return
|
||||
}
|
||||
|
||||
$serverBaseUrl = $env:BOREALIS_SERVER_URL
|
||||
if (-not $serverBaseUrl) {
|
||||
$settingsDir = Join-Path $agentRoot 'Settings'
|
||||
$serverUrlFile = Join-Path $settingsDir 'server_url.txt'
|
||||
if (Test-Path $serverUrlFile -PathType Leaf) {
|
||||
$serverBaseUrl = (Get-Content $serverUrlFile -Raw).Trim()
|
||||
}
|
||||
}
|
||||
if (-not $serverBaseUrl) { $serverBaseUrl = 'http://localhost:5000' }
|
||||
$serverBaseUrl = $serverBaseUrl.Trim()
|
||||
Write-Verbose "Using Borealis server URL: $serverBaseUrl"
|
||||
$base = $ServerBaseUrl.TrimEnd('/')
|
||||
$uri = "$base/api/agent/agent_hash"
|
||||
$payload = @{ agent_id = $AgentId; agent_hash = $AgentHash } | ConvertTo-Json -Depth 3
|
||||
$headers = @{ 'User-Agent' = 'borealis-agent-updater' }
|
||||
|
||||
try {
|
||||
$newSha = Get-BorealisServerRepoSha -BaseUrl $serverBaseUrl -Owner $RepoOwner -Repo $RepoName -Branch $Branch
|
||||
} catch {
|
||||
throw "Failed to query Borealis server for latest SHA: $($_.Exception.Message)"
|
||||
}
|
||||
|
||||
$changeDetected = $false
|
||||
if (-not $lastSha) {
|
||||
Write-Verbose 'No prior SHA on disk; treating as first run.'
|
||||
$changeDetected = $true
|
||||
} elseif ($lastSha -ne $newSha) {
|
||||
$changeDetected = $true
|
||||
Write-Verbose "Repo updated: $lastSha -> $newSha"
|
||||
} else {
|
||||
Write-Verbose "SHA unchanged: $newSha"
|
||||
}
|
||||
|
||||
$shouldUpdate = $false
|
||||
if ($forceUpdate) {
|
||||
Write-Verbose 'update_mode=force_update → forcing update.'
|
||||
$shouldUpdate = $true
|
||||
} elseif ($changeDetected) {
|
||||
Write-Verbose 'update_mode=update → repo changed or first run.'
|
||||
$shouldUpdate = $true
|
||||
} else {
|
||||
Write-Verbose 'No change; skipping Borealis.ps1 -SilentUpdate.'
|
||||
}
|
||||
|
||||
return [pscustomobject]@{
|
||||
ShouldUpdate = $shouldUpdate
|
||||
ForceUpdate = $forceUpdate
|
||||
ChangeDetected = $changeDetected
|
||||
LastSha = $lastSha
|
||||
NewSha = $newSha
|
||||
HashFile = $hashFile
|
||||
AgentRoot = $agentRoot
|
||||
ServerBaseUrl = $serverBaseUrl
|
||||
UpdateMode = $updateMode
|
||||
}
|
||||
Invoke-WebRequest -Uri $uri -Method Post -Headers $headers -Body $payload -ContentType 'application/json' -UseBasicParsing -ErrorAction Stop | Out-Null
|
||||
}
|
||||
|
||||
function Invoke-BorealisUpdate {
|
||||
@@ -850,9 +861,44 @@ function Invoke-BorealisUpdate {
|
||||
function Invoke-BorealisSilentUpdate {
|
||||
Write-Host "Initiating Borealis silent update workflow..." -ForegroundColor DarkCyan
|
||||
|
||||
$plan = Get-BorealisUpdatePlan -ScriptDirectory $scriptDir -RepoOwner $BorealisRepoOwner -RepoName $BorealisRepoName -Branch $BorealisRepoBranch
|
||||
$agentRootCandidate = Join-Path $scriptDir 'Agent\Borealis'
|
||||
$agentRoot = $scriptDir
|
||||
if (Test-Path $agentRootCandidate -PathType Container) {
|
||||
try {
|
||||
$agentRoot = (Resolve-Path -Path $agentRootCandidate -ErrorAction Stop).Path
|
||||
} catch {
|
||||
$agentRoot = $agentRootCandidate
|
||||
}
|
||||
}
|
||||
|
||||
if (-not $plan.ShouldUpdate) {
|
||||
$serverBaseUrl = Get-BorealisServerUrl -AgentRoot $agentRoot
|
||||
$agentId = Get-AgentServiceId -AgentRoot $agentRoot
|
||||
|
||||
$updateMode = $env:update_mode
|
||||
if ($updateMode) { $updateMode = $updateMode.ToLowerInvariant() } else { $updateMode = 'update' }
|
||||
$forceUpdate = $updateMode -eq 'force_update'
|
||||
|
||||
$shouldUpdate = $true
|
||||
$updateInfo = $null
|
||||
|
||||
if (-not $forceUpdate) {
|
||||
if ($agentId) {
|
||||
try {
|
||||
$updateInfo = Invoke-AgentUpdateCheck -ServerBaseUrl $serverBaseUrl -AgentId $agentId
|
||||
$shouldUpdate = [bool]($updateInfo.update_available)
|
||||
} catch {
|
||||
Write-Verbose ("Update check failed: {0}" -f $_.Exception.Message)
|
||||
$shouldUpdate = $true
|
||||
}
|
||||
} else {
|
||||
Write-Verbose 'Agent ID not found; defaulting to update.'
|
||||
$shouldUpdate = $true
|
||||
}
|
||||
} else {
|
||||
$shouldUpdate = $true
|
||||
}
|
||||
|
||||
if (-not $shouldUpdate) {
|
||||
Write-Host "=============================================="
|
||||
Write-Host "Borealis Agent Already Up-to-Date"
|
||||
Write-Host "=============================================="
|
||||
@@ -894,36 +940,37 @@ function Invoke-BorealisSilentUpdate {
|
||||
|
||||
$updateSucceeded = $false
|
||||
try {
|
||||
if ($plan.NewSha) {
|
||||
$env:BOREALIS_EXPECTED_SHA = $plan.NewSha
|
||||
}
|
||||
|
||||
Invoke-BorealisUpdate -Silent
|
||||
$updateSucceeded = $true
|
||||
} finally {
|
||||
if ($plan.NewSha) {
|
||||
Remove-Item Env:BOREALIS_EXPECTED_SHA -ErrorAction SilentlyContinue
|
||||
}
|
||||
|
||||
if ($managedTasks.Count -gt 0) {
|
||||
Start-AgentScheduledTasks -TaskNames $managedTasks
|
||||
}
|
||||
}
|
||||
|
||||
if (-not $updateSucceeded) {
|
||||
throw 'Borealis.ps1 -SilentUpdate failed; not advancing stored SHA.'
|
||||
throw 'Borealis.ps1 -SilentUpdate failed.'
|
||||
}
|
||||
|
||||
if ($plan.NewSha) {
|
||||
$hashDir = Split-Path $plan.HashFile -Parent
|
||||
if ($hashDir -and -not (Test-Path $hashDir -PathType Container)) {
|
||||
New-Item -ItemType Directory -Path $hashDir -Force | Out-Null
|
||||
$newHash = Get-RepositoryCommitHash -ProjectRoot $scriptDir
|
||||
if ($newHash) {
|
||||
try {
|
||||
if ($agentId) {
|
||||
Submit-AgentHash -ServerBaseUrl $serverBaseUrl -AgentId $agentId -AgentHash $newHash
|
||||
}
|
||||
} catch {
|
||||
Write-Verbose ("Failed to submit agent hash: {0}" -f $_.Exception.Message)
|
||||
}
|
||||
Set-Content -Path $plan.HashFile -Value $plan.NewSha -Encoding ASCII
|
||||
}
|
||||
|
||||
$displayHash = $newHash
|
||||
if (-not $displayHash -and $updateInfo) {
|
||||
$displayHash = ($updateInfo.repo_hash)
|
||||
}
|
||||
if (-not $displayHash) { $displayHash = 'unknown' }
|
||||
|
||||
Write-Host "=============================================="
|
||||
Write-Host ("Borealis Agent Updated - New Github Repository Hash: {0}" -f $plan.NewSha)
|
||||
Write-Host ("Borealis Agent Updated - Repository Hash: {0}" -f $displayHash)
|
||||
Write-Host "=============================================="
|
||||
} finally {
|
||||
if ($mutex -and $gotMutex) { $mutex.ReleaseMutex() | Out-Null }
|
||||
|
||||
@@ -9,7 +9,6 @@ import shutil
|
||||
import string
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
try:
|
||||
import psutil # type: ignore
|
||||
@@ -135,166 +134,6 @@ def _project_root():
|
||||
return os.getcwd()
|
||||
|
||||
|
||||
_AGENT_HASH_CACHE = {
|
||||
"path": None,
|
||||
"mtime": None,
|
||||
"value": None,
|
||||
"source": None,
|
||||
"extra": None,
|
||||
}
|
||||
|
||||
|
||||
def _iter_hash_roots():
|
||||
"""Yield candidate folders that may contain github_repo_hash.txt."""
|
||||
|
||||
root = _project_root()
|
||||
if not root:
|
||||
return
|
||||
|
||||
# Breadth-first walk up to a small, bounded set of parents/siblings.
|
||||
seen = set()
|
||||
queue = [root]
|
||||
|
||||
# Some deployments place the hash file directly under Agent/, while others
|
||||
# (including the scheduled updater) write to Agent/Borealis/. The previous
|
||||
# implementation only checked the parent chain which skipped Agent/Borealis,
|
||||
# so seed the queue with that sibling when available.
|
||||
borealis = os.path.join(root, "Borealis")
|
||||
if os.path.isdir(borealis):
|
||||
queue.append(borealis)
|
||||
|
||||
steps = 0
|
||||
while queue and steps < 12: # hard stop to avoid wandering too far
|
||||
steps += 1
|
||||
cur = queue.pop(0)
|
||||
if not cur or cur in seen:
|
||||
continue
|
||||
seen.add(cur)
|
||||
yield cur
|
||||
|
||||
parent = os.path.dirname(cur)
|
||||
if parent and parent != cur and parent not in seen:
|
||||
queue.append(parent)
|
||||
|
||||
# If we're currently at Agent/ or its parent, also check for an adjacent
|
||||
# Borealis/ folder in case the hash lives there.
|
||||
if cur != borealis:
|
||||
candidate = os.path.join(cur, "Borealis")
|
||||
if os.path.isdir(candidate) and candidate not in seen:
|
||||
queue.append(candidate)
|
||||
|
||||
|
||||
def _resolve_git_head_hash(root: str) -> Optional[str]:
|
||||
git_dir = os.path.join(root, ".git")
|
||||
head_path = os.path.join(git_dir, "HEAD")
|
||||
if not os.path.isfile(head_path):
|
||||
return None
|
||||
try:
|
||||
with open(head_path, "r", encoding="utf-8") as fh:
|
||||
head = fh.read().strip()
|
||||
except Exception:
|
||||
return None
|
||||
if not head:
|
||||
return None
|
||||
if head.startswith("ref:"):
|
||||
ref = head.split(" ", 1)[1].strip() if " " in head else head.split(":", 1)[1].strip()
|
||||
if not ref:
|
||||
return None
|
||||
ref_path = os.path.join(git_dir, *ref.split("/"))
|
||||
if os.path.isfile(ref_path):
|
||||
try:
|
||||
with open(ref_path, "r", encoding="utf-8") as rf:
|
||||
commit = rf.read().strip()
|
||||
return commit or None
|
||||
except Exception:
|
||||
return None
|
||||
packed_refs = os.path.join(git_dir, "packed-refs")
|
||||
if os.path.isfile(packed_refs):
|
||||
try:
|
||||
with open(packed_refs, "r", encoding="utf-8") as pf:
|
||||
for line in pf:
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#") or line.startswith("^"):
|
||||
continue
|
||||
try:
|
||||
commit, ref_name = line.split(" ", 1)
|
||||
except ValueError:
|
||||
continue
|
||||
if ref_name.strip() == ref:
|
||||
commit = commit.strip()
|
||||
return commit or None
|
||||
except Exception:
|
||||
return None
|
||||
return None
|
||||
# Detached head contains the commit hash directly
|
||||
commit = head.splitlines()[0].strip()
|
||||
return commit or None
|
||||
|
||||
|
||||
def _read_agent_hash():
|
||||
try:
|
||||
cache = _AGENT_HASH_CACHE
|
||||
for root in _iter_hash_roots():
|
||||
path = os.path.join(root, 'github_repo_hash.txt')
|
||||
if not os.path.isfile(path):
|
||||
continue
|
||||
mtime = os.path.getmtime(path)
|
||||
if (
|
||||
cache.get("source") == "file"
|
||||
and cache.get("path") == path
|
||||
and cache.get("mtime") == mtime
|
||||
):
|
||||
return cache.get("value")
|
||||
with open(path, 'r', encoding='utf-8') as fh:
|
||||
value = fh.read().strip()
|
||||
cache.update(
|
||||
{
|
||||
"source": "file",
|
||||
"path": path,
|
||||
"mtime": mtime,
|
||||
"extra": None,
|
||||
"value": value or None,
|
||||
}
|
||||
)
|
||||
return cache.get("value")
|
||||
|
||||
for root in _iter_hash_roots():
|
||||
git_dir = os.path.join(root, '.git')
|
||||
head_path = os.path.join(git_dir, 'HEAD')
|
||||
if not os.path.isfile(head_path):
|
||||
continue
|
||||
head_mtime = os.path.getmtime(head_path)
|
||||
packed_path = os.path.join(git_dir, 'packed-refs')
|
||||
packed_mtime = os.path.getmtime(packed_path) if os.path.isfile(packed_path) else None
|
||||
if (
|
||||
cache.get("source") == "git"
|
||||
and cache.get("path") == head_path
|
||||
and cache.get("mtime") == head_mtime
|
||||
and cache.get("extra") == packed_mtime
|
||||
):
|
||||
return cache.get("value")
|
||||
commit = _resolve_git_head_hash(root)
|
||||
cache.update(
|
||||
{
|
||||
"source": "git",
|
||||
"path": head_path,
|
||||
"mtime": head_mtime,
|
||||
"extra": packed_mtime,
|
||||
"value": commit or None,
|
||||
}
|
||||
)
|
||||
if commit:
|
||||
return commit
|
||||
cache.update({"source": None, "path": None, "mtime": None, "extra": None, "value": None})
|
||||
return None
|
||||
except Exception:
|
||||
try:
|
||||
_AGENT_HASH_CACHE.update({"value": None})
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
# Removed Ansible-based audit path; Python collectors provide details directly.
|
||||
|
||||
|
||||
@@ -938,12 +777,6 @@ def _build_details_fallback() -> dict:
|
||||
'storage': collect_storage(),
|
||||
'network': network,
|
||||
}
|
||||
try:
|
||||
agent_hash_value = _read_agent_hash()
|
||||
if agent_hash_value:
|
||||
details.setdefault('summary', {})['agent_hash'] = agent_hash_value
|
||||
except Exception:
|
||||
pass
|
||||
return details
|
||||
|
||||
|
||||
@@ -995,12 +828,6 @@ class Role:
|
||||
|
||||
# Always post the latest available details (possibly cached)
|
||||
details_to_send = self._last_details or {'summary': collect_summary(self.ctx.config)}
|
||||
agent_hash_value = _read_agent_hash()
|
||||
if agent_hash_value:
|
||||
try:
|
||||
details_to_send.setdefault('summary', {})['agent_hash'] = agent_hash_value
|
||||
except Exception:
|
||||
pass
|
||||
get_url = (self.ctx.hooks.get('get_server_url') if isinstance(self.ctx.hooks, dict) else None) or (lambda: 'http://localhost:5000')
|
||||
url = (get_url() or '').rstrip('/') + '/api/agent/details'
|
||||
payload = {
|
||||
@@ -1008,8 +835,6 @@ class Role:
|
||||
'hostname': details_to_send.get('summary', {}).get('hostname', socket.gethostname()),
|
||||
'details': details_to_send,
|
||||
}
|
||||
if agent_hash_value:
|
||||
payload['agent_hash'] = agent_hash_value
|
||||
if aiohttp is not None:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
await session.post(url, json=payload, timeout=10)
|
||||
|
||||
@@ -393,6 +393,151 @@ def api_agent_repo_hash():
|
||||
_write_service_log('server', f'/api/agent/repo_hash error: {exc}')
|
||||
return jsonify({"error": "internal error"}), 500
|
||||
|
||||
|
||||
@app.route("/api/agent/update_check", methods=["POST"])
|
||||
def api_agent_update_check():
|
||||
data = request.get_json(silent=True) or {}
|
||||
agent_id = (data.get("agent_id") or "").strip()
|
||||
if not agent_id:
|
||||
return jsonify({"error": "agent_id required"}), 400
|
||||
|
||||
repo_info = _refresh_default_repo_hash()
|
||||
repo_sha = (repo_info.get("sha") or "").strip()
|
||||
if not repo_sha:
|
||||
payload = {
|
||||
"error": repo_info.get("error") or "repository hash unavailable",
|
||||
"repo_source": repo_info.get("source"),
|
||||
}
|
||||
return jsonify(payload), 503
|
||||
|
||||
registry_info = registered_agents.get(agent_id) or {}
|
||||
hostname = (registry_info.get("hostname") or "").strip() or None
|
||||
stored_hash: Optional[str] = (registry_info.get("agent_hash") or "").strip() or None
|
||||
conn = None
|
||||
try:
|
||||
conn = _db_conn()
|
||||
cur = conn.cursor()
|
||||
rows = _device_rows_for_agent(cur, agent_id)
|
||||
except Exception:
|
||||
rows = []
|
||||
finally:
|
||||
if conn:
|
||||
try:
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if rows:
|
||||
hostname = rows[0].get("hostname") or hostname
|
||||
for row in rows:
|
||||
if row.get("matched"):
|
||||
hostname = row.get("hostname") or hostname
|
||||
candidate = (row.get("agent_hash") or "").strip()
|
||||
if not candidate:
|
||||
summary = row.get("details") or {}
|
||||
try:
|
||||
candidate = (summary.get("summary") or {}).get("agent_hash") or ""
|
||||
except Exception:
|
||||
candidate = ""
|
||||
candidate = candidate.strip()
|
||||
stored_hash = candidate or None
|
||||
break
|
||||
if stored_hash is None:
|
||||
first = rows[0]
|
||||
candidate = (first.get("agent_hash") or "").strip()
|
||||
if not candidate:
|
||||
details = first.get("details") or {}
|
||||
try:
|
||||
candidate = (details.get("summary") or {}).get("agent_hash") or ""
|
||||
except Exception:
|
||||
candidate = ""
|
||||
candidate = candidate.strip()
|
||||
stored_hash = candidate or None
|
||||
|
||||
update_available = (not stored_hash) or (stored_hash.strip() != repo_sha)
|
||||
|
||||
payload = {
|
||||
"agent_id": agent_id,
|
||||
"hostname": hostname,
|
||||
"repo_hash": repo_sha,
|
||||
"agent_hash": stored_hash,
|
||||
"update_available": bool(update_available),
|
||||
"repo_source": repo_info.get("source"),
|
||||
}
|
||||
if repo_info.get("cached") is not None:
|
||||
payload["cached"] = bool(repo_info.get("cached"))
|
||||
if repo_info.get("age_seconds") is not None:
|
||||
payload["age_seconds"] = repo_info.get("age_seconds")
|
||||
if repo_info.get("error"):
|
||||
payload["repo_error"] = repo_info.get("error")
|
||||
return jsonify(payload)
|
||||
|
||||
|
||||
@app.route("/api/agent/agent_hash", methods=["POST"])
|
||||
def api_agent_agent_hash_post():
|
||||
data = request.get_json(silent=True) or {}
|
||||
agent_id = (data.get("agent_id") or "").strip()
|
||||
agent_hash = (data.get("agent_hash") or "").strip()
|
||||
if not agent_id or not agent_hash:
|
||||
return jsonify({"error": "agent_id and agent_hash required"}), 400
|
||||
|
||||
conn = None
|
||||
hostname = None
|
||||
try:
|
||||
conn = _db_conn()
|
||||
cur = conn.cursor()
|
||||
rows = _device_rows_for_agent(cur, agent_id)
|
||||
target = None
|
||||
for row in rows:
|
||||
if row.get("matched"):
|
||||
target = row
|
||||
break
|
||||
if not target:
|
||||
if conn:
|
||||
conn.close()
|
||||
return jsonify({"status": "ignored"}), 200
|
||||
|
||||
hostname = target.get("hostname")
|
||||
details = target.get("details") or {}
|
||||
summary = details.setdefault("summary", {})
|
||||
summary["agent_hash"] = agent_hash
|
||||
cur.execute(
|
||||
"UPDATE device_details SET agent_hash=?, details=? WHERE hostname=?",
|
||||
(agent_hash, json.dumps(details), hostname),
|
||||
)
|
||||
conn.commit()
|
||||
except Exception as exc:
|
||||
if conn:
|
||||
try:
|
||||
conn.rollback()
|
||||
except Exception:
|
||||
pass
|
||||
_write_service_log('server', f'/api/agent/agent_hash error: {exc}')
|
||||
return jsonify({"error": "internal error"}), 500
|
||||
finally:
|
||||
if conn:
|
||||
try:
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
normalized_hash = agent_hash
|
||||
if agent_id in registered_agents:
|
||||
registered_agents[agent_id]["agent_hash"] = normalized_hash
|
||||
try:
|
||||
for aid, rec in registered_agents.items():
|
||||
if rec.get("hostname") and hostname and rec["hostname"] == hostname:
|
||||
rec["agent_hash"] = normalized_hash
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return jsonify({
|
||||
"status": "ok",
|
||||
"agent_id": agent_id,
|
||||
"hostname": hostname,
|
||||
"agent_hash": agent_hash,
|
||||
})
|
||||
|
||||
# ---------------------------------------------
|
||||
# Server Time Endpoint
|
||||
# ---------------------------------------------
|
||||
@@ -2887,6 +3032,57 @@ def load_agents_from_db():
|
||||
|
||||
load_agents_from_db()
|
||||
|
||||
|
||||
def _extract_hostname_from_agent(agent_id: str) -> Optional[str]:
|
||||
try:
|
||||
agent_id = (agent_id or "").strip()
|
||||
if not agent_id:
|
||||
return None
|
||||
lower = agent_id.lower()
|
||||
marker = "-agent"
|
||||
idx = lower.find(marker)
|
||||
if idx <= 0:
|
||||
return None
|
||||
return agent_id[:idx]
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _device_rows_for_agent(cur, agent_id: str) -> List[Dict[str, Any]]:
|
||||
results: List[Dict[str, Any]] = []
|
||||
normalized_id = (agent_id or "").strip()
|
||||
if not normalized_id:
|
||||
return results
|
||||
base_host = _extract_hostname_from_agent(normalized_id)
|
||||
if not base_host:
|
||||
return results
|
||||
try:
|
||||
cur.execute(
|
||||
"SELECT hostname, agent_hash, details FROM device_details WHERE LOWER(hostname) = ?",
|
||||
(base_host.lower(),),
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
except Exception:
|
||||
return results
|
||||
for hostname, agent_hash, details_json in rows or []:
|
||||
try:
|
||||
details = json.loads(details_json or "{}")
|
||||
except Exception:
|
||||
details = {}
|
||||
summary = details.get("summary") or {}
|
||||
summary_agent = (summary.get("agent_id") or "").strip()
|
||||
matched = summary_agent.lower() == normalized_id.lower() if summary_agent else False
|
||||
results.append(
|
||||
{
|
||||
"hostname": hostname,
|
||||
"agent_hash": (agent_hash or "").strip(),
|
||||
"details": details,
|
||||
"summary_agent_id": summary_agent,
|
||||
"matched": matched,
|
||||
}
|
||||
)
|
||||
return results
|
||||
|
||||
@app.route("/api/agents")
|
||||
def get_agents():
|
||||
"""Return agents with collector activity indicator."""
|
||||
@@ -3085,7 +3281,7 @@ def get_device_details(hostname: str):
|
||||
conn = _db_conn()
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"SELECT details, description, created_at FROM device_details WHERE hostname = ?",
|
||||
"SELECT details, description, created_at, agent_hash FROM device_details WHERE hostname = ?",
|
||||
(hostname,),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
@@ -3097,6 +3293,7 @@ def get_device_details(hostname: str):
|
||||
details = {}
|
||||
description = row[1] if len(row) > 1 else ""
|
||||
created_at = int(row[2] or 0) if len(row) > 2 else 0
|
||||
agent_hash = (row[3] or "").strip() if len(row) > 3 else ""
|
||||
if description:
|
||||
details.setdefault("summary", {})["description"] = description
|
||||
# Ensure created string exists from created_at
|
||||
@@ -3106,6 +3303,13 @@ def get_device_details(hostname: str):
|
||||
details.setdefault('summary', {})['created'] = datetime.utcfromtimestamp(created_at).strftime('%Y-%m-%d %H:%M:%S')
|
||||
except Exception:
|
||||
pass
|
||||
if agent_hash:
|
||||
try:
|
||||
details.setdefault('summary', {})
|
||||
if not details['summary'].get('agent_hash'):
|
||||
details['summary']['agent_hash'] = agent_hash
|
||||
except Exception:
|
||||
pass
|
||||
return jsonify(details)
|
||||
except Exception:
|
||||
pass
|
||||
@@ -3140,28 +3344,12 @@ def set_device_description(hostname: str):
|
||||
|
||||
@app.route("/api/agent/hash/<path:agent_id>", methods=["GET"])
|
||||
def get_agent_hash(agent_id: str):
|
||||
"""Return the last known github_repo_hash for a specific agent."""
|
||||
"""Return the last known repository hash for a specific agent."""
|
||||
|
||||
agent_id = (agent_id or "").strip()
|
||||
if not agent_id:
|
||||
return jsonify({"error": "invalid agent id"}), 400
|
||||
|
||||
def _extract_hostname_from_agent(agent: str) -> Optional[str]:
|
||||
try:
|
||||
agent = (agent or "").strip()
|
||||
if not agent:
|
||||
return None
|
||||
lower = agent.lower()
|
||||
marker = "-agent"
|
||||
if marker not in lower:
|
||||
return None
|
||||
idx = lower.index(marker)
|
||||
if idx <= 0:
|
||||
return None
|
||||
return agent[:idx]
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# Prefer the in-memory registry (updated on every heartbeat/details post).
|
||||
info = registered_agents.get(agent_id) or {}
|
||||
candidate = (info.get("agent_hash") or "").strip()
|
||||
@@ -3178,87 +3366,66 @@ def get_agent_hash(agent_id: str):
|
||||
try:
|
||||
conn = _db_conn()
|
||||
cur = conn.cursor()
|
||||
|
||||
parsed_hostname = _extract_hostname_from_agent(agent_id)
|
||||
host_candidates = []
|
||||
if hostname:
|
||||
host_candidates.append(hostname)
|
||||
if parsed_hostname and not any(
|
||||
parsed_hostname.lower() == (h or "").lower() for h in host_candidates
|
||||
):
|
||||
host_candidates.append(parsed_hostname)
|
||||
|
||||
def _load_row_for_host(host_value: str):
|
||||
if not host_value:
|
||||
return None, None
|
||||
try:
|
||||
cur.execute(
|
||||
"SELECT agent_hash, details, hostname FROM device_details WHERE LOWER(hostname) = ?",
|
||||
(host_value.lower(),),
|
||||
)
|
||||
return cur.fetchone()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
row = None
|
||||
matched_hostname = hostname
|
||||
|
||||
for host_value in host_candidates:
|
||||
fetched = _load_row_for_host(host_value)
|
||||
if fetched:
|
||||
row = fetched
|
||||
matched_hostname = fetched[2] or matched_hostname or host_value
|
||||
break
|
||||
|
||||
if not row:
|
||||
# No hostname available or found; scan for a matching agent_id in the JSON payload.
|
||||
cur.execute("SELECT hostname, agent_hash, details FROM device_details")
|
||||
for host, db_hash, details_json in cur.fetchall():
|
||||
rows = _device_rows_for_agent(cur, agent_id)
|
||||
if rows:
|
||||
if not hostname:
|
||||
hostname = rows[0].get("hostname") or hostname
|
||||
for row in rows:
|
||||
if row.get("matched"):
|
||||
normalized_hash = (row.get("agent_hash") or "").strip()
|
||||
if not normalized_hash:
|
||||
details = row.get("details") or {}
|
||||
try:
|
||||
normalized_hash = ((details.get("summary") or {}).get("agent_hash") or "").strip()
|
||||
except Exception:
|
||||
normalized_hash = ""
|
||||
if normalized_hash:
|
||||
effective_hostname = row.get("hostname") or hostname
|
||||
return jsonify({
|
||||
"agent_id": agent_id,
|
||||
"agent_hash": normalized_hash,
|
||||
"hostname": effective_hostname,
|
||||
"source": "database",
|
||||
})
|
||||
first = rows[0]
|
||||
fallback_hash = (first.get("agent_hash") or "").strip()
|
||||
if not fallback_hash:
|
||||
details = first.get("details") or {}
|
||||
try:
|
||||
data = json.loads(details_json or "{}")
|
||||
fallback_hash = ((details.get("summary") or {}).get("agent_hash") or "").strip()
|
||||
except Exception:
|
||||
data = {}
|
||||
summary = data.get("summary") or {}
|
||||
summary_agent_id = (summary.get("agent_id") or "").strip()
|
||||
summary_hostname = (summary.get("hostname") or "").strip()
|
||||
if summary_agent_id == agent_id:
|
||||
row = (db_hash, details_json, host)
|
||||
matched_hostname = host or summary_hostname or matched_hostname
|
||||
break
|
||||
if (
|
||||
not row
|
||||
and parsed_hostname
|
||||
and summary_hostname
|
||||
and summary_hostname.lower() == parsed_hostname.lower()
|
||||
):
|
||||
row = (db_hash, details_json, host)
|
||||
matched_hostname = host or summary_hostname
|
||||
conn.close()
|
||||
fallback_hash = ""
|
||||
if fallback_hash:
|
||||
effective_hostname = first.get("hostname") or hostname
|
||||
return jsonify({
|
||||
"agent_id": agent_id,
|
||||
"agent_hash": fallback_hash,
|
||||
"hostname": effective_hostname,
|
||||
"source": "database",
|
||||
})
|
||||
|
||||
if row:
|
||||
db_hash = (row[0] or "").strip()
|
||||
effective_hostname = matched_hostname or hostname or parsed_hostname
|
||||
if db_hash:
|
||||
return jsonify({
|
||||
"agent_id": agent_id,
|
||||
"agent_hash": db_hash,
|
||||
"hostname": effective_hostname,
|
||||
"source": "database",
|
||||
})
|
||||
# Hash column may be empty if only stored inside details JSON.
|
||||
# As a final fallback, scan the table for any matching agent_id in case hostname inference failed.
|
||||
cur.execute("SELECT hostname, agent_hash, details FROM device_details")
|
||||
for host, db_hash, details_json in cur.fetchall():
|
||||
try:
|
||||
details = json.loads(row[1] if len(row) > 1 else "{}")
|
||||
data = json.loads(details_json or "{}")
|
||||
except Exception:
|
||||
details = {}
|
||||
summary = details.get("summary") or {}
|
||||
data = {}
|
||||
summary = data.get("summary") or {}
|
||||
summary_agent_id = (summary.get("agent_id") or "").strip()
|
||||
if summary_agent_id != agent_id:
|
||||
continue
|
||||
summary_hash = (summary.get("agent_hash") or "").strip()
|
||||
if summary_hash:
|
||||
normalized_hash = (db_hash or "").strip() or summary_hash
|
||||
if normalized_hash:
|
||||
effective_hostname = host or summary.get("hostname") or hostname
|
||||
return jsonify({
|
||||
"agent_id": agent_id,
|
||||
"agent_hash": summary_hash,
|
||||
"agent_hash": normalized_hash,
|
||||
"hostname": effective_hostname,
|
||||
"source": "database",
|
||||
})
|
||||
conn.close()
|
||||
|
||||
return jsonify({"error": "agent hash not found"}), 404
|
||||
except Exception as e:
|
||||
|
||||
63
Docs/agent_update_tracking.md
Normal file
63
Docs/agent_update_tracking.md
Normal file
@@ -0,0 +1,63 @@
|
||||
# Agent Update Tracking Options
|
||||
|
||||
## Current Flow (Summary)
|
||||
- A scheduled job triggers the **Remote Agent Update [WIN]** PowerShell script, which resolves the agent's working directory and invokes `Borealis.ps1 -SilentUpdate` from the repository root.【F:Assemblies/Scripts/Borealis/Remote_Agent_Update_WIN.json†L1-L46】
|
||||
- `Borealis.ps1 -SilentUpdate` reads the service agent ID from `Agent\Borealis\Settings\agent_settings_svc.json`, queries the server's `/api/agent/update_check` endpoint, and only proceeds when the server reports that the agent hash differs from the cached GitHub head.【F:Borealis.ps1†L840-L931】【F:Data/Server/server.py†L127-L221】
|
||||
- When an update runs, the script finishes by reading the local `.git` HEAD to determine the commit it just deployed and POSTs that value (along with the agent ID) to `/api/agent/agent_hash`. The server stores the commit in `device_details.agent_hash` and updates any in-memory registry entries.【F:Borealis.ps1†L904-L931】【F:Data/Server/server.py†L221-L311】
|
||||
- Agents no longer track hashes on disk; they simply send routine device details. The dashboard uses the server's stored hash (surfaced via `/api/agents` and device details) and compares it against the server-side cached GitHub head resolved through `/api/agent/repo_hash`.【F:Data/Agent/Roles/role_DeviceAudit.py†L1-L200】【F:Data/Server/server.py†L285-L382】【F:Data/Server/WebUI/src/Devices/Device_List.jsx†L168-L236】
|
||||
|
||||
This pipeline works but depends on GitHub API availability and manual file comparisons. Below are alternative approaches that keep things simple while avoiding the clunky text-file hash matching.
|
||||
|
||||
## Option 1 — Server-Side HEAD Tracking
|
||||
1. Let the server treat its own checkout as the source of truth: read `.git/HEAD` (using the same helper that `role_DeviceAudit` already ships) on boot and whenever `/api/agent/repo_hash` is queried.
|
||||
2. Cache the value in memory/SQLite to avoid repeated disk reads.
|
||||
3. Agents continue to report the hash they are running; the dashboard compares against the locally computed HEAD instead of calling GitHub.
|
||||
|
||||
**Pros**
|
||||
- Eliminates all external GitHub API calls and their rate limits.
|
||||
- Reuses the existing hash wiring (agents keep posting hashes, server keeps storing them).
|
||||
|
||||
**Trade-offs**
|
||||
- Only works when the server itself is always updated to the commit you consider “latest.” If you sometimes stage updates without updating the server repo, you need a flag to override the expected HEAD.
|
||||
|
||||
## Option 2 — Version Manifest File
|
||||
1. Add a tracked file such as `Data/Agent/VERSION.json` that contains a semantic version or build number (`{"agent": "1.4.2"}`).
|
||||
2. Update the manifest whenever you cut a release or land breaking changes. (It can be part of the release checklist or automated by CI.)
|
||||
3. Agents read the manifest alongside `agent.py`, post the version string, and optionally persist it under `Settings/`.
|
||||
4. The server or UI reads the same manifest from disk (or embeds it during packaging) and simply compares version strings.
|
||||
|
||||
**Pros**
|
||||
- Very clear to operators: the UI can show “Agent 1.4.2” without exposing opaque hashes.
|
||||
- Works even if you bundle Borealis without a `.git` folder (e.g., PyInstaller builds).
|
||||
|
||||
**Trade-offs**
|
||||
- Requires someone to bump the version file. If that step is forgotten, the UI will not reflect new changes even though code moved.
|
||||
- Agents need a tiny change to read and report the version string, but no new endpoints are required.
|
||||
|
||||
## Option 3 — Server-Published Release Manifest
|
||||
1. Teach the server to store a `latest_agent_release` row (either in SQLite or a JSON file). The record would contain fields such as `commit`, `version`, and `released_at`.
|
||||
2. Expose it via a lightweight endpoint (e.g., `GET /api/agent/release`) that agents can poll or receive during config bootstrap.
|
||||
3. When `Borealis.ps1 -SilentUpdate` succeeds, have it `POST` the new release metadata back to the server (commit hash + timestamp). This keeps the server’s expectation in sync automatically.
|
||||
4. Agents compare their local hash/version to the manifest and report both values in their heartbeat so the dashboard can show `Up-to-Date`, `Behind`, or `Ahead` statuses without calling GitHub.
|
||||
|
||||
**Pros**
|
||||
- Single source of truth that survives restarts (stored in SQLite) and can be manually overridden from the server UI if needed.
|
||||
- Works for fleets that are offline from GitHub but can still reach the Borealis server.
|
||||
|
||||
**Trade-offs**
|
||||
- Requires one new endpoint and a small update to the silent update script so it reports success.
|
||||
- Still relies on the update workflow to report back; if an operator copies files manually, the manifest will not update until someone tells the server.
|
||||
|
||||
## Option 4 — Timestamp-Based Drift Detection
|
||||
1. When `Borealis.ps1 -SilentUpdate` completes, record the completion timestamp in a simple file (`Agent/Borealis/Settings/last_update.txt`) and include it in the device details payload.
|
||||
2. The server keeps the timestamp from the last time it downloaded a release (or last server update) as the expected baseline.
|
||||
3. The UI compares timestamps to show whether a device has consumed the most recent update batch.
|
||||
|
||||
**Pros**
|
||||
- No hashes at all; just lightweight timestamps that are easy to understand.
|
||||
- Useful when you only care about “updated since Tuesday’s rollout” rather than specific commits.
|
||||
|
||||
**Trade-offs**
|
||||
- Detects relative freshness, not exact build parity. If two different commits are released on the same day without updating the expected timestamp, the UI may report agents as “current” even when they aren’t at the newest commit.
|
||||
|
||||
Each option keeps the wiring simple while avoiding the clunky GitHub polling + text-file comparisons. We can mix-and-match (e.g., manifest + timestamp) depending on how much fidelity vs. operational simplicity you want.
|
||||
Reference in New Issue
Block a user