diff --git a/Assemblies/Ansible_Playbooks/Examples/Write Canary to C Drive Root.yml b/Assemblies/Ansible_Playbooks/Examples/Write Canary to C Drive Root.yml
new file mode 100644
index 0000000..b4b2ab2
--- /dev/null
+++ b/Assemblies/Ansible_Playbooks/Examples/Write Canary to C Drive Root.yml
@@ -0,0 +1,11 @@
+---
+- name: Create Canary.txt on local Windows machine
+ hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: Write Canary.txt to C:\
+ ansible.windows.win_copy:
+ content: "This is a canary file created by Ansible."
+ dest: C:\Canary.txt
diff --git a/Data/Agent/Roles/role_PlaybookExec_SYSTEM.py b/Data/Agent/Roles/role_PlaybookExec_SYSTEM.py
new file mode 100644
index 0000000..0dd39dc
--- /dev/null
+++ b/Data/Agent/Roles/role_PlaybookExec_SYSTEM.py
@@ -0,0 +1,448 @@
+import os
+import sys
+import asyncio
+import tempfile
+import uuid
+import time
+import json
+import socket
+import subprocess
+
+
+ROLE_NAME = 'playbook_exec_system'
+ROLE_CONTEXTS = ['system']
+
+
+def _project_root():
+ try:
+ return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
+ except Exception:
+ return os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
+
+
+def _scripts_bin():
+ # Return the venv Scripts (Windows) or bin (POSIX) path
+ base = os.path.join(_project_root(), 'Agent', 'Scripts')
+ if os.path.isdir(base):
+ return base
+ # Fallback to PATH
+ return None
+
+
+def _ansible_playbook_cmd():
+ exe = 'ansible-playbook.exe' if os.name == 'nt' else 'ansible-playbook'
+ sdir = _scripts_bin()
+ if sdir:
+ cand = os.path.join(sdir, exe)
+ if os.path.isfile(cand):
+ return cand
+ return exe
+
+
+class Role:
+ def __init__(self, ctx):
+ self.ctx = ctx
+ self._runs = {} # run_id -> { proc, task, cancel }
+
+ def _server_base(self) -> str:
+ try:
+ fn = (self.ctx.hooks or {}).get('get_server_url')
+ if callable(fn):
+ return (fn() or 'http://localhost:5000').rstrip('/')
+ except Exception:
+ pass
+ return 'http://localhost:5000'
+
+ async def _post_recap(self, payload: dict):
+ try:
+ import aiohttp
+ url = self._server_base().rstrip('/') + '/api/ansible/recap/report'
+ timeout = aiohttp.ClientTimeout(total=30)
+ async with aiohttp.ClientSession(timeout=timeout) as sess:
+ async with sess.post(url, json=payload) as resp:
+ # best-effort; ignore body
+ await resp.read()
+ except Exception:
+ pass
+
+ async def _run_playbook_runner(self, run_id: str, playbook_content: str, playbook_name: str = '', activity_job_id=None, connection: str = 'local'):
+ try:
+ import ansible_runner # type: ignore
+ except Exception:
+ return False
+
+ tmp_dir = os.path.join(_project_root(), 'Temp')
+ os.makedirs(tmp_dir, exist_ok=True)
+ pd = tempfile.mkdtemp(prefix='ar_', dir=tmp_dir)
+ project = os.path.join(pd, 'project')
+ inventory_dir = os.path.join(pd, 'inventory')
+ env_dir = os.path.join(pd, 'env')
+ os.makedirs(project, exist_ok=True)
+ os.makedirs(inventory_dir, exist_ok=True)
+ os.makedirs(env_dir, exist_ok=True)
+
+ play_rel = 'playbook.yml'
+ play_abs = os.path.join(project, play_rel)
+ with open(play_abs, 'w', encoding='utf-8', newline='\n') as fh:
+ fh.write(playbook_content or '')
+ with open(os.path.join(inventory_dir, 'hosts'), 'w', encoding='utf-8', newline='\n') as fh:
+ fh.write('localhost,\n')
+ # Set connection via envvars
+ with open(os.path.join(env_dir, 'envvars'), 'w', encoding='utf-8', newline='\n') as fh:
+ json.dump({ 'ANSIBLE_FORCE_COLOR': '0', 'ANSIBLE_STDOUT_CALLBACK': 'default' }, fh)
+
+ hostname = socket.gethostname()
+ agent_id = self.ctx.agent_id
+ started = int(time.time())
+ await self._post_recap({
+ 'run_id': run_id,
+ 'hostname': hostname,
+ 'agent_id': agent_id,
+ 'playbook_path': play_abs,
+ 'playbook_name': playbook_name or os.path.basename(play_abs),
+ 'activity_job_id': activity_job_id,
+ 'status': 'Running',
+ 'started_ts': started,
+ })
+
+ lines = []
+ recap_json = None
+
+ def _on_event(ev):
+ nonlocal lines, recap_json
+ try:
+ if not isinstance(ev, dict):
+ return
+ # Capture minimal textual progress
+ tx = ev.get('stdout') or ''
+ if tx:
+ lines.append(str(tx))
+ if len(lines) > 5000:
+ lines = lines[-2500:]
+ # Capture final stats
+ if (ev.get('event') or '') == 'playbook_on_stats':
+ d = ev.get('event_data') or {}
+ # ansible-runner provides per-host stats under 'res'
+ recap_json = d.get('res') or d.get('stats') or d
+ except Exception:
+ pass
+
+ cancel_token = self._runs.get(run_id)
+ def _cancel_cb():
+ try:
+ return bool(cancel_token and cancel_token.get('cancel'))
+ except Exception:
+ return False
+
+ try:
+ ansible_runner.interface.run(
+ private_data_dir=pd,
+ playbook=play_rel,
+ inventory=os.path.join(inventory_dir, 'hosts'),
+ quiet=True,
+ event_handler=_on_event,
+ cancel_callback=_cancel_cb,
+ extravars={}
+ )
+ status = 'Cancelled' if _cancel_cb() else 'Success'
+ except Exception:
+ status = 'Failed'
+
+ # Synthesize recap text from recap_json if available
+ recap_text = ''
+ try:
+ if isinstance(recap_json, dict):
+ # Expect a single host 'localhost'
+ stats = recap_json.get('localhost') or recap_json
+ ok = int(stats.get('ok') or 0)
+ changed = int(stats.get('changed') or 0)
+ unreachable = int(stats.get('unreachable') or 0)
+ failed = int(stats.get('failures') or stats.get('failed') or 0)
+ skipped = int(stats.get('skipped') or 0)
+ rescued = int(stats.get('rescued') or 0)
+ ignored = int(stats.get('ignored') or 0)
+ recap_text = (
+ 'PLAY RECAP *********************************************************************\n'
+ f"localhost : ok={ok} changed={changed} unreachable={unreachable} failed={failed} skipped={skipped} rescued={rescued} ignored={ignored}"
+ )
+ except Exception:
+ recap_text = ''
+
+ await self._post_recap({
+ 'run_id': run_id,
+ 'hostname': hostname,
+ 'agent_id': agent_id,
+ 'status': status,
+ 'recap_text': recap_text,
+ 'recap_json': recap_json,
+ 'finished_ts': int(time.time()),
+ })
+ return True
+
+ async def _run_playbook(self, run_id: str, playbook_content: str, playbook_name: str = '', activity_job_id=None, connection: str = 'local'):
+ # Write playbook temp
+ tmp_dir = os.path.join(_project_root(), 'Temp')
+ os.makedirs(tmp_dir, exist_ok=True)
+ fd, path = tempfile.mkstemp(prefix='pb_', suffix='.yml', dir=tmp_dir, text=True)
+ with os.fdopen(fd, 'w', encoding='utf-8', newline='\n') as fh:
+ fh.write(playbook_content or '')
+
+ hostname = socket.gethostname()
+ agent_id = self.ctx.agent_id
+
+ started = int(time.time())
+ await self._post_recap({
+ 'run_id': run_id,
+ 'hostname': hostname,
+ 'agent_id': agent_id,
+ 'playbook_path': path,
+ 'playbook_name': playbook_name or os.path.basename(path),
+ 'activity_job_id': activity_job_id,
+ 'status': 'Running',
+ 'started_ts': started,
+ })
+
+ conn = (connection or 'local').strip().lower()
+ if conn not in ('local', 'winrm', 'psrp'):
+ conn = 'local'
+ cmd = [_ansible_playbook_cmd(), path, '-i', 'localhost,', '-c', conn]
+ # Ensure clean, plain output and correct interpreter for localhost
+ env = os.environ.copy()
+ env.setdefault('ANSIBLE_FORCE_COLOR', '0')
+ env.setdefault('ANSIBLE_NOCOLOR', '1')
+ env.setdefault('PYTHONIOENCODING', 'utf-8')
+ env.setdefault('ANSIBLE_STDOUT_CALLBACK', 'default')
+ # Help Ansible pick the correct python for localhost
+ env.setdefault('ANSIBLE_LOCALHOST_WARNING', '0')
+
+ creationflags = 0
+ if os.name == 'nt':
+ # CREATE_NEW_PROCESS_GROUP | CREATE_NO_WINDOW
+ creationflags = 0x00000200 | 0x08000000
+
+ proc = None
+ try:
+ # Prefer ansible-runner when available and enabled
+ try:
+ if os.environ.get('BOREALIS_USE_ANSIBLE_RUNNER', '0').lower() not in ('0', 'false', 'no'):
+ used = await self._run_playbook_runner(run_id, playbook_content, playbook_name=playbook_name, activity_job_id=activity_job_id, connection=connection)
+ if used:
+ return
+ except Exception:
+ pass
+
+ proc = await asyncio.create_subprocess_exec(
+ *cmd,
+ stdout=asyncio.subprocess.PIPE,
+ stderr=asyncio.subprocess.STDOUT,
+ cwd=os.path.dirname(path),
+ env=env,
+ creationflags=creationflags,
+ )
+ except Exception as e:
+ await self._post_recap({
+ 'run_id': run_id,
+ 'hostname': hostname,
+ 'agent_id': agent_id,
+ 'status': 'Failed',
+ 'recap_text': f'Failed to launch ansible-playbook: {e}',
+ 'finished_ts': int(time.time()),
+ })
+ try:
+ os.remove(path)
+ except Exception:
+ pass
+ return
+
+ # Track run for cancellation
+ self._runs[run_id]['proc'] = proc
+
+ lines = []
+ recap_buffer = []
+ seen_recap = False
+ last_emit = 0
+
+ async def emit_update(force=False):
+ nonlocal last_emit
+ now = time.time()
+ if not force and (now - last_emit) < 1.0:
+ return
+ last_emit = now
+ txt = '\n'.join(recap_buffer[-80:]) if recap_buffer else ''
+ if not txt and lines:
+ # show tail while running
+ txt = '\n'.join(lines[-25:])
+ if txt:
+ await self._post_recap({
+ 'run_id': run_id,
+ 'hostname': hostname,
+ 'agent_id': agent_id,
+ 'recap_text': txt,
+ 'status': 'Running',
+ })
+
+ try:
+ # Read combined stdout
+ while True:
+ if proc.stdout is None:
+ break
+ bs = await proc.stdout.readline()
+ if not bs:
+ break
+ try:
+ line = bs.decode('utf-8', errors='replace').rstrip('\r\n')
+ except Exception:
+ line = str(bs)
+ lines.append(line)
+ if len(lines) > 5000:
+ lines = lines[-2500:]
+ # Detect recap section
+ if not seen_recap and line.strip().upper().startswith('PLAY RECAP'):
+ seen_recap = True
+ recap_buffer.append(line)
+ elif seen_recap:
+ recap_buffer.append(line)
+ await emit_update(False)
+ finally:
+ try:
+ await proc.wait()
+ except Exception:
+ pass
+
+ rc = proc.returncode if proc else -1
+ status = 'Success' if rc == 0 else ('Cancelled' if self._runs.get(run_id, {}).get('cancel') else 'Failed')
+
+ # Final recap text
+ final_txt = '\n'.join(recap_buffer[-120:]) if recap_buffer else ('\n'.join(lines[-60:]) if lines else '')
+ await self._post_recap({
+ 'run_id': run_id,
+ 'hostname': hostname,
+ 'agent_id': agent_id,
+ 'status': status,
+ 'recap_text': final_txt,
+ 'finished_ts': int(time.time()),
+ })
+
+ # Cleanup
+ try:
+ os.remove(path)
+ except Exception:
+ pass
+ self._runs.pop(run_id, None)
+
+ def register_events(self):
+ sio = self.ctx.sio
+
+ @sio.on('ansible_playbook_run')
+ async def _on_ansible_playbook_run(payload):
+ try:
+ hostname = socket.gethostname()
+ target = (payload.get('target_hostname') or '').strip().lower()
+ if target and target != hostname.lower():
+ return
+ # Accept provided run_id or generate one
+ run_id = (payload.get('run_id') or '').strip() or uuid.uuid4().hex
+ content = payload.get('playbook_content') or ''
+ p_name = payload.get('playbook_name') or ''
+ act_id = payload.get('activity_job_id')
+ sched_job_id = payload.get('scheduled_job_id')
+ sched_run_id = payload.get('scheduled_run_id')
+ conn = (payload.get('connection') or 'local')
+ # Track run
+ self._runs[run_id] = {'cancel': False, 'proc': None}
+ # Include scheduled ids on first recap post
+ async def run_and_tag():
+ # First recap (Running) will include activity_job_id and scheduled ids
+ # by temporarily monkey patching _post_recap for initial call only
+ first = {'done': False}
+ orig = self._post_recap
+ async def _wrapped(payload2: dict):
+ if not first['done']:
+ if sched_job_id is not None:
+ payload2['scheduled_job_id'] = sched_job_id
+ if sched_run_id is not None:
+ payload2['scheduled_run_id'] = sched_run_id
+ first['done'] = True
+ await orig(payload2)
+ self._post_recap = _wrapped
+ try:
+ await self._run_playbook(run_id, content, playbook_name=p_name, activity_job_id=act_id, connection=conn)
+ finally:
+ self._post_recap = orig
+ asyncio.create_task(run_and_tag())
+ except Exception:
+ pass
+
+ @sio.on('ansible_playbook_cancel')
+ async def _on_ansible_playbook_cancel(payload):
+ try:
+ run_id = (payload.get('run_id') or '').strip()
+ if not run_id:
+ return
+ obj = self._runs.get(run_id)
+ if not obj:
+ return
+ obj['cancel'] = True
+ proc = obj.get('proc')
+ if proc and proc.returncode is None:
+ try:
+ if os.name == 'nt':
+ proc.terminate()
+ await asyncio.sleep(0.5)
+ if proc.returncode is None:
+ proc.kill()
+ else:
+ proc.terminate()
+ await asyncio.sleep(0.5)
+ if proc.returncode is None:
+ proc.kill()
+ except Exception:
+ pass
+ except Exception:
+ pass
+
+ @sio.on('quick_job_run')
+ async def _compat_quick_job_run(payload):
+ """Compatibility: allow scheduled jobs to dispatch .yml as ansible playbooks.
+ Expects payload fields similar to script quick runs but with script_type='ansible'.
+ """
+ try:
+ stype = (payload.get('script_type') or '').lower()
+ if stype != 'ansible':
+ return
+ hostname = socket.gethostname()
+ target = (payload.get('target_hostname') or '').strip().lower()
+ if target and target != hostname.lower():
+ return
+ run_id = uuid.uuid4().hex
+ content = payload.get('script_content') or ''
+ p_name = payload.get('script_name') or ''
+ self._runs[run_id] = {'cancel': False, 'proc': None}
+ asyncio.create_task(self._run_playbook(run_id, content, playbook_name=p_name, activity_job_id=payload.get('job_id'), connection='local'))
+ except Exception:
+ pass
+
+ def on_config(self, roles):
+ # No scheduled tasks to manage for now
+ return
+
+ def stop_all(self):
+ # Attempt to cancel any running playbooks
+ for rid, obj in list(self._runs.items()):
+ try:
+ obj['cancel'] = True
+ p = obj.get('proc')
+ if p and p.returncode is None:
+ try:
+ if os.name == 'nt':
+ p.terminate()
+ time.sleep(0.2)
+ if p.returncode is None:
+ p.kill()
+ else:
+ p.terminate()
+ except Exception:
+ pass
+ except Exception:
+ pass
+ self._runs.clear()
diff --git a/Data/Agent/agent-requirements.txt b/Data/Agent/agent-requirements.txt
index c2cb6d7..157d100 100644
--- a/Data/Agent/agent-requirements.txt
+++ b/Data/Agent/agent-requirements.txt
@@ -29,3 +29,9 @@ pywin32; platform_system == "Windows"
# Ansible Libraries
ansible-core
+ansible-runner
+pywinrm
+requests-credssp
+requests-ntlm
+pypsrp
+psutil
diff --git a/Data/Server/WebUI/src/Devices/Device_Details.jsx b/Data/Server/WebUI/src/Devices/Device_Details.jsx
index 0f5242c..7f557e4 100644
--- a/Data/Server/WebUI/src/Devices/Device_Details.jsx
+++ b/Data/Server/WebUI/src/Devices/Device_Details.jsx
@@ -133,8 +133,12 @@ export default function DeviceDetails({ device, onBack }) {
}
}, [device]);
+
+
useEffect(() => { loadHistory(); }, [loadHistory]);
+ // No explicit live recap tab; recaps are recorded into Activity History
+
const clearHistory = async () => {
if (!device?.hostname) return;
try {
@@ -771,13 +775,10 @@ export default function DeviceDetails({ device, onBack }) {
+ Assembly
- handleHistorySort("script_name")}
- >
- Script Executed
+ handleHistorySort("script_name")}>
+ Task
@@ -806,6 +807,7 @@ export default function DeviceDetails({ device, onBack }) {
{sortedHistory.map((r) => (
+ {(r.script_type || '').toLowerCase() === 'ansible' ? 'Ansible Playbook' : 'Script'}
{r.script_name}
{formatTimestamp(r.ran_at)}
@@ -839,15 +841,15 @@ export default function DeviceDetails({ device, onBack }) {
))}
{sortedHistory.length === 0 && (
-
- No activity yet.
-
+ No activity yet.
)}
);
+
+
const tabs = [
{ label: "Summary", content: renderSummary() },
{ label: "Installed Software", content: renderSoftware() },
@@ -959,6 +961,8 @@ export default function DeviceDetails({ device, onBack }) {
+ {/* Recap dialog removed; recaps flow into Activity History stdout */}
+
setClearDialogOpen(false)}
diff --git a/Data/Server/WebUI/src/Scheduling/Create_Job.jsx b/Data/Server/WebUI/src/Scheduling/Create_Job.jsx
index f42f6fa..4ef4191 100644
--- a/Data/Server/WebUI/src/Scheduling/Create_Job.jsx
+++ b/Data/Server/WebUI/src/Scheduling/Create_Job.jsx
@@ -87,6 +87,11 @@ function buildScriptTree(scripts, folders) {
return { root: [rootNode], map };
}
+// --- Ansible tree helpers (reuse scripts tree builder) ---
+function buildAnsibleTree(playbooks, folders) {
+ return buildScriptTree(playbooks, folders);
+}
+
// --- Workflows tree helpers (reuse approach from Workflow_List) ---
function buildWorkflowTree(workflows, folders) {
const map = {};
@@ -177,6 +182,7 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
const [compTab, setCompTab] = useState("scripts");
const [scriptTree, setScriptTree] = useState([]); const [scriptMap, setScriptMap] = useState({});
const [workflowTree, setWorkflowTree] = useState([]); const [workflowMap, setWorkflowMap] = useState({});
+ const [ansibleTree, setAnsibleTree] = useState([]); const [ansibleMap, setAnsibleMap] = useState({});
const [selectedNodeId, setSelectedNodeId] = useState("");
const [addTargetOpen, setAddTargetOpen] = useState(false);
@@ -382,10 +388,19 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
setWorkflowTree(root); setWorkflowMap(map);
} else { setWorkflowTree([]); setWorkflowMap({}); }
} catch { setWorkflowTree([]); setWorkflowMap({}); }
+ try {
+ // ansible playbooks
+ const aResp = await fetch("/api/assembly/list?island=ansible");
+ if (aResp.ok) {
+ const aData = await aResp.json();
+ const { root, map } = buildAnsibleTree(aData.items || [], aData.folders || []);
+ setAnsibleTree(root); setAnsibleMap(map);
+ } else { setAnsibleTree([]); setAnsibleMap({}); }
+ } catch { setAnsibleTree([]); setAnsibleMap({}); }
};
const addSelectedComponent = () => {
- const map = compTab === "scripts" ? scriptMap : workflowMap;
+ const map = compTab === "scripts" ? scriptMap : (compTab === "ansible" ? ansibleMap : workflowMap);
const node = map[selectedNodeId];
if (!node || node.isFolder) return false;
if (compTab === "scripts" && node.script) {
@@ -396,6 +411,13 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
]);
setSelectedNodeId("");
return true;
+ } else if (compTab === "ansible" && node.script) {
+ setComponents((prev) => [
+ ...prev,
+ { type: "ansible", path: node.path, name: node.fileName || node.label, description: node.path }
+ ]);
+ setSelectedNodeId("");
+ return true;
} else if (compTab === "workflows" && node.workflow) {
alert("Workflows within Scheduled Jobs are not supported yet");
return false;
@@ -453,7 +475,7 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
const tabDefs = useMemo(() => {
const base = [
{ key: "name", label: "Job Name" },
- { key: "components", label: "Scripts/Workflows" },
+ { key: "components", label: "Assemblies" },
{ key: "targets", label: "Targets" },
{ key: "schedule", label: "Schedule" },
{ key: "context", label: "Execution Context" }
@@ -520,16 +542,16 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
{tab === 1 && (
} onClick={openAddComponent}
sx={{ color: "#58a6ff", borderColor: "#58a6ff" }} variant="outlined">
- Add Component
+ Add Assembly
)}
/>
{components.length === 0 && (
- No components added yet.
+ No assemblies added yet.
)}
{components.map((c, idx) => (
))}
{components.length === 0 && (
- At least one component is required.
+ At least one assembly is required.
)}
)}
@@ -731,13 +753,17 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
);
}
+
diff --git a/Data/Server/job_scheduler.py b/Data/Server/job_scheduler.py
index 6db5e57..eaf9a6c 100644
--- a/Data/Server/job_scheduler.py
+++ b/Data/Server/job_scheduler.py
@@ -161,6 +161,70 @@ class JobScheduler:
return "bash"
return "unknown"
+ def _ansible_root(self) -> str:
+ import os
+ return os.path.abspath(
+ os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Ansible_Playbooks")
+ )
+
+ def _dispatch_ansible(self, hostname: str, rel_path: str, scheduled_job_id: int, scheduled_run_id: int) -> None:
+ try:
+ import os, json, uuid
+ ans_root = self._ansible_root()
+ rel_norm = (rel_path or "").replace("\\", "/").lstrip("/")
+ abs_path = os.path.abspath(os.path.join(ans_root, rel_norm))
+ if (not abs_path.startswith(ans_root)) or (not os.path.isfile(abs_path)):
+ return
+ try:
+ with open(abs_path, "r", encoding="utf-8", errors="replace") as fh:
+ content = fh.read()
+ except Exception:
+ return
+
+ # Record in activity_history for UI parity
+ now = _now_ts()
+ act_id = None
+ conn = self._conn()
+ cur = conn.cursor()
+ try:
+ cur.execute(
+ """
+ INSERT INTO activity_history(hostname, script_path, script_name, script_type, ran_at, status, stdout, stderr)
+ VALUES(?,?,?,?,?,?,?,?)
+ """,
+ (
+ str(hostname),
+ rel_norm,
+ os.path.basename(abs_path),
+ "ansible",
+ now,
+ "Running",
+ "",
+ "",
+ ),
+ )
+ act_id = cur.lastrowid
+ conn.commit()
+ finally:
+ conn.close()
+
+ payload = {
+ "run_id": uuid.uuid4().hex,
+ "target_hostname": str(hostname),
+ "playbook_name": os.path.basename(abs_path),
+ "playbook_content": content,
+ "activity_job_id": act_id,
+ "scheduled_job_id": int(scheduled_job_id),
+ "scheduled_run_id": int(scheduled_run_id),
+ "connection": "local",
+ }
+ try:
+ self.socketio.emit("ansible_playbook_run", payload)
+ except Exception:
+ pass
+ except Exception:
+ pass
+
def _dispatch_script(self, hostname: str, rel_path: str, run_mode: str) -> None:
"""Emit a quick_job_run event to agents for the given script/host.
Mirrors /api/scripts/quick_run behavior for scheduled jobs.
@@ -457,12 +521,18 @@ class JobScheduler:
except Exception:
comps = []
script_paths = []
+ ansible_paths = []
for c in comps:
try:
- if (c or {}).get("type") == "script":
+ ctype = (c or {}).get("type")
+ if ctype == "script":
p = (c.get("path") or c.get("script_path") or "").strip()
if p:
script_paths.append(p)
+ elif ctype == "ansible":
+ p = (c.get("path") or "").strip()
+ if p:
+ ansible_paths.append(p)
except Exception:
continue
run_mode = (execution_context or "system").strip().lower()
@@ -549,6 +619,7 @@ class JobScheduler:
"INSERT INTO scheduled_job_runs (job_id, target_hostname, scheduled_ts, started_ts, status, created_at, updated_at) VALUES (?,?,?,?,?,?,?)",
(job_id, host, occ, ts_now, "Running", ts_now, ts_now),
)
+ run_row_id = c2.lastrowid or 0
conn2.commit()
# Dispatch all script components for this job to the target host
for sp in script_paths:
@@ -556,6 +627,12 @@ class JobScheduler:
self._dispatch_script(host, sp, run_mode)
except Exception:
continue
+ # Dispatch ansible playbooks for this job to the target host
+ for ap in ansible_paths:
+ try:
+ self._dispatch_ansible(host, ap, job_id, run_row_id)
+ except Exception:
+ continue
except Exception:
pass
finally:
diff --git a/Data/Server/server.py b/Data/Server/server.py
index 33fe5d4..0abd59a 100644
--- a/Data/Server/server.py
+++ b/Data/Server/server.py
@@ -1458,6 +1458,36 @@ def init_db():
"""
)
+ # Ansible play recap storage (one row per playbook run/session)
+ cur.execute(
+ """
+ CREATE TABLE IF NOT EXISTS ansible_play_recaps (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ run_id TEXT UNIQUE NOT NULL,
+ hostname TEXT,
+ agent_id TEXT,
+ playbook_path TEXT,
+ playbook_name TEXT,
+ scheduled_job_id INTEGER,
+ scheduled_run_id INTEGER,
+ activity_job_id INTEGER,
+ status TEXT,
+ recap_text TEXT,
+ recap_json TEXT,
+ started_ts INTEGER,
+ finished_ts INTEGER,
+ created_at INTEGER,
+ updated_at INTEGER
+ )
+ """
+ )
+ try:
+ # Helpful lookups for device views and run correlation
+ cur.execute("CREATE INDEX IF NOT EXISTS idx_ansible_recaps_host_created ON ansible_play_recaps(hostname, created_at)")
+ cur.execute("CREATE INDEX IF NOT EXISTS idx_ansible_recaps_status ON ansible_play_recaps(status)")
+ except Exception:
+ pass
+
conn.commit()
# Scheduled jobs table
cur.execute(
@@ -2512,6 +2542,55 @@ def scripts_quick_run():
return jsonify({"results": results})
+@app.route("/api/ansible/quick_run", methods=["POST"])
+def ansible_quick_run():
+ """Queue an Ansible Playbook Quick Job via WebSocket to targeted agents.
+
+ Payload: { playbook_path: str, hostnames: [str] }
+ The playbook_path is relative to the Ansible island (e.g., "folder/play.yml").
+ """
+ data = request.get_json(silent=True) or {}
+ rel_path = (data.get("playbook_path") or "").strip()
+ hostnames = data.get("hostnames") or []
+ if not rel_path or not isinstance(hostnames, list) or not hostnames:
+ return jsonify({"error": "Missing playbook_path or hostnames[]"}), 400
+ try:
+ root, abs_path, _ = _resolve_assembly_path('ansible', rel_path)
+ if not os.path.isfile(abs_path):
+ return jsonify({"error": "Playbook not found"}), 404
+ try:
+ with open(abs_path, 'r', encoding='utf-8', errors='replace') as fh:
+ content = fh.read()
+ except Exception as e:
+ return jsonify({"error": f"Failed to read playbook: {e}"}), 500
+
+ results = []
+ for host in hostnames:
+ run_id = None
+ try:
+ import uuid as _uuid
+ run_id = _uuid.uuid4().hex
+ except Exception:
+ run_id = str(int(time.time() * 1000))
+ payload = {
+ "run_id": run_id,
+ "target_hostname": str(host),
+ "playbook_name": os.path.basename(abs_path),
+ "playbook_content": content,
+ "connection": "local",
+ }
+ try:
+ socketio.emit("ansible_playbook_run", payload)
+ except Exception:
+ pass
+ results.append({"hostname": host, "run_id": run_id, "status": "Queued"})
+ return jsonify({"results": results})
+ except ValueError as ve:
+ return jsonify({"error": str(ve)}), 400
+ except Exception as e:
+ return jsonify({"error": str(e)}), 500
+
+
@app.route("/api/device/activity/", methods=["GET", "DELETE"])
def device_activity(hostname: str):
try:
@@ -2598,6 +2677,309 @@ def handle_quick_job_result(data):
print(f"[ERROR] quick_job_result DB update failed for job {job_id}: {e}")
+# ---------------------------------------------
+# Ansible Runtime API (Play Recaps)
+# ---------------------------------------------
+def _json_dump_safe(obj) -> str:
+ try:
+ if isinstance(obj, str):
+ # Accept pre-serialized JSON strings as-is
+ json.loads(obj)
+ return obj
+ return json.dumps(obj or {})
+ except Exception:
+ return json.dumps({})
+
+
+@app.route("/api/ansible/recap/report", methods=["POST"])
+def api_ansible_recap_report():
+ """Create or update an Ansible recap row for a running/finished playbook.
+
+ Expects JSON body with fields:
+ run_id: str (required) – unique id for this playbook run (uuid recommended)
+ hostname: str (optional)
+ agent_id: str (optional)
+ playbook_path: str (optional)
+ playbook_name: str (optional)
+ scheduled_job_id: int (optional)
+ scheduled_run_id: int (optional)
+ activity_job_id: int (optional)
+ status: str (Running|Success|Failed|Cancelled) (optional)
+ recap_text: str (optional)
+ recap_json: object or str (optional)
+ started_ts: int (optional)
+ finished_ts: int (optional)
+ """
+ data = request.get_json(silent=True) or {}
+ run_id = (data.get("run_id") or "").strip()
+ if not run_id:
+ return jsonify({"error": "run_id is required"}), 400
+
+ now = _now_ts()
+ hostname = (data.get("hostname") or "").strip()
+ agent_id = (data.get("agent_id") or "").strip()
+ playbook_path = (data.get("playbook_path") or "").strip()
+ playbook_name = (data.get("playbook_name") or "").strip() or (os.path.basename(playbook_path) if playbook_path else "")
+ status = (data.get("status") or "").strip()
+ recap_text = data.get("recap_text")
+ recap_json = data.get("recap_json")
+
+ # IDs to correlate with other subsystems (optional)
+ try:
+ scheduled_job_id = int(data.get("scheduled_job_id")) if data.get("scheduled_job_id") is not None else None
+ except Exception:
+ scheduled_job_id = None
+ try:
+ scheduled_run_id = int(data.get("scheduled_run_id")) if data.get("scheduled_run_id") is not None else None
+ except Exception:
+ scheduled_run_id = None
+ try:
+ activity_job_id = int(data.get("activity_job_id")) if data.get("activity_job_id") is not None else None
+ except Exception:
+ activity_job_id = None
+
+ try:
+ started_ts = int(data.get("started_ts")) if data.get("started_ts") is not None else None
+ except Exception:
+ started_ts = None
+ try:
+ finished_ts = int(data.get("finished_ts")) if data.get("finished_ts") is not None else None
+ except Exception:
+ finished_ts = None
+
+ recap_json_str = _json_dump_safe(recap_json) if recap_json is not None else None
+
+ try:
+ conn = _db_conn()
+ cur = conn.cursor()
+
+ # Attempt update by run_id first
+ cur.execute(
+ "SELECT id FROM ansible_play_recaps WHERE run_id = ?",
+ (run_id,)
+ )
+ row = cur.fetchone()
+ if row:
+ recap_id = int(row[0])
+ cur.execute(
+ """
+ UPDATE ansible_play_recaps
+ SET hostname = COALESCE(?, hostname),
+ agent_id = COALESCE(?, agent_id),
+ playbook_path = COALESCE(?, playbook_path),
+ playbook_name = COALESCE(?, playbook_name),
+ scheduled_job_id = COALESCE(?, scheduled_job_id),
+ scheduled_run_id = COALESCE(?, scheduled_run_id),
+ activity_job_id = COALESCE(?, activity_job_id),
+ status = COALESCE(?, status),
+ recap_text = CASE WHEN ? IS NOT NULL THEN ? ELSE recap_text END,
+ recap_json = CASE WHEN ? IS NOT NULL THEN ? ELSE recap_json END,
+ started_ts = COALESCE(?, started_ts),
+ finished_ts = COALESCE(?, finished_ts),
+ updated_at = ?
+ WHERE run_id = ?
+ """,
+ (
+ hostname or None,
+ agent_id or None,
+ playbook_path or None,
+ playbook_name or None,
+ scheduled_job_id,
+ scheduled_run_id,
+ activity_job_id,
+ status or None,
+ recap_text, recap_text,
+ recap_json_str, recap_json_str,
+ started_ts,
+ finished_ts,
+ now,
+ run_id,
+ )
+ )
+ conn.commit()
+ else:
+ cur.execute(
+ """
+ INSERT INTO ansible_play_recaps (
+ run_id, hostname, agent_id, playbook_path, playbook_name,
+ scheduled_job_id, scheduled_run_id, activity_job_id,
+ status, recap_text, recap_json, started_ts, finished_ts,
+ created_at, updated_at
+ ) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
+ """,
+ (
+ run_id,
+ hostname or None,
+ agent_id or None,
+ playbook_path or None,
+ playbook_name or None,
+ scheduled_job_id,
+ scheduled_run_id,
+ activity_job_id,
+ status or None,
+ recap_text if recap_text is not None else None,
+ recap_json_str,
+ started_ts,
+ finished_ts,
+ now,
+ now,
+ )
+ )
+ recap_id = cur.lastrowid
+ conn.commit()
+
+ # If linked to an activity_history row, mirror status/stdout for Activity tab UX
+ try:
+ if activity_job_id:
+ cur.execute(
+ "UPDATE activity_history SET status = COALESCE(?, status), stdout = CASE WHEN ? IS NOT NULL THEN ? ELSE stdout END WHERE id = ?",
+ (status or None, recap_text, recap_text, activity_job_id)
+ )
+ conn.commit()
+ except Exception:
+ pass
+
+ # Return the latest row
+ cur.execute(
+ "SELECT id, run_id, hostname, agent_id, playbook_path, playbook_name, scheduled_job_id, scheduled_run_id, activity_job_id, status, recap_text, recap_json, started_ts, finished_ts, created_at, updated_at FROM ansible_play_recaps WHERE id=?",
+ (recap_id,)
+ )
+ row = cur.fetchone()
+ conn.close()
+
+ # Broadcast to connected clients for live updates
+ try:
+ payload = {
+ "id": row[0],
+ "run_id": row[1],
+ "hostname": row[2] or "",
+ "agent_id": row[3] or "",
+ "playbook_path": row[4] or "",
+ "playbook_name": row[5] or "",
+ "scheduled_job_id": row[6],
+ "scheduled_run_id": row[7],
+ "activity_job_id": row[8],
+ "status": row[9] or "",
+ "recap_text": row[10] or "",
+ "recap_json": json.loads(row[11]) if (row[11] or "").strip() else None,
+ "started_ts": row[12],
+ "finished_ts": row[13],
+ "created_at": row[14],
+ "updated_at": row[15],
+ }
+ socketio.emit("ansible_recap_update", payload)
+ except Exception:
+ pass
+
+ return jsonify({
+ "id": row[0],
+ "run_id": row[1],
+ "hostname": row[2] or "",
+ "agent_id": row[3] or "",
+ "playbook_path": row[4] or "",
+ "playbook_name": row[5] or "",
+ "scheduled_job_id": row[6],
+ "scheduled_run_id": row[7],
+ "activity_job_id": row[8],
+ "status": row[9] or "",
+ "recap_text": row[10] or "",
+ "recap_json": json.loads(row[11]) if (row[11] or "").strip() else None,
+ "started_ts": row[12],
+ "finished_ts": row[13],
+ "created_at": row[14],
+ "updated_at": row[15],
+ })
+ except Exception as e:
+ return jsonify({"error": str(e)}), 500
+
+
+@app.route("/api/ansible/recaps", methods=["GET"])
+def api_ansible_recaps_list():
+ """List Ansible play recaps. Optional query params: hostname, limit (default 50)"""
+ hostname = (request.args.get("hostname") or "").strip()
+ try:
+ limit = int(request.args.get("limit") or 50)
+ except Exception:
+ limit = 50
+ try:
+ conn = _db_conn()
+ cur = conn.cursor()
+ if hostname:
+ cur.execute(
+ """
+ SELECT id, run_id, hostname, playbook_name, status, created_at, updated_at, started_ts, finished_ts
+ FROM ansible_play_recaps
+ WHERE hostname = ?
+ ORDER BY COALESCE(updated_at, created_at) DESC, id DESC
+ LIMIT ?
+ """,
+ (hostname, limit)
+ )
+ else:
+ cur.execute(
+ """
+ SELECT id, run_id, hostname, playbook_name, status, created_at, updated_at, started_ts, finished_ts
+ FROM ansible_play_recaps
+ ORDER BY COALESCE(updated_at, created_at) DESC, id DESC
+ LIMIT ?
+ """,
+ (limit,)
+ )
+ rows = cur.fetchall()
+ conn.close()
+ out = []
+ for r in rows:
+ out.append({
+ "id": r[0],
+ "run_id": r[1],
+ "hostname": r[2] or "",
+ "playbook_name": r[3] or "",
+ "status": r[4] or "",
+ "created_at": r[5],
+ "updated_at": r[6],
+ "started_ts": r[7],
+ "finished_ts": r[8],
+ })
+ return jsonify({"recaps": out})
+ except Exception as e:
+ return jsonify({"error": str(e)}), 500
+
+
+@app.route("/api/ansible/recap/", methods=["GET"])
+def api_ansible_recap_get(recap_id: int):
+ try:
+ conn = _db_conn()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT id, run_id, hostname, agent_id, playbook_path, playbook_name, scheduled_job_id, scheduled_run_id, activity_job_id, status, recap_text, recap_json, started_ts, finished_ts, created_at, updated_at FROM ansible_play_recaps WHERE id=?",
+ (recap_id,)
+ )
+ row = cur.fetchone()
+ conn.close()
+ if not row:
+ return jsonify({"error": "Not found"}), 404
+ return jsonify({
+ "id": row[0],
+ "run_id": row[1],
+ "hostname": row[2] or "",
+ "agent_id": row[3] or "",
+ "playbook_path": row[4] or "",
+ "playbook_name": row[5] or "",
+ "scheduled_job_id": row[6],
+ "scheduled_run_id": row[7],
+ "activity_job_id": row[8],
+ "status": row[9] or "",
+ "recap_text": row[10] or "",
+ "recap_json": json.loads(row[11]) if (row[11] or "").strip() else None,
+ "started_ts": row[12],
+ "finished_ts": row[13],
+ "created_at": row[14],
+ "updated_at": row[15],
+ })
+ except Exception as e:
+ return jsonify({"error": str(e)}), 500
+
+
@socketio.on("collector_status")
def handle_collector_status(data):
"""Collector agent reports activity and optional last_user.
@@ -2992,6 +3374,21 @@ def handle_agent_window_list(data):
# Relay the list to all interested clients
socketio.emit("agent_window_list", data)
+# Relay Ansible control messages from UI to agents
+@socketio.on("ansible_playbook_cancel")
+def relay_ansible_cancel(data):
+ try:
+ socketio.emit("ansible_playbook_cancel", data)
+ except Exception:
+ pass
+
+@socketio.on("ansible_playbook_run")
+def relay_ansible_run(data):
+ try:
+ socketio.emit("ansible_playbook_run", data)
+ except Exception:
+ pass
+
# ---------------------------------------------
# Server Launch
# ---------------------------------------------
diff --git a/qj_old.txt b/qj_old.txt
new file mode 100644
index 0000000..b8f8160
--- /dev/null
+++ b/qj_old.txt
@@ -0,0 +1,226 @@
+import React, { useEffect, useMemo, useState, useCallback } from "react";
+import {
+ Dialog,
+ DialogTitle,
+ DialogContent,
+ DialogActions,
+ Button,
+ Box,
+ Typography,
+ Paper,
+ FormControlLabel,
+ Checkbox
+} from "@mui/material";
+import { Folder as FolderIcon, Description as DescriptionIcon } from "@mui/icons-material";
+import { SimpleTreeView, TreeItem } from "@mui/x-tree-view";
+
+function buildTree(items, folders, rootLabel = "Scripts") {
+ const map = {};
+ const rootNode = {
+ id: "root",
+ label: rootLabel,
+ path: "",
+ isFolder: true,
+ children: []
+ };
+ map[rootNode.id] = rootNode;
+
+ (folders || []).forEach((f) => {
+ const parts = (f || "").split("/");
+ let children = rootNode.children;
+ let parentPath = "";
+ parts.forEach((part) => {
+ const path = parentPath ? `${parentPath}/${part}` : part;
+ let node = children.find((n) => n.id === path);
+ if (!node) {
+ node = { id: path, label: part, path, isFolder: true, children: [] };
+ children.push(node);
+ map[path] = node;
+ }
+ children = node.children;
+ parentPath = path;
+ });
+ });
+
+ (items || []).forEach((s) => {
+ const parts = (s.rel_path || "").split("/");
+ let children = rootNode.children;
+ let parentPath = "";
+ parts.forEach((part, idx) => {
+ const path = parentPath ? `${parentPath}/${part}` : part;
+ const isFile = idx === parts.length - 1;
+ let node = children.find((n) => n.id === path);
+ if (!node) {
+ node = {
+ id: path,
+ label: isFile ? s.file_name : part,
+ path,
+ isFolder: !isFile,
+ fileName: s.file_name,
+ script: isFile ? s : null,
+ children: []
+ };
+ children.push(node);
+ map[path] = node;
+ }
+ if (!isFile) {
+ children = node.children;
+ parentPath = path;
+ }
+ });
+ });
+
+ return { root: [rootNode], map };
+}
+
+export default function QuickJob({ open, onClose, hostnames = [] }) {
+ const [tree, setTree] = useState([]);
+ const [nodeMap, setNodeMap] = useState({});
+ const [selectedPath, setSelectedPath] = useState("");
+ const [running, setRunning] = useState(false);
+ const [error, setError] = useState("");
+ const [runAsCurrentUser, setRunAsCurrentUser] = useState(false);
+ const [mode, setMode] = useState("scripts"); // 'scripts' | 'ansible'
+
+ const loadTree = useCallback(async () => {
+ try {
+ const island = mode === 'ansible' ? 'ansible' : 'scripts';
+ const resp = await fetch(`/api/assembly/list?island=${island}`);
+ if (!resp.ok) throw new Error(`HTTP ${resp.status}`);
+ const data = await resp.json();
+ const { root, map } = buildTree(data.items || [], data.folders || [], mode === 'ansible' ? 'Ansible Playbooks' : 'Scripts');
+ setTree(root);
+ setNodeMap(map);
+ } catch (err) {
+ console.error("Failed to load scripts:", err);
+ setTree([]);
+ setNodeMap({});
+ }
+ }, [mode]);
+
+ useEffect(() => {
+ if (open) {
+ setSelectedPath("");
+ setError("");
+ loadTree();
+ }
+ }, [open, loadTree]);
+
+ const renderNodes = (nodes = []) =>
+ nodes.map((n) => (
+
+ {n.isFolder ? (
+
+ ) : (
+
+ )}
+ {n.label}
+
+ }
+ >
+ {n.children && n.children.length ? renderNodes(n.children) : null}
+
+ ));
+
+ const onItemSelect = (_e, itemId) => {
+ const node = nodeMap[itemId];
+ if (node && !node.isFolder) {
+ setSelectedPath(node.path);
+ setError("");
+ }
+ };
+
+ const onRun = async () => {
+ if (!selectedPath) {
+ setError(mode === 'ansible' ? "Please choose a playbook to run." : "Please choose a script to run.");
+ return;
+ }
+ setRunning(true);
+ setError("");
+ try {
+ let resp;
+ if (mode === 'ansible') {
+ const playbook_path = selectedPath; // relative to ansible island
+ resp = await fetch("/api/ansible/quick_run", {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ playbook_path, hostnames })
+ });
+ } else {
+ // quick_run expects a path relative to Assemblies root with 'Scripts/' prefix
+ const script_path = selectedPath.startsWith('Scripts/') ? selectedPath : `Scripts/${selectedPath}`;
+ resp = await fetch("/api/scripts/quick_run", {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ script_path, hostnames, run_mode: runAsCurrentUser ? "current_user" : "system" })
+ });
+ }
+ const data = await resp.json();
+ if (!resp.ok) throw new Error(data.error || `HTTP ${resp.status}`);
+ onClose && onClose();
+ } catch (err) {
+ setError(String(err.message || err));
+ } finally {
+ setRunning(false);
+ }
+ };
+
+ return (
+
+ );
+}
+