diff --git a/Data/Server/WebUI/src/Access_Management/Credential_Editor.jsx b/Data/Server/WebUI/src/Access_Management/Credential_Editor.jsx
new file mode 100644
index 0000000..88b293d
--- /dev/null
+++ b/Data/Server/WebUI/src/Access_Management/Credential_Editor.jsx
@@ -0,0 +1,549 @@
+import React, { useEffect, useMemo, useState } from "react";
+import {
+ Box,
+ Button,
+ Dialog,
+ DialogActions,
+ DialogContent,
+ DialogTitle,
+ FormControl,
+ InputLabel,
+ MenuItem,
+ Select,
+ TextField,
+ Typography,
+ IconButton,
+ Tooltip,
+ CircularProgress
+} from "@mui/material";
+import UploadIcon from "@mui/icons-material/UploadFile";
+import ClearIcon from "@mui/icons-material/Clear";
+
+const CREDENTIAL_TYPES = [
+ { value: "machine", label: "Machine" },
+ { value: "domain", label: "Domain" },
+ { value: "token", label: "Token" }
+];
+
+const CONNECTION_TYPES = [
+ { value: "ssh", label: "SSH" },
+ { value: "winrm", label: "WinRM" }
+];
+
+const BECOME_METHODS = [
+ { value: "", label: "None" },
+ { value: "sudo", label: "sudo" },
+ { value: "su", label: "su" },
+ { value: "runas", label: "runas" },
+ { value: "enable", label: "enable" }
+];
+
+function emptyForm() {
+ return {
+ name: "",
+ description: "",
+ site_id: "",
+ credential_type: "machine",
+ connection_type: "ssh",
+ username: "",
+ password: "",
+ private_key: "",
+ private_key_passphrase: "",
+ become_method: "",
+ become_username: "",
+ become_password: ""
+ };
+}
+
+function normalizeSiteId(value) {
+ if (value === null || typeof value === "undefined" || value === "") return "";
+ const num = Number(value);
+ if (Number.isNaN(num)) return "";
+ return String(num);
+}
+
+export default function CredentialEditor({
+ open,
+ mode = "create",
+ credential,
+ onClose,
+ onSaved
+}) {
+ const isEdit = mode === "edit" && credential && credential.id;
+ const [form, setForm] = useState(emptyForm);
+ const [sites, setSites] = useState([]);
+ const [loading, setLoading] = useState(false);
+ const [error, setError] = useState("");
+ const [passwordDirty, setPasswordDirty] = useState(false);
+ const [privateKeyDirty, setPrivateKeyDirty] = useState(false);
+ const [passphraseDirty, setPassphraseDirty] = useState(false);
+ const [becomePasswordDirty, setBecomePasswordDirty] = useState(false);
+ const [clearPassword, setClearPassword] = useState(false);
+ const [clearPrivateKey, setClearPrivateKey] = useState(false);
+ const [clearPassphrase, setClearPassphrase] = useState(false);
+ const [clearBecomePassword, setClearBecomePassword] = useState(false);
+ const [fetchingDetail, setFetchingDetail] = useState(false);
+
+ const credentialId = credential?.id;
+
+ useEffect(() => {
+ if (!open) return;
+ let canceled = false;
+ (async () => {
+ try {
+ const resp = await fetch("/api/sites");
+ if (!resp.ok) return;
+ const data = await resp.json();
+ if (canceled) return;
+ const parsed = Array.isArray(data?.sites)
+ ? data.sites
+ .filter((s) => s && s.id)
+ .map((s) => ({
+ id: s.id,
+ name: s.name || `Site ${s.id}`
+ }))
+ : [];
+ parsed.sort((a, b) => String(a.name || "").localeCompare(String(b.name || "")));
+ setSites(parsed);
+ } catch {
+ if (!canceled) setSites([]);
+ }
+ })();
+ return () => {
+ canceled = true;
+ };
+ }, [open]);
+
+ useEffect(() => {
+ if (!open) return;
+ setError("");
+ setPasswordDirty(false);
+ setPrivateKeyDirty(false);
+ setPassphraseDirty(false);
+ setBecomePasswordDirty(false);
+ setClearPassword(false);
+ setClearPrivateKey(false);
+ setClearPassphrase(false);
+ setClearBecomePassword(false);
+ if (isEdit && credentialId) {
+ const applyData = (detail) => {
+ const next = emptyForm();
+ next.name = detail?.name || "";
+ next.description = detail?.description || "";
+ next.site_id = normalizeSiteId(detail?.site_id);
+ next.credential_type = (detail?.credential_type || "machine").toLowerCase();
+ next.connection_type = (detail?.connection_type || "ssh").toLowerCase();
+ next.username = detail?.username || "";
+ next.become_method = (detail?.become_method || "").toLowerCase();
+ next.become_username = detail?.become_username || "";
+ setForm(next);
+ };
+
+ if (credential?.name) {
+ applyData(credential);
+ } else {
+ setFetchingDetail(true);
+ (async () => {
+ try {
+ const resp = await fetch(`/api/credentials/${credentialId}`);
+ if (resp.ok) {
+ const data = await resp.json();
+ applyData(data?.credential || {});
+ }
+ } catch {
+ /* ignore */
+ } finally {
+ setFetchingDetail(false);
+ }
+ })();
+ }
+ } else {
+ setForm(emptyForm());
+ }
+ }, [open, isEdit, credentialId, credential]);
+
+ const currentCredentialFlags = useMemo(() => ({
+ hasPassword: Boolean(credential?.has_password),
+ hasPrivateKey: Boolean(credential?.has_private_key),
+ hasPrivateKeyPassphrase: Boolean(credential?.has_private_key_passphrase),
+ hasBecomePassword: Boolean(credential?.has_become_password)
+ }), [credential]);
+
+ const disableSave = loading || fetchingDetail;
+
+ const updateField = (key) => (event) => {
+ const value = event?.target?.value ?? "";
+ setForm((prev) => ({ ...prev, [key]: value }));
+ if (key === "password") {
+ setPasswordDirty(true);
+ setClearPassword(false);
+ } else if (key === "private_key") {
+ setPrivateKeyDirty(true);
+ setClearPrivateKey(false);
+ } else if (key === "private_key_passphrase") {
+ setPassphraseDirty(true);
+ setClearPassphrase(false);
+ } else if (key === "become_password") {
+ setBecomePasswordDirty(true);
+ setClearBecomePassword(false);
+ }
+ };
+
+ const handlePrivateKeyUpload = async (event) => {
+ const file = event.target.files?.[0];
+ if (!file) return;
+ try {
+ const text = await file.text();
+ setForm((prev) => ({ ...prev, private_key: text }));
+ setPrivateKeyDirty(true);
+ setClearPrivateKey(false);
+ } catch {
+ setError("Unable to read private key file.");
+ } finally {
+ event.target.value = "";
+ }
+ };
+
+ const handleCancel = () => {
+ if (loading) return;
+ onClose && onClose();
+ };
+
+ const validate = () => {
+ if (!form.name.trim()) {
+ setError("Credential name is required.");
+ return false;
+ }
+ setError("");
+ return true;
+ };
+
+ const buildPayload = () => {
+ const payload = {
+ name: form.name.trim(),
+ description: form.description.trim(),
+ credential_type: (form.credential_type || "machine").toLowerCase(),
+ connection_type: (form.connection_type || "ssh").toLowerCase(),
+ username: form.username.trim(),
+ become_method: form.become_method.trim(),
+ become_username: form.become_username.trim()
+ };
+ const siteId = normalizeSiteId(form.site_id);
+ if (siteId) {
+ payload.site_id = Number(siteId);
+ } else {
+ payload.site_id = null;
+ }
+ if (passwordDirty) {
+ payload.password = form.password;
+ }
+ if (privateKeyDirty) {
+ payload.private_key = form.private_key;
+ }
+ if (passphraseDirty) {
+ payload.private_key_passphrase = form.private_key_passphrase;
+ }
+ if (becomePasswordDirty) {
+ payload.become_password = form.become_password;
+ }
+ if (clearPassword) payload.clear_password = true;
+ if (clearPrivateKey) payload.clear_private_key = true;
+ if (clearPassphrase) payload.clear_private_key_passphrase = true;
+ if (clearBecomePassword) payload.clear_become_password = true;
+ return payload;
+ };
+
+ const handleSave = async () => {
+ if (!validate()) return;
+ setLoading(true);
+ setError("");
+ const payload = buildPayload();
+ try {
+ const resp = await fetch(
+ isEdit ? `/api/credentials/${credentialId}` : "/api/credentials",
+ {
+ method: isEdit ? "PUT" : "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify(payload)
+ }
+ );
+ const data = await resp.json();
+ if (!resp.ok) {
+ throw new Error(data?.error || `Request failed (${resp.status})`);
+ }
+ onSaved && onSaved(data?.credential || null);
+ } catch (err) {
+ setError(String(err.message || err));
+ } finally {
+ setLoading(false);
+ }
+ };
+
+ const title = isEdit ? "Edit Credential" : "Create Credential";
+ const helperStyle = { fontSize: 12, color: "#8a8a8a", mt: 0.5 };
+
+ return (
+
+ );
+}
diff --git a/Data/Server/WebUI/src/Access_Management/Credential_List.jsx b/Data/Server/WebUI/src/Access_Management/Credential_List.jsx
new file mode 100644
index 0000000..353e231
--- /dev/null
+++ b/Data/Server/WebUI/src/Access_Management/Credential_List.jsx
@@ -0,0 +1,320 @@
+import React, { useCallback, useEffect, useMemo, useState } from "react";
+import {
+ Box,
+ Button,
+ IconButton,
+ Menu,
+ MenuItem,
+ Paper,
+ Table,
+ TableBody,
+ TableCell,
+ TableHead,
+ TableRow,
+ TableSortLabel,
+ Typography,
+ CircularProgress
+} from "@mui/material";
+import MoreVertIcon from "@mui/icons-material/MoreVert";
+import AddIcon from "@mui/icons-material/Add";
+import RefreshIcon from "@mui/icons-material/Refresh";
+import LockIcon from "@mui/icons-material/Lock";
+import WifiIcon from "@mui/icons-material/Wifi";
+import ComputerIcon from "@mui/icons-material/Computer";
+import CredentialEditor from "./Credential_Editor.jsx";
+import { ConfirmDeleteDialog } from "../Dialogs.jsx";
+
+const tablePaperSx = { m: 2, p: 0, bgcolor: "#1e1e1e", borderRadius: 2 };
+const tableSx = {
+ minWidth: 840,
+ "& th, & td": {
+ color: "#ddd",
+ borderColor: "#2a2a2a",
+ fontSize: 13,
+ py: 0.9
+ },
+ "& th .MuiTableSortLabel-root": { color: "#ddd" },
+ "& th .MuiTableSortLabel-root.Mui-active": { color: "#ddd" }
+};
+
+const columns = [
+ { id: "name", label: "Name" },
+ { id: "credential_type", label: "Credential Type" },
+ { id: "connection_type", label: "Connection" },
+ { id: "site_name", label: "Site" },
+ { id: "username", label: "Username" },
+ { id: "updated_at", label: "Updated" },
+ { id: "actions", label: "" }
+];
+
+function formatTs(ts) {
+ if (!ts) return "-";
+ const date = new Date(Number(ts) * 1000);
+ if (Number.isNaN(date?.getTime())) return "-";
+ return `${date.toLocaleDateString()} ${date.toLocaleTimeString([], { hour: "2-digit", minute: "2-digit" })}`;
+}
+
+function titleCase(value) {
+ if (!value) return "-";
+ const lower = String(value).toLowerCase();
+ return lower.replace(/(^|\s)\w/g, (c) => c.toUpperCase());
+}
+
+function connectionIcon(connection) {
+ const val = (connection || "").toLowerCase();
+ if (val === "ssh") return ;
+ if (val === "winrm") return ;
+ return ;
+}
+
+export default function CredentialList({ isAdmin = false }) {
+ const [rows, setRows] = useState([]);
+ const [orderBy, setOrderBy] = useState("name");
+ const [order, setOrder] = useState("asc");
+ const [loading, setLoading] = useState(false);
+ const [error, setError] = useState("");
+ const [menuAnchor, setMenuAnchor] = useState(null);
+ const [menuRow, setMenuRow] = useState(null);
+ const [editorOpen, setEditorOpen] = useState(false);
+ const [editorMode, setEditorMode] = useState("create");
+ const [editingCredential, setEditingCredential] = useState(null);
+ const [deleteTarget, setDeleteTarget] = useState(null);
+ const [deleteBusy, setDeleteBusy] = useState(false);
+
+ const sortedRows = useMemo(() => {
+ const sorted = [...rows];
+ sorted.sort((a, b) => {
+ const aVal = (a?.[orderBy] ?? "").toString().toLowerCase();
+ const bVal = (b?.[orderBy] ?? "").toString().toLowerCase();
+ if (aVal < bVal) return order === "asc" ? -1 : 1;
+ if (aVal > bVal) return order === "asc" ? 1 : -1;
+ return 0;
+ });
+ return sorted;
+ }, [rows, order, orderBy]);
+
+ const fetchCredentials = useCallback(async () => {
+ setLoading(true);
+ setError("");
+ try {
+ const resp = await fetch("/api/credentials");
+ if (!resp.ok) throw new Error(`HTTP ${resp.status}`);
+ const data = await resp.json();
+ const list = Array.isArray(data?.credentials) ? data.credentials : [];
+ list.sort((a, b) => String(a?.name || "").localeCompare(String(b?.name || "")));
+ setRows(list);
+ } catch (err) {
+ setRows([]);
+ setError(String(err.message || err));
+ } finally {
+ setLoading(false);
+ }
+ }, []);
+
+ useEffect(() => {
+ fetchCredentials();
+ }, [fetchCredentials]);
+
+ const handleSort = (columnId) => () => {
+ if (orderBy === columnId) {
+ setOrder((prev) => (prev === "asc" ? "desc" : "asc"));
+ } else {
+ setOrderBy(columnId);
+ setOrder("asc");
+ }
+ };
+
+ const openMenu = (event, row) => {
+ setMenuAnchor(event.currentTarget);
+ setMenuRow(row);
+ };
+
+ const closeMenu = () => {
+ setMenuAnchor(null);
+ setMenuRow(null);
+ };
+
+ const handleCreate = () => {
+ setEditorMode("create");
+ setEditingCredential(null);
+ setEditorOpen(true);
+ };
+
+ const handleEdit = (row) => {
+ closeMenu();
+ setEditorMode("edit");
+ setEditingCredential(row);
+ setEditorOpen(true);
+ };
+
+ const handleDelete = (row) => {
+ closeMenu();
+ setDeleteTarget(row);
+ };
+
+ const doDelete = async () => {
+ if (!deleteTarget?.id) return;
+ setDeleteBusy(true);
+ try {
+ const resp = await fetch(`/api/credentials/${deleteTarget.id}`, { method: "DELETE" });
+ if (!resp.ok) {
+ const data = await resp.json().catch(() => ({}));
+ throw new Error(data?.error || `HTTP ${resp.status}`);
+ }
+ setDeleteTarget(null);
+ await fetchCredentials();
+ } catch (err) {
+ setError(String(err.message || err));
+ } finally {
+ setDeleteBusy(false);
+ }
+ };
+
+ const handleEditorSaved = async () => {
+ setEditorOpen(false);
+ setEditingCredential(null);
+ await fetchCredentials();
+ };
+
+ if (!isAdmin) {
+ return (
+
+
+ Access denied
+
+
+ You do not have permission to manage credentials.
+
+
+ );
+ }
+
+ return (
+ <>
+
+
+
+
+ Credentials
+
+
+ Stored credentials for remote automation tasks and Ansible playbook runs.
+
+
+
+ }
+ sx={{ borderColor: "#58a6ff", color: "#58a6ff" }}
+ onClick={fetchCredentials}
+ disabled={loading}
+ >
+ Refresh
+
+ }
+ sx={{ bgcolor: "#58a6ff", color: "#0b0f19" }}
+ onClick={handleCreate}
+ >
+ New Credential
+
+
+
+ {loading && (
+
+
+ Loading credentials…
+
+ )}
+ {error && (
+
+ {error}
+
+ )}
+
+
+
+
+ {columns.map((col) => (
+
+ {col.id === "actions" ? null : (
+
+ {col.label}
+
+ )}
+
+ ))}
+
+
+
+ {!sortedRows.length && !loading ? (
+
+
+ No credentials have been created yet.
+
+
+ ) : (
+ sortedRows.map((row) => (
+
+ {row.name || "-"}
+ {titleCase(row.credential_type)}
+
+
+ {connectionIcon(row.connection_type)}
+ {titleCase(row.connection_type)}
+
+
+ {row.site_name || "-"}
+ {row.username || "-"}
+ {formatTs(row.updated_at || row.created_at)}
+
+ openMenu(e, row)} sx={{ color: "#7db7ff" }}>
+
+
+
+
+ ))
+ )}
+
+
+
+
+
+
+ {
+ setEditorOpen(false);
+ setEditingCredential(null);
+ }}
+ onSaved={handleEditorSaved}
+ />
+
+ setDeleteTarget(null)}
+ onConfirm={doDelete}
+ confirmDisabled={deleteBusy}
+ message={
+ deleteTarget
+ ? `Delete credential '${deleteTarget.name || ""}'? Any jobs referencing it will require an update.`
+ : ""
+ }
+ />
+ >
+ );
+}
diff --git a/Data/Server/WebUI/src/Admin/User_Management.jsx b/Data/Server/WebUI/src/Access_Management/Users.jsx
similarity index 100%
rename from Data/Server/WebUI/src/Admin/User_Management.jsx
rename to Data/Server/WebUI/src/Access_Management/Users.jsx
diff --git a/Data/Server/WebUI/src/App.jsx b/Data/Server/WebUI/src/App.jsx
index e0eaf62..7af4573 100644
--- a/Data/Server/WebUI/src/App.jsx
+++ b/Data/Server/WebUI/src/App.jsx
@@ -39,7 +39,8 @@ import AssemblyList from "./Assemblies/Assembly_List";
import AssemblyEditor from "./Assemblies/Assembly_Editor";
import ScheduledJobsList from "./Scheduling/Scheduled_Jobs_List";
import CreateJob from "./Scheduling/Create_Job.jsx";
-import UserManagement from "./Admin/User_Management.jsx";
+import CredentialList from "./Access_Management/Credential_List.jsx";
+import UserManagement from "./Access_Management/Users.jsx";
import ServerInfo from "./Admin/Server_Info.jsx";
// Networking Imports
@@ -201,12 +202,16 @@ const LOCAL_STORAGE_KEY = "borealis_persistent_state";
items.push({ label: "Automation", page: "jobs" });
items.push({ label: "Community Content", page: "community" });
break;
- case "admin_users":
- items.push({ label: "Admin Settings", page: "admin_users" });
- items.push({ label: "User Management", page: "admin_users" });
+ case "access_credentials":
+ items.push({ label: "Access Management", page: "access_credentials" });
+ items.push({ label: "Credentials", page: "access_credentials" });
+ break;
+ case "access_users":
+ items.push({ label: "Access Management", page: "access_credentials" });
+ items.push({ label: "Users", page: "access_users" });
break;
case "server_info":
- items.push({ label: "Admin Settings", page: "admin_users" });
+ items.push({ label: "Admin Settings" });
items.push({ label: "Server Info", page: "server_info" });
break;
case "filters":
@@ -551,7 +556,7 @@ const LOCAL_STORAGE_KEY = "borealis_persistent_state";
const isAdmin = (String(userRole || '').toLowerCase() === 'admin');
useEffect(() => {
- if (!isAdmin && (currentPage === 'admin_users' || currentPage === 'server_info')) {
+ if (!isAdmin && (currentPage === 'server_info' || currentPage === 'access_credentials' || currentPage === 'access_users')) {
setNotAuthorizedOpen(true);
setCurrentPage('devices');
}
@@ -705,7 +710,10 @@ const LOCAL_STORAGE_KEY = "borealis_persistent_state";
/>
);
- case "admin_users":
+ case "access_credentials":
+ return ;
+
+ case "access_users":
return ;
case "server_info":
diff --git a/Data/Server/WebUI/src/Navigation_Sidebar.jsx b/Data/Server/WebUI/src/Navigation_Sidebar.jsx
index e91189b..266ed69 100644
--- a/Data/Server/WebUI/src/Navigation_Sidebar.jsx
+++ b/Data/Server/WebUI/src/Navigation_Sidebar.jsx
@@ -22,7 +22,7 @@ import {
Apps as AssembliesIcon
} from "@mui/icons-material";
import { LocationCity as SitesIcon } from "@mui/icons-material";
-import { ManageAccounts as AdminUsersIcon, Dns as ServerInfoIcon } from "@mui/icons-material";
+import { Dns as ServerInfoIcon, VpnKey as CredentialIcon, PersonOutline as UserIcon } from "@mui/icons-material";
function NavigationSidebar({ currentPage, onNavigate, isAdmin = false }) {
const [expandedNav, setExpandedNav] = useState({
@@ -30,6 +30,7 @@ function NavigationSidebar({ currentPage, onNavigate, isAdmin = false }) {
devices: true,
automation: true,
filters: true,
+ access: true,
admin: true
});
@@ -289,10 +290,57 @@ function NavigationSidebar({ currentPage, onNavigate, isAdmin = false }) {
);
})()}
+ {/* Access Management */}
+ {(() => {
+ if (!isAdmin) return null;
+ const groupActive = currentPage === "access_credentials" || currentPage === "access_users";
+ return (
+ setExpandedNav((s) => ({ ...s, access: e }))}
+ square
+ disableGutters
+ sx={{ "&:before": { display: "none" }, margin: 0, border: 0 }}
+ >
+ }
+ sx={{
+ position: "relative",
+ background: groupActive
+ ? "linear-gradient(90deg, rgba(88,166,255,0.08) 0%, rgba(88,166,255,0.00) 100%)"
+ : "#2c2c2c",
+ minHeight: "36px",
+ "& .MuiAccordionSummary-content": { margin: 0 },
+ "&::before": {
+ content: '""',
+ position: "absolute",
+ left: 0,
+ top: 0,
+ bottom: 0,
+ width: groupActive ? 3 : 0,
+ bgcolor: "#58a6ff",
+ borderTopRightRadius: 2,
+ borderBottomRightRadius: 2,
+ transition: "width 160ms ease"
+ }
+ }}
+ >
+
+ Access Management
+
+
+
+ } label="Credentials" pageKey="access_credentials" />
+ } label="Users" pageKey="access_users" />
+
+
+ );
+ })()}
+
{/* Admin */}
{(() => {
if (!isAdmin) return null;
- const groupActive = currentPage === "admin_users" || currentPage === "server_info";
+ const groupActive = currentPage === "server_info";
return (
- } label="User Management" pageKey="admin_users" />
} label="Server Info" pageKey="server_info" />
diff --git a/Data/Server/WebUI/src/Scheduling/Create_Job.jsx b/Data/Server/WebUI/src/Scheduling/Create_Job.jsx
index 10b5aaa..e281867 100644
--- a/Data/Server/WebUI/src/Scheduling/Create_Job.jsx
+++ b/Data/Server/WebUI/src/Scheduling/Create_Job.jsx
@@ -9,8 +9,10 @@ import {
Button,
IconButton,
Checkbox,
+ FormControl,
FormControlLabel,
Select,
+ InputLabel,
Menu,
MenuItem,
Divider,
@@ -24,7 +26,8 @@ import {
TableCell,
TableBody,
TableSortLabel,
- GlobalStyles
+ GlobalStyles,
+ CircularProgress
} from "@mui/material";
import {
Add as AddIcon,
@@ -34,7 +37,8 @@ import {
Sync as SyncIcon,
Timer as TimerIcon,
Check as CheckIcon,
- Error as ErrorIcon
+ Error as ErrorIcon,
+ Refresh as RefreshIcon
} from "@mui/icons-material";
import { SimpleTreeView, TreeItem } from "@mui/x-tree-view";
import { LocalizationProvider } from "@mui/x-date-pickers/LocalizationProvider";
@@ -421,6 +425,52 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
const [stopAfterEnabled, setStopAfterEnabled] = useState(false);
const [expiration, setExpiration] = useState("no_expire");
const [execContext, setExecContext] = useState("system");
+ const [credentials, setCredentials] = useState([]);
+ const [credentialLoading, setCredentialLoading] = useState(false);
+ const [credentialError, setCredentialError] = useState("");
+ const [selectedCredentialId, setSelectedCredentialId] = useState("");
+
+ const loadCredentials = useCallback(async () => {
+ setCredentialLoading(true);
+ setCredentialError("");
+ try {
+ const resp = await fetch("/api/credentials");
+ if (!resp.ok) throw new Error(`HTTP ${resp.status}`);
+ const data = await resp.json();
+ const list = Array.isArray(data?.credentials) ? data.credentials : [];
+ list.sort((a, b) => String(a?.name || "").localeCompare(String(b?.name || "")));
+ setCredentials(list);
+ } catch (err) {
+ setCredentials([]);
+ setCredentialError(String(err.message || err));
+ } finally {
+ setCredentialLoading(false);
+ }
+ }, []);
+
+ useEffect(() => {
+ loadCredentials();
+ }, [loadCredentials]);
+
+ const remoteExec = useMemo(() => execContext === "ssh" || execContext === "winrm", [execContext]);
+ const filteredCredentials = useMemo(() => {
+ if (!remoteExec) return credentials;
+ const target = execContext === "winrm" ? "winrm" : "ssh";
+ return credentials.filter((cred) => String(cred.connection_type || "").toLowerCase() === target);
+ }, [credentials, remoteExec, execContext]);
+
+ useEffect(() => {
+ if (!remoteExec) {
+ return;
+ }
+ if (!filteredCredentials.length) {
+ setSelectedCredentialId("");
+ return;
+ }
+ if (!selectedCredentialId || !filteredCredentials.some((cred) => String(cred.id) === String(selectedCredentialId))) {
+ setSelectedCredentialId(String(filteredCredentials[0].id));
+ }
+ }, [remoteExec, filteredCredentials, selectedCredentialId]);
// dialogs state
const [addCompOpen, setAddCompOpen] = useState(false);
@@ -827,11 +877,12 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
const isValid = useMemo(() => {
const base = jobName.trim().length > 0 && components.length > 0 && targets.length > 0;
if (!base) return false;
+ if (remoteExec && !selectedCredentialId) return false;
if (scheduleType !== "immediately") {
return !!startDateTime;
}
return true;
- }, [jobName, components.length, targets.length, scheduleType, startDateTime]);
+ }, [jobName, components.length, targets.length, scheduleType, startDateTime, remoteExec, selectedCredentialId]);
const [confirmOpen, setConfirmOpen] = useState(false);
const editing = !!(initialJob && initialJob.id);
@@ -1306,6 +1357,7 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
setStopAfterEnabled(Boolean(initialJob.duration_stop_enabled));
setExpiration(initialJob.expiration || "no_expire");
setExecContext(initialJob.execution_context || "system");
+ setSelectedCredentialId(initialJob.credential_id ? String(initialJob.credential_id) : "");
const comps = Array.isArray(initialJob.components) ? initialJob.components : [];
const hydrated = await hydrateExistingComponents(comps);
if (!canceled) {
@@ -1316,6 +1368,7 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
setPageTitleJobName("");
setComponents([]);
setComponentVarErrors({});
+ setSelectedCredentialId("");
}
};
hydrate();
@@ -1411,6 +1464,10 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
};
const handleCreate = async () => {
+ if (remoteExec && !selectedCredentialId) {
+ alert("Please select a credential for this execution context.");
+ return;
+ }
const requiredErrors = {};
components.forEach((comp) => {
if (!comp || !comp.localId) return;
@@ -1438,7 +1495,8 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
targets,
schedule: { type: scheduleType, start: scheduleType !== "immediately" ? (() => { try { const d = startDateTime?.toDate?.() || new Date(startDateTime); d.setSeconds(0,0); return d.toISOString(); } catch { return startDateTime; } })() : null },
duration: { stopAfterEnabled, expiration },
- execution_context: execContext
+ execution_context: execContext,
+ credential_id: remoteExec && selectedCredentialId ? Number(selectedCredentialId) : null
};
try {
const resp = await fetch(initialJob && initialJob.id ? `/api/scheduled_jobs/${initialJob.id}` : "/api/scheduled_jobs", {
@@ -1665,10 +1723,61 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
{tab === 4 && (
-
)}
diff --git a/Data/Server/WebUI/src/Scheduling/Quick_Job.jsx b/Data/Server/WebUI/src/Scheduling/Quick_Job.jsx
index 59033b0..d83ac31 100644
--- a/Data/Server/WebUI/src/Scheduling/Quick_Job.jsx
+++ b/Data/Server/WebUI/src/Scheduling/Quick_Job.jsx
@@ -10,7 +10,12 @@ import {
Paper,
FormControlLabel,
Checkbox,
- TextField
+ TextField,
+ FormControl,
+ InputLabel,
+ Select,
+ MenuItem,
+ CircularProgress
} from "@mui/material";
import { Folder as FolderIcon, Description as DescriptionIcon } from "@mui/icons-material";
import { SimpleTreeView, TreeItem } from "@mui/x-tree-view";
@@ -82,6 +87,10 @@ export default function QuickJob({ open, onClose, hostnames = [] }) {
const [error, setError] = useState("");
const [runAsCurrentUser, setRunAsCurrentUser] = useState(false);
const [mode, setMode] = useState("scripts"); // 'scripts' | 'ansible'
+ const [credentials, setCredentials] = useState([]);
+ const [credentialsLoading, setCredentialsLoading] = useState(false);
+ const [credentialsError, setCredentialsError] = useState("");
+ const [selectedCredentialId, setSelectedCredentialId] = useState("");
const [variables, setVariables] = useState([]);
const [variableValues, setVariableValues] = useState({});
const [variableErrors, setVariableErrors] = useState({});
@@ -115,6 +124,53 @@ export default function QuickJob({ open, onClose, hostnames = [] }) {
}
}, [open, loadTree]);
+ useEffect(() => {
+ if (!open || mode !== "ansible") return;
+ let canceled = false;
+ setCredentialsLoading(true);
+ setCredentialsError("");
+ (async () => {
+ try {
+ const resp = await fetch("/api/credentials");
+ if (!resp.ok) throw new Error(`HTTP ${resp.status}`);
+ const data = await resp.json();
+ if (canceled) return;
+ const list = Array.isArray(data?.credentials)
+ ? data.credentials.filter((cred) => String(cred.connection_type || "").toLowerCase() === "ssh")
+ : [];
+ list.sort((a, b) => String(a?.name || "").localeCompare(String(b?.name || "")));
+ setCredentials(list);
+ } catch (err) {
+ if (!canceled) {
+ setCredentials([]);
+ setCredentialsError(String(err.message || err));
+ }
+ } finally {
+ if (!canceled) setCredentialsLoading(false);
+ }
+ })();
+ return () => {
+ canceled = true;
+ };
+ }, [open, mode]);
+
+ useEffect(() => {
+ if (!open) {
+ setSelectedCredentialId("");
+ }
+ }, [open]);
+
+ useEffect(() => {
+ if (mode !== "ansible") return;
+ if (!credentials.length) {
+ setSelectedCredentialId("");
+ return;
+ }
+ if (!selectedCredentialId || !credentials.some((cred) => String(cred.id) === String(selectedCredentialId))) {
+ setSelectedCredentialId(String(credentials[0].id));
+ }
+ }, [mode, credentials, selectedCredentialId]);
+
const renderNodes = (nodes = []) =>
nodes.map((n) => (
{
@@ -314,7 +374,12 @@ export default function QuickJob({ open, onClose, hostnames = [] }) {
resp = await fetch("/api/ansible/quick_run", {
method: "POST",
headers: { "Content-Type": "application/json" },
- body: JSON.stringify({ playbook_path, hostnames, variable_values: variableOverrides })
+ body: JSON.stringify({
+ playbook_path,
+ hostnames,
+ variable_values: variableOverrides,
+ credential_id: selectedCredentialId ? Number(selectedCredentialId) : null
+ })
});
} else {
// quick_run expects a path relative to Assemblies root with 'Scripts/' prefix
@@ -340,6 +405,9 @@ export default function QuickJob({ open, onClose, hostnames = [] }) {
}
};
+ const credentialRequired = mode === "ansible";
+ const disableRun = running || !selectedPath || (credentialRequired && (!selectedCredentialId || !credentials.length));
+
return (
);
}
-
diff --git a/Data/Server/job_scheduler.py b/Data/Server/job_scheduler.py
index 44a8634..bf95d77 100644
--- a/Data/Server/job_scheduler.py
+++ b/Data/Server/job_scheduler.py
@@ -309,6 +309,8 @@ class JobScheduler:
self.RETENTION_DAYS = int(os.environ.get("BOREALIS_JOB_HISTORY_DAYS", "30"))
# Callback to retrieve current set of online hostnames
self._online_lookup: Optional[Callable[[], List[str]]] = None
+ # Optional callback to execute Ansible directly from the server
+ self._server_ansible_runner: Optional[Callable[..., str]] = None
# Ensure run-history table exists
self._init_tables()
@@ -475,7 +477,15 @@ class JobScheduler:
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Ansible_Playbooks")
)
- def _dispatch_ansible(self, hostname: str, component: Dict[str, Any], scheduled_job_id: int, scheduled_run_id: int) -> Optional[Dict[str, Any]]:
+ def _dispatch_ansible(
+ self,
+ hostname: str,
+ component: Dict[str, Any],
+ scheduled_job_id: int,
+ scheduled_run_row_id: int,
+ run_mode: str,
+ credential_id: Optional[int] = None,
+ ) -> Optional[Dict[str, Any]]:
try:
import os, uuid
ans_root = self._ansible_root()
@@ -511,6 +521,8 @@ class JobScheduler:
encoded_content = _encode_script_content(content)
variables = doc.get("variables") or []
files = doc.get("files") or []
+ run_mode_norm = (run_mode or "system").strip().lower()
+ server_run = run_mode_norm in ("ssh", "winrm")
# Record in activity_history for UI parity
now = _now_ts()
@@ -539,24 +551,68 @@ class JobScheduler:
finally:
conn.close()
- payload = {
- "run_id": uuid.uuid4().hex,
- "target_hostname": str(hostname),
- "playbook_name": doc.get("name") or os.path.basename(abs_path),
- "playbook_content": encoded_content,
- "playbook_encoding": "base64",
- "activity_job_id": act_id,
- "scheduled_job_id": int(scheduled_job_id),
- "scheduled_run_id": int(scheduled_run_id),
- "connection": "winrm",
- "variables": variables,
- "files": files,
- "variable_values": overrides_map,
- }
- try:
- self.socketio.emit("ansible_playbook_run", payload)
- except Exception:
- pass
+ if server_run:
+ if not credential_id:
+ raise RuntimeError("Remote execution requires a credential_id")
+ if not callable(self._server_ansible_runner):
+ raise RuntimeError("Server-side Ansible runner is not configured")
+ try:
+ self._server_ansible_runner(
+ hostname=str(hostname),
+ playbook_abs_path=abs_path,
+ playbook_rel_path=rel_norm,
+ playbook_name=doc.get("name") or os.path.basename(abs_path),
+ credential_id=int(credential_id),
+ variable_values=overrides_map,
+ source="scheduled_job",
+ activity_id=act_id,
+ scheduled_job_id=scheduled_job_id,
+ scheduled_run_id=scheduled_run_row_id,
+ scheduled_job_run_row_id=scheduled_run_row_id,
+ )
+ except Exception as exc:
+ try:
+ self.app.logger.warning(
+ "[Scheduler] Server-side Ansible queue failed job=%s run=%s host=%s err=%s",
+ scheduled_job_id,
+ scheduled_run_row_id,
+ hostname,
+ exc,
+ )
+ except Exception:
+ print(f"[Scheduler] Server-side Ansible queue failed job={scheduled_job_id} host={hostname} err={exc}")
+ if act_id:
+ try:
+ conn_fail = self._conn()
+ cur_fail = conn_fail.cursor()
+ cur_fail.execute(
+ "UPDATE activity_history SET status='Failed', stderr=?, ran_at=? WHERE id=?",
+ (str(exc), _now_ts(), act_id),
+ )
+ conn_fail.commit()
+ conn_fail.close()
+ except Exception:
+ pass
+ raise
+ else:
+ payload = {
+ "run_id": uuid.uuid4().hex,
+ "target_hostname": str(hostname),
+ "playbook_name": doc.get("name") or os.path.basename(abs_path),
+ "playbook_content": encoded_content,
+ "playbook_encoding": "base64",
+ "activity_job_id": act_id,
+ "scheduled_job_id": int(scheduled_job_id),
+ "scheduled_run_id": int(scheduled_run_row_id),
+ "connection": "winrm",
+ "variables": variables,
+ "files": files,
+ "variable_values": overrides_map,
+ }
+ try:
+ self.socketio.emit("ansible_playbook_run", payload)
+ except Exception:
+ pass
if act_id:
return {
"activity_id": int(act_id),
@@ -898,7 +954,7 @@ class JobScheduler:
pass
try:
cur.execute(
- "SELECT id, components_json, targets_json, schedule_type, start_ts, expiration, execution_context, created_at FROM scheduled_jobs WHERE enabled=1 ORDER BY id ASC"
+ "SELECT id, components_json, targets_json, schedule_type, start_ts, expiration, execution_context, credential_id, created_at FROM scheduled_jobs WHERE enabled=1 ORDER BY id ASC"
)
jobs = cur.fetchall()
except Exception:
@@ -916,7 +972,7 @@ class JobScheduler:
five_min = 300
now_min = _now_minute()
- for (job_id, components_json, targets_json, schedule_type, start_ts, expiration, execution_context, created_at) in jobs:
+ for (job_id, components_json, targets_json, schedule_type, start_ts, expiration, execution_context, credential_id, created_at) in jobs:
try:
# Targets list for this job
try:
@@ -951,6 +1007,11 @@ class JobScheduler:
except Exception:
continue
run_mode = (execution_context or "system").strip().lower()
+ job_credential_id = None
+ try:
+ job_credential_id = int(credential_id) if credential_id is not None else None
+ except Exception:
+ job_credential_id = None
exp_seconds = _parse_expiration(expiration)
@@ -1037,54 +1098,78 @@ class JobScheduler:
run_row_id = c2.lastrowid or 0
conn2.commit()
activity_links: List[Dict[str, Any]] = []
- # Dispatch all script components for this job to the target host
- for comp in script_components:
- try:
- link = self._dispatch_script(host, comp, run_mode)
- if link and link.get("activity_id"):
- activity_links.append({
- "run_id": run_row_id,
- "activity_id": int(link["activity_id"]),
- "component_kind": link.get("component_kind") or "script",
- "script_type": link.get("script_type") or "powershell",
- "component_path": link.get("component_path") or "",
- "component_name": link.get("component_name") or "",
- })
- except Exception:
- continue
- # Dispatch ansible playbooks for this job to the target host
- for comp in ansible_components:
- try:
- link = self._dispatch_ansible(host, comp, job_id, run_row_id)
- if link and link.get("activity_id"):
- activity_links.append({
- "run_id": run_row_id,
- "activity_id": int(link["activity_id"]),
- "component_kind": link.get("component_kind") or "ansible",
- "script_type": link.get("script_type") or "ansible",
- "component_path": link.get("component_path") or "",
- "component_name": link.get("component_name") or "",
- })
- except Exception:
- continue
- if activity_links:
- try:
- for link in activity_links:
- c2.execute(
- "INSERT OR IGNORE INTO scheduled_job_run_activity(run_id, activity_id, component_kind, script_type, component_path, component_name, created_at) VALUES (?,?,?,?,?,?,?)",
- (
- int(link["run_id"]),
- int(link["activity_id"]),
- link.get("component_kind") or "",
- link.get("script_type") or "",
- link.get("component_path") or "",
- link.get("component_name") or "",
- ts_now,
- ),
+ remote_requires_cred = run_mode in ("ssh", "winrm")
+ if remote_requires_cred and not job_credential_id:
+ err_msg = "Credential required for remote execution"
+ c2.execute(
+ "UPDATE scheduled_job_runs SET status='Failed', finished_ts=?, updated_at=?, error=? WHERE id=?",
+ (ts_now, ts_now, err_msg, run_row_id),
+ )
+ conn2.commit()
+ else:
+ # Dispatch all script components for this job to the target host
+ for comp in script_components:
+ try:
+ link = self._dispatch_script(host, comp, run_mode)
+ if link and link.get("activity_id"):
+ activity_links.append({
+ "run_id": run_row_id,
+ "activity_id": int(link["activity_id"]),
+ "component_kind": link.get("component_kind") or "script",
+ "script_type": link.get("script_type") or "powershell",
+ "component_path": link.get("component_path") or "",
+ "component_name": link.get("component_name") or "",
+ })
+ except Exception:
+ continue
+ # Dispatch ansible playbooks for this job to the target host
+ for comp in ansible_components:
+ try:
+ link = self._dispatch_ansible(
+ host,
+ comp,
+ job_id,
+ run_row_id,
+ run_mode,
+ job_credential_id,
)
- conn2.commit()
- except Exception:
- pass
+ if link and link.get("activity_id"):
+ activity_links.append({
+ "run_id": run_row_id,
+ "activity_id": int(link["activity_id"]),
+ "component_kind": link.get("component_kind") or "ansible",
+ "script_type": link.get("script_type") or "ansible",
+ "component_path": link.get("component_path") or "",
+ "component_name": link.get("component_name") or "",
+ })
+ except Exception as exc:
+ try:
+ c2.execute(
+ "UPDATE scheduled_job_runs SET status='Failed', finished_ts=?, updated_at=?, error=? WHERE id=?",
+ (ts_now, ts_now, str(exc)[:512], run_row_id),
+ )
+ conn2.commit()
+ except Exception:
+ pass
+ continue
+ if activity_links:
+ try:
+ for link in activity_links:
+ c2.execute(
+ "INSERT OR IGNORE INTO scheduled_job_run_activity(run_id, activity_id, component_kind, script_type, component_path, component_name, created_at) VALUES (?,?,?,?,?,?,?)",
+ (
+ int(link["run_id"]),
+ int(link["activity_id"]),
+ link.get("component_kind") or "",
+ link.get("script_type") or "",
+ link.get("component_path") or "",
+ link.get("component_name") or "",
+ ts_now,
+ ),
+ )
+ conn2.commit()
+ except Exception:
+ pass
except Exception:
pass
finally:
@@ -1157,9 +1242,10 @@ class JobScheduler:
"duration_stop_enabled": bool(r[6] or 0),
"expiration": r[7] or "no_expire",
"execution_context": r[8] or "system",
- "enabled": bool(r[9] or 0),
- "created_at": r[10] or 0,
- "updated_at": r[11] or 0,
+ "credential_id": r[9],
+ "enabled": bool(r[10] or 0),
+ "created_at": r[11] or 0,
+ "updated_at": r[12] or 0,
}
# Attach computed status summary for latest occurrence
try:
@@ -1236,7 +1322,7 @@ class JobScheduler:
cur.execute(
"""
SELECT id, name, components_json, targets_json, schedule_type, start_ts,
- duration_stop_enabled, expiration, execution_context, enabled, created_at, updated_at
+ duration_stop_enabled, expiration, execution_context, credential_id, enabled, created_at, updated_at
FROM scheduled_jobs
ORDER BY created_at DESC
"""
@@ -1259,6 +1345,11 @@ class JobScheduler:
duration_stop_enabled = int(bool((data.get("duration") or {}).get("stopAfterEnabled") or data.get("duration_stop_enabled")))
expiration = (data.get("duration") or {}).get("expiration") or data.get("expiration") or "no_expire"
execution_context = (data.get("execution_context") or "system").strip().lower()
+ credential_id = data.get("credential_id")
+ try:
+ credential_id = int(credential_id) if credential_id is not None else None
+ except Exception:
+ credential_id = None
enabled = int(bool(data.get("enabled", True)))
if not name or not components or not targets:
return json.dumps({"error": "name, components, targets required"}), 400, {"Content-Type": "application/json"}
@@ -1269,8 +1360,8 @@ class JobScheduler:
cur.execute(
"""
INSERT INTO scheduled_jobs
- (name, components_json, targets_json, schedule_type, start_ts, duration_stop_enabled, expiration, execution_context, enabled, created_at, updated_at)
- VALUES (?,?,?,?,?,?,?,?,?,?,?)
+ (name, components_json, targets_json, schedule_type, start_ts, duration_stop_enabled, expiration, execution_context, credential_id, enabled, created_at, updated_at)
+ VALUES (?,?,?,?,?,?,?,?,?,?,?,?)
""",
(
name,
@@ -1281,6 +1372,7 @@ class JobScheduler:
duration_stop_enabled,
expiration,
execution_context,
+ credential_id,
enabled,
now,
now,
@@ -1291,7 +1383,7 @@ class JobScheduler:
cur.execute(
"""
SELECT id, name, components_json, targets_json, schedule_type, start_ts,
- duration_stop_enabled, expiration, execution_context, enabled, created_at, updated_at
+ duration_stop_enabled, expiration, execution_context, credential_id, enabled, created_at, updated_at
FROM scheduled_jobs WHERE id=?
""",
(job_id,),
@@ -1310,7 +1402,7 @@ class JobScheduler:
cur.execute(
"""
SELECT id, name, components_json, targets_json, schedule_type, start_ts,
- duration_stop_enabled, expiration, execution_context, enabled, created_at, updated_at
+ duration_stop_enabled, expiration, execution_context, credential_id, enabled, created_at, updated_at
FROM scheduled_jobs WHERE id=?
""",
(job_id,),
@@ -1344,6 +1436,15 @@ class JobScheduler:
fields["expiration"] = (data.get("duration") or {}).get("expiration") or data.get("expiration") or "no_expire"
if "execution_context" in data:
fields["execution_context"] = (data.get("execution_context") or "system").strip().lower()
+ if "credential_id" in data:
+ cred_val = data.get("credential_id")
+ if cred_val in (None, "", "null"):
+ fields["credential_id"] = None
+ else:
+ try:
+ fields["credential_id"] = int(cred_val)
+ except Exception:
+ fields["credential_id"] = None
if "enabled" in data:
fields["enabled"] = int(bool(data.get("enabled")))
if not fields:
@@ -1361,7 +1462,7 @@ class JobScheduler:
cur.execute(
"""
SELECT id, name, components_json, targets_json, schedule_type, start_ts,
- duration_stop_enabled, expiration, execution_context, enabled, created_at, updated_at
+ duration_stop_enabled, expiration, execution_context, credential_id, enabled, created_at, updated_at
FROM scheduled_jobs WHERE id=?
""",
(job_id,),
@@ -1385,7 +1486,7 @@ class JobScheduler:
return json.dumps({"error": "not found"}), 404, {"Content-Type": "application/json"}
conn.commit()
cur.execute(
- "SELECT id, name, components_json, targets_json, schedule_type, start_ts, duration_stop_enabled, expiration, execution_context, enabled, created_at, updated_at FROM scheduled_jobs WHERE id=?",
+ "SELECT id, name, components_json, targets_json, schedule_type, start_ts, duration_stop_enabled, expiration, execution_context, credential_id, enabled, created_at, updated_at FROM scheduled_jobs WHERE id=?",
(job_id,),
)
row = cur.fetchone()
@@ -1633,3 +1734,7 @@ def register(app, socketio, db_path: str) -> JobScheduler:
def set_online_lookup(scheduler: JobScheduler, fn: Callable[[], List[str]]):
scheduler._online_lookup = fn
+
+
+def set_server_ansible_runner(scheduler: JobScheduler, fn: Callable[..., str]):
+ scheduler._server_ansible_runner = fn
diff --git a/Data/Server/server-requirements.txt b/Data/Server/server-requirements.txt
index 9c60208..09a26af 100644
--- a/Data/Server/server-requirements.txt
+++ b/Data/Server/server-requirements.txt
@@ -30,3 +30,10 @@ Pillow # Image processing (Windows)
# WebRTC Video Libraries
###aiortc # Python library for WebRTC in async environments
###av # Required by aiortc for video/audio codecs
+
+# Ansible Execution (server-side playbooks)
+ansible-core
+ansible-compat
+ansible-runner
+paramiko
+pywinrm
diff --git a/Data/Server/server.py b/Data/Server/server.py
index 796fca0..2c62e80 100644
--- a/Data/Server/server.py
+++ b/Data/Server/server.py
@@ -39,6 +39,9 @@ from typing import List, Dict, Tuple, Optional, Any, Set, Sequence
import sqlite3
import io
import uuid
+import subprocess
+import stat
+import traceback
from threading import Lock
from datetime import datetime, timezone
@@ -322,6 +325,7 @@ from Python_API_Endpoints.ocr_engines import run_ocr_on_base64
from Python_API_Endpoints.script_engines import run_powershell_script
from job_scheduler import register as register_job_scheduler
from job_scheduler import set_online_lookup as scheduler_set_online_lookup
+from job_scheduler import set_server_ansible_runner as scheduler_set_server_runner
# =============================================================================
# Section: Runtime Stack Configuration
@@ -1593,6 +1597,675 @@ def api_users_toggle_mfa(username):
except Exception as exc:
return jsonify({"error": str(exc)}), 500
+# =============================================================================
+# Section: Access Management - Credentials
+# =============================================================================
+
+
+@app.route("/api/credentials", methods=["GET", "POST"])
+def api_credentials_collection():
+ chk = _require_admin()
+ if chk:
+ return chk
+ if request.method == "GET":
+ site_filter = _coerce_int(request.args.get("site_id"))
+ connection_filter = request.args.get("connection_type")
+ where_parts: List[str] = []
+ params: List[Any] = []
+ if site_filter is not None:
+ where_parts.append("c.site_id = ?")
+ params.append(site_filter)
+ if connection_filter:
+ where_parts.append("LOWER(c.connection_type) = ?")
+ params.append(_normalize_connection_type(connection_filter))
+ where_clause = " AND ".join(where_parts)
+ creds = _query_credentials(where_clause, tuple(params))
+ return jsonify({"credentials": creds})
+
+ data = request.get_json(silent=True) or {}
+ name = (data.get("name") or "").strip()
+ if not name:
+ return jsonify({"error": "name is required"}), 400
+ credential_type = _normalize_credential_type(data.get("credential_type"))
+ connection_type = _normalize_connection_type(data.get("connection_type"))
+ username = (data.get("username") or "").strip()
+ description = (data.get("description") or "").strip()
+ site_id = _coerce_int(data.get("site_id"))
+ metadata = data.get("metadata") if isinstance(data.get("metadata"), dict) else None
+ metadata_json = json.dumps(metadata) if metadata else None
+
+ password_blob = _secret_from_payload(data.get("password"))
+ private_key_val = data.get("private_key")
+ if isinstance(private_key_val, str) and private_key_val and not private_key_val.endswith("\n"):
+ private_key_val = private_key_val + "\n"
+ private_key_blob = _secret_from_payload(private_key_val)
+ private_key_passphrase_blob = _secret_from_payload(data.get("private_key_passphrase"))
+
+ become_method = _normalize_become_method(data.get("become_method"))
+ become_username = (data.get("become_username") or "").strip()
+ become_password_blob = _secret_from_payload(data.get("become_password"))
+
+ now = _now_ts()
+ conn = None
+ try:
+ conn = _db_conn()
+ cur = conn.cursor()
+ cur.execute(
+ """
+ INSERT INTO credentials (
+ name,
+ description,
+ site_id,
+ credential_type,
+ connection_type,
+ username,
+ password_encrypted,
+ private_key_encrypted,
+ private_key_passphrase_encrypted,
+ become_method,
+ become_username,
+ become_password_encrypted,
+ metadata_json,
+ created_at,
+ updated_at
+ ) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
+ """,
+ (
+ name,
+ description,
+ site_id,
+ credential_type,
+ connection_type,
+ username,
+ password_blob,
+ private_key_blob,
+ private_key_passphrase_blob,
+ become_method,
+ become_username,
+ become_password_blob,
+ metadata_json,
+ now,
+ now,
+ ),
+ )
+ conn.commit()
+ cred_id = int(cur.lastrowid or 0)
+ conn.close()
+ except sqlite3.IntegrityError:
+ if conn:
+ conn.close()
+ return jsonify({"error": "credential name already exists"}), 409
+ except Exception as exc:
+ if conn:
+ conn.close()
+ return jsonify({"error": str(exc)}), 500
+
+ record = _fetch_credential_record(cred_id)
+ return jsonify({"credential": record}), 201
+
+
+@app.route("/api/credentials/", methods=["GET", "PUT", "DELETE"])
+def api_credentials_detail(credential_id: int):
+ chk = _require_admin()
+ if chk:
+ return chk
+
+ if request.method == "GET":
+ conn = None
+ try:
+ conn = _db_conn()
+ conn.row_factory = sqlite3.Row # type: ignore[attr-defined]
+ cur = conn.cursor()
+ cur.execute(
+ """
+ SELECT c.*, s.name AS site_name
+ FROM credentials c
+ LEFT JOIN sites s ON s.id = c.site_id
+ WHERE c.id = ?
+ """,
+ (credential_id,),
+ )
+ row = cur.fetchone()
+ except Exception as exc:
+ if conn:
+ conn.close()
+ return jsonify({"error": str(exc)}), 500
+ if conn:
+ conn.close()
+ if not row:
+ return jsonify({"error": "credential not found"}), 404
+ row_map = dict(row)
+ row_map["has_password"] = 1 if row_map.get("password_encrypted") else 0
+ row_map["has_private_key"] = 1 if row_map.get("private_key_encrypted") else 0
+ row_map["has_become_password"] = 1 if row_map.get("become_password_encrypted") else 0
+ detail = _credential_row_to_dict(row_map)
+ detail["has_private_key_passphrase"] = bool(row_map.get("private_key_passphrase_encrypted"))
+ detail["password_fingerprint"] = _secret_fingerprint(row_map.get("password_encrypted"))
+ detail["private_key_fingerprint"] = _secret_fingerprint(row_map.get("private_key_encrypted"))
+ detail["become_password_fingerprint"] = _secret_fingerprint(row_map.get("become_password_encrypted"))
+ return jsonify({"credential": detail})
+
+ if request.method == "DELETE":
+ conn = None
+ try:
+ conn = _db_conn()
+ cur = conn.cursor()
+ cur.execute("UPDATE scheduled_jobs SET credential_id=NULL WHERE credential_id=?", (credential_id,))
+ cur.execute("DELETE FROM credentials WHERE id=?", (credential_id,))
+ if cur.rowcount == 0:
+ conn.close()
+ return jsonify({"error": "credential not found"}), 404
+ conn.commit()
+ conn.close()
+ return jsonify({"status": "ok"})
+ except Exception as exc:
+ if conn:
+ conn.close()
+ return jsonify({"error": str(exc)}), 500
+
+ data = request.get_json(silent=True) or {}
+ updates: Dict[str, Any] = {}
+ if "name" in data:
+ name = (data.get("name") or "").strip()
+ if not name:
+ return jsonify({"error": "name cannot be empty"}), 400
+ updates["name"] = name
+ if "description" in data:
+ updates["description"] = (data.get("description") or "").strip()
+ if "site_id" in data:
+ updates["site_id"] = _coerce_int(data.get("site_id"))
+ if "credential_type" in data:
+ updates["credential_type"] = _normalize_credential_type(data.get("credential_type"))
+ if "connection_type" in data:
+ updates["connection_type"] = _normalize_connection_type(data.get("connection_type"))
+ if "username" in data:
+ updates["username"] = (data.get("username") or "").strip()
+ if "become_method" in data:
+ updates["become_method"] = _normalize_become_method(data.get("become_method"))
+ if "become_username" in data:
+ updates["become_username"] = (data.get("become_username") or "").strip()
+ if "metadata" in data:
+ metadata = data.get("metadata")
+ if metadata is None:
+ updates["metadata_json"] = None
+ elif isinstance(metadata, dict):
+ updates["metadata_json"] = json.dumps(metadata)
+ if data.get("clear_password"):
+ updates["password_encrypted"] = None
+ elif "password" in data:
+ updates["password_encrypted"] = _secret_from_payload(data.get("password"))
+ if data.get("clear_private_key"):
+ updates["private_key_encrypted"] = None
+ elif "private_key" in data:
+ pk_val = data.get("private_key")
+ if isinstance(pk_val, str) and pk_val and not pk_val.endswith("\n"):
+ pk_val = pk_val + "\n"
+ updates["private_key_encrypted"] = _secret_from_payload(pk_val)
+ if data.get("clear_private_key_passphrase"):
+ updates["private_key_passphrase_encrypted"] = None
+ elif "private_key_passphrase" in data:
+ updates["private_key_passphrase_encrypted"] = _secret_from_payload(data.get("private_key_passphrase"))
+ if data.get("clear_become_password"):
+ updates["become_password_encrypted"] = None
+ elif "become_password" in data:
+ updates["become_password_encrypted"] = _secret_from_payload(data.get("become_password"))
+
+ if not updates:
+ return jsonify({"error": "no fields to update"}), 400
+ updates["updated_at"] = _now_ts()
+
+ conn = None
+ try:
+ conn = _db_conn()
+ cur = conn.cursor()
+ set_clause = ", ".join([f"{col}=?" for col in updates.keys()])
+ params = list(updates.values()) + [credential_id]
+ cur.execute(f"UPDATE credentials SET {set_clause} WHERE id=?", params)
+ if cur.rowcount == 0:
+ conn.close()
+ return jsonify({"error": "credential not found"}), 404
+ conn.commit()
+ conn.close()
+ except sqlite3.IntegrityError:
+ if conn:
+ conn.close()
+ return jsonify({"error": "credential name already exists"}), 409
+ except Exception as exc:
+ if conn:
+ conn.close()
+ return jsonify({"error": str(exc)}), 500
+
+ record = _fetch_credential_record(credential_id)
+ return jsonify({"credential": record})
+
+
+# =============================================================================
+# Section: Server-Side Ansible Execution
+# =============================================================================
+
+_ANSIBLE_WORKSPACE_DIR = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), "..", "..", "Logs", "Server", "AnsibleRuns")
+)
+
+
+def _ensure_ansible_workspace() -> str:
+ try:
+ os.makedirs(_ANSIBLE_WORKSPACE_DIR, exist_ok=True)
+ except Exception:
+ pass
+ return _ANSIBLE_WORKSPACE_DIR
+
+
+def _fetch_credential_with_secrets(credential_id: int) -> Optional[Dict[str, Any]]:
+ try:
+ conn = _db_conn()
+ cur = conn.cursor()
+ cur.execute(
+ """
+ SELECT
+ id,
+ name,
+ credential_type,
+ connection_type,
+ username,
+ password_encrypted,
+ private_key_encrypted,
+ private_key_passphrase_encrypted,
+ become_method,
+ become_username,
+ become_password_encrypted
+ FROM credentials
+ WHERE id=?
+ """,
+ (credential_id,),
+ )
+ row = cur.fetchone()
+ conn.close()
+ except Exception:
+ return None
+ if not row:
+ return None
+ return {
+ "id": row[0],
+ "name": row[1],
+ "credential_type": (row[2] or "machine").lower(),
+ "connection_type": (row[3] or "ssh").lower(),
+ "username": row[4] or "",
+ "password": _decrypt_secret(row[5]) if row[5] else "",
+ "private_key": _decrypt_secret(row[6]) if row[6] else "",
+ "private_key_passphrase": _decrypt_secret(row[7]) if row[7] else "",
+ "become_method": _normalize_become_method(row[8]),
+ "become_username": row[9] or "",
+ "become_password": _decrypt_secret(row[10]) if row[10] else "",
+ }
+
+
+def _emit_ansible_recap_from_row(row):
+ if not row:
+ return
+ try:
+ payload = {
+ "id": row[0],
+ "run_id": row[1],
+ "hostname": row[2] or "",
+ "agent_id": row[3] or "",
+ "playbook_path": row[4] or "",
+ "playbook_name": row[5] or "",
+ "scheduled_job_id": row[6],
+ "scheduled_run_id": row[7],
+ "activity_job_id": row[8],
+ "status": row[9] or "",
+ "recap_text": row[10] or "",
+ "recap_json": json.loads(row[11]) if (row[11] or "").strip() else None,
+ "started_ts": row[12],
+ "finished_ts": row[13],
+ "created_at": row[14],
+ "updated_at": row[15],
+ }
+ socketio.emit("ansible_recap_update", payload)
+ if payload.get("activity_job_id"):
+ socketio.emit(
+ "device_activity_changed",
+ {
+ "hostname": payload.get("hostname") or "",
+ "activity_id": payload.get("activity_job_id"),
+ "status": payload.get("status") or "",
+ "change": "updated",
+ "source": "ansible",
+ },
+ )
+ except Exception:
+ pass
+
+
+def _record_ansible_recap_start(
+ run_id: str,
+ hostname: str,
+ playbook_rel_path: str,
+ playbook_name: str,
+ activity_id: Optional[int],
+ scheduled_job_id: Optional[int],
+ scheduled_run_id: Optional[int],
+):
+ try:
+ now = _now_ts()
+ conn = _db_conn()
+ cur = conn.cursor()
+ cur.execute(
+ """
+ INSERT INTO ansible_play_recaps (
+ run_id,
+ hostname,
+ agent_id,
+ playbook_path,
+ playbook_name,
+ scheduled_job_id,
+ scheduled_run_id,
+ activity_job_id,
+ status,
+ recap_text,
+ recap_json,
+ started_ts,
+ created_at,
+ updated_at
+ )
+ VALUES (?, ?, 'server', ?, ?, ?, ?, ?, 'Running', '', '', ?, ?, ?)
+ ON CONFLICT(run_id) DO UPDATE SET
+ hostname=excluded.hostname,
+ playbook_path=excluded.playbook_path,
+ playbook_name=excluded.playbook_name,
+ scheduled_job_id=excluded.scheduled_job_id,
+ scheduled_run_id=excluded.scheduled_run_id,
+ activity_job_id=excluded.activity_job_id,
+ status='Running',
+ started_ts=COALESCE(ansible_play_recaps.started_ts, excluded.started_ts),
+ updated_at=excluded.updated_at
+ """,
+ (
+ run_id,
+ hostname,
+ playbook_rel_path,
+ playbook_name,
+ scheduled_job_id,
+ scheduled_run_id,
+ activity_id,
+ now,
+ now,
+ now,
+ ),
+ )
+ conn.commit()
+ conn.close()
+ except Exception as exc:
+ _ansible_log_server(f"[server_run] failed to record recap start run_id={run_id}: {exc}")
+
+
+def _queue_server_ansible_run(
+ *,
+ hostname: str,
+ playbook_abs_path: str,
+ playbook_rel_path: str,
+ playbook_name: str,
+ credential_id: int,
+ variable_values: Dict[str, Any],
+ source: str,
+ activity_id: Optional[int] = None,
+ scheduled_job_id: Optional[int] = None,
+ scheduled_run_id: Optional[int] = None,
+ scheduled_job_run_row_id: Optional[int] = None,
+) -> str:
+ try:
+ run_id = uuid.uuid4().hex
+ except Exception:
+ run_id = str(int(time.time() * 1000))
+
+ _record_ansible_recap_start(
+ run_id,
+ hostname,
+ playbook_rel_path,
+ playbook_name,
+ activity_id,
+ scheduled_job_id,
+ scheduled_run_id,
+ )
+
+ ctx = {
+ "run_id": run_id,
+ "hostname": hostname,
+ "playbook_abs_path": playbook_abs_path,
+ "playbook_rel_path": playbook_rel_path,
+ "playbook_name": playbook_name,
+ "credential_id": credential_id,
+ "variable_values": variable_values or {},
+ "activity_id": activity_id,
+ "scheduled_job_id": scheduled_job_id,
+ "scheduled_run_id": scheduled_run_id,
+ "scheduled_job_run_row_id": scheduled_job_run_row_id,
+ "source": source,
+ "started_ts": _now_ts(),
+ }
+ try:
+ socketio.start_background_task(_execute_server_ansible_run, ctx)
+ except Exception as exc:
+ _ansible_log_server(f"[server_run] failed to queue background task run_id={run_id}: {exc}")
+ _execute_server_ansible_run(ctx, immediate_error=str(exc))
+ return run_id
+
+
+def _execute_server_ansible_run(ctx: Dict[str, Any], immediate_error: Optional[str] = None):
+ run_id = ctx.get("run_id") or uuid.uuid4().hex
+ hostname = ctx.get("hostname") or ""
+ playbook_abs_path = ctx.get("playbook_abs_path") or ""
+ playbook_rel_path = ctx.get("playbook_rel_path") or os.path.basename(playbook_abs_path)
+ playbook_name = ctx.get("playbook_name") or os.path.basename(playbook_abs_path)
+ credential_id = ctx.get("credential_id")
+ variable_values = ctx.get("variable_values") or {}
+ activity_id = ctx.get("activity_id")
+ scheduled_job_id = ctx.get("scheduled_job_id")
+ scheduled_run_id = ctx.get("scheduled_run_id")
+ scheduled_job_run_row_id = ctx.get("scheduled_job_run_row_id")
+ started_ts = ctx.get("started_ts") or _now_ts()
+ source = ctx.get("source") or "ansible"
+
+ status = "Failed"
+ stdout = ""
+ stderr = ""
+ error_message = immediate_error or ""
+ finished_ts = started_ts
+ workspace = None
+
+ try:
+ credential = _fetch_credential_with_secrets(int(credential_id))
+ if not credential:
+ raise RuntimeError("Credential not found")
+ connection_type = credential.get("connection_type", "ssh")
+ if connection_type not in ("ssh",):
+ raise RuntimeError(f"Unsupported credential connection type '{connection_type}' for server execution")
+
+ workspace_root = _ensure_ansible_workspace()
+ workspace = os.path.join(workspace_root, run_id)
+ os.makedirs(workspace, exist_ok=True)
+
+ inventory_path = os.path.join(workspace, "inventory.json")
+ extra_vars_path = None
+ key_path = None
+
+ host_entry: Dict[str, Any] = {
+ "ansible_host": hostname,
+ "ansible_user": credential.get("username") or "",
+ "ansible_connection": "ssh",
+ "ansible_ssh_common_args": "-o StrictHostKeyChecking=no",
+ }
+ if credential.get("password"):
+ host_entry["ansible_password"] = credential["password"]
+ host_entry["ansible_ssh_pass"] = credential["password"]
+ if credential.get("private_key"):
+ key_path = os.path.join(workspace, "ssh_key")
+ with open(key_path, "w", encoding="utf-8") as fh:
+ fh.write(credential["private_key"])
+ try:
+ os.chmod(key_path, stat.S_IRUSR | stat.S_IWUSR)
+ except Exception:
+ pass
+ host_entry["ansible_ssh_private_key_file"] = key_path
+ become_method = credential.get("become_method") or ""
+ become_username = credential.get("become_username") or ""
+ become_password = credential.get("become_password") or ""
+ if become_method or become_username or become_password:
+ host_entry["ansible_become"] = True
+ if become_method:
+ host_entry["ansible_become_method"] = become_method
+ if become_username:
+ host_entry["ansible_become_user"] = become_username
+ if become_password:
+ host_entry["ansible_become_password"] = become_password
+
+ with open(inventory_path, "w", encoding="utf-8") as fh:
+ json.dump({"all": {"hosts": {hostname: host_entry}}}, fh)
+
+ if variable_values:
+ extra_vars_path = os.path.join(workspace, "extra_vars.json")
+ with open(extra_vars_path, "w", encoding="utf-8") as fh:
+ json.dump(variable_values, fh)
+
+ env = os.environ.copy()
+ env.setdefault("ANSIBLE_STDOUT_CALLBACK", "yaml")
+ env["ANSIBLE_HOST_KEY_CHECKING"] = "False"
+ env.setdefault("ANSIBLE_RETRY_FILES_ENABLED", "False")
+
+ cmd = ["ansible-playbook", playbook_abs_path, "-i", inventory_path]
+ if extra_vars_path:
+ cmd.extend(["--extra-vars", f"@{extra_vars_path}"])
+ if become_method or become_username or become_password:
+ cmd.append("--become")
+ if become_method:
+ cmd.extend(["--become-method", become_method])
+ if become_username:
+ cmd.extend(["--become-user", become_username])
+
+ _ansible_log_server(
+ f"[server_run] start run_id={run_id} host='{hostname}' playbook='{playbook_rel_path}' credential={credential_id}"
+ )
+
+ proc = subprocess.run(
+ cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ text=True,
+ env=env,
+ cwd=os.path.dirname(playbook_abs_path) or None,
+ )
+
+ stdout = proc.stdout or ""
+ stderr = proc.stderr or ""
+ status = "Success" if proc.returncode == 0 else "Failed"
+ finished_ts = _now_ts()
+ if proc.returncode != 0 and not error_message:
+ error_message = stderr or stdout or f"ansible-playbook exited with {proc.returncode}"
+ except Exception as exc:
+ finished_ts = _now_ts()
+ status = "Failed"
+ if not error_message:
+ error_message = str(exc)
+ if not stderr:
+ stderr = f"{exc}"
+ _ansible_log_server(
+ f"[server_run] error run_id={run_id} host='{hostname}' err={exc}\n{traceback.format_exc()}"
+ )
+ finally:
+ if workspace:
+ try:
+ shutil.rmtree(workspace, ignore_errors=True)
+ except Exception:
+ pass
+
+ recap_json = "{}"
+
+ try:
+ conn = _db_conn()
+ cur = conn.cursor()
+ if activity_id:
+ try:
+ cur.execute(
+ "UPDATE activity_history SET status=?, stdout=?, stderr=?, ran_at=? WHERE id=?",
+ (status, stdout, stderr, finished_ts, int(activity_id)),
+ )
+ except Exception:
+ pass
+ if scheduled_job_run_row_id:
+ try:
+ cur.execute(
+ "UPDATE scheduled_job_runs SET status=?, finished_ts=?, updated_at=?, error=? WHERE id=?",
+ (
+ status,
+ finished_ts,
+ finished_ts,
+ (error_message or "")[:1024],
+ int(scheduled_job_run_row_id),
+ ),
+ )
+ except Exception:
+ pass
+ try:
+ cur.execute(
+ """
+ UPDATE ansible_play_recaps
+ SET status=?,
+ recap_text=?,
+ recap_json=?,
+ finished_ts=?,
+ updated_at=?,
+ hostname=?,
+ playbook_path=?,
+ playbook_name=?,
+ scheduled_job_id=?,
+ scheduled_run_id=?,
+ activity_job_id=?
+ WHERE run_id=?
+ """,
+ (
+ status,
+ stdout,
+ recap_json,
+ finished_ts,
+ finished_ts,
+ hostname,
+ playbook_rel_path,
+ playbook_name,
+ scheduled_job_id,
+ scheduled_run_id,
+ activity_id,
+ run_id,
+ ),
+ )
+ except Exception as exc:
+ _ansible_log_server(f"[server_run] failed to update recap run_id={run_id}: {exc}")
+ try:
+ cur.execute(
+ "SELECT id, run_id, hostname, agent_id, playbook_path, playbook_name, scheduled_job_id, scheduled_run_id, activity_job_id, status, recap_text, recap_json, started_ts, finished_ts, created_at, updated_at FROM ansible_play_recaps WHERE run_id=?",
+ (run_id,),
+ )
+ recap_row = cur.fetchone()
+ except Exception:
+ recap_row = None
+ conn.commit()
+ conn.close()
+ except Exception as exc:
+ _ansible_log_server(f"[server_run] database update failed run_id={run_id}: {exc}")
+
+ if recap_row:
+ _emit_ansible_recap_from_row(recap_row)
+
+ if status == "Success":
+ _ansible_log_server(
+ f"[server_run] completed run_id={run_id} host='{hostname}' status=Success duration={finished_ts - started_ts}s"
+ )
+ else:
+ _ansible_log_server(
+ f"[server_run] completed run_id={run_id} host='{hostname}' status=Failed duration={finished_ts - started_ts}s"
+ )
+
+
# =============================================================================
# Section: Python Sidecar Services
# =============================================================================
@@ -3251,6 +3924,174 @@ def _decrypt_secret(blob: Optional[bytes]) -> str:
return ''
+def _normalize_credential_type(value: Optional[str]) -> str:
+ val = (value or '').strip().lower()
+ if val in {"machine", "ssh", "domain", "token", "api"}:
+ return val if val != "ssh" else "machine"
+ return "machine"
+
+
+def _normalize_connection_type(value: Optional[str]) -> str:
+ val = (value or '').strip().lower()
+ if val in {"ssh", "linux", "unix"}:
+ return "ssh"
+ if val in {"winrm", "windows"}:
+ return "winrm"
+ if val in {"api", "http"}:
+ return "api"
+ return "ssh"
+
+
+def _normalize_become_method(value: Optional[str]) -> str:
+ val = (value or '').strip().lower()
+ if val in {"", "none", "no", "false"}:
+ return ""
+ if val in {"sudo", "su", "runas", "enable"}:
+ return val
+ return ""
+
+
+def _secret_from_payload(value) -> Optional[bytes]:
+ if value is None:
+ return None
+ if isinstance(value, str):
+ if value.strip() == "":
+ return None
+ return _encrypt_secret(value)
+ text = str(value)
+ if not text.strip():
+ return None
+ return _encrypt_secret(text)
+
+
+def _coerce_int(value) -> Optional[int]:
+ try:
+ if value is None:
+ return None
+ if isinstance(value, str) and value.strip() == "":
+ return None
+ return int(value)
+ except Exception:
+ return None
+
+
+def _credential_row_to_dict(row) -> Dict[str, Any]:
+ if not row:
+ return {}
+ # Support both sqlite3.Row and tuple
+ try:
+ getter = row.__getitem__
+ keys = row.keys() if hasattr(row, "keys") else None
+ except Exception:
+ getter = None
+ keys = None
+
+ def _get(field, index=None):
+ if getter and keys:
+ try:
+ return getter(field)
+ except Exception:
+ pass
+ if index is not None:
+ try:
+ return row[index]
+ except Exception:
+ return None
+ return None
+
+ metadata_json = _get("metadata_json")
+ metadata = {}
+ if metadata_json:
+ try:
+ metadata = json.loads(metadata_json)
+ if not isinstance(metadata, dict):
+ metadata = {}
+ except Exception:
+ metadata = {}
+ created_at = _get("created_at")
+ updated_at = _get("updated_at")
+ out = {
+ "id": _get("id"),
+ "name": _get("name"),
+ "description": _get("description") or "",
+ "credential_type": _get("credential_type") or "machine",
+ "connection_type": _get("connection_type") or "ssh",
+ "site_id": _get("site_id"),
+ "site_name": _get("site_name"),
+ "username": _get("username") or "",
+ "become_method": _get("become_method") or "",
+ "become_username": _get("become_username") or "",
+ "has_password": bool(_get("has_password")),
+ "has_private_key": bool(_get("has_private_key")),
+ "has_private_key_passphrase": bool(_get("has_private_key_passphrase")),
+ "has_become_password": bool(_get("has_become_password")),
+ "metadata": metadata,
+ "created_at": int(created_at or 0),
+ "updated_at": int(updated_at or 0),
+ }
+ return out
+
+
+def _query_credentials(where_clause: str = "", params: Sequence[Any] = ()) -> List[Dict[str, Any]]:
+ try:
+ conn = _db_conn()
+ conn.row_factory = sqlite3.Row # type: ignore[attr-defined]
+ cur = conn.cursor()
+ sql = """
+ SELECT
+ c.id,
+ c.name,
+ c.description,
+ c.credential_type,
+ c.connection_type,
+ c.username,
+ c.site_id,
+ s.name AS site_name,
+ c.become_method,
+ c.become_username,
+ c.metadata_json,
+ c.created_at,
+ c.updated_at,
+ CASE WHEN c.password_encrypted IS NOT NULL AND LENGTH(c.password_encrypted) > 0 THEN 1 ELSE 0 END AS has_password,
+ CASE WHEN c.private_key_encrypted IS NOT NULL AND LENGTH(c.private_key_encrypted) > 0 THEN 1 ELSE 0 END AS has_private_key,
+ CASE WHEN c.private_key_passphrase_encrypted IS NOT NULL AND LENGTH(c.private_key_passphrase_encrypted) > 0 THEN 1 ELSE 0 END AS has_private_key_passphrase,
+ CASE WHEN c.become_password_encrypted IS NOT NULL AND LENGTH(c.become_password_encrypted) > 0 THEN 1 ELSE 0 END AS has_become_password
+ FROM credentials c
+ LEFT JOIN sites s ON s.id = c.site_id
+ """
+ if where_clause:
+ sql += f" WHERE {where_clause} "
+ sql += " ORDER BY LOWER(c.name)"
+ cur.execute(sql, params)
+ rows = cur.fetchall()
+ conn.close()
+ return [_credential_row_to_dict(r) for r in rows]
+ except Exception as exc:
+ _write_service_log("server", f"credential query failure: {exc}")
+ return []
+
+
+def _fetch_credential_record(credential_id: int) -> Optional[Dict[str, Any]]:
+ rows = _query_credentials("c.id = ?", (credential_id,))
+ if rows:
+ return rows[0]
+ return None
+
+
+def _secret_fingerprint(secret_blob: Optional[bytes]) -> str:
+ if not secret_blob:
+ return ""
+ try:
+ import hashlib
+ plaintext = _decrypt_secret(secret_blob)
+ if not plaintext:
+ return ""
+ digest = hashlib.sha256(plaintext.encode("utf-8")).hexdigest()
+ return digest[:16]
+ except Exception:
+ return ""
+
+
def init_db():
"""Initialize all required tables in the unified database."""
conn = _db_conn()
@@ -3544,7 +4385,55 @@ def init_db():
)
conn.commit()
+ # Central credential vault for remote execution
+ cur.execute(
+ """
+ CREATE TABLE IF NOT EXISTS credentials (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL UNIQUE,
+ description TEXT,
+ site_id INTEGER,
+ credential_type TEXT NOT NULL DEFAULT 'machine',
+ connection_type TEXT NOT NULL DEFAULT 'ssh',
+ username TEXT,
+ password_encrypted BLOB,
+ private_key_encrypted BLOB,
+ private_key_passphrase_encrypted BLOB,
+ become_method TEXT,
+ become_username TEXT,
+ become_password_encrypted BLOB,
+ metadata_json TEXT,
+ created_at INTEGER NOT NULL,
+ updated_at INTEGER NOT NULL,
+ FOREIGN KEY(site_id) REFERENCES sites(id) ON DELETE SET NULL
+ )
+ """
+ )
+ try:
+ cur.execute("PRAGMA table_info(credentials)")
+ cred_cols = [row[1] for row in cur.fetchall()]
+ if "connection_type" not in cred_cols:
+ cur.execute("ALTER TABLE credentials ADD COLUMN connection_type TEXT NOT NULL DEFAULT 'ssh'")
+ if "credential_type" not in cred_cols:
+ cur.execute("ALTER TABLE credentials ADD COLUMN credential_type TEXT NOT NULL DEFAULT 'machine'")
+ if "metadata_json" not in cred_cols:
+ cur.execute("ALTER TABLE credentials ADD COLUMN metadata_json TEXT")
+ if "private_key_passphrase_encrypted" not in cred_cols:
+ cur.execute("ALTER TABLE credentials ADD COLUMN private_key_passphrase_encrypted BLOB")
+ if "become_method" not in cred_cols:
+ cur.execute("ALTER TABLE credentials ADD COLUMN become_method TEXT")
+ if "become_username" not in cred_cols:
+ cur.execute("ALTER TABLE credentials ADD COLUMN become_username TEXT")
+ if "become_password_encrypted" not in cred_cols:
+ cur.execute("ALTER TABLE credentials ADD COLUMN become_password_encrypted BLOB")
+ if "site_id" not in cred_cols:
+ cur.execute("ALTER TABLE credentials ADD COLUMN site_id INTEGER")
+ if "description" not in cred_cols:
+ cur.execute("ALTER TABLE credentials ADD COLUMN description TEXT")
+ except Exception:
+ pass
conn.commit()
+
# Scheduled jobs table
cur.execute(
"""
@@ -3558,12 +4447,20 @@ def init_db():
duration_stop_enabled INTEGER DEFAULT 0,
expiration TEXT,
execution_context TEXT NOT NULL,
+ credential_id INTEGER,
enabled INTEGER DEFAULT 1,
created_at INTEGER,
updated_at INTEGER
)
"""
)
+ try:
+ cur.execute("PRAGMA table_info(scheduled_jobs)")
+ sj_cols = [row[1] for row in cur.fetchall()]
+ if "credential_id" not in sj_cols:
+ cur.execute("ALTER TABLE scheduled_jobs ADD COLUMN credential_id INTEGER")
+ except Exception:
+ pass
conn.commit()
conn.close()
@@ -3622,6 +4519,7 @@ ensure_default_admin()
# Connect the Flask app to the background job scheduler and helpers.
job_scheduler = register_job_scheduler(app, socketio, DB_PATH)
+scheduler_set_server_runner(job_scheduler, _queue_server_ansible_run)
job_scheduler.start()
# Provide scheduler with online device lookup based on registered agents
@@ -5196,9 +6094,21 @@ def ansible_quick_run():
data = request.get_json(silent=True) or {}
rel_path = (data.get("playbook_path") or "").strip()
hostnames = data.get("hostnames") or []
+ credential_id = data.get("credential_id")
if not rel_path or not isinstance(hostnames, list) or not hostnames:
_ansible_log_server(f"[quick_run] invalid payload rel_path='{rel_path}' hostnames={hostnames}")
return jsonify({"error": "Missing playbook_path or hostnames[]"}), 400
+ server_mode = False
+ cred_id_int = None
+ if credential_id not in (None, "", "null"):
+ try:
+ cred_id_int = int(credential_id)
+ if cred_id_int <= 0:
+ cred_id_int = None
+ else:
+ server_mode = True
+ except Exception:
+ return jsonify({"error": "Invalid credential_id"}), 400
try:
root, abs_path, _ = _resolve_assembly_path('ansible', rel_path)
if not os.path.isfile(abs_path):
@@ -5219,6 +6129,19 @@ def ansible_quick_run():
continue
variable_values[name] = val
+ if server_mode and not cred_id_int:
+ return jsonify({"error": "credential_id is required for server-side execution"}), 400
+
+ if server_mode:
+ cred = _fetch_credential_with_secrets(cred_id_int)
+ if not cred:
+ return jsonify({"error": "Credential not found"}), 404
+ conn_type = (cred.get("connection_type") or "ssh").lower()
+ if conn_type not in ("ssh",):
+ return jsonify({"error": f"Credential connection '{conn_type}' not supported for server execution"}), 400
+ # Avoid keeping decrypted secrets in memory longer than necessary
+ del cred
+
results = []
for host in hostnames:
# Create activity_history row so UI shows running state and can receive recap mirror
@@ -5267,18 +6190,51 @@ def ansible_quick_run():
"variable_values": variable_values,
}
try:
- _ansible_log_server(f"[quick_run] emit ansible_playbook_run host='{host}' run_id={run_id} job_id={job_id} path={rel_path}")
- socketio.emit("ansible_playbook_run", payload)
- if job_id:
- socketio.emit("device_activity_changed", {
- "hostname": str(host),
- "activity_id": job_id,
- "change": "created",
- "source": "ansible",
- })
+ if server_mode and cred_id_int:
+ run_id = _queue_server_ansible_run(
+ hostname=str(host),
+ playbook_abs_path=abs_path,
+ playbook_rel_path=rel_path.replace(os.sep, "/"),
+ playbook_name=friendly_name,
+ credential_id=cred_id_int,
+ variable_values=variable_values,
+ source="quick_job",
+ activity_id=job_id,
+ )
+ if job_id:
+ socketio.emit("device_activity_changed", {
+ "hostname": str(host),
+ "activity_id": job_id,
+ "change": "created",
+ "source": "ansible",
+ })
+ results.append({"hostname": host, "run_id": run_id, "status": "Queued", "activity_job_id": job_id, "execution": "server"})
+ else:
+ _ansible_log_server(f"[quick_run] emit ansible_playbook_run host='{host}' run_id={run_id} job_id={job_id} path={rel_path}")
+ socketio.emit("ansible_playbook_run", payload)
+ if job_id:
+ socketio.emit("device_activity_changed", {
+ "hostname": str(host),
+ "activity_id": job_id,
+ "change": "created",
+ "source": "ansible",
+ })
+ results.append({"hostname": host, "run_id": run_id, "status": "Queued", "activity_job_id": job_id, "execution": "agent"})
except Exception as ex:
_ansible_log_server(f"[quick_run] emit failed host='{host}' run_id={run_id} err={ex}")
- results.append({"hostname": host, "run_id": run_id, "status": "Queued", "activity_job_id": job_id})
+ if job_id:
+ try:
+ conn_fail = _db_conn()
+ cur_fail = conn_fail.cursor()
+ cur_fail.execute(
+ "UPDATE activity_history SET status='Failed', stderr=?, ran_at=? WHERE id=?",
+ (str(ex), int(time.time()), job_id),
+ )
+ conn_fail.commit()
+ conn_fail.close()
+ except Exception:
+ pass
+ results.append({"hostname": host, "run_id": run_id, "status": "Failed", "activity_job_id": job_id, "error": str(ex)})
return jsonify({"results": results})
except ValueError as ve:
return jsonify({"error": str(ve)}), 400