diff --git a/Data/Agent/Roles/role_ScriptExec_CURRENTUSER.py b/Data/Agent/Roles/role_ScriptExec_CURRENTUSER.py index 4170b75..b4c3d25 100644 --- a/Data/Agent/Roles/role_ScriptExec_CURRENTUSER.py +++ b/Data/Agent/Roles/role_ScriptExec_CURRENTUSER.py @@ -1,8 +1,10 @@ import os import sys +import re import asyncio import tempfile import uuid +from typing import Dict, List from PyQt5 import QtWidgets, QtGui @@ -13,12 +15,62 @@ ROLE_CONTEXTS = ['interactive'] IS_WINDOWS = os.name == 'nt' -def _write_temp_script(content: str, suffix: str): +def _sanitize_env_map(raw) -> Dict[str, str]: + env: Dict[str, str] = {} + if isinstance(raw, dict): + for key, value in raw.items(): + if key is None: + continue + name = str(key).strip() + if not name: + continue + env_key = re.sub(r"[^A-Za-z0-9_]", "_", name).upper() + if not env_key: + continue + if isinstance(value, bool): + env_val = "True" if value else "False" + elif value is None: + env_val = "" + else: + env_val = str(value) + env[env_key] = env_val + return env + + +def _ps_literal(value: str) -> str: + return "'" + value.replace("'", "''") + "'" + + +def _build_wrapped_script(content: str, env_map: Dict[str, str], timeout_seconds: int) -> str: + inner_lines: List[str] = [] + for key, value in (env_map or {}).items(): + if not key: + continue + inner_lines.append(f"$Env:{key} = {_ps_literal(value)}") + inner_lines.append(content or "") + inner = "\n".join(line for line in inner_lines if line is not None) + script_block = "$__BorealisScript = {\n" + inner + "\n}\n" + if timeout_seconds and timeout_seconds > 0: + block = ( + "$job = Start-Job -ScriptBlock $__BorealisScript\n" + f"if (Wait-Job -Job $job -Timeout {timeout_seconds}) {{\n" + " Receive-Job $job\n" + "} else {\n" + " Stop-Job $job -Force\n" + f" throw \"Script timed out after {timeout_seconds} seconds\"\n" + "}\n" + ) + return script_block + block + return script_block + "& $__BorealisScript\n" + + +def _write_temp_script(content: str, suffix: str, env_map: Dict[str, str], timeout_seconds: int): temp_dir = os.path.join(tempfile.gettempdir(), "Borealis", "quick_jobs") os.makedirs(temp_dir, exist_ok=True) fd, path = tempfile.mkstemp(prefix="bj_", suffix=suffix, dir=temp_dir, text=True) + final_content = _build_wrapped_script(content or "", env_map, timeout_seconds) with os.fdopen(fd, 'w', encoding='utf-8', newline='\n') as fh: - fh.write(content or "") + fh.write(final_content) return path @@ -45,7 +97,7 @@ async def _run_powershell_local(path: str): return -1, "", str(e) -async def _run_powershell_via_user_task(content: str): +async def _run_powershell_via_user_task(content: str, env_map: Dict[str, str], timeout_seconds: int): if not IS_WINDOWS: return -999, '', 'Windows only' ps = os.path.expandvars(r"%SystemRoot%\\System32\\WindowsPowerShell\\v1.0\\powershell.exe") @@ -58,8 +110,9 @@ async def _run_powershell_via_user_task(content: str): temp_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'Temp')) os.makedirs(temp_dir, exist_ok=True) fd, path = _tf.mkstemp(prefix='usr_task_', suffix='.ps1', dir=temp_dir, text=True) + final_content = _build_wrapped_script(content or '', env_map, timeout_seconds) with os.fdopen(fd, 'w', encoding='utf-8', newline='\n') as f: - f.write(content or '') + f.write(final_content) out_path = os.path.join(temp_dir, f'out_{uuid.uuid4().hex}.txt') name = f"Borealis Agent - Task - {uuid.uuid4().hex} @ CurrentUser" task_ps = f""" @@ -84,7 +137,7 @@ Get-ScheduledTask -TaskName $task | Out-Null return -999, '', (err_b or out_b or b'').decode(errors='replace') # Wait a short time for output file; best-effort import time as _t - deadline = _t.time() + 30 + deadline = _t.time() + (timeout_seconds if timeout_seconds > 0 else 30) out_data = '' while _t.time() < deadline: try: @@ -139,6 +192,29 @@ class Role: script_type = (payload.get('script_type') or '').lower() run_mode = (payload.get('run_mode') or 'current_user').lower() content = payload.get('script_content') or '' + raw_env = payload.get('environment') + env_map = _sanitize_env_map(raw_env) + variables = payload.get('variables') if isinstance(payload.get('variables'), list) else [] + for var in variables: + if not isinstance(var, dict): + continue + name = str(var.get('name') or '').strip() + if not name: + continue + key = re.sub(r"[^A-Za-z0-9_]", "_", name).upper() + if key in env_map: + continue + default_val = var.get('default') + if isinstance(default_val, bool): + env_map[key] = "True" if default_val else "False" + elif default_val is None: + env_map[key] = "" + else: + env_map[key] = str(default_val) + try: + timeout_seconds = max(0, int(payload.get('timeout_seconds') or 0)) + except Exception: + timeout_seconds = 0 if run_mode == 'system': return if script_type != 'powershell': @@ -147,10 +223,17 @@ class Role: if run_mode == 'admin': rc, out, err = -1, '', 'Admin credentialed runs are disabled; use SYSTEM or Current User.' else: - rc, out, err = await _run_powershell_via_user_task(content) + rc, out, err = await _run_powershell_via_user_task(content, env_map, timeout_seconds) if rc == -999: - path = _write_temp_script(content, '.ps1') - rc, out, err = await _run_powershell_local(path) + path = _write_temp_script(content, '.ps1', env_map, timeout_seconds) + try: + rc, out, err = await _run_powershell_local(path) + finally: + try: + if path and os.path.isfile(path): + os.remove(path) + except Exception: + pass status = 'Success' if rc == 0 else 'Failed' await sio.emit('quick_job_result', { 'job_id': job_id, diff --git a/Data/Agent/Roles/role_ScriptExec_SYSTEM.py b/Data/Agent/Roles/role_ScriptExec_SYSTEM.py index abb6a41..c3c4794 100644 --- a/Data/Agent/Roles/role_ScriptExec_SYSTEM.py +++ b/Data/Agent/Roles/role_ScriptExec_SYSTEM.py @@ -1,9 +1,11 @@ import os +import re import asyncio import tempfile import uuid import time import subprocess +from typing import Dict, List ROLE_NAME = 'script_exec_system' @@ -14,23 +16,74 @@ def _project_root(): return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) -def _run_powershell_script_content(content: str): +def _sanitize_env_map(raw) -> Dict[str, str]: + env: Dict[str, str] = {} + if isinstance(raw, dict): + for key, value in raw.items(): + if key is None: + continue + name = str(key).strip() + if not name: + continue + env_key = re.sub(r"[^A-Za-z0-9_]", "_", name).upper() + if not env_key: + continue + if isinstance(value, bool): + env_val = "True" if value else "False" + elif value is None: + env_val = "" + else: + env_val = str(value) + env[env_key] = env_val + return env + + +def _ps_literal(value: str) -> str: + return "'" + value.replace("'", "''") + "'" + + +def _build_wrapped_script(content: str, env_map: Dict[str, str], timeout_seconds: int) -> str: + inner_lines: List[str] = [] + for key, value in (env_map or {}).items(): + if not key: + continue + inner_lines.append(f"$Env:{key} = {_ps_literal(value)}") + inner_lines.append(content or "") + inner = "\n".join(line for line in inner_lines if line is not None) + script_block = "$__BorealisScript = {\n" + inner + "\n}\n" + if timeout_seconds and timeout_seconds > 0: + block = ( + "$job = Start-Job -ScriptBlock $__BorealisScript\n" + f"if (Wait-Job -Job $job -Timeout {timeout_seconds}) {{\n" + " Receive-Job $job\n" + "} else {\n" + " Stop-Job $job -Force\n" + f" throw \"Script timed out after {timeout_seconds} seconds\"\n" + "}\n" + ) + return script_block + block + return script_block + "& $__BorealisScript\n" + + +def _run_powershell_script_content(content: str, env_map: Dict[str, str], timeout_seconds: int): temp_dir = os.path.join(_project_root(), "Temp") os.makedirs(temp_dir, exist_ok=True) fd, path = tempfile.mkstemp(prefix="sj_", suffix=".ps1", dir=temp_dir, text=True) + final_content = _build_wrapped_script(content or "", env_map, timeout_seconds) with os.fdopen(fd, 'w', encoding='utf-8', newline='\n') as fh: - fh.write(content or "") + fh.write(final_content) ps = os.path.expandvars(r"%SystemRoot%\\System32\\WindowsPowerShell\\v1.0\\powershell.exe") if not os.path.isfile(ps): ps = "powershell.exe" try: flags = 0x08000000 if os.name == 'nt' else 0 + proc_timeout = timeout_seconds + 30 if timeout_seconds else 60 * 60 proc = subprocess.run( [ps, "-ExecutionPolicy", "Bypass", "-NoProfile", "-File", path], capture_output=True, text=True, - timeout=60*60, + timeout=proc_timeout, creationflags=flags, ) return proc.returncode, proc.stdout or "", proc.stderr or "" @@ -44,15 +97,16 @@ def _run_powershell_script_content(content: str): pass -def _run_powershell_via_system_task(content: str): +def _run_powershell_via_system_task(content: str, env_map: Dict[str, str], timeout_seconds: int): ps_exe = os.path.expandvars(r"%SystemRoot%\System32\WindowsPowerShell\v1.0\powershell.exe") if not os.path.isfile(ps_exe): ps_exe = 'powershell.exe' try: os.makedirs(os.path.join(_project_root(), 'Temp'), exist_ok=True) script_fd, script_path = tempfile.mkstemp(prefix='sys_task_', suffix='.ps1', dir=os.path.join(_project_root(), 'Temp'), text=True) + final_content = _build_wrapped_script(content or '', env_map, timeout_seconds) with os.fdopen(script_fd, 'w', encoding='utf-8', newline='\n') as f: - f.write(content or '') + f.write(final_content) try: log_dir = os.path.join(_project_root(), 'Logs', 'Agent') os.makedirs(log_dir, exist_ok=True) @@ -131,6 +185,29 @@ class Role: job_id = payload.get('job_id') script_type = (payload.get('script_type') or '').lower() content = payload.get('script_content') or '' + raw_env = payload.get('environment') + env_map = _sanitize_env_map(raw_env) + variables = payload.get('variables') if isinstance(payload.get('variables'), list) else [] + for var in variables: + if not isinstance(var, dict): + continue + name = str(var.get('name') or '').strip() + if not name: + continue + key = re.sub(r"[^A-Za-z0-9_]", "_", name).upper() + if key in env_map: + continue + default_val = var.get('default') + if isinstance(default_val, bool): + env_map[key] = "True" if default_val else "False" + elif default_val is None: + env_map[key] = "" + else: + env_map[key] = str(default_val) + try: + timeout_seconds = max(0, int(payload.get('timeout_seconds') or 0)) + except Exception: + timeout_seconds = 0 if script_type != 'powershell': await sio.emit('quick_job_result', { 'job_id': job_id, @@ -139,9 +216,9 @@ class Role: 'stderr': f"Unsupported type: {script_type}" }) return - rc, out, err = _run_powershell_via_system_task(content) + rc, out, err = _run_powershell_via_system_task(content, env_map, timeout_seconds) if rc == -999: - rc, out, err = _run_powershell_script_content(content) + rc, out, err = _run_powershell_script_content(content, env_map, timeout_seconds) status = 'Success' if rc == 0 else 'Failed' await sio.emit('quick_job_result', { 'job_id': job_id, diff --git a/Data/Server/WebUI/src/App.jsx b/Data/Server/WebUI/src/App.jsx index 2e2a05d..e0eaf62 100644 --- a/Data/Server/WebUI/src/App.jsx +++ b/Data/Server/WebUI/src/App.jsx @@ -36,7 +36,7 @@ import SiteList from "./Sites/Site_List"; import DeviceList from "./Devices/Device_List"; import DeviceDetails from "./Devices/Device_Details"; import AssemblyList from "./Assemblies/Assembly_List"; -import ScriptEditor from "./Assemblies/Script_Editor"; +import AssemblyEditor from "./Assemblies/Assembly_Editor"; import ScheduledJobsList from "./Scheduling/Scheduled_Jobs_List"; import CreateJob from "./Scheduling/Create_Job.jsx"; import UserManagement from "./Admin/User_Management.jsx"; @@ -106,7 +106,7 @@ const LOCAL_STORAGE_KEY = "borealis_persistent_state"; const [userDisplayName, setUserDisplayName] = useState(null); const [editingJob, setEditingJob] = useState(null); const [jobsRefreshToken, setJobsRefreshToken] = useState(0); - const [scriptToEdit, setScriptToEdit] = useState(null); // { path, mode: 'scripts'|'ansible' } + const [assemblyEditorState, setAssemblyEditorState] = useState(null); // { path, mode, context, nonce } const [notAuthorizedOpen, setNotAuthorizedOpen] = useState(false); // Top-bar search state @@ -631,8 +631,14 @@ const LOCAL_STORAGE_KEY = "borealis_persistent_state"; setActiveTabId(newId); setCurrentPage("workflow-editor"); }} - onOpenScript={(rel, mode) => { - setScriptToEdit({ path: rel, mode }); + onOpenScript={(rel, mode, context) => { + const nonce = Date.now(); + setAssemblyEditorState({ + path: rel || '', + mode, + context: context ? { ...context, nonce } : null, + nonce + }); setCurrentPage(mode === 'ansible' ? 'ansible_editor' : 'scripts'); }} /> @@ -660,8 +666,14 @@ const LOCAL_STORAGE_KEY = "borealis_persistent_state"; setActiveTabId(newId); setCurrentPage("workflow-editor"); }} - onOpenScript={(rel, mode) => { - setScriptToEdit({ path: rel, mode }); + onOpenScript={(rel, mode, context) => { + const nonce = Date.now(); + setAssemblyEditorState({ + path: rel || '', + mode, + context: context ? { ...context, nonce } : null, + nonce + }); setCurrentPage(mode === 'ansible' ? 'ansible_editor' : 'scripts'); }} /> @@ -669,20 +681,26 @@ const LOCAL_STORAGE_KEY = "borealis_persistent_state"; case "scripts": return ( - setScriptToEdit(null)} + initialPath={assemblyEditorState?.mode === 'scripts' ? (assemblyEditorState?.path || '') : ''} + initialContext={assemblyEditorState?.mode === 'scripts' ? assemblyEditorState?.context : null} + onConsumeInitialData={() => + setAssemblyEditorState((prev) => (prev && prev.mode === 'scripts' ? null : prev)) + } onSaved={() => setCurrentPage('assemblies')} /> ); case "ansible_editor": return ( - setScriptToEdit(null)} + initialPath={assemblyEditorState?.mode === 'ansible' ? (assemblyEditorState?.path || '') : ''} + initialContext={assemblyEditorState?.mode === 'ansible' ? assemblyEditorState?.context : null} + onConsumeInitialData={() => + setAssemblyEditorState((prev) => (prev && prev.mode === 'ansible' ? null : prev)) + } onSaved={() => setCurrentPage('assemblies')} /> ); diff --git a/Data/Server/WebUI/src/Assemblies/Assembly_Editor.jsx b/Data/Server/WebUI/src/Assemblies/Assembly_Editor.jsx new file mode 100644 index 0000000..ba03b6f --- /dev/null +++ b/Data/Server/WebUI/src/Assemblies/Assembly_Editor.jsx @@ -0,0 +1,908 @@ +import React, { useEffect, useMemo, useRef, useState } from "react"; +import { + Box, + Paper, + Typography, + Button, + Select, + FormControl, + InputLabel, + TextField, + MenuItem, + Grid, + RadioGroup, + FormControlLabel, + Radio, + Checkbox, + IconButton, + Tooltip, + Dialog, + DialogTitle, + DialogContent, + DialogActions +} from "@mui/material"; +import { Add as AddIcon, Delete as DeleteIcon, UploadFile as UploadFileIcon } from "@mui/icons-material"; +import Prism from "prismjs"; +import "prismjs/components/prism-yaml"; +import "prismjs/components/prism-bash"; +import "prismjs/components/prism-powershell"; +import "prismjs/components/prism-batch"; +import "prismjs/themes/prism-okaidia.css"; +import Editor from "react-simple-code-editor"; +import { ConfirmDeleteDialog } from "../Dialogs"; + +const TYPE_OPTIONS_ALL = [ + { key: "ansible", label: "Ansible Playbook", prism: "yaml" }, + { key: "powershell", label: "PowerShell Script", prism: "powershell" }, + { key: "batch", label: "Batch Script", prism: "batch" }, + { key: "bash", label: "Bash Script", prism: "bash" } +]; + +const CATEGORY_OPTIONS = [ + { key: "script", label: "Script" }, + { key: "application", label: "Application" } +]; + +const VARIABLE_TYPE_OPTIONS = [ + { key: "string", label: "String" }, + { key: "number", label: "Number" }, + { key: "boolean", label: "Boolean" }, + { key: "credential", label: "Credential" } +]; + +function keyBy(arr) { + return Object.fromEntries(arr.map((o) => [o.key, o])); +} + +const TYPE_MAP = keyBy(TYPE_OPTIONS_ALL); + +function highlightedHtml(code, prismLang) { + try { + const grammar = Prism.languages[prismLang] || Prism.languages.markup; + return Prism.highlight(code ?? "", grammar, prismLang); + } catch { + return (code ?? "").replace(/[&<>]/g, (c) => ({ "&": "&", "<": "<", ">": ">" }[c])); + } +} + +function sanitizeFileName(name = "") { + const base = name.trim().replace(/[^a-zA-Z0-9._-]+/g, "_") || "assembly"; + return base.endsWith(".json") ? base : `${base}.json`; +} + +function normalizeFolderPath(path = "") { + if (!path) return ""; + return path + .replace(/\\/g, "/") + .replace(/^\/+|\/+$/g, "") + .replace(/\/+/g, "/"); +} + +function formatBytes(size) { + if (!size || Number.isNaN(size)) return "0 B"; + if (size < 1024) return `${size} B`; + const units = ["KB", "MB", "GB", "TB"]; + let idx = -1; + let s = size; + while (s >= 1024 && idx < units.length - 1) { + s /= 1024; + idx += 1; + } + return `${s.toFixed(1)} ${units[idx]}`; +} + +function defaultAssembly(defaultType = "powershell") { + return { + name: "", + description: "", + category: defaultType === "ansible" ? "application" : "script", + type: defaultType, + script: "", + timeoutSeconds: 0, + sites: { mode: "all", values: [] }, + variables: [], + files: [] + }; +} + +function normalizeVariablesFromServer(vars = []) { + return (Array.isArray(vars) ? vars : []).map((v, idx) => ({ + id: `${Date.now()}_${idx}_${Math.random().toString(36).slice(2, 8)}`, + name: v?.name || v?.key || "", + label: v?.label || "", + type: v?.type || "string", + defaultValue: v?.default ?? v?.default_value ?? "", + required: Boolean(v?.required), + description: v?.description || "" + })); +} + +function normalizeFilesFromServer(files = []) { + return (Array.isArray(files) ? files : []).map((f, idx) => ({ + id: `${Date.now()}_${idx}_${Math.random().toString(36).slice(2, 8)}`, + fileName: f?.file_name || f?.name || "file.bin", + size: f?.size || 0, + mimeType: f?.mime_type || f?.mimeType || "", + data: f?.data || "" + })); +} + +function fromServerDocument(doc = {}, defaultType = "powershell") { + const assembly = defaultAssembly(defaultType); + if (doc && typeof doc === "object") { + assembly.name = doc.name || doc.display_name || assembly.name; + assembly.description = doc.description || ""; + assembly.category = doc.category || assembly.category; + assembly.type = doc.type || assembly.type; + assembly.script = doc.script ?? doc.content ?? ""; + const timeout = doc.timeout_seconds ?? doc.timeout ?? 0; + assembly.timeoutSeconds = Number.isFinite(Number(timeout)) ? Number(timeout) : 0; + const sites = doc.sites || {}; + assembly.sites = { + mode: sites.mode || (Array.isArray(sites.values) && sites.values.length ? "specific" : "all"), + values: Array.isArray(sites.values) ? sites.values : [] + }; + assembly.variables = normalizeVariablesFromServer(doc.variables); + assembly.files = normalizeFilesFromServer(doc.files); + } + return assembly; +} + +function toServerDocument(assembly) { + return { + version: 1, + name: assembly.name?.trim() || "", + description: assembly.description || "", + category: assembly.category || "script", + type: assembly.type || "powershell", + script: assembly.script ?? "", + timeout_seconds: Number.isFinite(Number(assembly.timeoutSeconds)) ? Number(assembly.timeoutSeconds) : 0, + sites: { + mode: assembly.sites?.mode === "specific" ? "specific" : "all", + values: Array.isArray(assembly.sites?.values) + ? assembly.sites.values.filter((v) => v && v.trim()).map((v) => v.trim()) + : [] + }, + variables: (assembly.variables || []).map((v) => ({ + name: v.name?.trim() || "", + label: v.label || "", + type: v.type || "string", + default: v.defaultValue ?? "", + required: Boolean(v.required), + description: v.description || "" + })), + files: (assembly.files || []).map((f) => ({ + file_name: f.fileName || "file.bin", + size: f.size || 0, + mime_type: f.mimeType || "", + data: f.data || "" + })) + }; +} + +function RenameFileDialog({ open, value, onChange, onCancel, onSave }) { + return ( + + Rename Assembly File + + onChange(e.target.value)} + sx={{ + "& .MuiOutlinedInput-root": { + backgroundColor: "#1e1e1e", + color: "#e6edf3", + "& fieldset": { borderColor: "#333" }, + "&:hover fieldset": { borderColor: "#555" } + }, + "& .MuiInputLabel-root": { color: "#aaa" } + }} + /> + + + + + + + ); +} + +export default function AssemblyEditor({ + mode = "scripts", + initialPath = "", + initialContext = null, + onConsumeInitialData, + onSaved +}) { + const isAnsible = mode === "ansible"; + const defaultType = isAnsible ? "ansible" : "powershell"; + const [assembly, setAssembly] = useState(() => defaultAssembly(defaultType)); + const [currentPath, setCurrentPath] = useState(""); + const [fileName, setFileName] = useState(""); + const [folderPath, setFolderPath] = useState(() => normalizeFolderPath(initialContext?.folder || "")); + const [renameOpen, setRenameOpen] = useState(false); + const [renameValue, setRenameValue] = useState(""); + const [deleteOpen, setDeleteOpen] = useState(false); + const [saving, setSaving] = useState(false); + const contextNonceRef = useRef(null); + + const TYPE_OPTIONS = useMemo( + () => (isAnsible ? TYPE_OPTIONS_ALL.filter((o) => o.key === "ansible") : TYPE_OPTIONS_ALL.filter((o) => o.key !== "ansible")), + [isAnsible] + ); + + const island = isAnsible ? "ansible" : "scripts"; + + useEffect(() => { + if (!initialPath) return; + let canceled = false; + (async () => { + try { + const resp = await fetch(`/api/assembly/load?island=${encodeURIComponent(island)}&path=${encodeURIComponent(initialPath)}`); + if (!resp.ok) throw new Error(`HTTP ${resp.status}`); + const data = await resp.json(); + if (canceled) return; + const rel = data.rel_path || initialPath; + setCurrentPath(rel); + setFolderPath(normalizeFolderPath(rel.split("/").slice(0, -1).join("/"))); + setFileName(data.file_name || rel.split("/").pop() || ""); + const doc = fromServerDocument(data.assembly || data, defaultType); + setAssembly(doc); + } catch (err) { + console.error("Failed to load assembly:", err); + } finally { + if (!canceled && onConsumeInitialData) onConsumeInitialData(); + } + })(); + return () => { + canceled = true; + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [initialPath, island]); + + useEffect(() => { + const ctx = initialContext; + if (!ctx || !ctx.nonce) return; + if (contextNonceRef.current === ctx.nonce) return; + contextNonceRef.current = ctx.nonce; + const doc = defaultAssembly(ctx.defaultType || defaultType); + if (ctx.name) doc.name = ctx.name; + if (ctx.description) doc.description = ctx.description; + if (ctx.category) doc.category = ctx.category; + if (ctx.type) doc.type = ctx.type; + setAssembly(doc); + setCurrentPath(""); + const suggested = ctx.suggestedFileName || ctx.name || ""; + setFileName(suggested ? sanitizeFileName(suggested) : ""); + setFolderPath(normalizeFolderPath(ctx.folder || "")); + if (onConsumeInitialData) onConsumeInitialData(); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [initialContext?.nonce]); + + const prismLanguage = TYPE_MAP[assembly.type]?.prism || "powershell"; + + const updateAssembly = (partial) => { + setAssembly((prev) => ({ ...prev, ...partial })); + }; + + const handleSitesChange = (modeValue, values) => { + setAssembly((prev) => ({ + ...prev, + sites: { + mode: modeValue, + values: Array.isArray(values) + ? values + : ((values || "").split(/\r?\n/).map((v) => v.trim()).filter(Boolean)) + } + })); + }; + + const addVariable = () => { + setAssembly((prev) => ({ + ...prev, + variables: [ + ...prev.variables, + { + id: `${Date.now()}_${Math.random().toString(36).slice(2, 8)}`, + name: "", + label: "", + type: "string", + defaultValue: "", + required: false, + description: "" + } + ] + })); + }; + + const updateVariable = (id, partial) => { + setAssembly((prev) => ({ + ...prev, + variables: prev.variables.map((v) => (v.id === id ? { ...v, ...partial } : v)) + })); + }; + + const removeVariable = (id) => { + setAssembly((prev) => ({ + ...prev, + variables: prev.variables.filter((v) => v.id !== id) + })); + }; + + const handleFileUpload = async (event) => { + const files = Array.from(event.target.files || []); + if (!files.length) return; + const reads = files.map((file) => new Promise((resolve) => { + const reader = new FileReader(); + reader.onload = () => { + const result = reader.result || ""; + const base64 = typeof result === "string" && result.includes(",") ? result.split(",", 2)[1] : result; + resolve({ + id: `${Date.now()}_${Math.random().toString(36).slice(2, 8)}`, + fileName: file.name, + size: file.size, + mimeType: file.type, + data: base64 + }); + }; + reader.onerror = () => resolve(null); + reader.readAsDataURL(file); + })); + const uploaded = (await Promise.all(reads)).filter(Boolean); + if (uploaded.length) { + setAssembly((prev) => ({ ...prev, files: [...prev.files, ...uploaded] })); + } + event.target.value = ""; + }; + + const removeFile = (id) => { + setAssembly((prev) => ({ ...prev, files: prev.files.filter((f) => f.id !== id) })); + }; + + const computeTargetPath = () => { + if (currentPath) return currentPath; + const baseName = sanitizeFileName(fileName || assembly.name || (isAnsible ? "playbook" : "assembly")); + const folder = normalizeFolderPath(folderPath); + return folder ? `${folder}/${baseName}` : baseName; + }; + + const saveAssembly = async () => { + if (!assembly.name.trim()) { + alert("Assembly Name is required."); + return; + } + const payload = toServerDocument(assembly); + payload.type = assembly.type; + const targetPath = computeTargetPath(); + if (!targetPath) { + alert("Unable to determine file path."); + return; + } + setSaving(true); + try { + if (currentPath) { + const resp = await fetch(`/api/assembly/edit`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island, path: currentPath, content: payload }) + }); + const data = await resp.json().catch(() => ({})); + if (!resp.ok) { + throw new Error(data?.error || `HTTP ${resp.status}`); + } + if (data?.rel_path) { + setCurrentPath(data.rel_path); + setFolderPath(normalizeFolderPath(data.rel_path.split("/").slice(0, -1).join("/"))); + setFileName(data.rel_path.split("/").pop() || fileName); + } + } else { + const resp = await fetch(`/api/assembly/create`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island, kind: "file", path: targetPath, content: payload, type: assembly.type }) + }); + const data = await resp.json(); + if (!resp.ok) throw new Error(data?.error || `HTTP ${resp.status}`); + if (data.rel_path) { + setCurrentPath(data.rel_path); + setFolderPath(data.rel_path.split("/").slice(0, -1).join("/")); + setFileName(data.rel_path.split("/").pop() || ""); + } else { + setCurrentPath(targetPath); + setFileName(targetPath.split("/").pop() || ""); + } + } + onSaved && onSaved(); + } catch (err) { + console.error("Failed to save assembly:", err); + alert(err.message || "Failed to save assembly"); + } finally { + setSaving(false); + } + }; + + const saveRename = async () => { + try { + const nextName = sanitizeFileName(renameValue || fileName || assembly.name); + const resp = await fetch(`/api/assembly/rename`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island, kind: "file", path: currentPath, new_name: nextName, type: assembly.type }) + }); + const data = await resp.json(); + if (!resp.ok) throw new Error(data?.error || `HTTP ${resp.status}`); + const rel = data.rel_path || currentPath; + setCurrentPath(rel); + setFolderPath(rel.split("/").slice(0, -1).join("/")); + setFileName(rel.split("/").pop() || nextName); + setRenameOpen(false); + } catch (err) { + console.error("Failed to rename assembly:", err); + alert(err.message || "Failed to rename"); + setRenameOpen(false); + } + }; + + const deleteAssembly = async () => { + if (!currentPath) { + setDeleteOpen(false); + return; + } + try { + const resp = await fetch(`/api/assembly/delete`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ island, kind: "file", path: currentPath }) + }); + if (!resp.ok) { + const data = await resp.json().catch(() => ({})); + throw new Error(data?.error || `HTTP ${resp.status}`); + } + setDeleteOpen(false); + setAssembly(defaultAssembly(defaultType)); + setCurrentPath(""); + setFileName(""); + onSaved && onSaved(); + } catch (err) { + console.error("Failed to delete assembly:", err); + alert(err.message || "Failed to delete assembly"); + setDeleteOpen(false); + } + }; + + const siteValuesText = (assembly.sites?.values || []).join("\n"); + + return ( + + + + Assembly Editor + + + Create and edit variables, scripts, and other fields related to assemblies. + + + + + + + + Assembly Details + + + {currentPath ? ( + + + + ) : null} + {currentPath ? ( + + + + ) : null} + + + + + + updateAssembly({ name: e.target.value })} + fullWidth + variant="outlined" + sx={{ + mb: 2, + "& .MuiOutlinedInput-root": { + bgcolor: "#121212", + color: "#e6edf3", + "& fieldset": { borderColor: "#333" }, + "&:hover fieldset": { borderColor: "#555" } + }, + "& .MuiInputLabel-root": { color: "#aaa" } + }} + /> + updateAssembly({ description: e.target.value })} + multiline + minRows={3} + fullWidth + variant="outlined" + sx={{ + "& .MuiOutlinedInput-root": { + bgcolor: "#121212", + color: "#e6edf3", + "& fieldset": { borderColor: "#333" }, + "&:hover fieldset": { borderColor: "#555" } + }, + "& .MuiInputLabel-root": { color: "#aaa" } + }} + /> + + + + Category + + + + + Type + + + + + + + + Script Content + + + updateAssembly({ script: value })} + highlight={(src) => highlightedHtml(src, prismLanguage)} + padding={12} + placeholder={currentPath ? `Editing: ${currentPath}` : "Start typing your script..."} + style={{ + fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace', + fontSize: 14, + color: "#e6edf3", + background: "#121212", + outline: "none", + minHeight: 320, + lineHeight: 1.45, + caretColor: "#58a6ff" + }} + /> + + + + + + { + const val = Number(e.target.value); + updateAssembly({ timeoutSeconds: Number.isNaN(val) ? 0 : val }); + }} + fullWidth + variant="outlined" + sx={{ + "& .MuiOutlinedInput-root": { + bgcolor: "#121212", + color: "#e6edf3", + "& fieldset": { borderColor: "#333" }, + "&:hover fieldset": { borderColor: "#555" } + }, + "& .MuiInputLabel-root": { color: "#aaa" } + }} + helperText="Timeout this script if not completed within X seconds" + /> + + + + Sites + + handleSitesChange(e.target.value, assembly.sites.values)} + sx={{ color: "#e6edf3" }} + > + } label="All Sites" /> + } label="Specific Sites" /> + + {assembly.sites.mode === "specific" ? ( + handleSitesChange("specific", e.target.value)} + multiline + minRows={3} + fullWidth + variant="outlined" + sx={{ + mt: 1, + "& .MuiOutlinedInput-root": { + bgcolor: "#121212", + color: "#e6edf3", + "& fieldset": { borderColor: "#333" }, + "&:hover fieldset": { borderColor: "#555" } + }, + "& .MuiInputLabel-root": { color: "#aaa" } + }} + /> + ) : null} + + + + + + Variables + + + Variables are passed into the execution environment as environment variables at runtime. + + {(assembly.variables || []).length ? ( + + {assembly.variables.map((variable) => ( + + + + updateVariable(variable.id, { name: e.target.value })} + fullWidth + variant="outlined" + sx={{ + "& .MuiOutlinedInput-root": { + bgcolor: "#121212", + color: "#e6edf3", + "& fieldset": { borderColor: "#333" }, + "&:hover fieldset": { borderColor: "#555" } + }, + "& .MuiInputLabel-root": { color: "#aaa" } + }} + /> + + + updateVariable(variable.id, { label: e.target.value })} + fullWidth + variant="outlined" + sx={{ + "& .MuiOutlinedInput-root": { + bgcolor: "#121212", + color: "#e6edf3", + "& fieldset": { borderColor: "#333" }, + "&:hover fieldset": { borderColor: "#555" } + }, + "& .MuiInputLabel-root": { color: "#aaa" } + }} + /> + + + + Type + + + + + {variable.type === "boolean" ? ( + updateVariable(variable.id, { defaultValue: e.target.checked })} + sx={{ color: "#58a6ff" }} + /> + } + label="Default Value" + /> + ) : ( + updateVariable(variable.id, { defaultValue: e.target.value })} + fullWidth + variant="outlined" + sx={{ + "& .MuiOutlinedInput-root": { + bgcolor: "#121212", + color: "#e6edf3", + "& fieldset": { borderColor: "#333" }, + "&:hover fieldset": { borderColor: "#555" } + }, + "& .MuiInputLabel-root": { color: "#aaa" } + }} + /> + )} + + + + updateVariable(variable.id, { required: e.target.checked })} + sx={{ color: "#58a6ff" }} + /> + + + + updateVariable(variable.id, { description: e.target.value })} + fullWidth + multiline + minRows={2} + variant="outlined" + sx={{ + "& .MuiOutlinedInput-root": { + bgcolor: "#121212", + color: "#e6edf3", + "& fieldset": { borderColor: "#333" }, + "&:hover fieldset": { borderColor: "#555" } + }, + "& .MuiInputLabel-root": { color: "#aaa" } + }} + /> + + + removeVariable(variable.id)} sx={{ color: "#ff6b6b" }}> + + + + + + ))} + + ) : ( + + No variables have been defined. + + )} + + + + + + Files + + + Upload supporting files. They will be embedded as Base64 and available to the assembly at runtime. + + {(assembly.files || []).length ? ( + + {assembly.files.map((file) => ( + + + {file.fileName} + {formatBytes(file.size)}{file.mimeType ? ` • ${file.mimeType}` : ""} + + removeFile(file.id)} sx={{ color: "#ff6b6b" }}> + + + + ))} + + ) : ( + + No files uploaded yet. + + )} + + + + + + setRenameOpen(false)} + onSave={saveRename} + /> + setDeleteOpen(false)} + onConfirm={deleteAssembly} + /> + + ); +} diff --git a/Data/Server/WebUI/src/Assemblies/Assembly_List.jsx b/Data/Server/WebUI/src/Assemblies/Assembly_List.jsx index 052bb40..d180de0 100644 --- a/Data/Server/WebUI/src/Assemblies/Assembly_List.jsx +++ b/Data/Server/WebUI/src/Assemblies/Assembly_List.jsx @@ -423,7 +423,7 @@ function buildFileTree(rootLabel, items, folders) { if (!node) { node = { id: path, - label: isFile ? (s.file_name || part) : part, + label: isFile ? (s.name || s.display_name || s.file_name || part) : part, path, isFolder: !isFile, fileName: s.file_name, @@ -591,32 +591,21 @@ function ScriptsLikeIsland({ } }; - const createNewItem = async () => { - try { - const folder = selectedNode?.isFolder ? selectedNode.path : (selectedNode?.path?.split("/").slice(0, -1).join("/") || ""); - let name = newItemName || "new"; - const hasExt = /\.[^./\\]+$/i.test(name); - if (!hasExt) { - if (String(baseApi || '').endsWith('/api/ansible')) name += '.yml'; - else name += '.ps1'; - } - const newPath = folder ? `${folder}/${name}` : name; - // create empty file via unified API - const res = await fetch(`/api/assembly/create`, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ island, kind: 'file', path: newPath, content: "", type: island === 'ansible' ? 'ansible' : 'powershell' }) - }); - if (!res.ok) { - const data = await res.json().catch(() => ({})); - throw new Error(data?.error || `HTTP ${res.status}`); - } - setNewItemOpen(false); - setNewItemName(""); - loadTree(); - } catch (err) { - console.error("Failed to create:", err); - } + const createNewItem = () => { + const trimmedName = (newItemName || '').trim(); + const folder = selectedNode?.isFolder + ? selectedNode.path + : (selectedNode?.path?.split("/").slice(0, -1).join("/") || ""); + const context = { + folder, + suggestedFileName: trimmedName, + defaultType: island === 'ansible' ? 'ansible' : 'powershell', + type: island === 'ansible' ? 'ansible' : 'powershell', + category: island === 'ansible' ? 'application' : 'script' + }; + setNewItemOpen(false); + setNewItemName(""); + onEdit && onEdit(null, context); }; const renderItems = (nodes) => @@ -754,7 +743,7 @@ export default function AssemblyList({ onOpenWorkflow, onOpenScript }) { rootLabel="Scripts" baseApi="/api/scripts" newItemLabel="New Script" - onEdit={(rel) => onOpenScript && onOpenScript(rel, 'scripts')} + onEdit={(rel, ctx) => onOpenScript && onOpenScript(rel, 'scripts', ctx)} /> {/* Right: Ansible Playbooks */} @@ -764,7 +753,7 @@ export default function AssemblyList({ onOpenWorkflow, onOpenScript }) { rootLabel="Ansible Playbooks" baseApi="/api/ansible" newItemLabel="New Playbook" - onEdit={(rel) => onOpenScript && onOpenScript(rel, 'ansible')} + onEdit={(rel, ctx) => onOpenScript && onOpenScript(rel, 'ansible', ctx)} /> diff --git a/Data/Server/WebUI/src/Assemblies/Script_Editor.jsx b/Data/Server/WebUI/src/Assemblies/Script_Editor.jsx deleted file mode 100644 index 7384b95..0000000 --- a/Data/Server/WebUI/src/Assemblies/Script_Editor.jsx +++ /dev/null @@ -1,223 +0,0 @@ -import React, { useState, useEffect, useMemo } from "react"; -import { Paper, Box, Typography, Button, Select, FormControl, InputLabel, TextField, MenuItem } from "@mui/material"; -import Prism from "prismjs"; -import "prismjs/components/prism-yaml"; -import "prismjs/components/prism-bash"; -import "prismjs/components/prism-powershell"; -import "prismjs/components/prism-batch"; -import "prismjs/themes/prism-okaidia.css"; -import Editor from "react-simple-code-editor"; -import { ConfirmDeleteDialog } from "../Dialogs"; - -const TYPE_OPTIONS_ALL = [ - { key: "ansible", label: "Ansible Playbook", ext: ".yml", prism: "yaml" }, - { key: "powershell", label: "Powershell Script", ext: ".ps1", prism: "powershell" }, - { key: "batch", label: "Batch Script", ext: ".bat", prism: "batch" }, - { key: "bash", label: "Bash Script", ext: ".sh", prism: "bash" } -]; - -const keyBy = (arr) => Object.fromEntries(arr.map((o) => [o.key, o])); - -function typeFromFilename(name = "") { - const n = name.toLowerCase(); - if (n.endsWith(".yml")) return "ansible"; - if (n.endsWith(".ps1")) return "powershell"; - if (n.endsWith(".bat")) return "batch"; - if (n.endsWith(".sh")) return "bash"; - return "powershell"; -} - -function ensureExt(baseName, t) { - if (!baseName) return baseName; - if (/\.[^./\\]+$/i.test(baseName)) return baseName; - const TYPES = keyBy(TYPE_OPTIONS_ALL); - const type = TYPES[t] || TYPES.powershell; - return baseName + type.ext; -} - -function highlightedHtml(code, prismLang) { - try { - const grammar = Prism.languages[prismLang] || Prism.languages.markup; - return Prism.highlight(code ?? "", grammar, prismLang); - } catch { - return (code ?? "").replace(/[&<>]/g, (c) => ({ "&": "&", "<": "<", ">": ">" }[c])); - } -} - -function RenameFileDialog({ open, value, onChange, onCancel, onSave }) { - if (!open) return null; - return ( -
- - Rename - onChange(e.target.value)} - sx={{ "& .MuiOutlinedInput-root": { backgroundColor: "#2a2a2a", color: "#ccc", "& fieldset": { borderColor: "#444" }, "&:hover fieldset": { borderColor: "#666" } }, label: { color: "#aaa" }, mt: 1 }} /> - - - - - -
- ); -} - -function NewItemDialog({ open, name, type, typeOptions, onChangeName, onChangeType, onCancel, onCreate }) { - if (!open) return null; - return ( -
- - New - onChangeName(e.target.value)} - sx={{ "& .MuiOutlinedInput-root": { backgroundColor: "#2a2a2a", color: "#ccc", "& fieldset": { borderColor: "#444" }, "&:hover fieldset": { borderColor: "#666" } }, label: { color: "#aaa" }, mt: 1 }} /> - - Type - - - - - - - -
- ); -} - -export default function ScriptEditor({ mode = "scripts", initialPath = "", onConsumedInitialPath, onSaved }) { - const isAnsible = mode === "ansible"; - const TYPE_OPTIONS = useMemo(() => (isAnsible ? TYPE_OPTIONS_ALL.filter(o => o.key === 'ansible') : TYPE_OPTIONS_ALL.filter(o => o.key !== 'ansible')), [isAnsible]); - - const [currentPath, setCurrentPath] = useState(""); - const [fileName, setFileName] = useState(""); - const [type, setType] = useState(isAnsible ? "ansible" : "powershell"); - const [code, setCode] = useState(""); - - const [renameOpen, setRenameOpen] = useState(false); - const [renameValue, setRenameValue] = useState(""); - const [newOpen, setNewOpen] = useState(false); - const [newName, setNewName] = useState(""); - const [newType, setNewType] = useState(isAnsible ? "ansible" : "powershell"); - const [deleteOpen, setDeleteOpen] = useState(false); - - const island = useMemo(() => (isAnsible ? 'ansible' : 'scripts'), [isAnsible]); - - useEffect(() => { - (async () => { - if (!initialPath) return; - try { - const resp = await fetch(`/api/assembly/load?island=${encodeURIComponent(island)}&path=${encodeURIComponent(initialPath)}`); - if (resp.ok) { - const data = await resp.json(); - setCurrentPath(data.rel_path || initialPath); - const fname = data.file_name || initialPath.split('/').pop() || ''; - setFileName(fname); - setType(typeFromFilename(fname)); - setCode(data.content || ""); - } - } catch {} - if (onConsumedInitialPath) onConsumedInitialPath(); - })(); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [initialPath, island]); - - const saveFile = async () => { - if (!currentPath && !fileName) { - setNewName(""); - setNewType(isAnsible ? "ansible" : type); - setNewOpen(true); - return; - } - const island = isAnsible ? 'ansible' : 'scripts'; - const normalizedName = currentPath ? currentPath : ensureExt(fileName, type); - try { - // If we already have a path, edit; otherwise create - if (currentPath) { - const resp = await fetch(`/api/assembly/edit`, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ island, path: currentPath, content: code }) - }); - if (!resp.ok) { - const data = await resp.json().catch(() => ({})); - throw new Error(data?.error || `HTTP ${resp.status}`); - } - onSaved && onSaved(); - } else { - const resp = await fetch(`/api/assembly/create`, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ island, kind: 'file', path: normalizedName, content: code, type }) - }); - const data = await resp.json(); - if (!resp.ok) throw new Error(data?.error || `HTTP ${resp.status}`); - if (data.rel_path) { - setCurrentPath(data.rel_path); - const fname = data.rel_path.split('/').pop(); - setFileName(fname); - setType(typeFromFilename(fname)); - onSaved && onSaved(); - } - } - } catch (err) { - console.error("Failed to save:", err); - } - }; - - const saveRenameFile = async () => { - try { - const island = isAnsible ? 'ansible' : 'scripts'; - const finalName = ensureExt(renameValue, type); - const res = await fetch(`/api/assembly/rename`, { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ island, kind: 'file', path: currentPath, new_name: finalName, type }) }); - const data = await res.json(); - if (!res.ok) throw new Error(data?.error || `HTTP ${res.status}`); - setCurrentPath(data.rel_path || currentPath); - const fname = (data.rel_path || currentPath).split('/').pop(); - setFileName(fname); - setType(typeFromFilename(fname)); - setRenameOpen(false); - } catch (err) { - console.error("Failed to rename file:", err); - setRenameOpen(false); - } - }; - - const createNew = () => { - const finalName = ensureExt(newName || (isAnsible ? "playbook" : "script"), newType); - setCurrentPath(finalName); - setFileName(finalName); - setType(newType); - setCode(""); - setNewOpen(false); - }; - - return ( - - - - - Type - - - - {fileName && ( - - )} - - - - highlightedHtml(src, (keyBy(TYPE_OPTIONS_ALL)[type]?.prism || 'yaml'))} padding={12} placeholder={currentPath ? `Editing: ${currentPath}` : (isAnsible ? "New Playbook..." : "New Script...")} - style={{ fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace', fontSize: 14, color: "#e6edf3", background: "#121212", outline: "none", minHeight: 300, lineHeight: 1.4, caretColor: "#58a6ff" }} /> - - - - {/* Dialogs */} - setRenameOpen(false)} onSave={saveRenameFile} /> - setNewOpen(false)} onCreate={createNew} /> - setDeleteOpen(false)} onConfirm={() => { setDeleteOpen(false); onSaved && onSaved(); }} /> - - ); -} diff --git a/Data/Server/WebUI/src/Scheduling/Create_Job.jsx b/Data/Server/WebUI/src/Scheduling/Create_Job.jsx index 4ef4191..ad0ad9f 100644 --- a/Data/Server/WebUI/src/Scheduling/Create_Job.jsx +++ b/Data/Server/WebUI/src/Scheduling/Create_Job.jsx @@ -78,7 +78,7 @@ function buildScriptTree(scripts, folders) { const isFile = idx === parts.length - 1; let node = children.find((n) => n.id === path); if (!node) { - node = { id: path, label: isFile ? s.file_name : part, path, isFolder: !isFile, fileName: s.file_name, script: isFile ? s : null, children: [] }; + node = { id: path, label: isFile ? (s.name || s.file_name || part) : part, path, isFolder: !isFile, fileName: s.file_name, script: isFile ? s : null, children: [] }; children.push(node); map[path] = node; } if (!isFile) { children = node.children; parentPath = path; } diff --git a/Data/Server/WebUI/src/Scheduling/Quick_Job.jsx b/Data/Server/WebUI/src/Scheduling/Quick_Job.jsx index 0e8de36..0aea259 100644 --- a/Data/Server/WebUI/src/Scheduling/Quick_Job.jsx +++ b/Data/Server/WebUI/src/Scheduling/Quick_Job.jsx @@ -53,7 +53,7 @@ function buildTree(items, folders, rootLabel = "Scripts") { if (!node) { node = { id: path, - label: isFile ? s.file_name : part, + label: isFile ? (s.name || s.file_name || part) : part, path, isFolder: !isFile, fileName: s.file_name, diff --git a/Data/Server/job_scheduler.py b/Data/Server/job_scheduler.py index 1518687..2e53f73 100644 --- a/Data/Server/job_scheduler.py +++ b/Data/Server/job_scheduler.py @@ -1,6 +1,8 @@ import os import time import json +import os +import re import sqlite3 from typing import Any, Dict, List, Optional, Tuple, Callable @@ -150,17 +152,111 @@ class JobScheduler: return False def _detect_script_type(self, filename: str) -> str: - fn = (filename or "").lower() - if fn.endswith(".yml"): - return "ansible" - if fn.endswith(".ps1"): + fn_lower = (filename or "").lower() + if fn_lower.endswith(".json") and os.path.isfile(filename): + try: + with open(filename, "r", encoding="utf-8") as fh: + data = json.load(fh) + if isinstance(data, dict): + typ = str(data.get("type") or data.get("script_type") or "").strip().lower() + if typ in ("powershell", "batch", "bash", "ansible"): + return typ + except Exception: + pass return "powershell" - if fn.endswith(".bat"): + if fn_lower.endswith(".yml"): + return "ansible" + if fn_lower.endswith(".ps1"): + return "powershell" + if fn_lower.endswith(".bat"): return "batch" - if fn.endswith(".sh"): + if fn_lower.endswith(".sh"): return "bash" return "unknown" + def _load_assembly_document(self, abs_path: str, default_type: str) -> Dict[str, Any]: + base_name = os.path.splitext(os.path.basename(abs_path))[0] + doc: Dict[str, Any] = { + "name": base_name, + "description": "", + "category": "application" if default_type == "ansible" else "script", + "type": default_type, + "script": "", + "variables": [], + "files": [], + "timeout_seconds": 0, + } + if abs_path.lower().endswith(".json") and os.path.isfile(abs_path): + try: + with open(abs_path, "r", encoding="utf-8") as fh: + data = json.load(fh) + except Exception: + data = {} + if isinstance(data, dict): + doc["name"] = str(data.get("name") or doc["name"]) + doc["description"] = str(data.get("description") or "") + cat = str(data.get("category") or doc["category"]).strip().lower() + if cat in ("application", "script"): + doc["category"] = cat + typ = str(data.get("type") or data.get("script_type") or default_type).strip().lower() + if typ in ("powershell", "batch", "bash", "ansible"): + doc["type"] = typ + script_val = data.get("script") + if isinstance(script_val, str): + doc["script"] = script_val + else: + content_val = data.get("content") + if isinstance(content_val, str): + doc["script"] = content_val + try: + doc["timeout_seconds"] = max(0, int(data.get("timeout_seconds") or 0)) + except Exception: + doc["timeout_seconds"] = 0 + vars_in = data.get("variables") if isinstance(data.get("variables"), list) else [] + doc["variables"] = [] + for v in vars_in: + if not isinstance(v, dict): + continue + name = str(v.get("name") or v.get("key") or "").strip() + if not name: + continue + vtype = str(v.get("type") or "string").strip().lower() + if vtype not in ("string", "number", "boolean", "credential"): + vtype = "string" + doc["variables"].append({ + "name": name, + "label": str(v.get("label") or ""), + "type": vtype, + "default": v.get("default", v.get("default_value")), + "required": bool(v.get("required")), + "description": str(v.get("description") or ""), + }) + files_in = data.get("files") if isinstance(data.get("files"), list) else [] + doc["files"] = [] + for f in files_in: + if not isinstance(f, dict): + continue + fname = f.get("file_name") or f.get("name") + if not fname or not isinstance(f.get("data"), str): + continue + try: + size_val = int(f.get("size") or 0) + except Exception: + size_val = 0 + doc["files"].append({ + "file_name": str(fname), + "size": size_val, + "mime_type": str(f.get("mime_type") or f.get("mimeType") or ""), + "data": f.get("data"), + }) + return doc + try: + with open(abs_path, "r", encoding="utf-8", errors="replace") as fh: + doc["script"] = fh.read() + except Exception: + doc["script"] = "" + return doc + def _ansible_root(self) -> str: import os return os.path.abspath( @@ -175,11 +271,10 @@ class JobScheduler: abs_path = os.path.abspath(os.path.join(ans_root, rel_norm)) if (not abs_path.startswith(ans_root)) or (not os.path.isfile(abs_path)): return - try: - with open(abs_path, "r", encoding="utf-8", errors="replace") as fh: - content = fh.read() - except Exception: - return + doc = self._load_assembly_document(abs_path, "ansible") + content = doc.get("script") or "" + variables = doc.get("variables") or [] + files = doc.get("files") or [] # Record in activity_history for UI parity now = _now_ts() @@ -217,6 +312,8 @@ class JobScheduler: "scheduled_job_id": int(scheduled_job_id), "scheduled_run_id": int(scheduled_run_id), "connection": "winrm", + "variables": variables, + "files": files, } try: self.socketio.emit("ansible_playbook_run", payload) @@ -236,15 +333,33 @@ class JobScheduler: abs_path = os.path.abspath(os.path.join(scripts_root, path_norm)) if (not abs_path.startswith(scripts_root)) or (not self._is_valid_scripts_relpath(path_norm)) or (not os.path.isfile(abs_path)): return - stype = self._detect_script_type(abs_path) + doc = self._load_assembly_document(abs_path, "powershell") + stype = (doc.get("type") or "powershell").lower() # For now, only PowerShell is supported by agents for scheduled jobs if stype != "powershell": return + content = doc.get("script") or "" + env_map: Dict[str, str] = {} + for var in doc.get("variables") or []: + if not isinstance(var, dict): + continue + name = str(var.get("name") or "").strip() + if not name: + continue + env_key = re.sub(r"[^A-Za-z0-9_]", "_", name.upper()) + default_val = var.get("default") + if isinstance(default_val, bool): + env_val = "True" if default_val else "False" + elif default_val is None: + env_val = "" + else: + env_val = str(default_val) + env_map[env_key] = env_val + timeout_seconds = 0 try: - with open(abs_path, "r", encoding="utf-8", errors="replace") as fh: - content = fh.read() + timeout_seconds = max(0, int(doc.get("timeout_seconds") or 0)) except Exception: - return + timeout_seconds = 0 # Insert into activity_history for device for parity with Quick Job import sqlite3 @@ -281,6 +396,10 @@ class JobScheduler: "script_name": os.path.basename(abs_path), "script_path": path_norm, "script_content": content, + "environment": env_map, + "variables": doc.get("variables") or [], + "timeout_seconds": timeout_seconds, + "files": doc.get("files") or [], "run_mode": (run_mode or "system").strip().lower(), "admin_user": "", "admin_pass": "", diff --git a/Data/Server/server.py b/Data/Server/server.py index 0ebf6e6..ae92d2b 100644 --- a/Data/Server/server.py +++ b/Data/Server/server.py @@ -5,6 +5,7 @@ import eventlet eventlet.monkey_patch() import requests +import re import base64 from flask import Flask, request, jsonify, Response, send_from_directory, make_response, session from flask_socketio import SocketIO, emit, join_room @@ -16,7 +17,7 @@ import time import os # To Read Production ReactJS Server Folder import json # For reading workflow JSON files import shutil # For moving workflow files and folders -from typing import List, Dict, Tuple, Optional +from typing import List, Dict, Tuple, Optional, Any import sqlite3 import io from datetime import datetime, timezone @@ -650,16 +651,145 @@ def _default_ext_for_island(island: str, item_type: str = "") -> str: if isl in ("workflows", "workflow"): return ".json" if isl in ("ansible", "ansible_playbooks", "ansible-playbooks", "playbooks"): - return ".yml" - # scripts: use hint or default to .ps1 + return ".json" + if isl in ("scripts", "script"): + return ".json" t = (item_type or "").lower().strip() if t == "bash": - return ".sh" + return ".json" if t == "batch": - return ".bat" + return ".json" if t == "powershell": - return ".ps1" - return ".ps1" + return ".json" + return ".json" + + +def _default_type_for_island(island: str, item_type: str = "") -> str: + isl = (island or "").lower().strip() + if isl in ("ansible", "ansible_playbooks", "ansible-playbooks", "playbooks"): + return "ansible" + t = (item_type or "").lower().strip() + if t in ("powershell", "batch", "bash", "ansible"): + return t + return "powershell" + + +def _empty_assembly_document(default_type: str = "powershell") -> Dict[str, Any]: + return { + "version": 1, + "name": "", + "description": "", + "category": "application" if (default_type or "").lower() == "ansible" else "script", + "type": default_type or "powershell", + "script": "", + "timeout_seconds": 0, + "sites": {"mode": "all", "values": []}, + "variables": [], + "files": [] + } + + +def _normalize_assembly_document(obj: Any, default_type: str, base_name: str) -> Dict[str, Any]: + doc = _empty_assembly_document(default_type) + if not isinstance(obj, dict): + obj = {} + base = (base_name or "assembly").strip() + doc["name"] = str(obj.get("name") or obj.get("display_name") or base) + doc["description"] = str(obj.get("description") or "") + category = str(obj.get("category") or doc["category"]).strip().lower() + if category in ("script", "application"): + doc["category"] = category + typ = str(obj.get("type") or obj.get("script_type") or default_type or "powershell").strip().lower() + if typ in ("powershell", "batch", "bash", "ansible"): + doc["type"] = typ + script_val = obj.get("script") + if isinstance(script_val, str): + doc["script"] = script_val + else: + content_val = obj.get("content") + if isinstance(content_val, str): + doc["script"] = content_val + timeout_val = obj.get("timeout_seconds", obj.get("timeout")) + if timeout_val is not None: + try: + doc["timeout_seconds"] = max(0, int(timeout_val)) + except Exception: + pass + sites = obj.get("sites") if isinstance(obj.get("sites"), dict) else {} + values = sites.get("values") if isinstance(sites.get("values"), list) else [] + mode = str(sites.get("mode") or ("specific" if values else "all")).strip().lower() + if mode not in ("all", "specific"): + mode = "all" + doc["sites"] = { + "mode": mode, + "values": [str(v).strip() for v in values if isinstance(v, (str, int, float)) and str(v).strip()] + } + vars_in = obj.get("variables") if isinstance(obj.get("variables"), list) else [] + doc_vars: List[Dict[str, Any]] = [] + for v in vars_in: + if not isinstance(v, dict): + continue + name = str(v.get("name") or v.get("key") or "").strip() + if not name: + continue + vtype = str(v.get("type") or "string").strip().lower() + if vtype not in ("string", "number", "boolean", "credential"): + vtype = "string" + default_val = v.get("default", v.get("default_value")) + doc_vars.append({ + "name": name, + "label": str(v.get("label") or ""), + "type": vtype, + "default": default_val, + "required": bool(v.get("required")), + "description": str(v.get("description") or "") + }) + doc["variables"] = doc_vars + files_in = obj.get("files") if isinstance(obj.get("files"), list) else [] + doc_files: List[Dict[str, Any]] = [] + for f in files_in: + if not isinstance(f, dict): + continue + fname = f.get("file_name") or f.get("name") + data = f.get("data") + if not fname or not isinstance(data, str): + continue + size_val = f.get("size") + try: + size_int = int(size_val) + except Exception: + size_int = 0 + doc_files.append({ + "file_name": str(fname), + "size": size_int, + "mime_type": str(f.get("mime_type") or f.get("mimeType") or ""), + "data": data + }) + doc["files"] = doc_files + try: + doc["version"] = int(obj.get("version") or doc["version"]) + except Exception: + pass + return doc + + +def _load_assembly_document(abs_path: str, island: str, type_hint: str = "") -> Dict[str, Any]: + base_name = os.path.splitext(os.path.basename(abs_path))[0] + default_type = _default_type_for_island(island, type_hint) + if abs_path.lower().endswith(".json"): + data = _safe_read_json(abs_path) + return _normalize_assembly_document(data, default_type, base_name) + try: + with open(abs_path, "r", encoding="utf-8", errors="replace") as fh: + content = fh.read() + except Exception: + content = "" + doc = _empty_assembly_document(default_type) + doc["name"] = base_name + doc["script"] = content + if default_type == "ansible": + doc["category"] = "application" + return doc @app.route("/api/assembly/create", methods=["POST"]) @@ -682,7 +812,7 @@ def assembly_create(): if not ext: abs_path = base + _default_ext_for_island(island, item_type) os.makedirs(os.path.dirname(abs_path), exist_ok=True) - # Workflows expect JSON; others raw text + # Workflows expect JSON; scripts/ansible use assembly documents if (island or "").lower() in ("workflows", "workflow"): obj = content if isinstance(obj, str): @@ -699,8 +829,22 @@ def assembly_create(): with open(abs_path, "w", encoding="utf-8") as fh: json.dump(obj, fh, indent=2) else: - with open(abs_path, "w", encoding="utf-8", newline="\n") as fh: - fh.write(str(content or "")) + obj = content + if isinstance(obj, str): + try: + obj = json.loads(obj) + except Exception: + obj = {} + if not isinstance(obj, dict): + obj = {} + base_name = os.path.splitext(os.path.basename(abs_path))[0] + normalized = _normalize_assembly_document( + obj, + _default_type_for_island(island, item_type), + base_name, + ) + with open(abs_path, "w", encoding="utf-8") as fh: + json.dump(normalized, fh, indent=2) rel_new = os.path.relpath(abs_path, root).replace(os.sep, "/") return jsonify({"status": "ok", "rel_path": rel_new}) else: @@ -721,18 +865,42 @@ def assembly_edit(): root, abs_path, _ = _resolve_assembly_path(island, path) if not os.path.isfile(abs_path): return jsonify({"error": "file not found"}), 404 + target_abs = abs_path + if not abs_path.lower().endswith(".json"): + base, _ = os.path.splitext(abs_path) + target_abs = base + _default_ext_for_island(island, data.get("type")) if (island or "").lower() in ("workflows", "workflow"): obj = content if isinstance(obj, str): obj = json.loads(obj) if not isinstance(obj, dict): return jsonify({"error": "invalid content for workflow"}), 400 - with open(abs_path, "w", encoding="utf-8") as fh: + with open(target_abs, "w", encoding="utf-8") as fh: json.dump(obj, fh, indent=2) else: - with open(abs_path, "w", encoding="utf-8", newline="\n") as fh: - fh.write(str(content or "")) - return jsonify({"status": "ok"}) + obj = content + if isinstance(obj, str): + try: + obj = json.loads(obj) + except Exception: + obj = {} + if not isinstance(obj, dict): + obj = {} + base_name = os.path.splitext(os.path.basename(target_abs))[0] + normalized = _normalize_assembly_document( + obj, + _default_type_for_island(island, obj.get("type") if isinstance(obj, dict) else ""), + base_name, + ) + with open(target_abs, "w", encoding="utf-8") as fh: + json.dump(normalized, fh, indent=2) + if target_abs != abs_path: + try: + os.remove(abs_path) + except Exception: + pass + rel_new = os.path.relpath(target_abs, root).replace(os.sep, "/") + return jsonify({"status": "ok", "rel_path": rel_new}) except ValueError as ve: return jsonify({"error": str(ve)}), 400 except Exception as e: @@ -885,7 +1053,7 @@ def assembly_list(): "last_edited_epoch": mtime }) elif isl in ("scripts", "script"): - exts = (".ps1", ".bat", ".sh") + exts = (".json", ".ps1", ".bat", ".sh") for r, dirs, files in os.walk(root): rel_root = os.path.relpath(r, root) if rel_root != ".": @@ -899,15 +1067,20 @@ def assembly_list(): mtime = os.path.getmtime(fp) except Exception: mtime = 0.0 + stype = _detect_script_type(fp) + doc = _load_assembly_document(fp, "scripts", stype) items.append({ "file_name": fname, "rel_path": rel_path, - "type": _detect_script_type(fname), + "type": doc.get("type", stype), + "name": doc.get("name"), + "category": doc.get("category"), + "description": doc.get("description"), "last_edited": time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime(mtime)), "last_edited_epoch": mtime }) else: # ansible - exts = (".yml",) + exts = (".json", ".yml") for r, dirs, files in os.walk(root): rel_root = os.path.relpath(r, root) if rel_root != ".": @@ -921,10 +1094,15 @@ def assembly_list(): mtime = os.path.getmtime(fp) except Exception: mtime = 0.0 + stype = _detect_script_type(fp) + doc = _load_assembly_document(fp, "ansible", stype) items.append({ "file_name": fname, "rel_path": rel_path, - "type": "ansible", + "type": doc.get("type", "ansible"), + "name": doc.get("name"), + "category": doc.get("category"), + "description": doc.get("description"), "last_edited": time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime(mtime)), "last_edited_epoch": mtime }) @@ -951,14 +1129,16 @@ def assembly_load(): obj = _safe_read_json(abs_path) return jsonify(obj) else: - with open(abs_path, "r", encoding="utf-8", errors="replace") as fh: - content = fh.read() - return jsonify({ + doc = _load_assembly_document(abs_path, island) + rel = os.path.relpath(abs_path, root).replace(os.sep, "/") + result = { "file_name": os.path.basename(abs_path), - "rel_path": os.path.relpath(abs_path, root).replace(os.sep, "/"), - "type": ("ansible" if isl.startswith("ansible") else _detect_script_type(abs_path)), - "content": content - }) + "rel_path": rel, + "type": doc.get("type"), + "assembly": doc, + "content": doc.get("script") + } + return jsonify(result) except ValueError as ve: return jsonify({"error": str(ve)}), 400 except Exception as e: @@ -991,29 +1171,33 @@ def _is_valid_scripts_relpath(rel_path: str) -> bool: def _detect_script_type(filename: str) -> str: - fn = (filename or "").lower() - if fn.endswith(".yml"): - return "ansible" - if fn.endswith(".ps1"): + fn_lower = (filename or "").lower() + if fn_lower.endswith(".json") and os.path.isfile(filename): + try: + obj = _safe_read_json(filename) + if isinstance(obj, dict): + typ = str(obj.get("type") or obj.get("script_type") or "").strip().lower() + if typ in ("powershell", "batch", "bash", "ansible"): + return typ + except Exception: + pass return "powershell" - if fn.endswith(".bat"): + if fn_lower.endswith(".yml"): + return "ansible" + if fn_lower.endswith(".ps1"): + return "powershell" + if fn_lower.endswith(".bat"): return "batch" - if fn.endswith(".sh"): + if fn_lower.endswith(".sh"): return "bash" return "unknown" def _ext_for_type(script_type: str) -> str: t = (script_type or "").lower() - if t == "ansible": - return ".yml" - if t == "powershell": - return ".ps1" - if t == "batch": - return ".bat" - if t == "bash": - return ".sh" - return "" + if t in ("ansible", "powershell", "batch", "bash"): + return ".json" + return ".json" """ @@ -2594,14 +2778,24 @@ def set_device_description(hostname: str): # Quick Job Execution + Activity History # --------------------------------------------- def _detect_script_type(fn: str) -> str: - fn = (fn or "").lower() - if fn.endswith(".yml"): - return "ansible" - if fn.endswith(".ps1"): + fn_lower = (fn or "").lower() + if fn_lower.endswith(".json") and os.path.isfile(fn): + try: + obj = _safe_read_json(fn) + if isinstance(obj, dict): + typ = str(obj.get("type") or obj.get("script_type") or "").strip().lower() + if typ in ("powershell", "batch", "bash", "ansible"): + return typ + except Exception: + pass return "powershell" - if fn.endswith(".bat"): + if fn_lower.endswith(".yml"): + return "ansible" + if fn_lower.endswith(".ps1"): + return "powershell" + if fn_lower.endswith(".bat"): return "batch" - if fn.endswith(".sh"): + if fn_lower.endswith(".sh"): return "bash" return "unknown" @@ -2634,15 +2828,34 @@ def scripts_quick_run(): if (not abs_path.startswith(scripts_root)) or (not _is_valid_scripts_relpath(rel_path)) or (not os.path.isfile(abs_path)): return jsonify({"error": "Script not found"}), 404 - script_type = _detect_script_type(abs_path) + doc = _load_assembly_document(abs_path, "scripts") + script_type = (doc.get("type") or "powershell").lower() if script_type != "powershell": return jsonify({"error": f"Unsupported script type '{script_type}'. Only powershell is supported for Quick Job currently."}), 400 + content = doc.get("script") or "" + variables = doc.get("variables") if isinstance(doc.get("variables"), list) else [] + env_map: Dict[str, str] = {} + for var in variables: + if not isinstance(var, dict): + continue + name = str(var.get("name") or "").strip() + if not name: + continue + env_key = re.sub(r"[^A-Za-z0-9_]", "_", name.upper()) + default_val = var.get("default") + if isinstance(default_val, bool): + env_val = "True" if default_val else "False" + elif default_val is None: + env_val = "" + else: + env_val = str(default_val) + env_map[env_key] = env_val + timeout_seconds = 0 try: - with open(abs_path, "r", encoding="utf-8", errors="replace") as fh: - content = fh.read() - except Exception as e: - return jsonify({"error": f"Failed to read script: {e}"}), 500 + timeout_seconds = max(0, int(doc.get("timeout_seconds") or 0)) + except Exception: + timeout_seconds = 0 now = int(time.time()) results = [] @@ -2680,6 +2893,10 @@ def scripts_quick_run(): "script_name": _safe_filename(rel_path), "script_path": rel_path.replace(os.sep, "/"), "script_content": content, + "environment": env_map, + "variables": variables, + "timeout_seconds": timeout_seconds, + "files": doc.get("files") if isinstance(doc.get("files"), list) else [], "run_mode": run_mode, "admin_user": admin_user, "admin_pass": admin_pass, @@ -2709,12 +2926,10 @@ def ansible_quick_run(): if not os.path.isfile(abs_path): _ansible_log_server(f"[quick_run] playbook not found path={abs_path}") return jsonify({"error": "Playbook not found"}), 404 - try: - with open(abs_path, 'r', encoding='utf-8', errors='replace') as fh: - content = fh.read() - except Exception as e: - _ansible_log_server(f"[quick_run] read error: {e}") - return jsonify({"error": f"Failed to read playbook: {e}"}), 500 + doc = _load_assembly_document(abs_path, 'ansible') + content = doc.get('script') or '' + variables = doc.get('variables') if isinstance(doc.get('variables'), list) else [] + files = doc.get('files') if isinstance(doc.get('files'), list) else [] results = [] for host in hostnames: @@ -2757,6 +2972,8 @@ def ansible_quick_run(): "playbook_name": os.path.basename(abs_path), "playbook_content": content, "connection": "winrm", + "variables": variables, + "files": files, "activity_job_id": job_id, } try: