diff --git a/Scripts/Examples/Write Canary to C Drive Root.ps1 b/Assemblies/Scripts/Examples/Write Canary to C Drive Root.ps1 similarity index 100% rename from Scripts/Examples/Write Canary to C Drive Root.ps1 rename to Assemblies/Scripts/Examples/Write Canary to C Drive Root.ps1 diff --git a/Scripts/Examples/Write Canary to CurrentUser Desktop.ps1 b/Assemblies/Scripts/Examples/Write Canary to CurrentUser Desktop.ps1 similarity index 100% rename from Scripts/Examples/Write Canary to CurrentUser Desktop.ps1 rename to Assemblies/Scripts/Examples/Write Canary to CurrentUser Desktop.ps1 diff --git a/Scripts/Examples/Write Canary to Specific User Folder.ps1 b/Assemblies/Scripts/Examples/Write Canary to Specific User Folder.ps1 similarity index 100% rename from Scripts/Examples/Write Canary to Specific User Folder.ps1 rename to Assemblies/Scripts/Examples/Write Canary to Specific User Folder.ps1 diff --git a/Workflows/Development Testing/Multiple Agent Screenshot Testing.json b/Assemblies/Workflows/Development Testing/Multiple Agent Screenshot Testing.json similarity index 100% rename from Workflows/Development Testing/Multiple Agent Screenshot Testing.json rename to Assemblies/Workflows/Development Testing/Multiple Agent Screenshot Testing.json diff --git a/Workflows/Examples/API Requests/Value Parser.json b/Assemblies/Workflows/Examples/API Requests/Value Parser.json similarity index 100% rename from Workflows/Examples/API Requests/Value Parser.json rename to Assemblies/Workflows/Examples/API Requests/Value Parser.json diff --git a/Workflows/Examples/Basic/Logic Comparison.json b/Assemblies/Workflows/Examples/Basic/Logic Comparison.json similarity index 100% rename from Workflows/Examples/Basic/Logic Comparison.json rename to Assemblies/Workflows/Examples/Basic/Logic Comparison.json diff --git a/Workflows/Examples/Basic/Math Operations.json b/Assemblies/Workflows/Examples/Basic/Math Operations.json similarity index 100% rename from Workflows/Examples/Basic/Math Operations.json rename to Assemblies/Workflows/Examples/Basic/Math Operations.json diff --git a/Workflows/Examples/Image Processing/Black and White Conversion.json b/Assemblies/Workflows/Examples/Image Processing/Black and White Conversion.json similarity index 100% rename from Workflows/Examples/Image Processing/Black and White Conversion.json rename to Assemblies/Workflows/Examples/Image Processing/Black and White Conversion.json diff --git a/Workflows/Examples/OCR/Text Recognition.json b/Assemblies/Workflows/Examples/OCR/Text Recognition.json similarity index 100% rename from Workflows/Examples/OCR/Text Recognition.json rename to Assemblies/Workflows/Examples/OCR/Text Recognition.json diff --git a/Workflows/Games/Flyff Universe/Character Status Breakdown.json b/Assemblies/Workflows/Games/Flyff Universe/Character Status Breakdown.json similarity index 100% rename from Workflows/Games/Flyff Universe/Character Status Breakdown.json rename to Assemblies/Workflows/Games/Flyff Universe/Character Status Breakdown.json diff --git a/Workflows/Games/Flyff Universe/Chat Text Search Alerter.json b/Assemblies/Workflows/Games/Flyff Universe/Chat Text Search Alerter.json similarity index 100% rename from Workflows/Games/Flyff Universe/Chat Text Search Alerter.json rename to Assemblies/Workflows/Games/Flyff Universe/Chat Text Search Alerter.json diff --git a/Data/Agent/agent-requirements.txt b/Data/Agent/agent-requirements.txt index 3df67a5..c2cb6d7 100644 --- a/Data/Agent/agent-requirements.txt +++ b/Data/Agent/agent-requirements.txt @@ -27,6 +27,5 @@ sounddevice numpy pywin32; platform_system == "Windows" -# Ansible-based inventory collection (Windows local-only) -# Note: ansible-core is heavy; enable via config flag in DeviceAudit role +# Ansible Libraries ansible-core diff --git a/Data/Server/WebUI/src/Scripting/Script_Editor.jsx b/Data/Server/WebUI/src/Scripting/Script_Editor.jsx index 0f93e72..151c0e1 100644 --- a/Data/Server/WebUI/src/Scripting/Script_Editor.jsx +++ b/Data/Server/WebUI/src/Scripting/Script_Editor.jsx @@ -34,7 +34,6 @@ import Editor from "react-simple-code-editor"; // ---------- helpers ---------- const TYPE_OPTIONS = [ - { key: "ansible", label: "Ansible Playbook", ext: ".yml", prism: "yaml" }, { key: "powershell", label: "Powershell Script", ext: ".ps1", prism: "powershell" }, { key: "batch", label: "Batch Script", ext: ".bat", prism: "batch" }, { key: "bash", label: "Bash Script", ext: ".sh", prism: "bash" } @@ -45,18 +44,18 @@ const TYPES = keyBy(TYPE_OPTIONS); function typeFromFilename(name = "") { const n = name.toLowerCase(); - if (n.endsWith(".yml")) return "ansible"; if (n.endsWith(".ps1")) return "powershell"; if (n.endsWith(".bat")) return "batch"; if (n.endsWith(".sh")) return "bash"; - return "ansible"; // default + // Default editor type + return "powershell"; } function ensureExt(baseName, typeKey) { if (!baseName) return baseName; // If user already provided any extension, keep it. if (/\.[^./\\]+$/i.test(baseName)) return baseName; - const t = TYPES[typeKey] || TYPES.ansible; + const t = TYPES[typeKey] || TYPES.powershell; return baseName + t.ext; } @@ -258,7 +257,7 @@ export default function ScriptEditor() { const [currentPath, setCurrentPath] = useState(""); const [currentFolder, setCurrentFolder] = useState(""); const [fileName, setFileName] = useState(""); - const [type, setType] = useState("ansible"); + const [type, setType] = useState("powershell"); const [code, setCode] = useState(""); // Dialog state @@ -268,7 +267,7 @@ export default function ScriptEditor() { const [folderDialogMode, setFolderDialogMode] = useState("rename"); const [newScriptOpen, setNewScriptOpen] = useState(false); const [newScriptName, setNewScriptName] = useState(""); - const [newScriptType, setNewScriptType] = useState("ansible"); + const [newScriptType, setNewScriptType] = useState("powershell"); const [deleteOpen, setDeleteOpen] = useState(false); const prismLang = useMemo(() => (TYPES[type]?.prism || "yaml"), [type]); @@ -401,7 +400,7 @@ export default function ScriptEditor() { if (!selectedNode) return; setContextMenu(null); setNewScriptName(""); - setNewScriptType("ansible"); + setNewScriptType("powershell"); setNewScriptOpen(true); }; diff --git a/Data/Server/job_scheduler.py b/Data/Server/job_scheduler.py index 89708a2..6db5e57 100644 --- a/Data/Server/job_scheduler.py +++ b/Data/Server/job_scheduler.py @@ -133,10 +133,22 @@ class JobScheduler: # ---------- Helpers for dispatching scripts ---------- def _scripts_root(self) -> str: import os + # Unified Assemblies root; script paths should include top-level + # folder such as "Scripts" or "Ansible Playbooks". return os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", "..", "Scripts") + os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies") ) + def _is_valid_scripts_relpath(self, rel_path: str) -> bool: + try: + p = (rel_path or "").replace("\\", "/").lstrip("/") + if not p: + return False + top = p.split("/", 1)[0] + return top in ("Scripts",) + except Exception: + return False + def _detect_script_type(self, filename: str) -> str: fn = (filename or "").lower() if fn.endswith(".yml"): @@ -158,7 +170,7 @@ class JobScheduler: import os path_norm = (rel_path or "").replace("\\", "/") abs_path = os.path.abspath(os.path.join(scripts_root, path_norm)) - if not abs_path.startswith(scripts_root) or not os.path.isfile(abs_path): + if (not abs_path.startswith(scripts_root)) or (not self._is_valid_scripts_relpath(path_norm)) or (not os.path.isfile(abs_path)): return stype = self._detect_script_type(abs_path) # For now, only PowerShell is supported by agents for scheduled jobs diff --git a/Data/Server/server.py b/Data/Server/server.py index 86a80d5..de8ce66 100644 --- a/Data/Server/server.py +++ b/Data/Server/server.py @@ -513,7 +513,7 @@ def move_workflow(): rel_path = (data.get("path") or "").strip() new_rel = (data.get("new_path") or "").strip() workflows_root = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", "..", "Workflows") + os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows") ) old_abs = os.path.abspath(os.path.join(workflows_root, rel_path)) new_abs = os.path.abspath(os.path.join(workflows_root, new_rel)) @@ -534,7 +534,7 @@ def delete_workflow(): data = request.get_json(silent=True) or {} rel_path = (data.get("path") or "").strip() workflows_root = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", "..", "Workflows") + os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows") ) abs_path = os.path.abspath(os.path.join(workflows_root, rel_path)) if not abs_path.startswith(workflows_root) or not os.path.isfile(abs_path): @@ -551,7 +551,7 @@ def delete_folder(): data = request.get_json(silent=True) or {} rel_path = (data.get("path") or "").strip() workflows_root = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", "..", "Workflows") + os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows") ) abs_path = os.path.abspath(os.path.join(workflows_root, rel_path)) if not abs_path.startswith(workflows_root) or not os.path.isdir(abs_path): @@ -567,7 +567,7 @@ def create_folder(): data = request.get_json(silent=True) or {} rel_path = (data.get("path") or "").strip() workflows_root = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", "..", "Workflows") + os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows") ) abs_path = os.path.abspath(os.path.join(workflows_root, rel_path)) if not abs_path.startswith(workflows_root): @@ -585,7 +585,7 @@ def rename_folder(): rel_path = (data.get("path") or "").strip() new_name = (data.get("new_name") or "").strip() workflows_root = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", "..", "Workflows") + os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows") ) old_abs = os.path.abspath(os.path.join(workflows_root, rel_path)) if not old_abs.startswith(workflows_root) or not os.path.isdir(old_abs): @@ -628,11 +628,11 @@ def _extract_tab_name(obj: Dict) -> str: @app.route("/api/storage/load_workflows", methods=["GET"]) def load_workflows(): """ - Scan /Workflows for *.json files and return a table-friendly list. + Scan /Assemblies/Workflows for *.json files and return a table-friendly list. """ - # Resolve /Workflows relative to this file at /Data/server.py + # Resolve /Assemblies/Workflows relative to this file at /Data/server.py workflows_root = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", "..", "Workflows") + os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows") ) results: List[Dict] = [] folders: List[str] = [] @@ -695,7 +695,7 @@ def load_workflow(): """Load a single workflow JSON by its relative path.""" rel_path = request.args.get("path", "") workflows_root = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", "..", "Workflows") + os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows") ) abs_path = os.path.abspath(os.path.join(workflows_root, rel_path)) @@ -716,7 +716,7 @@ def save_workflow(): return jsonify({"error": "Invalid payload"}), 400 workflows_root = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", "..", "Workflows") + os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows") ) os.makedirs(workflows_root, exist_ok=True) @@ -749,7 +749,7 @@ def rename_workflow(): rel_path = (data.get("path") or "").strip() new_name = (data.get("new_name") or "").strip() workflows_root = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", "..", "Workflows") + os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows") ) old_abs = os.path.abspath(os.path.join(workflows_root, rel_path)) if not old_abs.startswith(workflows_root) or not os.path.isfile(old_abs): @@ -780,10 +780,26 @@ def rename_workflow(): # Scripts Storage API Endpoints # --------------------------------------------- def _scripts_root() -> str: + # Scripts live under Assemblies. We unify listing under Assemblies and + # only allow access within top-level folders: "Scripts" and "Ansible Playbooks". return os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", "..", "Scripts") + os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies") ) +def _scripts_allowed_top_levels() -> List[str]: + # Scripts API is scoped strictly to the Scripts top-level. + return ["Scripts"] + +def _is_valid_scripts_relpath(rel_path: str) -> bool: + try: + p = (rel_path or "").replace("\\", "/").lstrip("/") + if not p: + return False + top = p.split("/", 1)[0] + return top in _scripts_allowed_top_levels() + except Exception: + return False + def _detect_script_type(filename: str) -> str: fn = (filename or "").lower() @@ -813,7 +829,7 @@ def _ext_for_type(script_type: str) -> str: @app.route("/api/scripts/list", methods=["GET"]) def list_scripts(): - """Scan /Scripts for known script files and return list + folders.""" + """Scan /Assemblies/Scripts for script files and return list + folders.""" scripts_root = _scripts_root() results: List[Dict] = [] folders: List[str] = [] @@ -826,35 +842,39 @@ def list_scripts(): }), 200 exts = (".yml", ".ps1", ".bat", ".sh") - for root, dirs, files in os.walk(scripts_root): - rel_root = os.path.relpath(root, scripts_root) - if rel_root != ".": - folders.append(rel_root.replace(os.sep, "/")) - for fname in files: - if not fname.lower().endswith(exts): - continue + for top in _scripts_allowed_top_levels(): + base_dir = os.path.join(scripts_root, top) + if not os.path.isdir(base_dir): + continue + for root, dirs, files in os.walk(base_dir): + rel_root = os.path.relpath(root, scripts_root) + if rel_root != ".": + folders.append(rel_root.replace(os.sep, "/")) + for fname in files: + if not fname.lower().endswith(exts): + continue - full_path = os.path.join(root, fname) - rel_path = os.path.relpath(full_path, scripts_root) - parts = rel_path.split(os.sep) - folder_parts = parts[:-1] - breadcrumb_prefix = " > ".join(folder_parts) if folder_parts else "" - display_name = f"{breadcrumb_prefix} > {fname}" if breadcrumb_prefix else fname + full_path = os.path.join(root, fname) + rel_path = os.path.relpath(full_path, scripts_root) + parts = rel_path.split(os.sep) + folder_parts = parts[:-1] + breadcrumb_prefix = " > ".join(folder_parts) if folder_parts else "" + display_name = f"{breadcrumb_prefix} > {fname}" if breadcrumb_prefix else fname - try: - mtime = os.path.getmtime(full_path) - except Exception: - mtime = 0.0 + try: + mtime = os.path.getmtime(full_path) + except Exception: + mtime = 0.0 - results.append({ - "name": display_name, - "breadcrumb_prefix": breadcrumb_prefix, - "file_name": fname, - "rel_path": rel_path.replace(os.sep, "/"), - "type": _detect_script_type(fname), - "last_edited": time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime(mtime)), - "last_edited_epoch": mtime - }) + results.append({ + "name": display_name, + "breadcrumb_prefix": breadcrumb_prefix, + "file_name": fname, + "rel_path": rel_path.replace(os.sep, "/"), + "type": _detect_script_type(fname), + "last_edited": time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime(mtime)), + "last_edited_epoch": mtime + }) results.sort(key=lambda x: x.get("last_edited_epoch", 0.0), reverse=True) @@ -870,7 +890,7 @@ def load_script(): rel_path = request.args.get("path", "") scripts_root = _scripts_root() abs_path = os.path.abspath(os.path.join(scripts_root, rel_path)) - if not abs_path.startswith(scripts_root) or not os.path.isfile(abs_path): + if (not abs_path.startswith(scripts_root)) or (not _is_valid_scripts_relpath(rel_path)) or (not os.path.isfile(abs_path)): return jsonify({"error": "Script not found"}), 404 try: with open(abs_path, "r", encoding="utf-8", errors="replace") as fh: @@ -908,6 +928,8 @@ def save_script(): if desired_ext: rel_path = base + desired_ext abs_path = os.path.abspath(os.path.join(scripts_root, rel_path)) + if not _is_valid_scripts_relpath(rel_path): + return jsonify({"error": "Invalid path (must be under 'Scripts')"}), 400 else: if not name: return jsonify({"error": "Missing name"}), 400 @@ -916,7 +938,10 @@ def save_script(): if not ext: desired_ext = _ext_for_type(script_type) or ".txt" name = os.path.splitext(name)[0] + desired_ext - abs_path = os.path.abspath(os.path.join(scripts_root, os.path.basename(name))) + # Default top-level folder is Scripts only (Playbooks handled separately) + if (script_type or "").lower() == "ansible": + return jsonify({"error": "Ansible playbooks are managed separately from scripts."}), 400 + abs_path = os.path.abspath(os.path.join(scripts_root, "Scripts", os.path.basename(name))) if not abs_path.startswith(scripts_root): return jsonify({"error": "Invalid path"}), 400 @@ -967,7 +992,7 @@ def move_script_file(): new_abs = os.path.abspath(os.path.join(scripts_root, new_rel)) if not old_abs.startswith(scripts_root) or not os.path.isfile(old_abs): return jsonify({"error": "File not found"}), 404 - if not new_abs.startswith(scripts_root): + if (not new_abs.startswith(scripts_root)) or (not _is_valid_scripts_relpath(new_rel)): return jsonify({"error": "Invalid destination"}), 400 os.makedirs(os.path.dirname(new_abs), exist_ok=True) try: @@ -983,7 +1008,7 @@ def delete_script_file(): rel_path = (data.get("path") or "").strip() scripts_root = _scripts_root() abs_path = os.path.abspath(os.path.join(scripts_root, rel_path)) - if not abs_path.startswith(scripts_root) or not os.path.isfile(abs_path): + if (not abs_path.startswith(scripts_root)) or (not _is_valid_scripts_relpath(rel_path)) or (not os.path.isfile(abs_path)): return jsonify({"error": "File not found"}), 404 try: os.remove(abs_path) @@ -997,6 +1022,10 @@ def scripts_create_folder(): data = request.get_json(silent=True) or {} rel_path = (data.get("path") or "").strip() scripts_root = _scripts_root() + # If caller provided a path that does not include a valid top-level, + # default to creating under the "Scripts" top-level for convenience. + if not _is_valid_scripts_relpath(rel_path): + rel_path = os.path.join("Scripts", rel_path) if rel_path else "Scripts" abs_path = os.path.abspath(os.path.join(scripts_root, rel_path)) if not abs_path.startswith(scripts_root): return jsonify({"error": "Invalid path"}), 400 @@ -1013,8 +1042,11 @@ def scripts_delete_folder(): rel_path = (data.get("path") or "").strip() scripts_root = _scripts_root() abs_path = os.path.abspath(os.path.join(scripts_root, rel_path)) - if not abs_path.startswith(scripts_root) or not os.path.isdir(abs_path): + if (not abs_path.startswith(scripts_root)) or (not _is_valid_scripts_relpath(rel_path)) or (not os.path.isdir(abs_path)): return jsonify({"error": "Folder not found"}), 404 + rel_norm = (rel_path or "").replace("\\", "/").strip("/") + if rel_norm in ("Scripts", "Ansible Playbooks"): + return jsonify({"error": "Cannot delete top-level folder"}), 400 try: shutil.rmtree(abs_path) return jsonify({"status": "ok"}) @@ -1033,6 +1065,9 @@ def scripts_rename_folder(): return jsonify({"error": "Folder not found"}), 404 if not new_name: return jsonify({"error": "Invalid new_name"}), 400 + rel_norm = (rel_path or "").replace("\\", "/").strip("/") + if rel_norm in ("Scripts", "Ansible Playbooks"): + return jsonify({"error": "Cannot rename top-level folder"}), 400 new_abs = os.path.join(os.path.dirname(old_abs), new_name) try: os.rename(old_abs, new_abs) @@ -2138,7 +2173,7 @@ def scripts_quick_run(): scripts_root = _scripts_root() abs_path = os.path.abspath(os.path.join(scripts_root, rel_path)) - if not abs_path.startswith(scripts_root) or not os.path.isfile(abs_path): + if (not abs_path.startswith(scripts_root)) or (not _is_valid_scripts_relpath(rel_path)) or (not os.path.isfile(abs_path)): return jsonify({"error": "Script not found"}), 404 script_type = _detect_script_type(abs_path)