mirror of
https://github.com/bunny-lab-io/Borealis.git
synced 2025-10-26 17:21:58 -06:00
Restructured Scripts and Workflows into "Assemblies"
This commit is contained in:
@@ -27,6 +27,5 @@ sounddevice
|
|||||||
numpy
|
numpy
|
||||||
pywin32; platform_system == "Windows"
|
pywin32; platform_system == "Windows"
|
||||||
|
|
||||||
# Ansible-based inventory collection (Windows local-only)
|
# Ansible Libraries
|
||||||
# Note: ansible-core is heavy; enable via config flag in DeviceAudit role
|
|
||||||
ansible-core
|
ansible-core
|
||||||
|
|||||||
@@ -34,7 +34,6 @@ import Editor from "react-simple-code-editor";
|
|||||||
|
|
||||||
// ---------- helpers ----------
|
// ---------- helpers ----------
|
||||||
const TYPE_OPTIONS = [
|
const TYPE_OPTIONS = [
|
||||||
{ key: "ansible", label: "Ansible Playbook", ext: ".yml", prism: "yaml" },
|
|
||||||
{ key: "powershell", label: "Powershell Script", ext: ".ps1", prism: "powershell" },
|
{ key: "powershell", label: "Powershell Script", ext: ".ps1", prism: "powershell" },
|
||||||
{ key: "batch", label: "Batch Script", ext: ".bat", prism: "batch" },
|
{ key: "batch", label: "Batch Script", ext: ".bat", prism: "batch" },
|
||||||
{ key: "bash", label: "Bash Script", ext: ".sh", prism: "bash" }
|
{ key: "bash", label: "Bash Script", ext: ".sh", prism: "bash" }
|
||||||
@@ -45,18 +44,18 @@ const TYPES = keyBy(TYPE_OPTIONS);
|
|||||||
|
|
||||||
function typeFromFilename(name = "") {
|
function typeFromFilename(name = "") {
|
||||||
const n = name.toLowerCase();
|
const n = name.toLowerCase();
|
||||||
if (n.endsWith(".yml")) return "ansible";
|
|
||||||
if (n.endsWith(".ps1")) return "powershell";
|
if (n.endsWith(".ps1")) return "powershell";
|
||||||
if (n.endsWith(".bat")) return "batch";
|
if (n.endsWith(".bat")) return "batch";
|
||||||
if (n.endsWith(".sh")) return "bash";
|
if (n.endsWith(".sh")) return "bash";
|
||||||
return "ansible"; // default
|
// Default editor type
|
||||||
|
return "powershell";
|
||||||
}
|
}
|
||||||
|
|
||||||
function ensureExt(baseName, typeKey) {
|
function ensureExt(baseName, typeKey) {
|
||||||
if (!baseName) return baseName;
|
if (!baseName) return baseName;
|
||||||
// If user already provided any extension, keep it.
|
// If user already provided any extension, keep it.
|
||||||
if (/\.[^./\\]+$/i.test(baseName)) return baseName;
|
if (/\.[^./\\]+$/i.test(baseName)) return baseName;
|
||||||
const t = TYPES[typeKey] || TYPES.ansible;
|
const t = TYPES[typeKey] || TYPES.powershell;
|
||||||
return baseName + t.ext;
|
return baseName + t.ext;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -258,7 +257,7 @@ export default function ScriptEditor() {
|
|||||||
const [currentPath, setCurrentPath] = useState("");
|
const [currentPath, setCurrentPath] = useState("");
|
||||||
const [currentFolder, setCurrentFolder] = useState("");
|
const [currentFolder, setCurrentFolder] = useState("");
|
||||||
const [fileName, setFileName] = useState("");
|
const [fileName, setFileName] = useState("");
|
||||||
const [type, setType] = useState("ansible");
|
const [type, setType] = useState("powershell");
|
||||||
const [code, setCode] = useState("");
|
const [code, setCode] = useState("");
|
||||||
|
|
||||||
// Dialog state
|
// Dialog state
|
||||||
@@ -268,7 +267,7 @@ export default function ScriptEditor() {
|
|||||||
const [folderDialogMode, setFolderDialogMode] = useState("rename");
|
const [folderDialogMode, setFolderDialogMode] = useState("rename");
|
||||||
const [newScriptOpen, setNewScriptOpen] = useState(false);
|
const [newScriptOpen, setNewScriptOpen] = useState(false);
|
||||||
const [newScriptName, setNewScriptName] = useState("");
|
const [newScriptName, setNewScriptName] = useState("");
|
||||||
const [newScriptType, setNewScriptType] = useState("ansible");
|
const [newScriptType, setNewScriptType] = useState("powershell");
|
||||||
const [deleteOpen, setDeleteOpen] = useState(false);
|
const [deleteOpen, setDeleteOpen] = useState(false);
|
||||||
|
|
||||||
const prismLang = useMemo(() => (TYPES[type]?.prism || "yaml"), [type]);
|
const prismLang = useMemo(() => (TYPES[type]?.prism || "yaml"), [type]);
|
||||||
@@ -401,7 +400,7 @@ export default function ScriptEditor() {
|
|||||||
if (!selectedNode) return;
|
if (!selectedNode) return;
|
||||||
setContextMenu(null);
|
setContextMenu(null);
|
||||||
setNewScriptName("");
|
setNewScriptName("");
|
||||||
setNewScriptType("ansible");
|
setNewScriptType("powershell");
|
||||||
setNewScriptOpen(true);
|
setNewScriptOpen(true);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -133,10 +133,22 @@ class JobScheduler:
|
|||||||
# ---------- Helpers for dispatching scripts ----------
|
# ---------- Helpers for dispatching scripts ----------
|
||||||
def _scripts_root(self) -> str:
|
def _scripts_root(self) -> str:
|
||||||
import os
|
import os
|
||||||
|
# Unified Assemblies root; script paths should include top-level
|
||||||
|
# folder such as "Scripts" or "Ansible Playbooks".
|
||||||
return os.path.abspath(
|
return os.path.abspath(
|
||||||
os.path.join(os.path.dirname(__file__), "..", "..", "Scripts")
|
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _is_valid_scripts_relpath(self, rel_path: str) -> bool:
|
||||||
|
try:
|
||||||
|
p = (rel_path or "").replace("\\", "/").lstrip("/")
|
||||||
|
if not p:
|
||||||
|
return False
|
||||||
|
top = p.split("/", 1)[0]
|
||||||
|
return top in ("Scripts",)
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
def _detect_script_type(self, filename: str) -> str:
|
def _detect_script_type(self, filename: str) -> str:
|
||||||
fn = (filename or "").lower()
|
fn = (filename or "").lower()
|
||||||
if fn.endswith(".yml"):
|
if fn.endswith(".yml"):
|
||||||
@@ -158,7 +170,7 @@ class JobScheduler:
|
|||||||
import os
|
import os
|
||||||
path_norm = (rel_path or "").replace("\\", "/")
|
path_norm = (rel_path or "").replace("\\", "/")
|
||||||
abs_path = os.path.abspath(os.path.join(scripts_root, path_norm))
|
abs_path = os.path.abspath(os.path.join(scripts_root, path_norm))
|
||||||
if not abs_path.startswith(scripts_root) or not os.path.isfile(abs_path):
|
if (not abs_path.startswith(scripts_root)) or (not self._is_valid_scripts_relpath(path_norm)) or (not os.path.isfile(abs_path)):
|
||||||
return
|
return
|
||||||
stype = self._detect_script_type(abs_path)
|
stype = self._detect_script_type(abs_path)
|
||||||
# For now, only PowerShell is supported by agents for scheduled jobs
|
# For now, only PowerShell is supported by agents for scheduled jobs
|
||||||
|
|||||||
@@ -513,7 +513,7 @@ def move_workflow():
|
|||||||
rel_path = (data.get("path") or "").strip()
|
rel_path = (data.get("path") or "").strip()
|
||||||
new_rel = (data.get("new_path") or "").strip()
|
new_rel = (data.get("new_path") or "").strip()
|
||||||
workflows_root = os.path.abspath(
|
workflows_root = os.path.abspath(
|
||||||
os.path.join(os.path.dirname(__file__), "..", "..", "Workflows")
|
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||||
)
|
)
|
||||||
old_abs = os.path.abspath(os.path.join(workflows_root, rel_path))
|
old_abs = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||||
new_abs = os.path.abspath(os.path.join(workflows_root, new_rel))
|
new_abs = os.path.abspath(os.path.join(workflows_root, new_rel))
|
||||||
@@ -534,7 +534,7 @@ def delete_workflow():
|
|||||||
data = request.get_json(silent=True) or {}
|
data = request.get_json(silent=True) or {}
|
||||||
rel_path = (data.get("path") or "").strip()
|
rel_path = (data.get("path") or "").strip()
|
||||||
workflows_root = os.path.abspath(
|
workflows_root = os.path.abspath(
|
||||||
os.path.join(os.path.dirname(__file__), "..", "..", "Workflows")
|
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||||
)
|
)
|
||||||
abs_path = os.path.abspath(os.path.join(workflows_root, rel_path))
|
abs_path = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||||
if not abs_path.startswith(workflows_root) or not os.path.isfile(abs_path):
|
if not abs_path.startswith(workflows_root) or not os.path.isfile(abs_path):
|
||||||
@@ -551,7 +551,7 @@ def delete_folder():
|
|||||||
data = request.get_json(silent=True) or {}
|
data = request.get_json(silent=True) or {}
|
||||||
rel_path = (data.get("path") or "").strip()
|
rel_path = (data.get("path") or "").strip()
|
||||||
workflows_root = os.path.abspath(
|
workflows_root = os.path.abspath(
|
||||||
os.path.join(os.path.dirname(__file__), "..", "..", "Workflows")
|
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||||
)
|
)
|
||||||
abs_path = os.path.abspath(os.path.join(workflows_root, rel_path))
|
abs_path = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||||
if not abs_path.startswith(workflows_root) or not os.path.isdir(abs_path):
|
if not abs_path.startswith(workflows_root) or not os.path.isdir(abs_path):
|
||||||
@@ -567,7 +567,7 @@ def create_folder():
|
|||||||
data = request.get_json(silent=True) or {}
|
data = request.get_json(silent=True) or {}
|
||||||
rel_path = (data.get("path") or "").strip()
|
rel_path = (data.get("path") or "").strip()
|
||||||
workflows_root = os.path.abspath(
|
workflows_root = os.path.abspath(
|
||||||
os.path.join(os.path.dirname(__file__), "..", "..", "Workflows")
|
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||||
)
|
)
|
||||||
abs_path = os.path.abspath(os.path.join(workflows_root, rel_path))
|
abs_path = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||||
if not abs_path.startswith(workflows_root):
|
if not abs_path.startswith(workflows_root):
|
||||||
@@ -585,7 +585,7 @@ def rename_folder():
|
|||||||
rel_path = (data.get("path") or "").strip()
|
rel_path = (data.get("path") or "").strip()
|
||||||
new_name = (data.get("new_name") or "").strip()
|
new_name = (data.get("new_name") or "").strip()
|
||||||
workflows_root = os.path.abspath(
|
workflows_root = os.path.abspath(
|
||||||
os.path.join(os.path.dirname(__file__), "..", "..", "Workflows")
|
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||||
)
|
)
|
||||||
old_abs = os.path.abspath(os.path.join(workflows_root, rel_path))
|
old_abs = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||||
if not old_abs.startswith(workflows_root) or not os.path.isdir(old_abs):
|
if not old_abs.startswith(workflows_root) or not os.path.isdir(old_abs):
|
||||||
@@ -628,11 +628,11 @@ def _extract_tab_name(obj: Dict) -> str:
|
|||||||
@app.route("/api/storage/load_workflows", methods=["GET"])
|
@app.route("/api/storage/load_workflows", methods=["GET"])
|
||||||
def load_workflows():
|
def load_workflows():
|
||||||
"""
|
"""
|
||||||
Scan <ProjectRoot>/Workflows for *.json files and return a table-friendly list.
|
Scan <ProjectRoot>/Assemblies/Workflows for *.json files and return a table-friendly list.
|
||||||
"""
|
"""
|
||||||
# Resolve <ProjectRoot>/Workflows relative to this file at <ProjectRoot>/Data/server.py
|
# Resolve <ProjectRoot>/Assemblies/Workflows relative to this file at <ProjectRoot>/Data/server.py
|
||||||
workflows_root = os.path.abspath(
|
workflows_root = os.path.abspath(
|
||||||
os.path.join(os.path.dirname(__file__), "..", "..", "Workflows")
|
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||||
)
|
)
|
||||||
results: List[Dict] = []
|
results: List[Dict] = []
|
||||||
folders: List[str] = []
|
folders: List[str] = []
|
||||||
@@ -695,7 +695,7 @@ def load_workflow():
|
|||||||
"""Load a single workflow JSON by its relative path."""
|
"""Load a single workflow JSON by its relative path."""
|
||||||
rel_path = request.args.get("path", "")
|
rel_path = request.args.get("path", "")
|
||||||
workflows_root = os.path.abspath(
|
workflows_root = os.path.abspath(
|
||||||
os.path.join(os.path.dirname(__file__), "..", "..", "Workflows")
|
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||||
)
|
)
|
||||||
abs_path = os.path.abspath(os.path.join(workflows_root, rel_path))
|
abs_path = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||||
|
|
||||||
@@ -716,7 +716,7 @@ def save_workflow():
|
|||||||
return jsonify({"error": "Invalid payload"}), 400
|
return jsonify({"error": "Invalid payload"}), 400
|
||||||
|
|
||||||
workflows_root = os.path.abspath(
|
workflows_root = os.path.abspath(
|
||||||
os.path.join(os.path.dirname(__file__), "..", "..", "Workflows")
|
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||||
)
|
)
|
||||||
os.makedirs(workflows_root, exist_ok=True)
|
os.makedirs(workflows_root, exist_ok=True)
|
||||||
|
|
||||||
@@ -749,7 +749,7 @@ def rename_workflow():
|
|||||||
rel_path = (data.get("path") or "").strip()
|
rel_path = (data.get("path") or "").strip()
|
||||||
new_name = (data.get("new_name") or "").strip()
|
new_name = (data.get("new_name") or "").strip()
|
||||||
workflows_root = os.path.abspath(
|
workflows_root = os.path.abspath(
|
||||||
os.path.join(os.path.dirname(__file__), "..", "..", "Workflows")
|
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||||
)
|
)
|
||||||
old_abs = os.path.abspath(os.path.join(workflows_root, rel_path))
|
old_abs = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||||
if not old_abs.startswith(workflows_root) or not os.path.isfile(old_abs):
|
if not old_abs.startswith(workflows_root) or not os.path.isfile(old_abs):
|
||||||
@@ -780,10 +780,26 @@ def rename_workflow():
|
|||||||
# Scripts Storage API Endpoints
|
# Scripts Storage API Endpoints
|
||||||
# ---------------------------------------------
|
# ---------------------------------------------
|
||||||
def _scripts_root() -> str:
|
def _scripts_root() -> str:
|
||||||
|
# Scripts live under Assemblies. We unify listing under Assemblies and
|
||||||
|
# only allow access within top-level folders: "Scripts" and "Ansible Playbooks".
|
||||||
return os.path.abspath(
|
return os.path.abspath(
|
||||||
os.path.join(os.path.dirname(__file__), "..", "..", "Scripts")
|
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _scripts_allowed_top_levels() -> List[str]:
|
||||||
|
# Scripts API is scoped strictly to the Scripts top-level.
|
||||||
|
return ["Scripts"]
|
||||||
|
|
||||||
|
def _is_valid_scripts_relpath(rel_path: str) -> bool:
|
||||||
|
try:
|
||||||
|
p = (rel_path or "").replace("\\", "/").lstrip("/")
|
||||||
|
if not p:
|
||||||
|
return False
|
||||||
|
top = p.split("/", 1)[0]
|
||||||
|
return top in _scripts_allowed_top_levels()
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _detect_script_type(filename: str) -> str:
|
def _detect_script_type(filename: str) -> str:
|
||||||
fn = (filename or "").lower()
|
fn = (filename or "").lower()
|
||||||
@@ -813,7 +829,7 @@ def _ext_for_type(script_type: str) -> str:
|
|||||||
|
|
||||||
@app.route("/api/scripts/list", methods=["GET"])
|
@app.route("/api/scripts/list", methods=["GET"])
|
||||||
def list_scripts():
|
def list_scripts():
|
||||||
"""Scan <ProjectRoot>/Scripts for known script files and return list + folders."""
|
"""Scan <ProjectRoot>/Assemblies/Scripts for script files and return list + folders."""
|
||||||
scripts_root = _scripts_root()
|
scripts_root = _scripts_root()
|
||||||
results: List[Dict] = []
|
results: List[Dict] = []
|
||||||
folders: List[str] = []
|
folders: List[str] = []
|
||||||
@@ -826,35 +842,39 @@ def list_scripts():
|
|||||||
}), 200
|
}), 200
|
||||||
|
|
||||||
exts = (".yml", ".ps1", ".bat", ".sh")
|
exts = (".yml", ".ps1", ".bat", ".sh")
|
||||||
for root, dirs, files in os.walk(scripts_root):
|
for top in _scripts_allowed_top_levels():
|
||||||
rel_root = os.path.relpath(root, scripts_root)
|
base_dir = os.path.join(scripts_root, top)
|
||||||
if rel_root != ".":
|
if not os.path.isdir(base_dir):
|
||||||
folders.append(rel_root.replace(os.sep, "/"))
|
continue
|
||||||
for fname in files:
|
for root, dirs, files in os.walk(base_dir):
|
||||||
if not fname.lower().endswith(exts):
|
rel_root = os.path.relpath(root, scripts_root)
|
||||||
continue
|
if rel_root != ".":
|
||||||
|
folders.append(rel_root.replace(os.sep, "/"))
|
||||||
|
for fname in files:
|
||||||
|
if not fname.lower().endswith(exts):
|
||||||
|
continue
|
||||||
|
|
||||||
full_path = os.path.join(root, fname)
|
full_path = os.path.join(root, fname)
|
||||||
rel_path = os.path.relpath(full_path, scripts_root)
|
rel_path = os.path.relpath(full_path, scripts_root)
|
||||||
parts = rel_path.split(os.sep)
|
parts = rel_path.split(os.sep)
|
||||||
folder_parts = parts[:-1]
|
folder_parts = parts[:-1]
|
||||||
breadcrumb_prefix = " > ".join(folder_parts) if folder_parts else ""
|
breadcrumb_prefix = " > ".join(folder_parts) if folder_parts else ""
|
||||||
display_name = f"{breadcrumb_prefix} > {fname}" if breadcrumb_prefix else fname
|
display_name = f"{breadcrumb_prefix} > {fname}" if breadcrumb_prefix else fname
|
||||||
|
|
||||||
try:
|
try:
|
||||||
mtime = os.path.getmtime(full_path)
|
mtime = os.path.getmtime(full_path)
|
||||||
except Exception:
|
except Exception:
|
||||||
mtime = 0.0
|
mtime = 0.0
|
||||||
|
|
||||||
results.append({
|
results.append({
|
||||||
"name": display_name,
|
"name": display_name,
|
||||||
"breadcrumb_prefix": breadcrumb_prefix,
|
"breadcrumb_prefix": breadcrumb_prefix,
|
||||||
"file_name": fname,
|
"file_name": fname,
|
||||||
"rel_path": rel_path.replace(os.sep, "/"),
|
"rel_path": rel_path.replace(os.sep, "/"),
|
||||||
"type": _detect_script_type(fname),
|
"type": _detect_script_type(fname),
|
||||||
"last_edited": time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime(mtime)),
|
"last_edited": time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime(mtime)),
|
||||||
"last_edited_epoch": mtime
|
"last_edited_epoch": mtime
|
||||||
})
|
})
|
||||||
|
|
||||||
results.sort(key=lambda x: x.get("last_edited_epoch", 0.0), reverse=True)
|
results.sort(key=lambda x: x.get("last_edited_epoch", 0.0), reverse=True)
|
||||||
|
|
||||||
@@ -870,7 +890,7 @@ def load_script():
|
|||||||
rel_path = request.args.get("path", "")
|
rel_path = request.args.get("path", "")
|
||||||
scripts_root = _scripts_root()
|
scripts_root = _scripts_root()
|
||||||
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
||||||
if not abs_path.startswith(scripts_root) or not os.path.isfile(abs_path):
|
if (not abs_path.startswith(scripts_root)) or (not _is_valid_scripts_relpath(rel_path)) or (not os.path.isfile(abs_path)):
|
||||||
return jsonify({"error": "Script not found"}), 404
|
return jsonify({"error": "Script not found"}), 404
|
||||||
try:
|
try:
|
||||||
with open(abs_path, "r", encoding="utf-8", errors="replace") as fh:
|
with open(abs_path, "r", encoding="utf-8", errors="replace") as fh:
|
||||||
@@ -908,6 +928,8 @@ def save_script():
|
|||||||
if desired_ext:
|
if desired_ext:
|
||||||
rel_path = base + desired_ext
|
rel_path = base + desired_ext
|
||||||
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
||||||
|
if not _is_valid_scripts_relpath(rel_path):
|
||||||
|
return jsonify({"error": "Invalid path (must be under 'Scripts')"}), 400
|
||||||
else:
|
else:
|
||||||
if not name:
|
if not name:
|
||||||
return jsonify({"error": "Missing name"}), 400
|
return jsonify({"error": "Missing name"}), 400
|
||||||
@@ -916,7 +938,10 @@ def save_script():
|
|||||||
if not ext:
|
if not ext:
|
||||||
desired_ext = _ext_for_type(script_type) or ".txt"
|
desired_ext = _ext_for_type(script_type) or ".txt"
|
||||||
name = os.path.splitext(name)[0] + desired_ext
|
name = os.path.splitext(name)[0] + desired_ext
|
||||||
abs_path = os.path.abspath(os.path.join(scripts_root, os.path.basename(name)))
|
# Default top-level folder is Scripts only (Playbooks handled separately)
|
||||||
|
if (script_type or "").lower() == "ansible":
|
||||||
|
return jsonify({"error": "Ansible playbooks are managed separately from scripts."}), 400
|
||||||
|
abs_path = os.path.abspath(os.path.join(scripts_root, "Scripts", os.path.basename(name)))
|
||||||
|
|
||||||
if not abs_path.startswith(scripts_root):
|
if not abs_path.startswith(scripts_root):
|
||||||
return jsonify({"error": "Invalid path"}), 400
|
return jsonify({"error": "Invalid path"}), 400
|
||||||
@@ -967,7 +992,7 @@ def move_script_file():
|
|||||||
new_abs = os.path.abspath(os.path.join(scripts_root, new_rel))
|
new_abs = os.path.abspath(os.path.join(scripts_root, new_rel))
|
||||||
if not old_abs.startswith(scripts_root) or not os.path.isfile(old_abs):
|
if not old_abs.startswith(scripts_root) or not os.path.isfile(old_abs):
|
||||||
return jsonify({"error": "File not found"}), 404
|
return jsonify({"error": "File not found"}), 404
|
||||||
if not new_abs.startswith(scripts_root):
|
if (not new_abs.startswith(scripts_root)) or (not _is_valid_scripts_relpath(new_rel)):
|
||||||
return jsonify({"error": "Invalid destination"}), 400
|
return jsonify({"error": "Invalid destination"}), 400
|
||||||
os.makedirs(os.path.dirname(new_abs), exist_ok=True)
|
os.makedirs(os.path.dirname(new_abs), exist_ok=True)
|
||||||
try:
|
try:
|
||||||
@@ -983,7 +1008,7 @@ def delete_script_file():
|
|||||||
rel_path = (data.get("path") or "").strip()
|
rel_path = (data.get("path") or "").strip()
|
||||||
scripts_root = _scripts_root()
|
scripts_root = _scripts_root()
|
||||||
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
||||||
if not abs_path.startswith(scripts_root) or not os.path.isfile(abs_path):
|
if (not abs_path.startswith(scripts_root)) or (not _is_valid_scripts_relpath(rel_path)) or (not os.path.isfile(abs_path)):
|
||||||
return jsonify({"error": "File not found"}), 404
|
return jsonify({"error": "File not found"}), 404
|
||||||
try:
|
try:
|
||||||
os.remove(abs_path)
|
os.remove(abs_path)
|
||||||
@@ -997,6 +1022,10 @@ def scripts_create_folder():
|
|||||||
data = request.get_json(silent=True) or {}
|
data = request.get_json(silent=True) or {}
|
||||||
rel_path = (data.get("path") or "").strip()
|
rel_path = (data.get("path") or "").strip()
|
||||||
scripts_root = _scripts_root()
|
scripts_root = _scripts_root()
|
||||||
|
# If caller provided a path that does not include a valid top-level,
|
||||||
|
# default to creating under the "Scripts" top-level for convenience.
|
||||||
|
if not _is_valid_scripts_relpath(rel_path):
|
||||||
|
rel_path = os.path.join("Scripts", rel_path) if rel_path else "Scripts"
|
||||||
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
||||||
if not abs_path.startswith(scripts_root):
|
if not abs_path.startswith(scripts_root):
|
||||||
return jsonify({"error": "Invalid path"}), 400
|
return jsonify({"error": "Invalid path"}), 400
|
||||||
@@ -1013,8 +1042,11 @@ def scripts_delete_folder():
|
|||||||
rel_path = (data.get("path") or "").strip()
|
rel_path = (data.get("path") or "").strip()
|
||||||
scripts_root = _scripts_root()
|
scripts_root = _scripts_root()
|
||||||
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
||||||
if not abs_path.startswith(scripts_root) or not os.path.isdir(abs_path):
|
if (not abs_path.startswith(scripts_root)) or (not _is_valid_scripts_relpath(rel_path)) or (not os.path.isdir(abs_path)):
|
||||||
return jsonify({"error": "Folder not found"}), 404
|
return jsonify({"error": "Folder not found"}), 404
|
||||||
|
rel_norm = (rel_path or "").replace("\\", "/").strip("/")
|
||||||
|
if rel_norm in ("Scripts", "Ansible Playbooks"):
|
||||||
|
return jsonify({"error": "Cannot delete top-level folder"}), 400
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(abs_path)
|
shutil.rmtree(abs_path)
|
||||||
return jsonify({"status": "ok"})
|
return jsonify({"status": "ok"})
|
||||||
@@ -1033,6 +1065,9 @@ def scripts_rename_folder():
|
|||||||
return jsonify({"error": "Folder not found"}), 404
|
return jsonify({"error": "Folder not found"}), 404
|
||||||
if not new_name:
|
if not new_name:
|
||||||
return jsonify({"error": "Invalid new_name"}), 400
|
return jsonify({"error": "Invalid new_name"}), 400
|
||||||
|
rel_norm = (rel_path or "").replace("\\", "/").strip("/")
|
||||||
|
if rel_norm in ("Scripts", "Ansible Playbooks"):
|
||||||
|
return jsonify({"error": "Cannot rename top-level folder"}), 400
|
||||||
new_abs = os.path.join(os.path.dirname(old_abs), new_name)
|
new_abs = os.path.join(os.path.dirname(old_abs), new_name)
|
||||||
try:
|
try:
|
||||||
os.rename(old_abs, new_abs)
|
os.rename(old_abs, new_abs)
|
||||||
@@ -2138,7 +2173,7 @@ def scripts_quick_run():
|
|||||||
|
|
||||||
scripts_root = _scripts_root()
|
scripts_root = _scripts_root()
|
||||||
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
||||||
if not abs_path.startswith(scripts_root) or not os.path.isfile(abs_path):
|
if (not abs_path.startswith(scripts_root)) or (not _is_valid_scripts_relpath(rel_path)) or (not os.path.isfile(abs_path)):
|
||||||
return jsonify({"error": "Script not found"}), 404
|
return jsonify({"error": "Script not found"}), 404
|
||||||
|
|
||||||
script_type = _detect_script_type(abs_path)
|
script_type = _detect_script_type(abs_path)
|
||||||
|
|||||||
Reference in New Issue
Block a user