mirror of
https://github.com/bunny-lab-io/Borealis.git
synced 2025-10-26 15:21:57 -06:00
Unified Assembly-Based API Endpoints
This commit is contained in:
@@ -527,10 +527,16 @@ const LOCAL_STORAGE_KEY = "borealis_persistent_state";
|
||||
}
|
||||
};
|
||||
try {
|
||||
await fetch("/api/storage/save_workflow", {
|
||||
const body = {
|
||||
island: 'workflows',
|
||||
kind: 'file',
|
||||
path: payload.path,
|
||||
content: payload.workflow
|
||||
};
|
||||
await fetch("/api/assembly/create", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(payload)
|
||||
body: JSON.stringify(body)
|
||||
});
|
||||
setTabs((prev) =>
|
||||
prev.map((t) => (t.id === activeTabId ? { ...t, tab_name: name } : t))
|
||||
@@ -611,7 +617,7 @@ const LOCAL_STORAGE_KEY = "borealis_persistent_state";
|
||||
if (workflow && workflow.rel_path) {
|
||||
const folder = workflow.rel_path.split("/").slice(0, -1).join("/");
|
||||
try {
|
||||
const resp = await fetch(`/api/storage/load_workflow?path=${encodeURIComponent(workflow.rel_path)}`);
|
||||
const resp = await fetch(`/api/assembly/load?island=workflows&path=${encodeURIComponent(workflow.rel_path)}`);
|
||||
if (!resp.ok) throw new Error(`HTTP ${resp.status}`);
|
||||
const data = await resp.json();
|
||||
setTabs([{ id: newId, tab_name: data.tab_name || workflow.name || workflow.file_name || "Workflow", nodes: data.nodes || [], edges: data.edges || [], folderPath: folder }]);
|
||||
@@ -640,7 +646,7 @@ const LOCAL_STORAGE_KEY = "borealis_persistent_state";
|
||||
if (workflow && workflow.rel_path) {
|
||||
const folder = workflow.rel_path.split("/").slice(0, -1).join("/");
|
||||
try {
|
||||
const resp = await fetch(`/api/storage/load_workflow?path=${encodeURIComponent(workflow.rel_path)}`);
|
||||
const resp = await fetch(`/api/assembly/load?island=workflows&path=${encodeURIComponent(workflow.rel_path)}`);
|
||||
if (!resp.ok) throw new Error(`HTTP ${resp.status}`);
|
||||
const data = await resp.json();
|
||||
setTabs([{ id: newId, tab_name: data.tab_name || workflow.name || workflow.file_name || "Workflow", nodes: data.nodes || [], edges: data.edges || [], folderPath: folder }]);
|
||||
|
||||
@@ -133,10 +133,10 @@ function WorkflowsIsland({ onOpenWorkflow }) {
|
||||
}
|
||||
const newPath = target.path ? `${target.path}/${dragNode.fileName}` : dragNode.fileName;
|
||||
try {
|
||||
await fetch("/api/storage/move_workflow", {
|
||||
await fetch("/api/assembly/move", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ path: dragNode.path, new_path: newPath })
|
||||
body: JSON.stringify({ island: 'workflows', kind: 'file', path: dragNode.path, new_path: newPath })
|
||||
});
|
||||
loadTree();
|
||||
} catch (err) {
|
||||
@@ -147,10 +147,10 @@ function WorkflowsIsland({ onOpenWorkflow }) {
|
||||
|
||||
const loadTree = useCallback(async () => {
|
||||
try {
|
||||
const resp = await fetch("/api/storage/load_workflows");
|
||||
const resp = await fetch(`/api/assembly/list?island=workflows`);
|
||||
if (!resp.ok) throw new Error(`HTTP ${resp.status}`);
|
||||
const data = await resp.json();
|
||||
const { root, map } = buildWorkflowTree(data.workflows || [], data.folders || []);
|
||||
const { root, map } = buildWorkflowTree(data.items || [], data.folders || []);
|
||||
setTree(root);
|
||||
setNodeMap(map);
|
||||
} catch (err) {
|
||||
@@ -211,10 +211,10 @@ function WorkflowsIsland({ onOpenWorkflow }) {
|
||||
const saveRenameWorkflow = async () => {
|
||||
if (!selectedNode) return;
|
||||
try {
|
||||
await fetch("/api/storage/rename_workflow", {
|
||||
await fetch("/api/assembly/rename", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ path: selectedNode.path, new_name: renameValue })
|
||||
body: JSON.stringify({ island: 'workflows', kind: 'file', path: selectedNode.path, new_name: renameValue })
|
||||
});
|
||||
loadTree();
|
||||
} catch (err) {
|
||||
@@ -226,18 +226,18 @@ function WorkflowsIsland({ onOpenWorkflow }) {
|
||||
const saveRenameFolder = async () => {
|
||||
try {
|
||||
if (folderDialogMode === "rename" && selectedNode) {
|
||||
await fetch("/api/storage/rename_folder", {
|
||||
await fetch("/api/assembly/rename", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ path: selectedNode.path, new_name: renameValue })
|
||||
body: JSON.stringify({ island: 'workflows', kind: 'folder', path: selectedNode.path, new_name: renameValue })
|
||||
});
|
||||
} else {
|
||||
const basePath = selectedNode ? selectedNode.path : "";
|
||||
const newPath = basePath ? `${basePath}/${renameValue}` : renameValue;
|
||||
await fetch("/api/storage/create_folder", {
|
||||
await fetch("/api/assembly/create", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ path: newPath })
|
||||
body: JSON.stringify({ island: 'workflows', kind: 'folder', path: newPath })
|
||||
});
|
||||
}
|
||||
loadTree();
|
||||
@@ -258,16 +258,16 @@ function WorkflowsIsland({ onOpenWorkflow }) {
|
||||
if (!selectedNode) return;
|
||||
try {
|
||||
if (selectedNode.isFolder) {
|
||||
await fetch("/api/storage/delete_folder", {
|
||||
await fetch("/api/assembly/delete", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ path: selectedNode.path })
|
||||
body: JSON.stringify({ island: 'workflows', kind: 'folder', path: selectedNode.path })
|
||||
});
|
||||
} else {
|
||||
await fetch("/api/storage/delete_workflow", {
|
||||
await fetch("/api/assembly/delete", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ path: selectedNode.path })
|
||||
body: JSON.stringify({ island: 'workflows', kind: 'file', path: selectedNode.path })
|
||||
});
|
||||
}
|
||||
loadTree();
|
||||
@@ -464,12 +464,17 @@ function ScriptsLikeIsland({
|
||||
const apiRef = useTreeViewApiRef();
|
||||
const [dragNode, setDragNode] = useState(null);
|
||||
|
||||
const island = React.useMemo(() => {
|
||||
const b = String(baseApi || '').toLowerCase();
|
||||
return b.endsWith('/api/ansible') ? 'ansible' : 'scripts';
|
||||
}, [baseApi]);
|
||||
|
||||
const loadTree = useCallback(async () => {
|
||||
try {
|
||||
const resp = await fetch(`${baseApi}/list`);
|
||||
const resp = await fetch(`/api/assembly/list?island=${encodeURIComponent(island)}`);
|
||||
if (!resp.ok) throw new Error(`HTTP ${resp.status}`);
|
||||
const data = await resp.json();
|
||||
const { root, map } = buildFileTree(rootLabel, data.scripts || data.items || [], data.folders || []);
|
||||
const { root, map } = buildFileTree(rootLabel, data.items || [], data.folders || []);
|
||||
setTree(root);
|
||||
setNodeMap(map);
|
||||
} catch (err) {
|
||||
@@ -477,7 +482,7 @@ function ScriptsLikeIsland({
|
||||
setTree([]);
|
||||
setNodeMap({});
|
||||
}
|
||||
}, [baseApi, title, rootLabel]);
|
||||
}, [island, title, rootLabel]);
|
||||
|
||||
useEffect(() => { loadTree(); }, [loadTree]);
|
||||
|
||||
@@ -497,10 +502,10 @@ function ScriptsLikeIsland({
|
||||
}
|
||||
const newPath = target.path ? `${target.path}/${dragNode.fileName}` : dragNode.fileName;
|
||||
try {
|
||||
await fetch(`${baseApi}/move_file`, {
|
||||
await fetch(`/api/assembly/move`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ path: dragNode.path, new_path: newPath })
|
||||
body: JSON.stringify({ island, kind: 'file', path: dragNode.path, new_path: newPath })
|
||||
});
|
||||
loadTree();
|
||||
} catch (err) {
|
||||
@@ -519,10 +524,13 @@ function ScriptsLikeIsland({
|
||||
|
||||
const saveRenameFile = async () => {
|
||||
try {
|
||||
const res = await fetch(`${baseApi}/rename_file`, {
|
||||
const payload = { island, kind: 'file', path: selectedNode.path, new_name: renameValue };
|
||||
// preserve extension for scripts when no extension provided
|
||||
if (selectedNode?.meta?.type) payload.type = selectedNode.meta.type;
|
||||
const res = await fetch(`/api/assembly/rename`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ path: selectedNode.path, new_name: renameValue })
|
||||
body: JSON.stringify(payload)
|
||||
});
|
||||
const data = await res.json();
|
||||
if (!res.ok) throw new Error(data?.error || `HTTP ${res.status}`);
|
||||
@@ -537,18 +545,18 @@ function ScriptsLikeIsland({
|
||||
const saveRenameFolder = async () => {
|
||||
try {
|
||||
if (folderDialogMode === "rename" && selectedNode) {
|
||||
await fetch(`${baseApi}/rename_folder`, {
|
||||
await fetch(`/api/assembly/rename`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ path: selectedNode.path, new_name: renameValue })
|
||||
body: JSON.stringify({ island, kind: 'folder', path: selectedNode.path, new_name: renameValue })
|
||||
});
|
||||
} else {
|
||||
const basePath = selectedNode ? selectedNode.path : "";
|
||||
const newPath = basePath ? `${basePath}/${renameValue}` : renameValue;
|
||||
await fetch(`${baseApi}/create_folder`, {
|
||||
await fetch(`/api/assembly/create`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ path: newPath })
|
||||
body: JSON.stringify({ island, kind: 'folder', path: newPath })
|
||||
});
|
||||
}
|
||||
setRenameFolderOpen(false);
|
||||
@@ -563,16 +571,16 @@ function ScriptsLikeIsland({
|
||||
if (!selectedNode) return;
|
||||
try {
|
||||
if (selectedNode.isFolder) {
|
||||
await fetch(`${baseApi}/delete_folder`, {
|
||||
await fetch(`/api/assembly/delete`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ path: selectedNode.path })
|
||||
body: JSON.stringify({ island, kind: 'folder', path: selectedNode.path })
|
||||
});
|
||||
} else {
|
||||
await fetch(`${baseApi}/delete_file`, {
|
||||
await fetch(`/api/assembly/delete`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ path: selectedNode.path })
|
||||
body: JSON.stringify({ island, kind: 'file', path: selectedNode.path })
|
||||
});
|
||||
}
|
||||
setDeleteOpen(false);
|
||||
@@ -593,11 +601,11 @@ function ScriptsLikeIsland({
|
||||
else name += '.ps1';
|
||||
}
|
||||
const newPath = folder ? `${folder}/${name}` : name;
|
||||
// create empty file by saving blank content
|
||||
const res = await fetch(`${baseApi}/save`, {
|
||||
// create empty file via unified API
|
||||
const res = await fetch(`/api/assembly/create`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ name: newPath, content: "" })
|
||||
body: JSON.stringify({ island, kind: 'file', path: newPath, content: "", type: island === 'ansible' ? 'ansible' : 'powershell' })
|
||||
});
|
||||
if (!res.ok) {
|
||||
const data = await res.json().catch(() => ({}));
|
||||
|
||||
@@ -87,7 +87,6 @@ function NewItemDialog({ open, name, type, typeOptions, onChangeName, onChangeTy
|
||||
|
||||
export default function ScriptEditor({ mode = "scripts", initialPath = "", onConsumedInitialPath, onSaved }) {
|
||||
const isAnsible = mode === "ansible";
|
||||
const baseApi = isAnsible ? "/api/ansible" : "/api/scripts";
|
||||
const TYPE_OPTIONS = useMemo(() => (isAnsible ? TYPE_OPTIONS_ALL.filter(o => o.key === 'ansible') : TYPE_OPTIONS_ALL.filter(o => o.key !== 'ansible')), [isAnsible]);
|
||||
|
||||
const [currentPath, setCurrentPath] = useState("");
|
||||
@@ -128,18 +127,36 @@ export default function ScriptEditor({ mode = "scripts", initialPath = "", onCon
|
||||
setNewOpen(true);
|
||||
return;
|
||||
}
|
||||
const normalizedName = currentPath ? undefined : ensureExt(fileName, type);
|
||||
const payload = { path: currentPath || undefined, name: normalizedName, content: code, type };
|
||||
const island = isAnsible ? 'ansible' : 'scripts';
|
||||
const normalizedName = currentPath ? currentPath : ensureExt(fileName, type);
|
||||
try {
|
||||
const resp = await fetch(`${baseApi}/save`, { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify(payload) });
|
||||
const data = await resp.json();
|
||||
if (!resp.ok) throw new Error(data?.error || `HTTP ${resp.status}`);
|
||||
if (data.rel_path) {
|
||||
setCurrentPath(data.rel_path);
|
||||
const fname = data.rel_path.split('/').pop();
|
||||
setFileName(fname);
|
||||
setType(typeFromFilename(fname));
|
||||
// If we already have a path, edit; otherwise create
|
||||
if (currentPath) {
|
||||
const resp = await fetch(`/api/assembly/edit`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ island, path: currentPath, content: code })
|
||||
});
|
||||
if (!resp.ok) {
|
||||
const data = await resp.json().catch(() => ({}));
|
||||
throw new Error(data?.error || `HTTP ${resp.status}`);
|
||||
}
|
||||
onSaved && onSaved();
|
||||
} else {
|
||||
const resp = await fetch(`/api/assembly/create`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ island, kind: 'file', path: normalizedName, content: code, type })
|
||||
});
|
||||
const data = await resp.json();
|
||||
if (!resp.ok) throw new Error(data?.error || `HTTP ${resp.status}`);
|
||||
if (data.rel_path) {
|
||||
setCurrentPath(data.rel_path);
|
||||
const fname = data.rel_path.split('/').pop();
|
||||
setFileName(fname);
|
||||
setType(typeFromFilename(fname));
|
||||
onSaved && onSaved();
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Failed to save:", err);
|
||||
@@ -148,8 +165,9 @@ export default function ScriptEditor({ mode = "scripts", initialPath = "", onCon
|
||||
|
||||
const saveRenameFile = async () => {
|
||||
try {
|
||||
const island = isAnsible ? 'ansible' : 'scripts';
|
||||
const finalName = ensureExt(renameValue, type);
|
||||
const res = await fetch(`${baseApi}/rename_file`, { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ path: currentPath, new_name: finalName, type }) });
|
||||
const res = await fetch(`/api/assembly/rename`, { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ island, kind: 'file', path: currentPath, new_name: finalName, type }) });
|
||||
const data = await res.json();
|
||||
if (!res.ok) throw new Error(data?.error || `HTTP ${res.status}`);
|
||||
setCurrentPath(data.rel_path || currentPath);
|
||||
@@ -201,4 +219,3 @@ export default function ScriptEditor({ mode = "scripts", initialPath = "", onCon
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -366,19 +366,19 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
|
||||
setAddCompOpen(true);
|
||||
try {
|
||||
// scripts
|
||||
const sResp = await fetch("/api/scripts/list");
|
||||
const sResp = await fetch("/api/assembly/list?island=scripts");
|
||||
if (sResp.ok) {
|
||||
const sData = await sResp.json();
|
||||
const { root, map } = buildScriptTree(sData.scripts || [], sData.folders || []);
|
||||
const { root, map } = buildScriptTree(sData.items || [], sData.folders || []);
|
||||
setScriptTree(root); setScriptMap(map);
|
||||
} else { setScriptTree([]); setScriptMap({}); }
|
||||
} catch { setScriptTree([]); setScriptMap({}); }
|
||||
try {
|
||||
// workflows
|
||||
const wResp = await fetch("/api/storage/load_workflows");
|
||||
const wResp = await fetch("/api/assembly/list?island=workflows");
|
||||
if (wResp.ok) {
|
||||
const wData = await wResp.json();
|
||||
const { root, map } = buildWorkflowTree(wData.workflows || [], wData.folders || []);
|
||||
const { root, map } = buildWorkflowTree(wData.items || [], wData.folders || []);
|
||||
setWorkflowTree(root); setWorkflowMap(map);
|
||||
} else { setWorkflowTree([]); setWorkflowMap({}); }
|
||||
} catch { setWorkflowTree([]); setWorkflowMap({}); }
|
||||
@@ -391,7 +391,8 @@ export default function CreateJob({ onCancel, onCreated, initialJob = null }) {
|
||||
if (compTab === "scripts" && node.script) {
|
||||
setComponents((prev) => [
|
||||
...prev,
|
||||
{ type: "script", path: node.path, name: node.fileName || node.label, description: node.path }
|
||||
// Store path relative to Assemblies root with 'Scripts/' prefix for scheduler compatibility
|
||||
{ type: "script", path: (node.path.startsWith('Scripts/') ? node.path : `Scripts/${node.path}`), name: node.fileName || node.label, description: node.path }
|
||||
]);
|
||||
setSelectedNodeId("");
|
||||
return true;
|
||||
|
||||
@@ -83,10 +83,10 @@ export default function QuickJob({ open, onClose, hostnames = [] }) {
|
||||
|
||||
const loadTree = useCallback(async () => {
|
||||
try {
|
||||
const resp = await fetch("/api/scripts/list");
|
||||
const resp = await fetch("/api/assembly/list?island=scripts");
|
||||
if (!resp.ok) throw new Error(`HTTP ${resp.status}`);
|
||||
const data = await resp.json();
|
||||
const { root, map } = buildTree(data.scripts || [], data.folders || []);
|
||||
const { root, map } = buildTree(data.items || [], data.folders || []);
|
||||
setTree(root);
|
||||
setNodeMap(map);
|
||||
} catch (err) {
|
||||
@@ -140,10 +140,12 @@ export default function QuickJob({ open, onClose, hostnames = [] }) {
|
||||
setRunning(true);
|
||||
setError("");
|
||||
try {
|
||||
// quick_run expects a path relative to Assemblies root with 'Scripts/' prefix
|
||||
const script_path = selectedPath.startsWith('Scripts/') ? selectedPath : `Scripts/${selectedPath}`;
|
||||
const resp = await fetch("/api/scripts/quick_run", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ script_path: selectedPath, hostnames, run_mode: runAsCurrentUser ? "current_user" : "system" })
|
||||
body: JSON.stringify({ script_path, hostnames, run_mode: runAsCurrentUser ? "current_user" : "system" })
|
||||
});
|
||||
const data = await resp.json();
|
||||
if (!resp.ok) throw new Error(data.error || `HTTP ${resp.status}`);
|
||||
|
||||
@@ -15,7 +15,7 @@ import time
|
||||
import os # To Read Production ReactJS Server Folder
|
||||
import json # For reading workflow JSON files
|
||||
import shutil # For moving workflow files and folders
|
||||
from typing import List, Dict
|
||||
from typing import List, Dict, Tuple, Optional
|
||||
import sqlite3
|
||||
import io
|
||||
|
||||
@@ -505,99 +505,7 @@ def ocr_endpoint():
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
# New storage management endpoints
|
||||
|
||||
@app.route("/api/storage/move_workflow", methods=["POST"])
|
||||
def move_workflow():
|
||||
data = request.get_json(silent=True) or {}
|
||||
rel_path = (data.get("path") or "").strip()
|
||||
new_rel = (data.get("new_path") or "").strip()
|
||||
workflows_root = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||
)
|
||||
old_abs = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||
new_abs = os.path.abspath(os.path.join(workflows_root, new_rel))
|
||||
if not old_abs.startswith(workflows_root) or not os.path.isfile(old_abs):
|
||||
return jsonify({"error": "Workflow not found"}), 404
|
||||
if not new_abs.startswith(workflows_root):
|
||||
return jsonify({"error": "Invalid destination"}), 400
|
||||
os.makedirs(os.path.dirname(new_abs), exist_ok=True)
|
||||
try:
|
||||
shutil.move(old_abs, new_abs)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
|
||||
@app.route("/api/storage/delete_workflow", methods=["POST"])
|
||||
def delete_workflow():
|
||||
data = request.get_json(silent=True) or {}
|
||||
rel_path = (data.get("path") or "").strip()
|
||||
workflows_root = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||
)
|
||||
abs_path = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||
if not abs_path.startswith(workflows_root) or not os.path.isfile(abs_path):
|
||||
return jsonify({"error": "Workflow not found"}), 404
|
||||
try:
|
||||
os.remove(abs_path)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
|
||||
@app.route("/api/storage/delete_folder", methods=["POST"])
|
||||
def delete_folder():
|
||||
data = request.get_json(silent=True) or {}
|
||||
rel_path = (data.get("path") or "").strip()
|
||||
workflows_root = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||
)
|
||||
abs_path = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||
if not abs_path.startswith(workflows_root) or not os.path.isdir(abs_path):
|
||||
return jsonify({"error": "Folder not found"}), 404
|
||||
try:
|
||||
shutil.rmtree(abs_path)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@app.route("/api/storage/create_folder", methods=["POST"])
|
||||
def create_folder():
|
||||
data = request.get_json(silent=True) or {}
|
||||
rel_path = (data.get("path") or "").strip()
|
||||
workflows_root = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||
)
|
||||
abs_path = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||
if not abs_path.startswith(workflows_root):
|
||||
return jsonify({"error": "Invalid path"}), 400
|
||||
try:
|
||||
os.makedirs(abs_path, exist_ok=True)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
|
||||
@app.route("/api/storage/rename_folder", methods=["POST"])
|
||||
def rename_folder():
|
||||
data = request.get_json(silent=True) or {}
|
||||
rel_path = (data.get("path") or "").strip()
|
||||
new_name = (data.get("new_name") or "").strip()
|
||||
workflows_root = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||
)
|
||||
old_abs = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||
if not old_abs.startswith(workflows_root) or not os.path.isdir(old_abs):
|
||||
return jsonify({"error": "Folder not found"}), 404
|
||||
if not new_name:
|
||||
return jsonify({"error": "Invalid new_name"}), 400
|
||||
new_abs = os.path.join(os.path.dirname(old_abs), new_name)
|
||||
try:
|
||||
os.rename(old_abs, new_abs)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
# unified assembly endpoints supersede prior storage workflow endpoints
|
||||
|
||||
# ---------------------------------------------
|
||||
# Borealis Storage API Endpoints
|
||||
@@ -625,153 +533,381 @@ def _extract_tab_name(obj: Dict) -> str:
|
||||
return val.strip()
|
||||
return ""
|
||||
|
||||
@app.route("/api/storage/load_workflows", methods=["GET"])
|
||||
def load_workflows():
|
||||
"""
|
||||
Scan <ProjectRoot>/Assemblies/Workflows for *.json files and return a table-friendly list.
|
||||
"""
|
||||
# Resolve <ProjectRoot>/Assemblies/Workflows relative to this file at <ProjectRoot>/Data/server.py
|
||||
workflows_root = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||
# unified assembly endpoints provide listing instead
|
||||
|
||||
|
||||
# superseded by /api/assembly/load
|
||||
|
||||
|
||||
# superseded by /api/assembly/create and /api/assembly/edit
|
||||
|
||||
|
||||
# superseded by /api/assembly/rename
|
||||
|
||||
|
||||
# ---------------------------------------------
|
||||
# Unified Assembly API (Workflows, Scripts, Playbooks)
|
||||
# ---------------------------------------------
|
||||
|
||||
def _assemblies_root() -> str:
|
||||
return os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies")
|
||||
)
|
||||
results: List[Dict] = []
|
||||
folders: List[str] = []
|
||||
|
||||
if not os.path.isdir(workflows_root):
|
||||
return jsonify({
|
||||
"root": workflows_root,
|
||||
"workflows": [],
|
||||
"warning": "Workflows directory not found."
|
||||
}), 200
|
||||
|
||||
for root, dirs, files in os.walk(workflows_root):
|
||||
rel_root = os.path.relpath(root, workflows_root)
|
||||
if rel_root != ".":
|
||||
folders.append(rel_root.replace(os.sep, "/"))
|
||||
for fname in files:
|
||||
if not fname.lower().endswith(".json"):
|
||||
continue
|
||||
|
||||
full_path = os.path.join(root, fname)
|
||||
rel_path = os.path.relpath(full_path, workflows_root)
|
||||
|
||||
parts = rel_path.split(os.sep)
|
||||
folder_parts = parts[:-1]
|
||||
breadcrumb_prefix = " > ".join(folder_parts) if folder_parts else ""
|
||||
display_name = f"{breadcrumb_prefix} > {fname}" if breadcrumb_prefix else fname
|
||||
|
||||
obj = _safe_read_json(full_path)
|
||||
tab_name = _extract_tab_name(obj)
|
||||
|
||||
try:
|
||||
mtime = os.path.getmtime(full_path)
|
||||
except Exception:
|
||||
mtime = 0.0
|
||||
last_edited_str = time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime(mtime))
|
||||
|
||||
results.append({
|
||||
"name": display_name,
|
||||
"breadcrumb_prefix": breadcrumb_prefix,
|
||||
"file_name": fname,
|
||||
"rel_path": rel_path.replace(os.sep, "/"),
|
||||
"tab_name": tab_name,
|
||||
"description": "",
|
||||
"category": "",
|
||||
"last_edited": last_edited_str,
|
||||
"last_edited_epoch": mtime
|
||||
})
|
||||
|
||||
results.sort(key=lambda x: x.get("last_edited_epoch", 0.0), reverse=True)
|
||||
|
||||
return jsonify({
|
||||
"root": workflows_root,
|
||||
"workflows": results,
|
||||
"folders": folders
|
||||
})
|
||||
|
||||
|
||||
@app.route("/api/storage/load_workflow", methods=["GET"])
|
||||
def load_workflow():
|
||||
"""Load a single workflow JSON by its relative path."""
|
||||
rel_path = request.args.get("path", "")
|
||||
workflows_root = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||
)
|
||||
abs_path = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||
|
||||
if not abs_path.startswith(workflows_root) or not os.path.isfile(abs_path):
|
||||
return jsonify({"error": "Workflow not found"}), 404
|
||||
|
||||
obj = _safe_read_json(abs_path)
|
||||
return jsonify(obj)
|
||||
_ISLAND_DIR_MAP = {
|
||||
# normalized -> directory name
|
||||
"workflows": "Workflows",
|
||||
"workflow": "Workflows",
|
||||
"scripts": "Scripts",
|
||||
"script": "Scripts",
|
||||
"ansible": "Ansible_Playbooks",
|
||||
"ansible_playbooks": "Ansible_Playbooks",
|
||||
"ansible-playbooks": "Ansible_Playbooks",
|
||||
"playbooks": "Ansible_Playbooks",
|
||||
}
|
||||
|
||||
|
||||
@app.route("/api/storage/save_workflow", methods=["POST"])
|
||||
def save_workflow():
|
||||
def _normalize_relpath(p: str) -> str:
|
||||
return (p or "").replace("\\", "/").strip("/")
|
||||
|
||||
|
||||
def _resolve_island_root(island: str) -> Optional[str]:
|
||||
key = (island or "").strip().lower()
|
||||
sub = _ISLAND_DIR_MAP.get(key)
|
||||
if not sub:
|
||||
return None
|
||||
root = os.path.join(_assemblies_root(), sub)
|
||||
return os.path.abspath(root)
|
||||
|
||||
|
||||
def _resolve_assembly_path(island: str, rel_path: str) -> Tuple[str, str, str]:
|
||||
root = _resolve_island_root(island)
|
||||
if not root:
|
||||
raise ValueError("invalid island")
|
||||
rel_norm = _normalize_relpath(rel_path)
|
||||
abs_path = os.path.abspath(os.path.join(root, rel_norm))
|
||||
if not abs_path.startswith(root):
|
||||
raise ValueError("invalid path")
|
||||
return root, abs_path, rel_norm
|
||||
|
||||
|
||||
def _default_ext_for_island(island: str, item_type: str = "") -> str:
|
||||
isl = (island or "").lower().strip()
|
||||
if isl in ("workflows", "workflow"):
|
||||
return ".json"
|
||||
if isl in ("ansible", "ansible_playbooks", "ansible-playbooks", "playbooks"):
|
||||
return ".yml"
|
||||
# scripts: use hint or default to .ps1
|
||||
t = (item_type or "").lower().strip()
|
||||
if t == "bash":
|
||||
return ".sh"
|
||||
if t == "batch":
|
||||
return ".bat"
|
||||
if t == "powershell":
|
||||
return ".ps1"
|
||||
return ".ps1"
|
||||
|
||||
|
||||
@app.route("/api/assembly/create", methods=["POST"])
|
||||
def assembly_create():
|
||||
data = request.get_json(silent=True) or {}
|
||||
rel_path = (data.get("path") or "").strip()
|
||||
name = (data.get("name") or "").strip()
|
||||
workflow = data.get("workflow")
|
||||
if not isinstance(workflow, dict):
|
||||
return jsonify({"error": "Invalid payload"}), 400
|
||||
|
||||
workflows_root = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||
)
|
||||
os.makedirs(workflows_root, exist_ok=True)
|
||||
|
||||
if rel_path:
|
||||
if not rel_path.lower().endswith(".json"):
|
||||
rel_path += ".json"
|
||||
abs_path = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||
else:
|
||||
if not name:
|
||||
return jsonify({"error": "Invalid payload"}), 400
|
||||
if not name.lower().endswith(".json"):
|
||||
name += ".json"
|
||||
abs_path = os.path.abspath(os.path.join(workflows_root, os.path.basename(name)))
|
||||
|
||||
if not abs_path.startswith(workflows_root):
|
||||
return jsonify({"error": "Invalid path"}), 400
|
||||
|
||||
os.makedirs(os.path.dirname(abs_path), exist_ok=True)
|
||||
island = (data.get("island") or "").strip()
|
||||
kind = (data.get("kind") or "").strip().lower() # file | folder
|
||||
path = (data.get("path") or "").strip()
|
||||
content = data.get("content")
|
||||
item_type = (data.get("type") or "").strip().lower() # optional hint for scripts
|
||||
try:
|
||||
with open(abs_path, "w", encoding="utf-8") as fh:
|
||||
json.dump(workflow, fh, indent=2)
|
||||
return jsonify({"status": "ok"})
|
||||
root, abs_path, rel_norm = _resolve_assembly_path(island, path)
|
||||
if not rel_norm:
|
||||
return jsonify({"error": "path required"}), 400
|
||||
if kind == "folder":
|
||||
os.makedirs(abs_path, exist_ok=True)
|
||||
return jsonify({"status": "ok"})
|
||||
elif kind == "file":
|
||||
base, ext = os.path.splitext(abs_path)
|
||||
if not ext:
|
||||
abs_path = base + _default_ext_for_island(island, item_type)
|
||||
os.makedirs(os.path.dirname(abs_path), exist_ok=True)
|
||||
# Workflows expect JSON; others raw text
|
||||
if (island or "").lower() in ("workflows", "workflow"):
|
||||
obj = content
|
||||
if isinstance(obj, str):
|
||||
try:
|
||||
obj = json.loads(obj)
|
||||
except Exception:
|
||||
obj = {}
|
||||
if not isinstance(obj, dict):
|
||||
obj = {}
|
||||
# seed tab_name based on filename when empty
|
||||
base_name = os.path.splitext(os.path.basename(abs_path))[0]
|
||||
if "tab_name" not in obj:
|
||||
obj["tab_name"] = base_name
|
||||
with open(abs_path, "w", encoding="utf-8") as fh:
|
||||
json.dump(obj, fh, indent=2)
|
||||
else:
|
||||
with open(abs_path, "w", encoding="utf-8", newline="\n") as fh:
|
||||
fh.write(str(content or ""))
|
||||
rel_new = os.path.relpath(abs_path, root).replace(os.sep, "/")
|
||||
return jsonify({"status": "ok", "rel_path": rel_new})
|
||||
else:
|
||||
return jsonify({"error": "invalid kind"}), 400
|
||||
except ValueError as ve:
|
||||
return jsonify({"error": str(ve)}), 400
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
|
||||
@app.route("/api/storage/rename_workflow", methods=["POST"])
|
||||
def rename_workflow():
|
||||
@app.route("/api/assembly/edit", methods=["POST"])
|
||||
def assembly_edit():
|
||||
data = request.get_json(silent=True) or {}
|
||||
rel_path = (data.get("path") or "").strip()
|
||||
new_name = (data.get("new_name") or "").strip()
|
||||
workflows_root = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "..", "..", "Assemblies", "Workflows")
|
||||
)
|
||||
old_abs = os.path.abspath(os.path.join(workflows_root, rel_path))
|
||||
if not old_abs.startswith(workflows_root) or not os.path.isfile(old_abs):
|
||||
return jsonify({"error": "Workflow not found"}), 404
|
||||
if not new_name:
|
||||
return jsonify({"error": "Invalid new_name"}), 400
|
||||
if not new_name.lower().endswith(".json"):
|
||||
new_name += ".json"
|
||||
new_abs = os.path.join(os.path.dirname(old_abs), os.path.basename(new_name))
|
||||
base_name = os.path.splitext(os.path.basename(new_abs))[0]
|
||||
island = (data.get("island") or "").strip()
|
||||
path = (data.get("path") or "").strip()
|
||||
content = data.get("content")
|
||||
try:
|
||||
root, abs_path, _ = _resolve_assembly_path(island, path)
|
||||
if not os.path.isfile(abs_path):
|
||||
return jsonify({"error": "file not found"}), 404
|
||||
if (island or "").lower() in ("workflows", "workflow"):
|
||||
obj = content
|
||||
if isinstance(obj, str):
|
||||
obj = json.loads(obj)
|
||||
if not isinstance(obj, dict):
|
||||
return jsonify({"error": "invalid content for workflow"}), 400
|
||||
with open(abs_path, "w", encoding="utf-8") as fh:
|
||||
json.dump(obj, fh, indent=2)
|
||||
else:
|
||||
with open(abs_path, "w", encoding="utf-8", newline="\n") as fh:
|
||||
fh.write(str(content or ""))
|
||||
return jsonify({"status": "ok"})
|
||||
except ValueError as ve:
|
||||
return jsonify({"error": str(ve)}), 400
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
|
||||
@app.route("/api/assembly/rename", methods=["POST"])
|
||||
def assembly_rename():
|
||||
data = request.get_json(silent=True) or {}
|
||||
island = (data.get("island") or "").strip()
|
||||
kind = (data.get("kind") or "").strip().lower()
|
||||
path = (data.get("path") or "").strip()
|
||||
new_name = (data.get("new_name") or "").strip()
|
||||
item_type = (data.get("type") or "").strip().lower()
|
||||
if not new_name:
|
||||
return jsonify({"error": "new_name required"}), 400
|
||||
try:
|
||||
root, old_abs, _ = _resolve_assembly_path(island, path)
|
||||
if kind == "folder":
|
||||
if not os.path.isdir(old_abs):
|
||||
return jsonify({"error": "folder not found"}), 404
|
||||
new_abs = os.path.join(os.path.dirname(old_abs), new_name)
|
||||
elif kind == "file":
|
||||
if not os.path.isfile(old_abs):
|
||||
return jsonify({"error": "file not found"}), 404
|
||||
base, ext = os.path.splitext(new_name)
|
||||
if not ext:
|
||||
new_name = base + _default_ext_for_island(island, item_type)
|
||||
new_abs = os.path.join(os.path.dirname(old_abs), os.path.basename(new_name))
|
||||
else:
|
||||
return jsonify({"error": "invalid kind"}), 400
|
||||
|
||||
if not os.path.abspath(new_abs).startswith(root):
|
||||
return jsonify({"error": "invalid destination"}), 400
|
||||
|
||||
os.rename(old_abs, new_abs)
|
||||
obj = _safe_read_json(new_abs)
|
||||
for k in ["tabName", "tab_name", "name", "title"]:
|
||||
if k in obj:
|
||||
obj[k] = base_name
|
||||
if "tab_name" not in obj:
|
||||
obj["tab_name"] = base_name
|
||||
with open(new_abs, "w", encoding="utf-8") as fh:
|
||||
json.dump(obj, fh, indent=2)
|
||||
rel_new = os.path.relpath(new_abs, workflows_root).replace(os.sep, "/")
|
||||
|
||||
# If a workflow file is renamed, update internal name fields
|
||||
if kind == "file" and (island or "").lower() in ("workflows", "workflow"):
|
||||
try:
|
||||
obj = _safe_read_json(new_abs)
|
||||
base_name = os.path.splitext(os.path.basename(new_abs))[0]
|
||||
for k in ["tabName", "tab_name", "name", "title"]:
|
||||
if k in obj:
|
||||
obj[k] = base_name
|
||||
if "tab_name" not in obj:
|
||||
obj["tab_name"] = base_name
|
||||
with open(new_abs, "w", encoding="utf-8") as fh:
|
||||
json.dump(obj, fh, indent=2)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
rel_new = os.path.relpath(new_abs, root).replace(os.sep, "/")
|
||||
return jsonify({"status": "ok", "rel_path": rel_new})
|
||||
except ValueError as ve:
|
||||
return jsonify({"error": str(ve)}), 400
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
|
||||
@app.route("/api/assembly/move", methods=["POST"])
|
||||
def assembly_move():
|
||||
data = request.get_json(silent=True) or {}
|
||||
island = (data.get("island") or "").strip()
|
||||
path = (data.get("path") or "").strip()
|
||||
new_path = (data.get("new_path") or "").strip()
|
||||
kind = (data.get("kind") or "").strip().lower() # optional; used for existence checks
|
||||
try:
|
||||
root, old_abs, _ = _resolve_assembly_path(island, path)
|
||||
_, new_abs, _ = _resolve_assembly_path(island, new_path)
|
||||
if kind == "folder":
|
||||
if not os.path.isdir(old_abs):
|
||||
return jsonify({"error": "folder not found"}), 404
|
||||
else:
|
||||
if not os.path.isfile(old_abs):
|
||||
return jsonify({"error": "file not found"}), 404
|
||||
os.makedirs(os.path.dirname(new_abs), exist_ok=True)
|
||||
shutil.move(old_abs, new_abs)
|
||||
return jsonify({"status": "ok"})
|
||||
except ValueError as ve:
|
||||
return jsonify({"error": str(ve)}), 400
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
|
||||
@app.route("/api/assembly/delete", methods=["POST"])
|
||||
def assembly_delete():
|
||||
data = request.get_json(silent=True) or {}
|
||||
island = (data.get("island") or "").strip()
|
||||
kind = (data.get("kind") or "").strip().lower()
|
||||
path = (data.get("path") or "").strip()
|
||||
try:
|
||||
root, abs_path, rel_norm = _resolve_assembly_path(island, path)
|
||||
if not rel_norm:
|
||||
return jsonify({"error": "cannot delete root"}), 400
|
||||
if kind == "folder":
|
||||
if not os.path.isdir(abs_path):
|
||||
return jsonify({"error": "folder not found"}), 404
|
||||
shutil.rmtree(abs_path)
|
||||
elif kind == "file":
|
||||
if not os.path.isfile(abs_path):
|
||||
return jsonify({"error": "file not found"}), 404
|
||||
os.remove(abs_path)
|
||||
else:
|
||||
return jsonify({"error": "invalid kind"}), 400
|
||||
return jsonify({"status": "ok"})
|
||||
except ValueError as ve:
|
||||
return jsonify({"error": str(ve)}), 400
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
|
||||
@app.route("/api/assembly/list", methods=["GET"])
|
||||
def assembly_list():
|
||||
"""List files and folders for a given island (workflows|scripts|ansible)."""
|
||||
island = (request.args.get("island") or "").strip()
|
||||
try:
|
||||
root = _resolve_island_root(island)
|
||||
if not root:
|
||||
return jsonify({"error": "invalid island"}), 400
|
||||
os.makedirs(root, exist_ok=True)
|
||||
|
||||
items: List[Dict] = []
|
||||
folders: List[str] = []
|
||||
|
||||
isl = (island or "").lower()
|
||||
if isl in ("workflows", "workflow"):
|
||||
exts = (".json",)
|
||||
for r, dirs, files in os.walk(root):
|
||||
rel_root = os.path.relpath(r, root)
|
||||
if rel_root != ".":
|
||||
folders.append(rel_root.replace(os.sep, "/"))
|
||||
for fname in files:
|
||||
if not fname.lower().endswith(exts):
|
||||
continue
|
||||
fp = os.path.join(r, fname)
|
||||
rel_path = os.path.relpath(fp, root).replace(os.sep, "/")
|
||||
try:
|
||||
mtime = os.path.getmtime(fp)
|
||||
except Exception:
|
||||
mtime = 0.0
|
||||
obj = _safe_read_json(fp)
|
||||
tab = _extract_tab_name(obj)
|
||||
items.append({
|
||||
"file_name": fname,
|
||||
"rel_path": rel_path,
|
||||
"type": "workflow",
|
||||
"tab_name": tab,
|
||||
"last_edited": time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime(mtime)),
|
||||
"last_edited_epoch": mtime
|
||||
})
|
||||
elif isl in ("scripts", "script"):
|
||||
exts = (".ps1", ".bat", ".sh")
|
||||
for r, dirs, files in os.walk(root):
|
||||
rel_root = os.path.relpath(r, root)
|
||||
if rel_root != ".":
|
||||
folders.append(rel_root.replace(os.sep, "/"))
|
||||
for fname in files:
|
||||
if not fname.lower().endswith(exts):
|
||||
continue
|
||||
fp = os.path.join(r, fname)
|
||||
rel_path = os.path.relpath(fp, root).replace(os.sep, "/")
|
||||
try:
|
||||
mtime = os.path.getmtime(fp)
|
||||
except Exception:
|
||||
mtime = 0.0
|
||||
items.append({
|
||||
"file_name": fname,
|
||||
"rel_path": rel_path,
|
||||
"type": _detect_script_type(fname),
|
||||
"last_edited": time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime(mtime)),
|
||||
"last_edited_epoch": mtime
|
||||
})
|
||||
else: # ansible
|
||||
exts = (".yml",)
|
||||
for r, dirs, files in os.walk(root):
|
||||
rel_root = os.path.relpath(r, root)
|
||||
if rel_root != ".":
|
||||
folders.append(rel_root.replace(os.sep, "/"))
|
||||
for fname in files:
|
||||
if not fname.lower().endswith(exts):
|
||||
continue
|
||||
fp = os.path.join(r, fname)
|
||||
rel_path = os.path.relpath(fp, root).replace(os.sep, "/")
|
||||
try:
|
||||
mtime = os.path.getmtime(fp)
|
||||
except Exception:
|
||||
mtime = 0.0
|
||||
items.append({
|
||||
"file_name": fname,
|
||||
"rel_path": rel_path,
|
||||
"type": "ansible",
|
||||
"last_edited": time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime(mtime)),
|
||||
"last_edited_epoch": mtime
|
||||
})
|
||||
|
||||
items.sort(key=lambda x: x.get("last_edited_epoch", 0.0), reverse=True)
|
||||
return jsonify({"root": root, "items": items, "folders": folders})
|
||||
except ValueError as ve:
|
||||
return jsonify({"error": str(ve)}), 400
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
|
||||
@app.route("/api/assembly/load", methods=["GET"])
|
||||
def assembly_load():
|
||||
"""Load a file for a given island. Returns workflow JSON for workflows, and text content for others."""
|
||||
island = (request.args.get("island") or "").strip()
|
||||
rel_path = (request.args.get("path") or "").strip()
|
||||
try:
|
||||
root, abs_path, _ = _resolve_assembly_path(island, rel_path)
|
||||
if not os.path.isfile(abs_path):
|
||||
return jsonify({"error": "file not found"}), 404
|
||||
isl = (island or "").lower()
|
||||
if isl in ("workflows", "workflow"):
|
||||
obj = _safe_read_json(abs_path)
|
||||
return jsonify(obj)
|
||||
else:
|
||||
with open(abs_path, "r", encoding="utf-8", errors="replace") as fh:
|
||||
content = fh.read()
|
||||
return jsonify({
|
||||
"file_name": os.path.basename(abs_path),
|
||||
"rel_path": os.path.relpath(abs_path, root).replace(os.sep, "/"),
|
||||
"type": ("ansible" if isl.startswith("ansible") else _detect_script_type(abs_path)),
|
||||
"content": content
|
||||
})
|
||||
except ValueError as ve:
|
||||
return jsonify({"error": str(ve)}), 400
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@@ -827,6 +963,9 @@ def _ext_for_type(script_type: str) -> str:
|
||||
return ""
|
||||
|
||||
|
||||
"""
|
||||
Legacy scripts endpoints removed in favor of unified assembly APIs.
|
||||
"""
|
||||
@app.route("/api/scripts/list", methods=["GET"])
|
||||
def list_scripts():
|
||||
"""Scan <ProjectRoot>/Assemblies/Scripts for script files and return list + folders."""
|
||||
@@ -878,11 +1017,7 @@ def list_scripts():
|
||||
|
||||
results.sort(key=lambda x: x.get("last_edited_epoch", 0.0), reverse=True)
|
||||
|
||||
return jsonify({
|
||||
"root": scripts_root,
|
||||
"scripts": results,
|
||||
"folders": folders
|
||||
})
|
||||
return jsonify({"error": "deprecated; use /api/assembly/list?island=scripts"}), 410
|
||||
|
||||
|
||||
@app.route("/api/scripts/load", methods=["GET"])
|
||||
@@ -895,12 +1030,7 @@ def load_script():
|
||||
try:
|
||||
with open(abs_path, "r", encoding="utf-8", errors="replace") as fh:
|
||||
content = fh.read()
|
||||
return jsonify({
|
||||
"file_name": os.path.basename(abs_path),
|
||||
"rel_path": os.path.relpath(abs_path, scripts_root).replace(os.sep, "/"),
|
||||
"type": _detect_script_type(abs_path),
|
||||
"content": content
|
||||
})
|
||||
return jsonify({"error": "deprecated; use /api/assembly/load?island=scripts&path=..."}), 410
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@@ -947,13 +1077,7 @@ def save_script():
|
||||
return jsonify({"error": "Invalid path"}), 400
|
||||
|
||||
os.makedirs(os.path.dirname(abs_path), exist_ok=True)
|
||||
try:
|
||||
with open(abs_path, "w", encoding="utf-8", newline="\n") as fh:
|
||||
fh.write(str(content))
|
||||
rel_new = os.path.relpath(abs_path, scripts_root).replace(os.sep, "/")
|
||||
return jsonify({"status": "ok", "rel_path": rel_new})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return jsonify({"error": "deprecated; use /api/assembly/create or /api/assembly/edit"}), 410
|
||||
|
||||
|
||||
@app.route("/api/scripts/rename_file", methods=["POST"])
|
||||
@@ -974,12 +1098,7 @@ def rename_script_file():
|
||||
if desired_ext:
|
||||
new_name = os.path.splitext(new_name)[0] + desired_ext
|
||||
new_abs = os.path.join(os.path.dirname(old_abs), os.path.basename(new_name))
|
||||
try:
|
||||
os.rename(old_abs, new_abs)
|
||||
rel_new = os.path.relpath(new_abs, scripts_root).replace(os.sep, "/")
|
||||
return jsonify({"status": "ok", "rel_path": rel_new})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return jsonify({"error": "deprecated; use /api/assembly/rename"}), 410
|
||||
|
||||
|
||||
@app.route("/api/scripts/move_file", methods=["POST"])
|
||||
@@ -995,11 +1114,7 @@ def move_script_file():
|
||||
if (not new_abs.startswith(scripts_root)) or (not _is_valid_scripts_relpath(new_rel)):
|
||||
return jsonify({"error": "Invalid destination"}), 400
|
||||
os.makedirs(os.path.dirname(new_abs), exist_ok=True)
|
||||
try:
|
||||
shutil.move(old_abs, new_abs)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return jsonify({"error": "deprecated; use /api/assembly/move"}), 410
|
||||
|
||||
|
||||
@app.route("/api/scripts/delete_file", methods=["POST"])
|
||||
@@ -1010,11 +1125,7 @@ def delete_script_file():
|
||||
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
||||
if (not abs_path.startswith(scripts_root)) or (not _is_valid_scripts_relpath(rel_path)) or (not os.path.isfile(abs_path)):
|
||||
return jsonify({"error": "File not found"}), 404
|
||||
try:
|
||||
os.remove(abs_path)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return jsonify({"error": "deprecated; use /api/assembly/delete"}), 410
|
||||
|
||||
# ---------------------------------------------
|
||||
# Ansible Playbooks Storage API Endpoints
|
||||
@@ -1063,8 +1174,7 @@ def list_ansible():
|
||||
"last_edited": time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime(mtime)),
|
||||
"last_edited_epoch": mtime
|
||||
})
|
||||
results.sort(key=lambda x: x.get("last_edited_epoch", 0.0), reverse=True)
|
||||
return jsonify({ "root": root, "items": results, "folders": folders })
|
||||
return jsonify({"error": "deprecated; use /api/assembly/list?island=ansible"}), 410
|
||||
|
||||
|
||||
@app.route("/api/ansible/load", methods=["GET"])
|
||||
@@ -1077,12 +1187,7 @@ def load_ansible():
|
||||
try:
|
||||
with open(abs_path, "r", encoding="utf-8", errors="replace") as fh:
|
||||
content = fh.read()
|
||||
return jsonify({
|
||||
"file_name": os.path.basename(abs_path),
|
||||
"rel_path": os.path.relpath(abs_path, root).replace(os.sep, "/"),
|
||||
"type": "ansible",
|
||||
"content": content
|
||||
})
|
||||
return jsonify({"error": "deprecated; use /api/assembly/load?island=ansible&path=..."}), 410
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@@ -1112,13 +1217,7 @@ def save_ansible():
|
||||
if not abs_path.startswith(root):
|
||||
return jsonify({"error": "Invalid path"}), 400
|
||||
os.makedirs(os.path.dirname(abs_path), exist_ok=True)
|
||||
try:
|
||||
with open(abs_path, "w", encoding="utf-8", newline="\n") as fh:
|
||||
fh.write(str(content))
|
||||
rel_new = os.path.relpath(abs_path, root).replace(os.sep, "/")
|
||||
return jsonify({"status": "ok", "rel_path": rel_new})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return jsonify({"error": "deprecated; use /api/assembly/create or /api/assembly/edit"}), 410
|
||||
|
||||
|
||||
@app.route("/api/ansible/rename_file", methods=["POST"])
|
||||
@@ -1135,12 +1234,7 @@ def rename_ansible_file():
|
||||
if not os.path.splitext(new_name)[1]:
|
||||
new_name = os.path.splitext(new_name)[0] + ".yml"
|
||||
new_abs = os.path.join(os.path.dirname(old_abs), os.path.basename(new_name))
|
||||
try:
|
||||
os.rename(old_abs, new_abs)
|
||||
rel_new = os.path.relpath(new_abs, root).replace(os.sep, "/")
|
||||
return jsonify({"status": "ok", "rel_path": rel_new})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return jsonify({"error": "deprecated; use /api/assembly/rename"}), 410
|
||||
|
||||
|
||||
@app.route("/api/ansible/move_file", methods=["POST"])
|
||||
@@ -1156,11 +1250,7 @@ def move_ansible_file():
|
||||
if (not new_abs.startswith(root)) or (not _is_valid_ansible_relpath(new_rel)):
|
||||
return jsonify({"error": "Invalid destination"}), 400
|
||||
os.makedirs(os.path.dirname(new_abs), exist_ok=True)
|
||||
try:
|
||||
shutil.move(old_abs, new_abs)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return jsonify({"error": "deprecated; use /api/assembly/move"}), 410
|
||||
|
||||
|
||||
@app.route("/api/ansible/delete_file", methods=["POST"])
|
||||
@@ -1171,11 +1261,7 @@ def delete_ansible_file():
|
||||
abs_path = os.path.abspath(os.path.join(root, rel_path))
|
||||
if (not abs_path.startswith(root)) or (not _is_valid_ansible_relpath(rel_path)) or (not os.path.isfile(abs_path)):
|
||||
return jsonify({"error": "File not found"}), 404
|
||||
try:
|
||||
os.remove(abs_path)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return jsonify({"error": "deprecated; use /api/assembly/delete"}), 410
|
||||
|
||||
|
||||
@app.route("/api/ansible/create_folder", methods=["POST"])
|
||||
@@ -1187,11 +1273,7 @@ def ansible_create_folder():
|
||||
abs_path = os.path.abspath(os.path.join(root, rel_path))
|
||||
if not abs_path.startswith(root):
|
||||
return jsonify({"error": "Invalid path"}), 400
|
||||
try:
|
||||
os.makedirs(abs_path, exist_ok=True)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return jsonify({"error": "deprecated; use /api/assembly/create"}), 410
|
||||
|
||||
|
||||
@app.route("/api/ansible/delete_folder", methods=["POST"])
|
||||
@@ -1205,11 +1287,7 @@ def ansible_delete_folder():
|
||||
rel_norm = (rel_path or "").replace("\\", "/").strip("/")
|
||||
if rel_norm in ("",):
|
||||
return jsonify({"error": "Cannot delete top-level folder"}), 400
|
||||
try:
|
||||
shutil.rmtree(abs_path)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return jsonify({"error": "deprecated; use /api/assembly/delete"}), 410
|
||||
|
||||
|
||||
@app.route("/api/ansible/rename_folder", methods=["POST"])
|
||||
@@ -1227,11 +1305,7 @@ def ansible_rename_folder():
|
||||
if rel_norm in ("",):
|
||||
return jsonify({"error": "Cannot rename top-level folder"}), 400
|
||||
new_abs = os.path.join(os.path.dirname(old_abs), new_name)
|
||||
try:
|
||||
os.rename(old_abs, new_abs)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return jsonify({"error": "deprecated; use /api/assembly/rename"}), 410
|
||||
|
||||
|
||||
@app.route("/api/scripts/create_folder", methods=["POST"])
|
||||
@@ -1246,11 +1320,7 @@ def scripts_create_folder():
|
||||
abs_path = os.path.abspath(os.path.join(scripts_root, rel_path))
|
||||
if not abs_path.startswith(scripts_root):
|
||||
return jsonify({"error": "Invalid path"}), 400
|
||||
try:
|
||||
os.makedirs(abs_path, exist_ok=True)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return jsonify({"error": "deprecated; use /api/assembly/create"}), 410
|
||||
|
||||
|
||||
@app.route("/api/scripts/delete_folder", methods=["POST"])
|
||||
@@ -1264,11 +1334,7 @@ def scripts_delete_folder():
|
||||
rel_norm = (rel_path or "").replace("\\", "/").strip("/")
|
||||
if rel_norm in ("Scripts", "Ansible Playbooks"):
|
||||
return jsonify({"error": "Cannot delete top-level folder"}), 400
|
||||
try:
|
||||
shutil.rmtree(abs_path)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return jsonify({"error": "deprecated; use /api/assembly/delete"}), 410
|
||||
|
||||
|
||||
@app.route("/api/scripts/rename_folder", methods=["POST"])
|
||||
@@ -1286,11 +1352,7 @@ def scripts_rename_folder():
|
||||
if rel_norm in ("Scripts", "Ansible Playbooks"):
|
||||
return jsonify({"error": "Cannot rename top-level folder"}), 400
|
||||
new_abs = os.path.join(os.path.dirname(old_abs), new_name)
|
||||
try:
|
||||
os.rename(old_abs, new_abs)
|
||||
return jsonify({"status": "ok"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return jsonify({"error": "deprecated; use /api/assembly/rename"}), 410
|
||||
|
||||
# ---------------------------------------------
|
||||
# Borealis Agent API Endpoints
|
||||
|
||||
4
tmp_parse.py
Normal file
4
tmp_parse.py
Normal file
@@ -0,0 +1,4 @@
|
||||
import ast, sys
|
||||
with open('Data/Server/server.py','r',encoding='utf-8') as f:
|
||||
ast.parse(f.read())
|
||||
print('OK')
|
||||
Reference in New Issue
Block a user