Removed ReactJS Code
Removed new code from legacy codebase.
This commit is contained in:
parent
fce8b7c911
commit
f27104036f
78
Data/Experiments/Transparent Nodes/QML/blueprint_grid.qml
Normal file
78
Data/Experiments/Transparent Nodes/QML/blueprint_grid.qml
Normal file
@ -0,0 +1,78 @@
|
||||
import QtQuick 2.15
|
||||
import QtQuick.Controls 2.15
|
||||
import QtQuick.Shapes 1.15
|
||||
import QtQuick.Window 2.15
|
||||
|
||||
Item {
|
||||
id: root
|
||||
width: Screen.width
|
||||
height: Screen.height
|
||||
|
||||
// Grid overlay is enabled at startup.
|
||||
property bool editMode: true
|
||||
|
||||
// Blue gradient background (edges fading inward) with stops shifted inward.
|
||||
Rectangle {
|
||||
id: gradientBackground
|
||||
width: parent.width
|
||||
height: parent.height
|
||||
opacity: 0.5
|
||||
gradient: Gradient {
|
||||
// Shifted stops: outer stops moved to 0.1 and 0.9, inner stops to 0.4 and 0.6.
|
||||
GradientStop { position: 0.1; color: Qt.rgba(0, 100/255, 255/255, 0.5) }
|
||||
GradientStop { position: 0.4; color: Qt.rgba(0, 50/255, 180/255, 0.2) }
|
||||
GradientStop { position: 0.5; color: Qt.rgba(0, 0, 0, 0.0) }
|
||||
GradientStop { position: 0.6; color: Qt.rgba(0, 50/255, 180/255, 0.2) }
|
||||
GradientStop { position: 0.9; color: Qt.rgba(0, 100/255, 255/255, 0.5) }
|
||||
}
|
||||
visible: editMode // Only show the gradient in edit mode
|
||||
}
|
||||
|
||||
// Top & Bottom fade remains unchanged.
|
||||
Rectangle {
|
||||
id: topBottomGradient
|
||||
width: parent.width
|
||||
height: parent.height
|
||||
opacity: 0.3
|
||||
gradient: Gradient {
|
||||
orientation: Gradient.Vertical
|
||||
GradientStop { position: 0.0; color: Qt.rgba(0, 100/255, 255/255, 0.4) }
|
||||
GradientStop { position: 0.3; color: Qt.rgba(0, 50/255, 180/255, 0.1) }
|
||||
GradientStop { position: 0.5; color: Qt.rgba(0, 0, 0, 0.0) }
|
||||
GradientStop { position: 0.7; color: Qt.rgba(0, 50/255, 180/255, 0.1) }
|
||||
GradientStop { position: 1.0; color: Qt.rgba(0, 100/255, 255/255, 0.4) }
|
||||
}
|
||||
visible: editMode
|
||||
}
|
||||
|
||||
// Full-Screen Dynamic Grid with 10% increased transparency (grid lines at 0.3 opacity).
|
||||
Canvas {
|
||||
id: gridCanvas
|
||||
width: parent.width
|
||||
height: parent.height
|
||||
onPaint: {
|
||||
var ctx = getContext("2d");
|
||||
ctx.clearRect(0, 0, width, height);
|
||||
ctx.strokeStyle = "rgba(255, 255, 255, 0.3)"; // Reduced opacity from 0.4 to 0.3.
|
||||
ctx.lineWidth = 1;
|
||||
|
||||
var step = 120; // Grid spacing remains unchanged.
|
||||
|
||||
for (var x = 0; x < width; x += step) {
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(x, 0);
|
||||
ctx.lineTo(x, height);
|
||||
ctx.stroke();
|
||||
}
|
||||
for (var y = 0; y < height; y += step) {
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(0, y);
|
||||
ctx.lineTo(width, y);
|
||||
ctx.stroke();
|
||||
}
|
||||
}
|
||||
Component.onCompleted: requestPaint()
|
||||
onVisibleChanged: requestPaint()
|
||||
visible: editMode // Hide when edit mode is off.
|
||||
}
|
||||
}
|
193
Data/Experiments/Transparent Nodes/blueprint_grid.py
Normal file
193
Data/Experiments/Transparent Nodes/blueprint_grid.py
Normal file
@ -0,0 +1,193 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import pkgutil
|
||||
import importlib
|
||||
import inspect
|
||||
import types
|
||||
from PyQt5.QtWidgets import QApplication, QMainWindow, QPushButton, QWidget
|
||||
from PyQt5.QtCore import Qt, QUrl, QTimer
|
||||
from PyQt5.QtGui import QGuiApplication
|
||||
from PyQt5.QtQuick import QQuickView
|
||||
|
||||
# OdenGraphQt Fix: Monkey-patch QUndoStack
|
||||
import OdenGraphQt.base.graph as base_graph
|
||||
from PyQt5 import QtWidgets
|
||||
base_graph.QtGui.QUndoStack = QtWidgets.QUndoStack
|
||||
|
||||
import OdenGraphQt.base.commands as base_commands
|
||||
_original_redo = base_commands.NodesRemovedCmd.redo
|
||||
_original_undo = base_commands.NodesRemovedCmd.undo
|
||||
|
||||
def _patched_redo(self):
|
||||
try:
|
||||
_original_redo(self)
|
||||
except TypeError as e:
|
||||
if "unexpected type" in str(e) and hasattr(self, 'node'):
|
||||
node_ids = []
|
||||
if isinstance(self.node, list):
|
||||
node_ids = [getattr(n, 'id', str(n)) for n in self.node]
|
||||
else:
|
||||
node_ids = [getattr(self.node, 'id', str(self.node))]
|
||||
self.graph.nodes_deleted.emit(node_ids)
|
||||
else:
|
||||
raise
|
||||
|
||||
def _patched_undo(self):
|
||||
try:
|
||||
_original_undo(self)
|
||||
except TypeError as e:
|
||||
if "unexpected type" in str(e) and hasattr(self, 'node'):
|
||||
node_ids = []
|
||||
if isinstance(self.node, list):
|
||||
node_ids = [getattr(n, 'id', str(n)) for n in self.node]
|
||||
else:
|
||||
node_ids = [getattr(self.node, 'id', str(self.node))]
|
||||
self.graph.nodes_deleted.emit(node_ids)
|
||||
else:
|
||||
raise
|
||||
|
||||
base_commands.NodesRemovedCmd.redo = _patched_redo
|
||||
base_commands.NodesRemovedCmd.undo = _patched_undo
|
||||
|
||||
# OdenGraphQt Transparent Viewer
|
||||
from OdenGraphQt.widgets.viewer import NodeViewer
|
||||
|
||||
class TransparentViewer(NodeViewer):
|
||||
"""A NodeViewer that does not paint anything in drawBackground() -> Fully transparent."""
|
||||
def drawBackground(self, painter, rect):
|
||||
pass # Do nothing, ensuring transparency.
|
||||
|
||||
# NodeGraph & Node Import Helpers
|
||||
from OdenGraphQt import NodeGraph, BaseNode
|
||||
|
||||
def import_nodes_from_folder(package_name):
|
||||
imported_nodes = []
|
||||
package = importlib.import_module(package_name)
|
||||
for loader, module_name, is_pkg in pkgutil.walk_packages(
|
||||
package.__path__, package.__name__ + "."):
|
||||
module = importlib.import_module(module_name)
|
||||
for name, obj in inspect.getmembers(module, inspect.isclass):
|
||||
if issubclass(obj, BaseNode) and obj.__module__ == module.__name__:
|
||||
imported_nodes.append(obj)
|
||||
return imported_nodes
|
||||
|
||||
def make_node_command(graph, node_type):
|
||||
def command():
|
||||
try:
|
||||
graph.create_node(node_type)
|
||||
except Exception as e:
|
||||
print(f"Error creating node of type {node_type}: {e}")
|
||||
return command
|
||||
|
||||
# Edit Mode Button
|
||||
class EditButton(QPushButton):
|
||||
"""A small, frameless button to toggle edit mode."""
|
||||
def __init__(self, parent=None):
|
||||
super().__init__("Toggle Edit Mode", parent)
|
||||
self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)
|
||||
# Dark gray background with white text.
|
||||
self.setStyleSheet("background-color: #444444; border: 1px solid black; color: white;")
|
||||
self.resize(140, 40)
|
||||
|
||||
# Main Overlay Window
|
||||
class MainWindow(QMainWindow):
|
||||
"""A frameless, transparent overlay with OdenGraphQt nodes & edit mode toggle."""
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
# Full-screen overlay
|
||||
app = QApplication.instance()
|
||||
screen_geo = app.primaryScreen().geometry()
|
||||
self.setGeometry(screen_geo)
|
||||
|
||||
# Frameless, top-most, fully transparent
|
||||
self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)
|
||||
self.setAttribute(Qt.WA_TranslucentBackground, True)
|
||||
|
||||
# QML Background
|
||||
self.qml_view = QQuickView()
|
||||
self.qml_view.setSource(QUrl("qml/background_grid.qml"))
|
||||
self.qml_view.setFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)
|
||||
self.qml_view.setClearBeforeRendering(True)
|
||||
self.qml_view.setColor(Qt.transparent)
|
||||
self.qml_view.show()
|
||||
|
||||
# Save the QML root object for later property sync
|
||||
self.qml_root = self.qml_view.rootObject()
|
||||
|
||||
# NodeGraph with TransparentViewer
|
||||
self.graph = NodeGraph(viewer=TransparentViewer())
|
||||
self.nodeGraphWidget = self.graph.widget
|
||||
self.nodeGraphWidget.setStyleSheet("background: transparent; border: none;")
|
||||
|
||||
# Transparent central widget
|
||||
central = QWidget(self)
|
||||
central.setAttribute(Qt.WA_TranslucentBackground, True)
|
||||
self.setCentralWidget(central)
|
||||
|
||||
self.nodeGraphWidget.setParent(central)
|
||||
self.nodeGraphWidget.setGeometry(central.rect())
|
||||
|
||||
# Edit Mode Button (Python controlled)
|
||||
self.editButton = EditButton(self)
|
||||
self.editButton.move(10, 10)
|
||||
self.editButton.clicked.connect(self.toggleEditMode)
|
||||
self.isEditMode = True # Set edit mode enabled by default
|
||||
|
||||
# Ensure QML grid overlay is enabled at startup
|
||||
if self.qml_root:
|
||||
self.qml_root.setProperty("editMode", self.isEditMode)
|
||||
|
||||
# Import custom nodes
|
||||
try:
|
||||
custom_nodes = import_nodes_from_folder('Nodes')
|
||||
for node_class in custom_nodes:
|
||||
self.graph.register_node(node_class)
|
||||
|
||||
graph_menu = self.graph.get_context_menu('graph')
|
||||
for node_class in custom_nodes:
|
||||
node_type = f"{node_class.__identifier__}.{node_class.__name__}"
|
||||
node_name = node_class.NODE_NAME
|
||||
graph_menu.add_command(
|
||||
f"Add {node_name}",
|
||||
make_node_command(self.graph, node_type)
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Error setting up custom nodes: {e}")
|
||||
|
||||
# Global update timer
|
||||
self.timer = QTimer(self)
|
||||
self.timer.timeout.connect(self.global_update)
|
||||
self.timer.start(500)
|
||||
|
||||
# Timer to ensure the button stays on top (hacky, but effective)
|
||||
self.raiseTimer = QTimer(self)
|
||||
self.raiseTimer.timeout.connect(self.editButton.raise_)
|
||||
self.raiseTimer.start(1000) # Raise the button every 1 second
|
||||
|
||||
self.show()
|
||||
self.nodeGraphWidget.setAttribute(Qt.WA_TransparentForMouseEvents, not self.isEditMode)
|
||||
|
||||
def toggleEditMode(self):
|
||||
"""Toggle edit mode (pass-through clicks vs interactive)."""
|
||||
self.isEditMode = not self.isEditMode
|
||||
self.nodeGraphWidget.setAttribute(Qt.WA_TransparentForMouseEvents, not self.isEditMode)
|
||||
# Button text remains constant.
|
||||
self.editButton.setText("Toggle Edit Mode")
|
||||
if self.qml_root:
|
||||
self.qml_root.setProperty("editMode", self.isEditMode)
|
||||
|
||||
def global_update(self):
|
||||
"""Update all nodes periodically."""
|
||||
for node in self.graph.all_nodes():
|
||||
if hasattr(node, "process_input"):
|
||||
node.process_input()
|
||||
|
||||
# Entry Point
|
||||
if __name__ == '__main__':
|
||||
app = QApplication(sys.argv)
|
||||
window = MainWindow()
|
||||
window.show()
|
||||
sys.exit(app.exec_())
|
160
Data/Experiments/Transparent Nodes/borealis_transparent.py
Normal file
160
Data/Experiments/Transparent Nodes/borealis_transparent.py
Normal file
@ -0,0 +1,160 @@
|
||||
import sys
|
||||
import pkgutil
|
||||
import importlib
|
||||
import inspect
|
||||
from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QVBoxLayout, QGraphicsView, QGraphicsScene, QGraphicsItem, QMenu
|
||||
from PyQt5.QtCore import Qt, QTimer, QRectF, QPointF
|
||||
from PyQt5.QtGui import QColor, QPainter, QPen, QBrush, QGradient, QLinearGradient
|
||||
from PyQt5 import QtWidgets, QtCore, QtGui
|
||||
from OdenGraphQt import NodeGraph, BaseNode
|
||||
|
||||
# --- Fix Missing QUndoStack in QtGui ---
|
||||
import OdenGraphQt.base.graph as base_graph
|
||||
base_graph.QtGui.QUndoStack = QtWidgets.QUndoStack # Monkey-patch the missing QUndoStack
|
||||
|
||||
# --- Custom Graph Scene ---
|
||||
class CustomGraphScene(QGraphicsScene):
|
||||
"""
|
||||
Custom scene that draws a blueprint-style transparent grid with gradient shading.
|
||||
"""
|
||||
def __init__(self, parent=None):
|
||||
super().__init__(parent)
|
||||
self.setBackgroundBrush(QtCore.Qt.transparent)
|
||||
self.grid_color = QtGui.QColor(100, 160, 160, 160) # Blueprint grid color (10% more transparent)
|
||||
self.grid_size = 115
|
||||
|
||||
def drawBackground(self, painter, rect):
|
||||
"""
|
||||
Custom draw function to render a blueprint-style grid with gradient shading.
|
||||
"""
|
||||
painter.save()
|
||||
painter.setRenderHint(QPainter.Antialiasing, False)
|
||||
painter.setBrush(QtCore.Qt.NoBrush) # No background fill
|
||||
pen = QPen(self.grid_color, 0.5)
|
||||
|
||||
left = int(rect.left()) - (int(rect.left()) % self.grid_size)
|
||||
top = int(rect.top()) - (int(rect.top()) % self.grid_size)
|
||||
|
||||
# Draw vertical lines
|
||||
lines = []
|
||||
for x in range(left, int(rect.right()), self.grid_size):
|
||||
lines.append(QtCore.QLineF(x, rect.top(), x, rect.bottom()))
|
||||
|
||||
# Draw horizontal lines
|
||||
for y in range(top, int(rect.bottom()), self.grid_size):
|
||||
lines.append(QtCore.QLineF(rect.left(), y, rect.right(), y))
|
||||
|
||||
painter.setPen(pen)
|
||||
painter.drawLines(lines)
|
||||
|
||||
# Draw gradient shading (top and bottom)
|
||||
gradient = QLinearGradient(QPointF(rect.left(), rect.top()), QPointF(rect.left(), rect.bottom()))
|
||||
gradient.setColorAt(0.0, QColor(0, 40, 100, 220)) # Darker blue at the top
|
||||
gradient.setColorAt(0.5, QColor(0, 0, 0, 0)) # Transparent in the middle
|
||||
gradient.setColorAt(1.0, QColor(0, 40, 100, 220)) # Darker blue at the bottom
|
||||
painter.fillRect(rect, QBrush(gradient))
|
||||
|
||||
painter.restore()
|
||||
|
||||
# --- Node Management ---
|
||||
def import_nodes_from_folder(package_name):
|
||||
imported_nodes = []
|
||||
package = importlib.import_module(package_name)
|
||||
for loader, module_name, is_pkg in pkgutil.walk_packages(package.__path__, package.__name__ + "."):
|
||||
module = importlib.import_module(module_name)
|
||||
for name, obj in inspect.getmembers(module, inspect.isclass):
|
||||
if issubclass(obj, BaseNode) and obj.__module__ == module.__name__:
|
||||
imported_nodes.append(obj)
|
||||
return imported_nodes
|
||||
|
||||
# --- Custom Graph View ---
|
||||
class CustomGraphView(QGraphicsView):
|
||||
"""
|
||||
Custom view for the graph that applies full transparency and handles right-click context menu.
|
||||
"""
|
||||
def __init__(self, scene, graph, parent=None):
|
||||
super().__init__(scene, parent)
|
||||
self.graph = graph # Reference to NodeGraph
|
||||
self.setRenderHints(QtGui.QPainter.Antialiasing | QtGui.QPainter.SmoothPixmapTransform)
|
||||
self.setViewportUpdateMode(QGraphicsView.FullViewportUpdate)
|
||||
self.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
|
||||
self.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
|
||||
self.setStyleSheet("background: transparent; border: none;")
|
||||
self.setAttribute(QtCore.Qt.WA_TranslucentBackground, True)
|
||||
|
||||
# Enable context menu on right-click
|
||||
self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
|
||||
self.customContextMenuRequested.connect(self.show_context_menu)
|
||||
|
||||
def show_context_menu(self, position):
|
||||
"""
|
||||
Displays the node creation context menu with dynamically loaded nodes.
|
||||
"""
|
||||
menu = QMenu()
|
||||
for node_class in self.graph.registered_nodes():
|
||||
node_name = getattr(node_class, "NODE_NAME", node_class.__name__)
|
||||
menu.addAction(f"Create {node_name}", lambda nc=node_class: self.create_node(nc))
|
||||
menu.exec_(self.mapToGlobal(position))
|
||||
|
||||
def create_node(self, node_class):
|
||||
"""
|
||||
Creates a node instance of the given class in the NodeGraph.
|
||||
"""
|
||||
try:
|
||||
node = self.graph.create_node(f"{node_class.__identifier__}.{node_class.__name__}")
|
||||
print(f"Created node: {node_class.__name__}")
|
||||
except Exception as e:
|
||||
print(f"Error creating node: {e}")
|
||||
|
||||
# --- Main Window ---
|
||||
class MainWindow(QMainWindow):
|
||||
"""A frameless, transparent overlay with a custom graph."""
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
# Full-screen overlay
|
||||
app = QApplication.instance()
|
||||
screen_geo = app.primaryScreen().geometry()
|
||||
self.setGeometry(screen_geo)
|
||||
|
||||
# Frameless, top-most, fully transparent
|
||||
self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)
|
||||
self.setAttribute(Qt.WA_TranslucentBackground, True)
|
||||
|
||||
# Transparent central widget
|
||||
central = QWidget(self)
|
||||
central.setAttribute(Qt.WA_TranslucentBackground, True)
|
||||
layout = QVBoxLayout(central)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
self.setCentralWidget(central)
|
||||
|
||||
# Initialize NodeGraph
|
||||
self.graph = NodeGraph()
|
||||
|
||||
# Load custom nodes
|
||||
custom_nodes = import_nodes_from_folder('Nodes')
|
||||
for node_class in custom_nodes:
|
||||
self.graph.register_node(node_class)
|
||||
|
||||
# Initialize Custom Graph Scene & View
|
||||
self.scene = CustomGraphScene()
|
||||
self.view = CustomGraphView(self.scene, self.graph, self)
|
||||
layout.addWidget(self.view)
|
||||
|
||||
# Global update timer
|
||||
self.timer = QTimer(self)
|
||||
self.timer.timeout.connect(self.global_update)
|
||||
self.timer.start(500)
|
||||
|
||||
def global_update(self):
|
||||
"""Update all nodes periodically."""
|
||||
for node in self.graph.all_nodes():
|
||||
if hasattr(node, "process_input"):
|
||||
node.process_input()
|
||||
|
||||
# --- Entry Point ---
|
||||
if __name__ == '__main__':
|
||||
app = QApplication(sys.argv)
|
||||
window = MainWindow()
|
||||
window.show()
|
||||
sys.exit(app.exec_())
|
542
Data/Experiments/borealis_overlay.py
Normal file
542
Data/Experiments/borealis_overlay.py
Normal file
@ -0,0 +1,542 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import time
|
||||
import re
|
||||
import numpy as np
|
||||
import cv2
|
||||
import pytesseract
|
||||
|
||||
try:
|
||||
import winsound
|
||||
HAS_WINSOUND = True
|
||||
except ImportError:
|
||||
HAS_WINSOUND = False
|
||||
|
||||
from PyQt5.QtWidgets import QApplication, QWidget
|
||||
from PyQt5.QtCore import Qt, QRect, QPoint, QTimer
|
||||
from PyQt5.QtGui import QPainter, QPen, QColor, QFont
|
||||
from PIL import Image, ImageGrab, ImageFilter
|
||||
|
||||
from rich.console import Console, Group
|
||||
from rich.table import Table
|
||||
from rich.progress import Progress, BarColumn, TextColumn
|
||||
from rich.text import Text
|
||||
from rich.live import Live
|
||||
|
||||
# =============================================================================
|
||||
# Global Config
|
||||
# =============================================================================
|
||||
|
||||
pytesseract.pytesseract.tesseract_cmd = r"C:\Program Files\Tesseract-OCR\tesseract.exe"
|
||||
|
||||
POLLING_RATE_MS = 500
|
||||
MAX_DATA_POINTS = 8
|
||||
|
||||
# We still use these defaults for Region size.
|
||||
DEFAULT_WIDTH = 180
|
||||
DEFAULT_HEIGHT = 130
|
||||
HANDLE_SIZE = 8
|
||||
LABEL_HEIGHT = 20
|
||||
|
||||
GREEN_HEADER_STYLE = "bold green"
|
||||
|
||||
BEEP_INTERVAL_SECONDS = 1.0 # Only beep once every 1 second
|
||||
|
||||
# STATUS BAR AUTO-LOCATOR LOGIC (WILL BE BUILT-OUT TO BE MORE ROBUST LATER)
|
||||
TEMPLATE_PATH = "G:\\Nextcloud\\Projects\\Scripting\\bars_template.png" # Path to your bars template file
|
||||
MATCH_THRESHOLD = 0.4 # The correlation threshold to consider a "good" match
|
||||
|
||||
# =============================================================================
|
||||
# Helper Functions
|
||||
# =============================================================================
|
||||
|
||||
def beep_hp_warning():
|
||||
"""
|
||||
Only beep if enough time has elapsed since the last beep (BEEP_INTERVAL_SECONDS).
|
||||
"""
|
||||
current_time = time.time()
|
||||
if (beep_hp_warning.last_beep_time is None or
|
||||
(current_time - beep_hp_warning.last_beep_time >= BEEP_INTERVAL_SECONDS)):
|
||||
|
||||
beep_hp_warning.last_beep_time = current_time
|
||||
if HAS_WINSOUND:
|
||||
# frequency=376 Hz, duration=100 ms
|
||||
winsound.Beep(376, 100)
|
||||
else:
|
||||
# Attempt terminal bell
|
||||
print('\a', end='')
|
||||
|
||||
beep_hp_warning.last_beep_time = None
|
||||
|
||||
|
||||
def locate_bars_opencv(template_path, threshold=MATCH_THRESHOLD):
|
||||
"""
|
||||
Attempt to locate the bars via OpenCV template matching:
|
||||
1) Grab the full screen using PIL.ImageGrab.
|
||||
2) Convert to NumPy array in BGR format for cv2.
|
||||
3) Load template from `template_path`.
|
||||
4) Use cv2.matchTemplate to find the best match location.
|
||||
5) If max correlation > threshold, return (x, y, w, h).
|
||||
6) Else return None.
|
||||
"""
|
||||
# 1) Capture full screen
|
||||
screenshot_pil = ImageGrab.grab()
|
||||
screenshot_np = np.array(screenshot_pil) # shape (H, W, 4) possibly
|
||||
# Convert RGBA or RGB to BGR
|
||||
screenshot_bgr = cv2.cvtColor(screenshot_np, cv2.COLOR_RGB2BGR)
|
||||
|
||||
# 2) Load template from file
|
||||
template_bgr = cv2.imread(template_path, cv2.IMREAD_COLOR)
|
||||
if template_bgr is None:
|
||||
print(f"[WARN] Could not load template file: {template_path}")
|
||||
return None
|
||||
|
||||
# 3) Template matching
|
||||
result = cv2.matchTemplate(screenshot_bgr, template_bgr, cv2.TM_CCOEFF_NORMED)
|
||||
|
||||
# 4) Find best match
|
||||
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(result)
|
||||
# template width/height
|
||||
th, tw, _ = template_bgr.shape
|
||||
|
||||
if max_val >= threshold:
|
||||
# max_loc is top-left corner of the best match
|
||||
found_x, found_y = max_loc
|
||||
return (found_x, found_y, tw, th)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def format_duration(seconds):
|
||||
if seconds is None:
|
||||
return "???"
|
||||
seconds = int(seconds)
|
||||
hours = seconds // 3600
|
||||
leftover = seconds % 3600
|
||||
mins = leftover // 60
|
||||
secs = leftover % 60
|
||||
if hours > 0:
|
||||
return f"{hours}h {mins}m {secs}s"
|
||||
else:
|
||||
return f"{mins}m {secs}s"
|
||||
|
||||
|
||||
def sanitize_experience_string(raw_text):
|
||||
text_no_percent = raw_text.replace('%', '')
|
||||
text_no_spaces = text_no_percent.replace(' ', '')
|
||||
cleaned = re.sub(r'[^0-9\.]', '', text_no_spaces)
|
||||
match = re.search(r'\d+(?:\.\d+)?', cleaned)
|
||||
if not match:
|
||||
return None
|
||||
val = float(match.group(0))
|
||||
if val < 0:
|
||||
val = 0
|
||||
elif val > 100:
|
||||
val = 100
|
||||
return round(val, 4)
|
||||
|
||||
|
||||
def format_experience_value(value):
|
||||
if value < 0:
|
||||
value = 0
|
||||
elif value > 100:
|
||||
value = 100
|
||||
float_4 = round(value, 4)
|
||||
raw_str = f"{float_4:.4f}"
|
||||
int_part, dec_part = raw_str.split('.')
|
||||
if int_part == "100":
|
||||
pass
|
||||
elif len(int_part) == 1 and int_part != "0":
|
||||
int_part = "0" + int_part
|
||||
elif int_part == "0":
|
||||
int_part = "00"
|
||||
return f"{int_part}.{dec_part}"
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Region Class
|
||||
# -----------------------------------------------------------------------------
|
||||
class Region:
|
||||
"""
|
||||
Defines a draggable/resizable screen region for OCR capture.
|
||||
"""
|
||||
def __init__(self, x, y, label="Region", color=QColor(0,0,255)):
|
||||
self.x = x
|
||||
self.y = y
|
||||
self.w = DEFAULT_WIDTH
|
||||
self.h = DEFAULT_HEIGHT
|
||||
self.label = label
|
||||
self.color = color
|
||||
self.visible = True
|
||||
self.data = ""
|
||||
|
||||
def rect(self):
|
||||
return QRect(self.x, self.y, self.w, self.h)
|
||||
|
||||
def label_rect(self):
|
||||
return QRect(self.x, self.y - LABEL_HEIGHT, self.w, LABEL_HEIGHT)
|
||||
|
||||
def resize_handles(self):
|
||||
return [
|
||||
QRect(self.x - HANDLE_SIZE // 2, self.y - HANDLE_SIZE // 2, HANDLE_SIZE, HANDLE_SIZE),
|
||||
QRect(self.x + self.w - HANDLE_SIZE // 2, self.y - HANDLE_SIZE // 2, HANDLE_SIZE, HANDLE_SIZE),
|
||||
QRect(self.x - HANDLE_SIZE // 2, self.y + self.h - HANDLE_SIZE // 2, HANDLE_SIZE, HANDLE_SIZE),
|
||||
QRect(self.x + self.w - HANDLE_SIZE // 2, self.y + self.h - HANDLE_SIZE // 2, HANDLE_SIZE, HANDLE_SIZE),
|
||||
]
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# OverlayCanvas Class
|
||||
# -----------------------------------------------------------------------------
|
||||
class OverlayCanvas(QWidget):
|
||||
"""
|
||||
Renders the overlay & handles region dragging/resizing.
|
||||
"""
|
||||
def __init__(self, regions, parent=None):
|
||||
super().__init__(parent)
|
||||
self.regions = regions
|
||||
self.edit_mode = True
|
||||
self.selected_region = None
|
||||
self.selected_handle = None
|
||||
self.drag_offset = QPoint()
|
||||
|
||||
def paintEvent(self, event):
|
||||
painter = QPainter(self)
|
||||
for region in self.regions:
|
||||
if region.visible:
|
||||
pen = QPen(region.color)
|
||||
pen.setWidth(3)
|
||||
painter.setPen(pen)
|
||||
painter.drawRect(region.x, region.y, region.w, region.h)
|
||||
|
||||
painter.setFont(QFont("Arial", 12, QFont.Bold))
|
||||
painter.setPen(region.color)
|
||||
painter.drawText(region.x, region.y - 5, region.label)
|
||||
|
||||
if self.edit_mode:
|
||||
for handle in region.resize_handles():
|
||||
painter.fillRect(handle, region.color)
|
||||
|
||||
def mousePressEvent(self, event):
|
||||
if not self.edit_mode:
|
||||
return
|
||||
if event.button() == Qt.LeftButton:
|
||||
for region in reversed(self.regions):
|
||||
for i, handle in enumerate(region.resize_handles()):
|
||||
if handle.contains(event.pos()):
|
||||
self.selected_region = region
|
||||
self.selected_handle = i
|
||||
return
|
||||
if region.label_rect().contains(event.pos()):
|
||||
self.selected_region = region
|
||||
self.selected_handle = None
|
||||
self.drag_offset = event.pos() - QPoint(region.x, region.y)
|
||||
return
|
||||
if region.rect().contains(event.pos()):
|
||||
self.selected_region = region
|
||||
self.selected_handle = None
|
||||
self.drag_offset = event.pos() - QPoint(region.x, region.y)
|
||||
return
|
||||
|
||||
def mouseMoveEvent(self, event):
|
||||
if not self.edit_mode or self.selected_region is None:
|
||||
return
|
||||
|
||||
if self.selected_handle is None:
|
||||
self.selected_region.x = event.x() - self.drag_offset.x()
|
||||
self.selected_region.y = event.y() - self.drag_offset.y()
|
||||
else:
|
||||
sr = self.selected_region
|
||||
if self.selected_handle == 0: # top-left
|
||||
sr.w += sr.x - event.x()
|
||||
sr.h += sr.y - event.y()
|
||||
sr.x = event.x()
|
||||
sr.y = event.y()
|
||||
elif self.selected_handle == 1: # top-right
|
||||
sr.w = event.x() - sr.x
|
||||
sr.h += sr.y - event.y()
|
||||
sr.y = event.y()
|
||||
elif self.selected_handle == 2: # bottom-left
|
||||
sr.w += sr.x - event.x()
|
||||
sr.h = event.y() - sr.y
|
||||
sr.x = event.x()
|
||||
elif self.selected_handle == 3: # bottom-right
|
||||
sr.w = event.x() - sr.x
|
||||
sr.h = event.y() - sr.y
|
||||
|
||||
sr.w = max(sr.w, 10)
|
||||
sr.h = max(sr.h, 10)
|
||||
|
||||
self.update()
|
||||
|
||||
def mouseReleaseEvent(self, event):
|
||||
if not self.edit_mode:
|
||||
return
|
||||
if event.button() == Qt.LeftButton:
|
||||
self.selected_region = None
|
||||
self.selected_handle = None
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# BorealisOverlay Class
|
||||
# -----------------------------------------------------------------------------
|
||||
class BorealisOverlay(QWidget):
|
||||
"""
|
||||
Single Region Overlay for Player Stats (HP/MP/FP/EXP) with:
|
||||
- Automatic location via OpenCV template matching at startup
|
||||
- OCR scanning
|
||||
- Low-HP beep
|
||||
- Rich Live updates in terminal
|
||||
"""
|
||||
def __init__(self, live=None):
|
||||
super().__init__()
|
||||
screen_geo = QApplication.primaryScreen().geometry()
|
||||
self.setGeometry(screen_geo)
|
||||
self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)
|
||||
self.setAttribute(Qt.WA_TranslucentBackground, True)
|
||||
|
||||
# Try to find the bars automatically
|
||||
# If found => use that location, else default
|
||||
initial_x, initial_y = 250, 50
|
||||
region_w, region_h = DEFAULT_WIDTH, DEFAULT_HEIGHT
|
||||
|
||||
match_result = locate_bars_opencv(TEMPLATE_PATH, MATCH_THRESHOLD)
|
||||
if match_result is not None:
|
||||
found_x, found_y, w, h = match_result
|
||||
print(f"Character Status Located at {found_x}, {found_y} with confidence >= {MATCH_THRESHOLD}.")
|
||||
initial_x, initial_y = found_x, found_y
|
||||
# Optionally override region size with template size
|
||||
region_w, region_h = w, h
|
||||
else:
|
||||
print("Could not auto-locate the character status page. Set your theme to Masquerade and Interface Scale to 140%, and browser zoom level to 110%. Using default region.")
|
||||
|
||||
region = Region(initial_x, initial_y, label="Character Status")
|
||||
region.w = region_w
|
||||
region.h = region_h
|
||||
self.regions = [region]
|
||||
|
||||
self.canvas = OverlayCanvas(self.regions, self)
|
||||
self.canvas.setGeometry(self.rect())
|
||||
|
||||
# Tesseract
|
||||
self.engine = pytesseract
|
||||
|
||||
# Keep history of EXP data
|
||||
self.points = []
|
||||
|
||||
self.live = live
|
||||
|
||||
# Timer for periodic OCR scanning
|
||||
self.timer = QTimer(self)
|
||||
self.timer.timeout.connect(self.collect_ocr_data)
|
||||
self.timer.start(POLLING_RATE_MS)
|
||||
|
||||
def set_live(self, live):
|
||||
self.live = live
|
||||
|
||||
def collect_ocr_data(self):
|
||||
for region in self.regions:
|
||||
if region.visible:
|
||||
screenshot = ImageGrab.grab(
|
||||
bbox=(region.x, region.y, region.x + region.w, region.y + region.h)
|
||||
)
|
||||
processed = self.preprocess_image(screenshot)
|
||||
text = pytesseract.image_to_string(processed, config='--psm 4 --oem 1')
|
||||
region.data = text.strip()
|
||||
|
||||
if self.live is not None:
|
||||
renderable = self.build_renderable()
|
||||
self.live.update(renderable)
|
||||
|
||||
def preprocess_image(self, image):
|
||||
gray = image.convert("L")
|
||||
scaled = gray.resize((gray.width * 3, gray.height * 3))
|
||||
thresh = scaled.point(lambda p: p > 200 and 255)
|
||||
return thresh.filter(ImageFilter.MedianFilter(3))
|
||||
|
||||
def parse_all_stats(self, raw_text):
|
||||
raw_lines = raw_text.splitlines()
|
||||
lines = [l.strip() for l in raw_lines if l.strip()]
|
||||
stats_dict = {
|
||||
"hp": (0,1),
|
||||
"mp": (0,1),
|
||||
"fp": (0,1),
|
||||
"exp": None
|
||||
}
|
||||
if len(lines) < 4:
|
||||
return stats_dict
|
||||
|
||||
hp_match = re.search(r"(\d+)\s*/\s*(\d+)", lines[0])
|
||||
if hp_match:
|
||||
stats_dict["hp"] = (int(hp_match.group(1)), int(hp_match.group(2)))
|
||||
|
||||
mp_match = re.search(r"(\d+)\s*/\s*(\d+)", lines[1])
|
||||
if mp_match:
|
||||
stats_dict["mp"] = (int(mp_match.group(1)), int(mp_match.group(2)))
|
||||
|
||||
fp_match = re.search(r"(\d+)\s*/\s*(\d+)", lines[2])
|
||||
if fp_match:
|
||||
stats_dict["fp"] = (int(fp_match.group(1)), int(fp_match.group(2)))
|
||||
|
||||
exp_val = sanitize_experience_string(lines[3])
|
||||
stats_dict["exp"] = exp_val
|
||||
return stats_dict
|
||||
|
||||
def update_points(self, new_val):
|
||||
now = time.time()
|
||||
if self.points:
|
||||
_, last_v = self.points[-1]
|
||||
if abs(new_val - last_v) < 1e-6:
|
||||
return
|
||||
if new_val < last_v:
|
||||
self.points.clear()
|
||||
self.points.append((now, new_val))
|
||||
if len(self.points) > MAX_DATA_POINTS:
|
||||
self.points.pop(0)
|
||||
|
||||
def compute_time_to_100(self):
|
||||
n = len(self.points)
|
||||
if n < 2:
|
||||
return None
|
||||
first_t, first_v = self.points[0]
|
||||
last_t, last_v = self.points[-1]
|
||||
diff_v = last_v - first_v
|
||||
if diff_v <= 0:
|
||||
return None
|
||||
|
||||
steps = n - 1
|
||||
total_time = last_t - first_t
|
||||
if total_time <= 0:
|
||||
return None
|
||||
|
||||
avg_change = diff_v / steps
|
||||
remain = 100.0 - last_v
|
||||
if remain <= 0:
|
||||
return None
|
||||
|
||||
avg_time = total_time / steps
|
||||
rate_per_s = avg_change / avg_time if avg_time > 0 else 0
|
||||
if rate_per_s <= 0:
|
||||
return None
|
||||
|
||||
return int(remain / rate_per_s)
|
||||
|
||||
def build_renderable(self):
|
||||
raw_text = self.regions[0].data
|
||||
stats = self.parse_all_stats(raw_text)
|
||||
hp_cur, hp_max = stats["hp"]
|
||||
mp_cur, mp_max = stats["mp"]
|
||||
fp_cur, fp_max = stats["fp"]
|
||||
exp_val = stats["exp"]
|
||||
|
||||
# HP beep logic
|
||||
if hp_max > 0:
|
||||
hp_ratio = hp_cur / hp_max
|
||||
if 0 < hp_ratio <= 0.40:
|
||||
beep_hp_warning()
|
||||
|
||||
if exp_val is not None:
|
||||
self.update_points(exp_val)
|
||||
current_exp = self.points[-1][1] if self.points else 0.0
|
||||
|
||||
# Title
|
||||
title_text = Text("Project Borealis\n", style="bold white")
|
||||
subtitle_text = Text("Flyff Information Overlay\n\n", style="dim")
|
||||
|
||||
# HP / MP / FP bars
|
||||
bar_progress = Progress(
|
||||
"{task.description}",
|
||||
BarColumn(bar_width=30),
|
||||
TextColumn(" {task.completed}/{task.total} ({task.percentage:>5.2f}%)"),
|
||||
transient=False,
|
||||
auto_refresh=False
|
||||
)
|
||||
bar_progress.add_task("[bold red]HP[/bold red]", total=hp_max, completed=hp_cur,
|
||||
style="red", complete_style="red")
|
||||
bar_progress.add_task("[bold blue]MP[/bold blue]", total=mp_max, completed=mp_cur,
|
||||
style="blue", complete_style="blue")
|
||||
bar_progress.add_task("[bold green]FP[/bold green]", total=fp_max, completed=fp_cur,
|
||||
style="green", complete_style="green")
|
||||
bar_progress.refresh()
|
||||
|
||||
# Historical EXP table
|
||||
table = Table(show_header=True, header_style=GREEN_HEADER_STYLE, style=None)
|
||||
table.add_column("Historical EXP", justify="center", style="green")
|
||||
table.add_column("Time Since Last Kill", justify="center", style="green")
|
||||
table.add_column("Average EXP Per Kill", justify="center", style="green")
|
||||
table.add_column("Average Time Between Kills", justify="center", style="green")
|
||||
|
||||
n = len(self.points)
|
||||
if n == 0:
|
||||
table.add_row("N/A", "N/A", "N/A", "N/A")
|
||||
elif n == 1:
|
||||
_, v0 = self.points[0]
|
||||
exp_str = f"[green]{format_experience_value(v0)}%[/green]"
|
||||
table.add_row(exp_str, "N/A", "N/A", "N/A")
|
||||
else:
|
||||
for i in range(1, n):
|
||||
t_cur, v_cur = self.points[i]
|
||||
t_prev, v_prev = self.points[i - 1]
|
||||
delta_v = v_cur - v_prev
|
||||
delta_str = f"{delta_v:+.4f}%"
|
||||
exp_main = format_experience_value(v_cur)
|
||||
exp_str = f"[green]{exp_main}%[/green] [dim]({delta_str})[/dim]"
|
||||
|
||||
delta_t = t_cur - t_prev
|
||||
t_since_str = f"{delta_t:.1f}s"
|
||||
|
||||
diff_v = v_cur - self.points[0][1]
|
||||
steps = i
|
||||
avg_exp_str = f"{diff_v/steps:.4f}%"
|
||||
|
||||
total_time = t_cur - self.points[0][0]
|
||||
avg_kill_time = total_time / steps
|
||||
avg_time_str = f"{avg_kill_time:.1f}s"
|
||||
|
||||
table.add_row(exp_str, t_since_str, avg_exp_str, avg_time_str)
|
||||
|
||||
# Predicted Time to Level
|
||||
secs_left = self.compute_time_to_100()
|
||||
time_str = format_duration(secs_left)
|
||||
|
||||
time_bar = Progress(
|
||||
TextColumn("[bold white]Predicted Time to Level:[/bold white] "),
|
||||
BarColumn(bar_width=30, complete_style="magenta"),
|
||||
TextColumn(" [green]{task.percentage:>5.2f}%[/green] "),
|
||||
TextColumn(f"[magenta]{time_str}[/magenta] until 100%", justify="right"),
|
||||
transient=False,
|
||||
auto_refresh=False
|
||||
)
|
||||
time_bar.add_task("", total=100, completed=current_exp)
|
||||
time_bar.refresh()
|
||||
|
||||
return Group(
|
||||
title_text,
|
||||
subtitle_text,
|
||||
bar_progress,
|
||||
table,
|
||||
time_bar
|
||||
)
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# main
|
||||
# -----------------------------------------------------------------------------
|
||||
def main():
|
||||
"""
|
||||
1) Attempt to locate HP/MP/FP/Exp bars using OpenCV template matching.
|
||||
2) Position overlay region accordingly if found, else default.
|
||||
3) Start PyQt, periodically OCR the region, update Rich Live in terminal.
|
||||
"""
|
||||
app = QApplication(sys.argv)
|
||||
window = BorealisOverlay()
|
||||
window.setWindowTitle("Project Borealis Overlay (HP/MP/FP/EXP)")
|
||||
window.show()
|
||||
|
||||
console = Console()
|
||||
|
||||
with Live(console=console, refresh_per_second=4) as live:
|
||||
window.set_live(live)
|
||||
exit_code = app.exec_()
|
||||
|
||||
sys.exit(exit_code)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
80
Data/Experiments/flowpipe.py
Normal file
80
Data/Experiments/flowpipe.py
Normal file
@ -0,0 +1,80 @@
|
||||
from flask import Flask, jsonify
|
||||
from flowpipe.node import Node
|
||||
from flowpipe.graph import Graph
|
||||
from flowpipe.plug import InputPlug, OutputPlug
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
# ===========================
|
||||
# Define Custom Nodes
|
||||
# ===========================
|
||||
|
||||
class MultiplyNode(Node):
|
||||
"""Multiplies an input value by a factor"""
|
||||
factor = InputPlug()
|
||||
value = InputPlug()
|
||||
result = OutputPlug()
|
||||
|
||||
def compute(self):
|
||||
self.result.value = self.value.value * self.factor.value
|
||||
|
||||
|
||||
class AddNode(Node):
|
||||
"""Adds two input values"""
|
||||
input1 = InputPlug()
|
||||
input2 = InputPlug()
|
||||
sum = OutputPlug()
|
||||
|
||||
def compute(self):
|
||||
self.sum.value = self.input1.value + self.input2.value
|
||||
|
||||
|
||||
class OutputNode(Node):
|
||||
"""Outputs the final result"""
|
||||
input_value = InputPlug()
|
||||
output_value = OutputPlug()
|
||||
|
||||
def compute(self):
|
||||
self.output_value.value = self.input_value.value
|
||||
|
||||
|
||||
# ===========================
|
||||
# Define Graph Workflow
|
||||
# ===========================
|
||||
|
||||
def create_workflow():
|
||||
"""Creates a sample workflow using nodes"""
|
||||
graph = Graph(name="Sample Workflow")
|
||||
|
||||
# Create nodes
|
||||
multiply = MultiplyNode(name="Multiplier", graph=graph)
|
||||
add = AddNode(name="Adder", graph=graph)
|
||||
output = OutputNode(name="Output", graph=graph)
|
||||
|
||||
# Connect nodes
|
||||
multiply.result.connect(add.input1) # Multiply output -> Add input1
|
||||
add.sum.connect(output.input_value) # Add output -> Output node
|
||||
|
||||
# Set static input values
|
||||
multiply.factor.value = 2
|
||||
multiply.value.value = 5 # 5 * 2 = 10
|
||||
add.input2.value = 3 # 10 + 3 = 13
|
||||
|
||||
return graph
|
||||
|
||||
|
||||
@app.route('/run-workflow', methods=['GET'])
|
||||
def run_workflow():
|
||||
"""Runs the defined node-based workflow"""
|
||||
graph = create_workflow()
|
||||
graph.evaluate() # Execute the graph
|
||||
|
||||
# Extract the final result from the output node
|
||||
output_node = graph.nodes["Output"]
|
||||
result = output_node.output_value.value
|
||||
|
||||
return jsonify({"workflow_result": result})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(debug=True)
|
98
Data/Experiments/gui_elements.py
Normal file
98
Data/Experiments/gui_elements.py
Normal file
@ -0,0 +1,98 @@
|
||||
# example_qt_interface.py
|
||||
import sys
|
||||
from PySide6.QtCore import Qt
|
||||
from PySide6.QtGui import QAction, QIcon
|
||||
from PySide6.QtWidgets import (
|
||||
QApplication, QMainWindow, QWidget, QVBoxLayout,
|
||||
QLabel, QMenuBar, QToolBar, QSplitter, QListWidget,
|
||||
QTextEdit, QStatusBar, QFileDialog, QPushButton
|
||||
)
|
||||
|
||||
|
||||
class MainWindow(QMainWindow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self.setWindowTitle("Example Qt Interface")
|
||||
|
||||
# Create and set up the menu bar.
|
||||
menu_bar = QMenuBar(self)
|
||||
self.setMenuBar(menu_bar)
|
||||
|
||||
# File menu.
|
||||
file_menu = menu_bar.addMenu("File")
|
||||
|
||||
# Create some actions to populate the File menu.
|
||||
open_action = QAction("Open", self)
|
||||
open_action.triggered.connect(self.open_file)
|
||||
file_menu.addAction(open_action)
|
||||
|
||||
save_action = QAction("Save", self)
|
||||
save_action.triggered.connect(self.save_file)
|
||||
file_menu.addAction(save_action)
|
||||
|
||||
exit_action = QAction("Exit", self)
|
||||
exit_action.triggered.connect(self.close)
|
||||
file_menu.addAction(exit_action)
|
||||
|
||||
# Create a toolbar and add some actions.
|
||||
tool_bar = QToolBar("Main Toolbar", self)
|
||||
tool_bar.addAction(open_action)
|
||||
tool_bar.addAction(save_action)
|
||||
self.addToolBar(Qt.TopToolBarArea, tool_bar)
|
||||
|
||||
# Set up a status bar at the bottom.
|
||||
self.setStatusBar(QStatusBar(self))
|
||||
self.statusBar().showMessage("Ready")
|
||||
|
||||
# Create your central widget area.
|
||||
central_widget = QWidget()
|
||||
self.setCentralWidget(central_widget)
|
||||
layout = QVBoxLayout(central_widget)
|
||||
|
||||
# A splitter as an example container that can hold multiple widgets side-by-side.
|
||||
splitter = QSplitter()
|
||||
|
||||
# Left side: a simple list widget.
|
||||
self.list_widget = QListWidget()
|
||||
self.list_widget.addItem("Item A")
|
||||
self.list_widget.addItem("Item B")
|
||||
self.list_widget.addItem("Item C")
|
||||
splitter.addWidget(self.list_widget)
|
||||
|
||||
# Right side: a text edit widget.
|
||||
self.text_edit = QTextEdit()
|
||||
self.text_edit.setPlainText("Type here...")
|
||||
splitter.addWidget(self.text_edit)
|
||||
|
||||
layout.addWidget(splitter)
|
||||
|
||||
# Example button in the central widget area.
|
||||
example_button = QPushButton("Click Me")
|
||||
example_button.clicked.connect(self.on_button_clicked)
|
||||
layout.addWidget(example_button)
|
||||
|
||||
def open_file(self):
|
||||
file_name, _ = QFileDialog.getOpenFileName(self, "Open File", "", "All Files (*.*)")
|
||||
if file_name:
|
||||
self.statusBar().showMessage(f"Opened: {file_name}")
|
||||
|
||||
def save_file(self):
|
||||
file_name, _ = QFileDialog.getSaveFileName(self, "Save File", "", "All Files (*.*)")
|
||||
if file_name:
|
||||
self.statusBar().showMessage(f"Saved: {file_name}")
|
||||
|
||||
def on_button_clicked(self):
|
||||
self.statusBar().showMessage("Button clicked!")
|
||||
|
||||
|
||||
def main():
|
||||
app = QApplication(sys.argv)
|
||||
window = MainWindow()
|
||||
window.resize(800, 600)
|
||||
window.show()
|
||||
sys.exit(app.exec())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
398
Data/Modules/data_collector.py
Normal file
398
Data/Modules/data_collector.py
Normal file
@ -0,0 +1,398 @@
|
||||
# Modules/data_collector.py
|
||||
|
||||
import threading
|
||||
import time
|
||||
import re
|
||||
import sys
|
||||
import numpy as np
|
||||
import cv2
|
||||
import concurrent.futures
|
||||
|
||||
# Vision-related Imports
|
||||
import pytesseract
|
||||
import easyocr
|
||||
import torch
|
||||
|
||||
from PIL import Image, ImageGrab, ImageFilter
|
||||
|
||||
from PyQt5.QtWidgets import QApplication, QWidget
|
||||
from PyQt5.QtCore import QRect, QPoint, Qt, QMutex, QTimer
|
||||
from PyQt5.QtGui import QPainter, QPen, QColor, QFont
|
||||
|
||||
# Initialize EasyOCR with CUDA support
|
||||
reader_cpu = None
|
||||
reader_gpu = None
|
||||
|
||||
def initialize_ocr_engines():
|
||||
global reader_cpu, reader_gpu
|
||||
reader_cpu = easyocr.Reader(['en'], gpu=False)
|
||||
reader_gpu = easyocr.Reader(['en'], gpu=True if torch.cuda.is_available() else False)
|
||||
|
||||
pytesseract.pytesseract.tesseract_cmd = r"C:\\Program Files\\Tesseract-OCR\\tesseract.exe"
|
||||
|
||||
DEFAULT_WIDTH = 180
|
||||
DEFAULT_HEIGHT = 130
|
||||
HANDLE_SIZE = 5
|
||||
LABEL_HEIGHT = 20
|
||||
|
||||
collector_mutex = QMutex()
|
||||
regions = {}
|
||||
|
||||
app_instance = None
|
||||
|
||||
def _ensure_qapplication():
|
||||
"""
|
||||
Ensures that QApplication is initialized before creating widgets.
|
||||
Must be called from the main thread.
|
||||
"""
|
||||
global app_instance
|
||||
if app_instance is None:
|
||||
app_instance = QApplication(sys.argv) # Start in main thread
|
||||
|
||||
def capture_region_as_image(region_id):
|
||||
collector_mutex.lock()
|
||||
if region_id not in regions:
|
||||
collector_mutex.unlock()
|
||||
return None
|
||||
x, y, w, h = regions[region_id]['bbox'][:]
|
||||
collector_mutex.unlock()
|
||||
screenshot = ImageGrab.grab(bbox=(x, y, x + w, y + h))
|
||||
return screenshot
|
||||
|
||||
def create_ocr_region(region_id, x=250, y=50, w=DEFAULT_WIDTH, h=DEFAULT_HEIGHT, color=(255, 255, 0), thickness=2):
|
||||
"""
|
||||
Creates an OCR region with a visible, resizable box on the screen.
|
||||
Allows setting custom color (RGB) and line thickness.
|
||||
"""
|
||||
_ensure_qapplication()
|
||||
|
||||
collector_mutex.lock()
|
||||
if region_id in regions:
|
||||
collector_mutex.unlock()
|
||||
return
|
||||
regions[region_id] = {
|
||||
'bbox': [x, y, w, h],
|
||||
'raw_text': "",
|
||||
'widget': OCRRegionWidget(x, y, w, h, region_id, color, thickness)
|
||||
}
|
||||
collector_mutex.unlock()
|
||||
|
||||
def get_raw_text(region_id):
|
||||
collector_mutex.lock()
|
||||
if region_id not in regions:
|
||||
collector_mutex.unlock()
|
||||
return ""
|
||||
text = regions[region_id]['raw_text']
|
||||
collector_mutex.unlock()
|
||||
return text
|
||||
|
||||
def start_collector():
|
||||
initialize_ocr_engines()
|
||||
t = threading.Thread(target=_update_ocr_loop, daemon=True)
|
||||
t.start()
|
||||
|
||||
def _update_ocr_loop():
|
||||
while True:
|
||||
collector_mutex.lock()
|
||||
region_ids = list(regions.keys())
|
||||
collector_mutex.unlock()
|
||||
|
||||
for rid in region_ids:
|
||||
collector_mutex.lock()
|
||||
bbox = regions[rid]['bbox'][:]
|
||||
collector_mutex.unlock()
|
||||
|
||||
x, y, w, h = bbox
|
||||
screenshot = ImageGrab.grab(bbox=(x, y, x + w, y + h))
|
||||
processed = _preprocess_image(screenshot)
|
||||
raw_text = pytesseract.image_to_string(processed, config='--psm 6 --oem 1')
|
||||
|
||||
collector_mutex.lock()
|
||||
if rid in regions:
|
||||
regions[rid]['raw_text'] = raw_text
|
||||
collector_mutex.unlock()
|
||||
|
||||
time.sleep(0.7)
|
||||
|
||||
def _preprocess_image(image):
|
||||
gray = image.convert("L")
|
||||
scaled = gray.resize((gray.width * 3, gray.height * 3))
|
||||
thresh = scaled.point(lambda p: 255 if p > 200 else 0)
|
||||
return thresh.filter(ImageFilter.MedianFilter(3))
|
||||
|
||||
|
||||
def find_word_positions(region_id, word, offset_x=0, offset_y=0, margin=5, ocr_engine="CPU", num_slices=1):
|
||||
"""
|
||||
Uses user-defined horizontal slices and threading for faster inference.
|
||||
"""
|
||||
collector_mutex.lock()
|
||||
if region_id not in regions:
|
||||
collector_mutex.unlock()
|
||||
return []
|
||||
|
||||
bbox = regions[region_id]['bbox']
|
||||
collector_mutex.unlock()
|
||||
|
||||
x, y, w, h = bbox
|
||||
left, top, right, bottom = x, y, x + w, y + h
|
||||
|
||||
if right <= left or bottom <= top:
|
||||
print(f"[ERROR] Invalid OCR region bounds: {bbox}")
|
||||
return []
|
||||
|
||||
try:
|
||||
image = ImageGrab.grab(bbox=(left, top, right, bottom))
|
||||
orig_width, orig_height = image.size
|
||||
|
||||
word_positions = []
|
||||
|
||||
# Ensure number of slices does not exceed image height
|
||||
num_slices = min(num_slices, orig_height)
|
||||
strip_height = max(1, orig_height // num_slices)
|
||||
|
||||
def process_strip(strip_id):
|
||||
strip_y = strip_id * strip_height
|
||||
strip = image.crop((0, strip_y, orig_width, strip_y + strip_height))
|
||||
|
||||
strip_np = np.array(strip)
|
||||
|
||||
detected_positions = []
|
||||
if ocr_engine == "CPU":
|
||||
ocr_data = pytesseract.image_to_data(strip, config='--psm 6 --oem 1', output_type=pytesseract.Output.DICT)
|
||||
|
||||
for i in range(len(ocr_data['text'])):
|
||||
if re.search(rf"\b{word}\b", ocr_data['text'][i], re.IGNORECASE):
|
||||
x_scaled = int(ocr_data['left'][i])
|
||||
y_scaled = int(ocr_data['top'][i]) + strip_y
|
||||
w_scaled = int(ocr_data['width'][i])
|
||||
h_scaled = int(ocr_data['height'][i])
|
||||
|
||||
detected_positions.append((x_scaled + offset_x, y_scaled + offset_y, w_scaled + (margin * 2), h_scaled + (margin * 2)))
|
||||
|
||||
else:
|
||||
results = reader_gpu.readtext(strip_np)
|
||||
for (bbox, text, _) in results:
|
||||
if re.search(rf"\b{word}\b", text, re.IGNORECASE):
|
||||
(x_min, y_min), (x_max, y_max) = bbox[0], bbox[2]
|
||||
|
||||
x_scaled = int(x_min)
|
||||
y_scaled = int(y_min) + strip_y
|
||||
w_scaled = int(x_max - x_min)
|
||||
h_scaled = int(y_max - y_min)
|
||||
|
||||
detected_positions.append((x_scaled + offset_x, y_scaled + offset_y, w_scaled + (margin * 2), h_scaled + (margin * 2)))
|
||||
|
||||
return detected_positions
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=num_slices) as executor:
|
||||
strip_results = list(executor.map(process_strip, range(num_slices)))
|
||||
|
||||
for strip_result in strip_results:
|
||||
word_positions.extend(strip_result)
|
||||
|
||||
return word_positions
|
||||
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Failed to capture OCR region: {e}")
|
||||
return []
|
||||
|
||||
def draw_identification_boxes(region_id, positions, color=(0, 0, 255), thickness=2):
|
||||
"""
|
||||
Draws non-interactive rectangles at specified positions within the given OCR region.
|
||||
Uses a separate rendering thread to prevent blocking OCR processing.
|
||||
"""
|
||||
collector_mutex.lock()
|
||||
if region_id in regions and 'widget' in regions[region_id]:
|
||||
widget = regions[region_id]['widget']
|
||||
widget.update_draw_positions(positions, color, thickness)
|
||||
collector_mutex.unlock()
|
||||
|
||||
def update_region_slices(region_id, num_slices):
|
||||
"""
|
||||
Updates the number of visual slices in the OCR region.
|
||||
"""
|
||||
collector_mutex.lock()
|
||||
if region_id in regions and 'widget' in regions[region_id]:
|
||||
widget = regions[region_id]['widget']
|
||||
widget.set_num_slices(num_slices)
|
||||
collector_mutex.unlock()
|
||||
|
||||
class OCRRegionWidget(QWidget):
|
||||
def __init__(self, x, y, w, h, region_id, color, thickness):
|
||||
super().__init__()
|
||||
|
||||
self.setGeometry(x, y, w, h)
|
||||
self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint | Qt.Tool)
|
||||
self.setAttribute(Qt.WA_TranslucentBackground, True)
|
||||
self.setAttribute(Qt.WA_TransparentForMouseEvents, False)
|
||||
|
||||
self.region_id = region_id
|
||||
self.box_color = QColor(*color)
|
||||
self.line_thickness = thickness
|
||||
self.draw_positions = []
|
||||
self.previous_positions = [] # This prevents redundant redraws
|
||||
self.num_slices = 1 # Ensures slice count is initialized
|
||||
|
||||
# --- Initialization for interactive handles ---
|
||||
self.selected_handle = None # Tracks which handle is being dragged/resized
|
||||
self.drag_offset = None # Tracks the offset for moving the widget
|
||||
|
||||
self.show()
|
||||
|
||||
def paintEvent(self, event):
|
||||
painter = QPainter(self)
|
||||
pen = QPen(self.box_color)
|
||||
pen.setWidth(self.line_thickness)
|
||||
painter.setPen(pen)
|
||||
|
||||
# Draw main rectangle
|
||||
painter.drawRect(0, 0, self.width(), self.height())
|
||||
|
||||
# Draw detected word overlays
|
||||
for x, y, w, h in self.draw_positions:
|
||||
painter.drawRect(x, y, w, h)
|
||||
|
||||
# Draw faint slice division lines
|
||||
if self.num_slices > 1:
|
||||
strip_height = self.height() // self.num_slices
|
||||
pen.setColor(QColor(150, 150, 150, 100)) # Light gray, semi-transparent
|
||||
pen.setWidth(1)
|
||||
painter.setPen(pen)
|
||||
|
||||
for i in range(1, self.num_slices): # Do not draw the last one at the bottom
|
||||
painter.drawLine(0, i * strip_height, self.width(), i * strip_height)
|
||||
|
||||
# --- Draw interactive handles (grabbers) with reduced opacity (15%) ---
|
||||
# 15% opacity of 255 is approximately 38
|
||||
handle_color = QColor(0, 0, 0, 50)
|
||||
for handle in self._resize_handles():
|
||||
painter.fillRect(handle, handle_color)
|
||||
painter.drawRect(handle) # Optional: draw a border around the handle
|
||||
|
||||
def set_draw_positions(self, positions, color, thickness):
|
||||
"""
|
||||
Updates the overlay positions and visual settings.
|
||||
"""
|
||||
self.draw_positions = positions
|
||||
self.box_color = QColor(*color)
|
||||
self.line_thickness = thickness
|
||||
self.update()
|
||||
|
||||
def update_draw_positions(self, positions, color, thickness):
|
||||
"""
|
||||
Updates the overlay positions and redraws only if the positions have changed.
|
||||
This prevents unnecessary flickering.
|
||||
"""
|
||||
if positions == self.previous_positions:
|
||||
return # No change, do not update
|
||||
|
||||
self.previous_positions = positions # Store last known positions
|
||||
self.draw_positions = positions
|
||||
self.box_color = QColor(*color)
|
||||
self.line_thickness = thickness
|
||||
self.update() # Redraw only if needed
|
||||
|
||||
def set_num_slices(self, num_slices):
|
||||
"""
|
||||
Updates the number of horizontal slices for visualization.
|
||||
"""
|
||||
self.num_slices = num_slices
|
||||
self.update()
|
||||
|
||||
def _resize_handles(self):
|
||||
"""
|
||||
Returns a list of QRect objects representing the interactive handles:
|
||||
- Index 0: Top-left (resize)
|
||||
- Index 1: Top-right (resize)
|
||||
- Index 2: Bottom-left (resize)
|
||||
- Index 3: Bottom-right (resize)
|
||||
- Index 4: Top-center (dragger)
|
||||
"""
|
||||
w, h = self.width(), self.height()
|
||||
handles = [
|
||||
QRect(0, 0, HANDLE_SIZE, HANDLE_SIZE), # Top-left
|
||||
QRect(w - HANDLE_SIZE, 0, HANDLE_SIZE, HANDLE_SIZE), # Top-right
|
||||
QRect(0, h - HANDLE_SIZE, HANDLE_SIZE, HANDLE_SIZE), # Bottom-left
|
||||
QRect(w - HANDLE_SIZE, h - HANDLE_SIZE, HANDLE_SIZE, HANDLE_SIZE) # Bottom-right
|
||||
]
|
||||
# Top-center handle: centered along the top edge
|
||||
top_center_x = (w - HANDLE_SIZE) // 2
|
||||
top_center = QRect(top_center_x, 0, HANDLE_SIZE, HANDLE_SIZE)
|
||||
handles.append(top_center)
|
||||
return handles
|
||||
|
||||
def mousePressEvent(self, event):
|
||||
if event.button() == Qt.LeftButton:
|
||||
# Check if any handle (including the new top-center) is clicked
|
||||
for i, handle in enumerate(self._resize_handles()):
|
||||
if handle.contains(event.pos()):
|
||||
self.selected_handle = i
|
||||
# For the top-center handle (index 4), initialize drag offset for moving
|
||||
if i == 4:
|
||||
self.drag_offset = event.pos()
|
||||
return
|
||||
# If no handle is clicked, allow dragging by clicking anywhere in the widget
|
||||
self.drag_offset = event.pos()
|
||||
|
||||
def mouseMoveEvent(self, event):
|
||||
if self.selected_handle is not None:
|
||||
if self.selected_handle == 4:
|
||||
# --- Top-center handle dragging ---
|
||||
new_x = event.globalX() - self.drag_offset.x()
|
||||
new_y = event.globalY() - self.drag_offset.y()
|
||||
self.move(new_x, new_y)
|
||||
collector_mutex.lock()
|
||||
if self.region_id in regions:
|
||||
regions[self.region_id]["bbox"] = [new_x, new_y, self.width(), self.height()]
|
||||
collector_mutex.unlock()
|
||||
self.update()
|
||||
else:
|
||||
# --- Resizing logic for corner handles ---
|
||||
if self.selected_handle == 0: # Top-left
|
||||
new_w = self.width() + (self.x() - event.globalX())
|
||||
new_h = self.height() + (self.y() - event.globalY())
|
||||
new_x = event.globalX()
|
||||
new_y = event.globalY()
|
||||
elif self.selected_handle == 1: # Top-right
|
||||
new_w = event.globalX() - self.x()
|
||||
new_h = self.height() + (self.y() - event.globalY())
|
||||
new_x = self.x()
|
||||
new_y = event.globalY()
|
||||
elif self.selected_handle == 2: # Bottom-left
|
||||
new_w = self.width() + (self.x() - event.globalX())
|
||||
new_h = event.globalY() - self.y()
|
||||
new_x = event.globalX()
|
||||
new_y = self.y()
|
||||
elif self.selected_handle == 3: # Bottom-right
|
||||
new_w = event.globalX() - self.x()
|
||||
new_h = event.globalY() - self.y()
|
||||
new_x = self.x()
|
||||
new_y = self.y()
|
||||
|
||||
if new_w < 20:
|
||||
new_w = 20
|
||||
if new_h < 20:
|
||||
new_h = 20
|
||||
|
||||
self.setGeometry(new_x, new_y, new_w, new_h)
|
||||
collector_mutex.lock()
|
||||
if self.region_id in regions:
|
||||
regions[self.region_id]["bbox"] = [self.x(), self.y(), self.width(), self.height()]
|
||||
collector_mutex.unlock()
|
||||
self.update()
|
||||
elif self.drag_offset:
|
||||
# --- General widget dragging (if no handle was clicked) ---
|
||||
new_x = event.globalX() - self.drag_offset.x()
|
||||
new_y = event.globalY() - self.drag_offset.y()
|
||||
self.move(new_x, new_y)
|
||||
collector_mutex.lock()
|
||||
if self.region_id in regions:
|
||||
regions[self.region_id]["bbox"] = [new_x, new_y, self.width(), self.height()]
|
||||
collector_mutex.unlock()
|
||||
|
||||
def mouseReleaseEvent(self, event):
|
||||
"""
|
||||
Resets the drag/resize state once the mouse button is released.
|
||||
"""
|
||||
self.selected_handle = None
|
||||
self.drag_offset = None
|
156
Data/Modules/data_manager.py
Normal file
156
Data/Modules/data_manager.py
Normal file
@ -0,0 +1,156 @@
|
||||
import threading
|
||||
import time
|
||||
import base64
|
||||
from flask import Flask, jsonify, Response
|
||||
from PyQt5.QtCore import QMutex
|
||||
|
||||
# Global datastore for character metrics
|
||||
data_store = {
|
||||
"hp_current": 0,
|
||||
"hp_total": 0,
|
||||
"mp_current": 0,
|
||||
"mp_total": 0,
|
||||
"fp_current": 0,
|
||||
"fp_total": 0,
|
||||
"exp": 0.0
|
||||
}
|
||||
|
||||
# Mutex for thread safety
|
||||
data_mutex = QMutex()
|
||||
|
||||
# Flag to ensure only one character status collector node exists
|
||||
character_status_collector_exists = False
|
||||
|
||||
# A place to store the screenshot in base64
|
||||
status_screenshot_base64 = ""
|
||||
|
||||
# Flask Application
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.route('/data')
|
||||
def data_api():
|
||||
"""
|
||||
Returns the current character metrics as JSON.
|
||||
"""
|
||||
return jsonify(get_data())
|
||||
|
||||
@app.route('/exp')
|
||||
def exp_api():
|
||||
"""
|
||||
Returns the EXP data.
|
||||
"""
|
||||
return jsonify({"exp": get_data()["exp"]})
|
||||
|
||||
@app.route('/hp')
|
||||
def hp_api():
|
||||
"""
|
||||
Returns the HP data.
|
||||
"""
|
||||
return jsonify({
|
||||
"hp_current": get_data()["hp_current"],
|
||||
"hp_total": get_data()["hp_total"]
|
||||
})
|
||||
|
||||
@app.route('/mp')
|
||||
def mp_api():
|
||||
"""
|
||||
Returns the MP data.
|
||||
"""
|
||||
return jsonify({
|
||||
"mp_current": get_data()["mp_current"],
|
||||
"mp_total": get_data()["mp_total"]
|
||||
})
|
||||
|
||||
@app.route('/fp')
|
||||
def fp_api():
|
||||
"""
|
||||
Returns the FP data.
|
||||
"""
|
||||
return jsonify({
|
||||
"fp_current": get_data()["fp_current"],
|
||||
"fp_total": get_data()["fp_total"]
|
||||
})
|
||||
|
||||
@app.route('/flyff/status')
|
||||
def status_screenshot():
|
||||
"""
|
||||
Returns an HTML page that displays the stored screenshot and
|
||||
automatically refreshes it every second without requiring a manual page reload.
|
||||
"""
|
||||
html = """
|
||||
<html>
|
||||
<head>
|
||||
<title>Borealis - Live Status</title>
|
||||
<script>
|
||||
// Reload the <img> every second
|
||||
setInterval(function(){
|
||||
var img = document.getElementById('status_img');
|
||||
img.src = '/flyff/status_rawdata?random=' + Math.random();
|
||||
}, 1000);
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<img id="status_img" src="/flyff/status_rawdata" />
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
return html
|
||||
|
||||
@app.route('/flyff/status_rawdata')
|
||||
def status_screenshot_data():
|
||||
"""
|
||||
Serves the raw PNG bytes (decoded from base64) used by <img> in /status_screenshot.
|
||||
"""
|
||||
data_mutex.lock()
|
||||
encoded = status_screenshot_base64
|
||||
data_mutex.unlock()
|
||||
|
||||
if not encoded:
|
||||
# No image captured yet, return HTTP 204 "No Content"
|
||||
return "", 204
|
||||
|
||||
raw_img = base64.b64decode(encoded)
|
||||
return Response(raw_img, mimetype='image/png')
|
||||
|
||||
def start_api_server():
|
||||
"""
|
||||
Starts the Flask API server in a separate daemon thread.
|
||||
"""
|
||||
def run():
|
||||
app.run(host="0.0.0.0", port=5000) # Allows external connections
|
||||
t = threading.Thread(target=run, daemon=True)
|
||||
t.start()
|
||||
|
||||
def get_data():
|
||||
"""
|
||||
Return a copy of the global data_store.
|
||||
"""
|
||||
data_mutex.lock()
|
||||
data_copy = data_store.copy()
|
||||
data_mutex.unlock()
|
||||
return data_copy
|
||||
|
||||
def set_data(key, value):
|
||||
"""
|
||||
Set a single metric in the global data_store.
|
||||
"""
|
||||
data_mutex.lock()
|
||||
data_store[key] = value
|
||||
data_mutex.unlock()
|
||||
|
||||
def set_data_bulk(metrics_dict):
|
||||
"""
|
||||
Update multiple metrics in the global data_store at once.
|
||||
"""
|
||||
data_mutex.lock()
|
||||
data_store.update(metrics_dict)
|
||||
data_mutex.unlock()
|
||||
|
||||
def set_status_screenshot(encoded_str):
|
||||
"""
|
||||
Called by the OCR node to store the base64-encoded screenshot data.
|
||||
"""
|
||||
global status_screenshot_base64
|
||||
data_mutex.lock()
|
||||
status_screenshot_base64 = encoded_str
|
||||
data_mutex.unlock()
|
38
Data/Nodes/Experimental/blueprint_node.py
Normal file
38
Data/Nodes/Experimental/blueprint_node.py
Normal file
@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from OdenGraphQt import BaseNode
|
||||
from Qt import QtCore
|
||||
|
||||
class BlueprintNode(BaseNode):
|
||||
"""
|
||||
A placeholder node used to preview placement before spawning
|
||||
the real node. It has a distinct color and minimal UI.
|
||||
"""
|
||||
__identifier__ = 'bunny-lab.io.blueprint'
|
||||
NODE_NAME = 'Blueprint Node'
|
||||
|
||||
def __init__(self):
|
||||
super(BlueprintNode, self).__init__()
|
||||
# Display a name so the user sees "Click to Place Node"
|
||||
self.set_name("Click to Place Node")
|
||||
|
||||
# Give it a bluish color + white text, for visibility
|
||||
self.set_color(60, 120, 220) # R, G, B
|
||||
self.view.text_color = (255, 255, 255, 200)
|
||||
self.view.border_color = (255, 255, 255, 180)
|
||||
|
||||
# Make it slightly transparent if desired (alpha=150)
|
||||
self.view._bg_color = (60, 120, 220, 150)
|
||||
|
||||
# Remove any default inputs/outputs (make it minimal)
|
||||
for port in self.input_ports() + self.output_ports():
|
||||
self.model.delete_port(port.name(), port.port_type)
|
||||
|
||||
# Store the "actual node" we want to spawn
|
||||
self.create_property("actual_node_type", "", widget_type=0)
|
||||
|
||||
def process_input(self):
|
||||
"""
|
||||
We do nothing here; it is purely a placeholder node.
|
||||
"""
|
||||
pass
|
BIN
Data/Nodes/Flyff/Resources/bars_template.png
Normal file
BIN
Data/Nodes/Flyff/Resources/bars_template.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 5.6 KiB |
50
Data/Nodes/Flyff/flyff_EXP_current.py
Normal file
50
Data/Nodes/Flyff/flyff_EXP_current.py
Normal file
@ -0,0 +1,50 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Flyff EXP Node (Final Combined Version)
|
||||
- Pulls the EXP value directly from data_manager.py
|
||||
- Outputs only the "exp" value as a string
|
||||
- Uses color (48, 116, 143) for its output port
|
||||
- Displays "exp" in a text field labeled "Value"
|
||||
- Retrieves the port with self.outputs().get('value')
|
||||
"""
|
||||
|
||||
import time
|
||||
import traceback
|
||||
from OdenGraphQt import BaseNode
|
||||
from Modules import data_manager # Importing data_manager from Modules
|
||||
|
||||
class FlyffEXPCurrentNode(BaseNode):
|
||||
__identifier__ = 'bunny-lab.io.flyff_exp_current_node'
|
||||
NODE_NAME = 'Flyff - EXP'
|
||||
|
||||
def __init__(self):
|
||||
super(FlyffEXPCurrentNode, self).__init__()
|
||||
|
||||
# 1) Text input property named "value" for UI display
|
||||
self.add_text_input('value', 'Value', text='N/A')
|
||||
|
||||
# 2) Output port also named "value"
|
||||
self.add_output('value', color=(48, 116, 143))
|
||||
|
||||
self.set_name("Flyff - EXP")
|
||||
|
||||
def process_input(self):
|
||||
try:
|
||||
new_value = data_manager.get_data().get("exp", "N/A")
|
||||
new_value_str = str(new_value)
|
||||
self.set_property('value', new_value_str)
|
||||
self.transmit_data(new_value_str)
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
print(f"[ERROR] Exception in FlyffEXPCurrentNode: {e}\nTraceback:\n{tb}")
|
||||
|
||||
def transmit_data(self, data):
|
||||
output_port = self.outputs().get('value')
|
||||
if output_port and output_port.connected_ports():
|
||||
for connected_port in output_port.connected_ports():
|
||||
connected_node = connected_port.node()
|
||||
if hasattr(connected_node, 'receive_data'):
|
||||
try:
|
||||
connected_node.receive_data(data, source_port_name='value')
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Error transmitting data to {connected_node}: {e}")
|
93
Data/Nodes/Flyff/flyff_FP_current.py
Normal file
93
Data/Nodes/Flyff/flyff_FP_current.py
Normal file
@ -0,0 +1,93 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Flyff FP Current Node (Final Combined Version)
|
||||
- Polls the API at http://127.0.0.1:5000/data
|
||||
- Outputs only the "fp_current" value as a string
|
||||
- Uses color (36, 116, 32) for its output port
|
||||
- Displays "fp_current" in a text field labeled "Value"
|
||||
- Retrieves the port with self.outputs().get('value')
|
||||
"""
|
||||
|
||||
import time
|
||||
import requests
|
||||
import traceback
|
||||
from OdenGraphQt import BaseNode
|
||||
|
||||
class FlyffFPCurrentNode(BaseNode):
|
||||
__identifier__ = 'bunny-lab.io.flyff_fp_current_node'
|
||||
NODE_NAME = 'Flyff - FP Current'
|
||||
|
||||
def __init__(self):
|
||||
super(FlyffFPCurrentNode, self).__init__()
|
||||
|
||||
# 1) Text input property named "value" for UI display
|
||||
self.add_text_input('value', 'Value', text='N/A')
|
||||
|
||||
# 2) Output port also named "value"
|
||||
self.add_output('value', color=(36, 116, 32))
|
||||
|
||||
self._api_down = True
|
||||
self._last_api_attempt = 0.0
|
||||
self._retry_interval = 5.0
|
||||
self._last_error_printed = 0.0
|
||||
|
||||
self.set_name("Flyff - FP Current (API Disconnected)")
|
||||
|
||||
def process_input(self):
|
||||
current_time = time.time()
|
||||
if self._api_down and (current_time - self._last_api_attempt < self._retry_interval):
|
||||
return
|
||||
|
||||
self._last_api_attempt = current_time
|
||||
|
||||
try:
|
||||
response = requests.get("http://127.0.0.1:5000/data", timeout=1)
|
||||
status_code = response.status_code
|
||||
print(f"[DEBUG] FlyffFPCurrentNode: HTTP Status Code = {status_code}")
|
||||
|
||||
if status_code == 200:
|
||||
try:
|
||||
data = response.json() or {}
|
||||
except ValueError:
|
||||
data = {}
|
||||
|
||||
if isinstance(data, list):
|
||||
data = {}
|
||||
|
||||
self._api_down = False
|
||||
self.set_name("Flyff - FP Current (API Connected)")
|
||||
|
||||
new_value = data.get("fp_current", "N/A")
|
||||
print(f"[DEBUG] FlyffFPCurrentNode: fp_current = {new_value}")
|
||||
|
||||
new_value_str = str(new_value)
|
||||
self.set_property('value', new_value_str)
|
||||
self.transmit_data(new_value_str)
|
||||
|
||||
else:
|
||||
self._handle_api_error(f"HTTP {status_code} from FlyffFPCurrentNode")
|
||||
self._api_down = True
|
||||
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
self._handle_api_error(f"Exception in FlyffFPCurrentNode: {e}\nTraceback:\n{tb}")
|
||||
self._api_down = True
|
||||
|
||||
def transmit_data(self, data):
|
||||
output_port = self.outputs().get('value')
|
||||
if output_port and output_port.connected_ports():
|
||||
for connected_port in output_port.connected_ports():
|
||||
connected_node = connected_port.node()
|
||||
if hasattr(connected_node, 'receive_data'):
|
||||
try:
|
||||
connected_node.receive_data(data, source_port_name='value')
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Error transmitting data to {connected_node}: {e}")
|
||||
|
||||
def _handle_api_error(self, msg):
|
||||
current_time = time.time()
|
||||
if (current_time - self._last_error_printed) >= self._retry_interval:
|
||||
print(f"[ERROR] {msg}")
|
||||
self._last_error_printed = current_time
|
||||
|
||||
self.set_name("Flyff - FP Current (API Disconnected)")
|
93
Data/Nodes/Flyff/flyff_FP_total.py
Normal file
93
Data/Nodes/Flyff/flyff_FP_total.py
Normal file
@ -0,0 +1,93 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Flyff FP Total Node (Final Combined Version)
|
||||
- Polls the API at http://127.0.0.1:5000/data
|
||||
- Outputs only the "fp_total" value as a string
|
||||
- Uses color (36, 116, 32) for its output port
|
||||
- Displays "fp_total" in a text field labeled "Value"
|
||||
- Retrieves the port with self.outputs().get('value')
|
||||
"""
|
||||
|
||||
import time
|
||||
import requests
|
||||
import traceback
|
||||
from OdenGraphQt import BaseNode
|
||||
|
||||
class FlyffFPTotalNode(BaseNode):
|
||||
__identifier__ = 'bunny-lab.io.flyff_fp_total_node'
|
||||
NODE_NAME = 'Flyff - FP Total'
|
||||
|
||||
def __init__(self):
|
||||
super(FlyffFPTotalNode, self).__init__()
|
||||
|
||||
# 1) Text input property named "value" for UI display
|
||||
self.add_text_input('value', 'Value', text='N/A')
|
||||
|
||||
# 2) Output port also named "value"
|
||||
self.add_output('value', color=(36, 116, 32))
|
||||
|
||||
self._api_down = True
|
||||
self._last_api_attempt = 0.0
|
||||
self._retry_interval = 5.0
|
||||
self._last_error_printed = 0.0
|
||||
|
||||
self.set_name("Flyff - FP Total (API Disconnected)")
|
||||
|
||||
def process_input(self):
|
||||
current_time = time.time()
|
||||
if self._api_down and (current_time - self._last_api_attempt < self._retry_interval):
|
||||
return
|
||||
|
||||
self._last_api_attempt = current_time
|
||||
|
||||
try:
|
||||
response = requests.get("http://127.0.0.1:5000/data", timeout=1)
|
||||
status_code = response.status_code
|
||||
print(f"[DEBUG] FlyffFPTotalNode: HTTP Status Code = {status_code}")
|
||||
|
||||
if status_code == 200:
|
||||
try:
|
||||
data = response.json() or {}
|
||||
except ValueError:
|
||||
data = {}
|
||||
|
||||
if isinstance(data, list):
|
||||
data = {}
|
||||
|
||||
self._api_down = False
|
||||
self.set_name("Flyff - FP Total (API Connected)")
|
||||
|
||||
new_value = data.get("fp_total", "N/A")
|
||||
print(f"[DEBUG] FlyffFPTotalNode: fp_total = {new_value}")
|
||||
|
||||
new_value_str = str(new_value)
|
||||
self.set_property('value', new_value_str)
|
||||
self.transmit_data(new_value_str)
|
||||
|
||||
else:
|
||||
self._handle_api_error(f"HTTP {status_code} from FlyffFPTotalNode")
|
||||
self._api_down = True
|
||||
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
self._handle_api_error(f"Exception in FlyffFPTotalNode: {e}\nTraceback:\n{tb}")
|
||||
self._api_down = True
|
||||
|
||||
def transmit_data(self, data):
|
||||
output_port = self.outputs().get('value')
|
||||
if output_port and output_port.connected_ports():
|
||||
for connected_port in output_port.connected_ports():
|
||||
connected_node = connected_port.node()
|
||||
if hasattr(connected_node, 'receive_data'):
|
||||
try:
|
||||
connected_node.receive_data(data, source_port_name='value')
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Error transmitting data to {connected_node}: {e}")
|
||||
|
||||
def _handle_api_error(self, msg):
|
||||
current_time = time.time()
|
||||
if (current_time - self._last_error_printed) >= self._retry_interval:
|
||||
print(f"[ERROR] {msg}")
|
||||
self._last_error_printed = current_time
|
||||
|
||||
self.set_name("Flyff - FP Total (API Disconnected)")
|
112
Data/Nodes/Flyff/flyff_HP_current.py
Normal file
112
Data/Nodes/Flyff/flyff_HP_current.py
Normal file
@ -0,0 +1,112 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Flyff HP Current Node (Final Combined Version)
|
||||
- Polls the API at http://127.0.0.1:5000/data
|
||||
- Outputs only the "hp_current" value as a string
|
||||
- Uses color (126, 36, 57) for its output port
|
||||
- Displays "hp_current" in a text field labeled "Value"
|
||||
- Avoids "list indices must be integers" by retrieving the port with self.outputs().get('value')
|
||||
"""
|
||||
|
||||
import time
|
||||
import requests
|
||||
import traceback
|
||||
from OdenGraphQt import BaseNode
|
||||
|
||||
class FlyffHPCurrentNode(BaseNode):
|
||||
__identifier__ = 'bunny-lab.io.flyff_hp_current_node'
|
||||
NODE_NAME = 'Flyff - HP Current'
|
||||
|
||||
def __init__(self):
|
||||
super(FlyffHPCurrentNode, self).__init__()
|
||||
|
||||
# 1) Add a text input property named "value" for UI display
|
||||
self.add_text_input('value', 'Value', text='N/A')
|
||||
|
||||
# 2) Add an output port also named "value"
|
||||
self.add_output('value', color=(126, 36, 57))
|
||||
|
||||
# Start in "disconnected" state
|
||||
self._api_down = True
|
||||
self._last_api_attempt = 0.0
|
||||
self._retry_interval = 5.0
|
||||
self._last_error_printed = 0.0
|
||||
|
||||
# Default node title
|
||||
self.set_name("Flyff - HP Current (API Disconnected)")
|
||||
|
||||
def process_input(self):
|
||||
"""
|
||||
Called periodically by the global timer in borealis.py
|
||||
"""
|
||||
current_time = time.time()
|
||||
if self._api_down and (current_time - self._last_api_attempt < self._retry_interval):
|
||||
return
|
||||
|
||||
self._last_api_attempt = current_time
|
||||
|
||||
try:
|
||||
response = requests.get("http://127.0.0.1:5000/data", timeout=1)
|
||||
status_code = response.status_code
|
||||
print(f"[DEBUG] FlyffHPCurrentNode: HTTP Status Code = {status_code}")
|
||||
|
||||
if status_code == 200:
|
||||
# Attempt to parse JSON
|
||||
try:
|
||||
data = response.json() or {}
|
||||
except ValueError:
|
||||
data = {}
|
||||
|
||||
# If data is a list, ignore or convert to {}
|
||||
if isinstance(data, list):
|
||||
data = {}
|
||||
|
||||
# Mark node as connected
|
||||
self._api_down = False
|
||||
self.set_name("Flyff - HP Current (API Connected)")
|
||||
|
||||
# Retrieve hp_current (default "N/A" if missing)
|
||||
new_value = data.get("hp_current", "N/A")
|
||||
print(f"[DEBUG] FlyffHPCurrentNode: hp_current = {new_value}")
|
||||
|
||||
# Convert to string
|
||||
new_value_str = str(new_value)
|
||||
|
||||
# 3) Update the text input property so the user sees it
|
||||
self.set_property('value', new_value_str)
|
||||
|
||||
# 4) Transmit to downstream nodes
|
||||
self.transmit_data(new_value_str)
|
||||
|
||||
else:
|
||||
# Non-200 => disconnected
|
||||
self._handle_api_error(f"HTTP {status_code} from FlyffHPCurrentNode")
|
||||
self._api_down = True
|
||||
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
self._handle_api_error(f"Exception in FlyffHPCurrentNode: {e}\nTraceback:\n{tb}")
|
||||
self._api_down = True
|
||||
|
||||
def transmit_data(self, data):
|
||||
"""
|
||||
Sends 'data' to any connected node via the "value" port.
|
||||
(Uses self.outputs().get('value') instead of self.output('value'))
|
||||
"""
|
||||
output_port = self.outputs().get('value')
|
||||
if output_port and output_port.connected_ports():
|
||||
for connected_port in output_port.connected_ports():
|
||||
connected_node = connected_port.node()
|
||||
if hasattr(connected_node, 'receive_data'):
|
||||
try:
|
||||
connected_node.receive_data(data, source_port_name='value')
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Error transmitting data to {connected_node}: {e}")
|
||||
|
||||
def _handle_api_error(self, msg):
|
||||
current_time = time.time()
|
||||
if (current_time - self._last_error_printed) >= self._retry_interval:
|
||||
print(f"[ERROR] {msg}")
|
||||
self._last_error_printed = current_time
|
||||
|
||||
self.set_name("Flyff - HP Current (API Disconnected)")
|
93
Data/Nodes/Flyff/flyff_HP_total.py
Normal file
93
Data/Nodes/Flyff/flyff_HP_total.py
Normal file
@ -0,0 +1,93 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Flyff HP Total Node (Final Combined Version)
|
||||
- Polls the API at http://127.0.0.1:5000/data
|
||||
- Outputs only the "hp_total" value as a string
|
||||
- Uses color (126, 36, 57) for its output port
|
||||
- Displays "hp_total" in a text field labeled "Value"
|
||||
- Retrieves the port with self.outputs().get('value')
|
||||
"""
|
||||
|
||||
import time
|
||||
import requests
|
||||
import traceback
|
||||
from OdenGraphQt import BaseNode
|
||||
|
||||
class FlyffHPTotalNode(BaseNode):
|
||||
__identifier__ = 'bunny-lab.io.flyff_hp_total_node'
|
||||
NODE_NAME = 'Flyff - HP Total'
|
||||
|
||||
def __init__(self):
|
||||
super(FlyffHPTotalNode, self).__init__()
|
||||
|
||||
# 1) Text input property named "value" for UI display
|
||||
self.add_text_input('value', 'Value', text='N/A')
|
||||
|
||||
# 2) Output port also named "value"
|
||||
self.add_output('value', color=(126, 36, 57))
|
||||
|
||||
self._api_down = True
|
||||
self._last_api_attempt = 0.0
|
||||
self._retry_interval = 5.0
|
||||
self._last_error_printed = 0.0
|
||||
|
||||
self.set_name("Flyff - HP Total (API Disconnected)")
|
||||
|
||||
def process_input(self):
|
||||
current_time = time.time()
|
||||
if self._api_down and (current_time - self._last_api_attempt < self._retry_interval):
|
||||
return
|
||||
|
||||
self._last_api_attempt = current_time
|
||||
|
||||
try:
|
||||
response = requests.get("http://127.0.0.1:5000/data", timeout=1)
|
||||
status_code = response.status_code
|
||||
print(f"[DEBUG] FlyffHPTotalNode: HTTP Status Code = {status_code}")
|
||||
|
||||
if status_code == 200:
|
||||
try:
|
||||
data = response.json() or {}
|
||||
except ValueError:
|
||||
data = {}
|
||||
|
||||
if isinstance(data, list):
|
||||
data = {}
|
||||
|
||||
self._api_down = False
|
||||
self.set_name("Flyff - HP Total (API Connected)")
|
||||
|
||||
new_value = data.get("hp_total", "N/A")
|
||||
print(f"[DEBUG] FlyffHPTotalNode: hp_total = {new_value}")
|
||||
|
||||
new_value_str = str(new_value)
|
||||
self.set_property('value', new_value_str)
|
||||
self.transmit_data(new_value_str)
|
||||
|
||||
else:
|
||||
self._handle_api_error(f"HTTP {status_code} from FlyffHPTotalNode")
|
||||
self._api_down = True
|
||||
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
self._handle_api_error(f"Exception in FlyffHPTotalNode: {e}\nTraceback:\n{tb}")
|
||||
self._api_down = True
|
||||
|
||||
def transmit_data(self, data):
|
||||
output_port = self.outputs().get('value')
|
||||
if output_port and output_port.connected_ports():
|
||||
for connected_port in output_port.connected_ports():
|
||||
connected_node = connected_port.node()
|
||||
if hasattr(connected_node, 'receive_data'):
|
||||
try:
|
||||
connected_node.receive_data(data, source_port_name='value')
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Error transmitting data to {connected_node}: {e}")
|
||||
|
||||
def _handle_api_error(self, msg):
|
||||
current_time = time.time()
|
||||
if (current_time - self._last_error_printed) >= self._retry_interval:
|
||||
print(f"[ERROR] {msg}")
|
||||
self._last_error_printed = current_time
|
||||
|
||||
self.set_name("Flyff - HP Total (API Disconnected)")
|
93
Data/Nodes/Flyff/flyff_MP_current.py
Normal file
93
Data/Nodes/Flyff/flyff_MP_current.py
Normal file
@ -0,0 +1,93 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Flyff MP Current Node (Final Combined Version)
|
||||
- Polls the API at http://127.0.0.1:5000/data
|
||||
- Outputs only the "mp_current" value as a string
|
||||
- Uses color (35, 89, 144) for its output port
|
||||
- Displays "mp_current" in a text field labeled "Value"
|
||||
- Retrieves the port with self.outputs().get('value')
|
||||
"""
|
||||
|
||||
import time
|
||||
import requests
|
||||
import traceback
|
||||
from OdenGraphQt import BaseNode
|
||||
|
||||
class FlyffMPCurrentNode(BaseNode):
|
||||
__identifier__ = 'bunny-lab.io.flyff_mp_current_node'
|
||||
NODE_NAME = 'Flyff - MP Current'
|
||||
|
||||
def __init__(self):
|
||||
super(FlyffMPCurrentNode, self).__init__()
|
||||
|
||||
# 1) Text input property named "value" for UI display
|
||||
self.add_text_input('value', 'Value', text='N/A')
|
||||
|
||||
# 2) Output port also named "value"
|
||||
self.add_output('value', color=(35, 89, 144))
|
||||
|
||||
self._api_down = True
|
||||
self._last_api_attempt = 0.0
|
||||
self._retry_interval = 5.0
|
||||
self._last_error_printed = 0.0
|
||||
|
||||
self.set_name("Flyff - MP Current (API Disconnected)")
|
||||
|
||||
def process_input(self):
|
||||
current_time = time.time()
|
||||
if self._api_down and (current_time - self._last_api_attempt < self._retry_interval):
|
||||
return
|
||||
|
||||
self._last_api_attempt = current_time
|
||||
|
||||
try:
|
||||
response = requests.get("http://127.0.0.1:5000/data", timeout=1)
|
||||
status_code = response.status_code
|
||||
print(f"[DEBUG] FlyffMPCurrentNode: HTTP Status Code = {status_code}")
|
||||
|
||||
if status_code == 200:
|
||||
try:
|
||||
data = response.json() or {}
|
||||
except ValueError:
|
||||
data = {}
|
||||
|
||||
if isinstance(data, list):
|
||||
data = {}
|
||||
|
||||
self._api_down = False
|
||||
self.set_name("Flyff - MP Current (API Connected)")
|
||||
|
||||
new_value = data.get("mp_current", "N/A")
|
||||
print(f"[DEBUG] FlyffMPCurrentNode: mp_current = {new_value}")
|
||||
|
||||
new_value_str = str(new_value)
|
||||
self.set_property('value', new_value_str)
|
||||
self.transmit_data(new_value_str)
|
||||
|
||||
else:
|
||||
self._handle_api_error(f"HTTP {status_code} from FlyffMPCurrentNode")
|
||||
self._api_down = True
|
||||
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
self._handle_api_error(f"Exception in FlyffMPCurrentNode: {e}\nTraceback:\n{tb}")
|
||||
self._api_down = True
|
||||
|
||||
def transmit_data(self, data):
|
||||
output_port = self.outputs().get('value')
|
||||
if output_port and output_port.connected_ports():
|
||||
for connected_port in output_port.connected_ports():
|
||||
connected_node = connected_port.node()
|
||||
if hasattr(connected_node, 'receive_data'):
|
||||
try:
|
||||
connected_node.receive_data(data, source_port_name='value')
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Error transmitting data to {connected_node}: {e}")
|
||||
|
||||
def _handle_api_error(self, msg):
|
||||
current_time = time.time()
|
||||
if (current_time - self._last_error_printed) >= self._retry_interval:
|
||||
print(f"[ERROR] {msg}")
|
||||
self._last_error_printed = current_time
|
||||
|
||||
self.set_name("Flyff - MP Current (API Disconnected)")
|
93
Data/Nodes/Flyff/flyff_MP_total.py
Normal file
93
Data/Nodes/Flyff/flyff_MP_total.py
Normal file
@ -0,0 +1,93 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Flyff MP Total Node (Final Combined Version)
|
||||
- Polls the API at http://127.0.0.1:5000/data
|
||||
- Outputs only the "mp_total" value as a string
|
||||
- Uses color (35, 89, 144) for its output port
|
||||
- Displays "mp_total" in a text field labeled "Value"
|
||||
- Retrieves the port with self.outputs().get('value')
|
||||
"""
|
||||
|
||||
import time
|
||||
import requests
|
||||
import traceback
|
||||
from OdenGraphQt import BaseNode
|
||||
|
||||
class FlyffMPTotalNode(BaseNode):
|
||||
__identifier__ = 'bunny-lab.io.flyff_mp_total_node'
|
||||
NODE_NAME = 'Flyff - MP Total'
|
||||
|
||||
def __init__(self):
|
||||
super(FlyffMPTotalNode, self).__init__()
|
||||
|
||||
# 1) Text input property named "value" for UI display
|
||||
self.add_text_input('value', 'Value', text='N/A')
|
||||
|
||||
# 2) Output port also named "value"
|
||||
self.add_output('value', color=(35, 89, 144))
|
||||
|
||||
self._api_down = True
|
||||
self._last_api_attempt = 0.0
|
||||
self._retry_interval = 5.0
|
||||
self._last_error_printed = 0.0
|
||||
|
||||
self.set_name("Flyff - MP Total (API Disconnected)")
|
||||
|
||||
def process_input(self):
|
||||
current_time = time.time()
|
||||
if self._api_down and (current_time - self._last_api_attempt < self._retry_interval):
|
||||
return
|
||||
|
||||
self._last_api_attempt = current_time
|
||||
|
||||
try:
|
||||
response = requests.get("http://127.0.0.1:5000/data", timeout=1)
|
||||
status_code = response.status_code
|
||||
print(f"[DEBUG] FlyffMPTotalNode: HTTP Status Code = {status_code}")
|
||||
|
||||
if status_code == 200:
|
||||
try:
|
||||
data = response.json() or {}
|
||||
except ValueError:
|
||||
data = {}
|
||||
|
||||
if isinstance(data, list):
|
||||
data = {}
|
||||
|
||||
self._api_down = False
|
||||
self.set_name("Flyff - MP Total (API Connected)")
|
||||
|
||||
new_value = data.get("mp_total", "N/A")
|
||||
print(f"[DEBUG] FlyffMPTotalNode: mp_total = {new_value}")
|
||||
|
||||
new_value_str = str(new_value)
|
||||
self.set_property('value', new_value_str)
|
||||
self.transmit_data(new_value_str)
|
||||
|
||||
else:
|
||||
self._handle_api_error(f"HTTP {status_code} from FlyffMPTotalNode")
|
||||
self._api_down = True
|
||||
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
self._handle_api_error(f"Exception in FlyffMPTotalNode: {e}\nTraceback:\n{tb}")
|
||||
self._api_down = True
|
||||
|
||||
def transmit_data(self, data):
|
||||
output_port = self.outputs().get('value')
|
||||
if output_port and output_port.connected_ports():
|
||||
for connected_port in output_port.connected_ports():
|
||||
connected_node = connected_port.node()
|
||||
if hasattr(connected_node, 'receive_data'):
|
||||
try:
|
||||
connected_node.receive_data(data, source_port_name='value')
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Error transmitting data to {connected_node}: {e}")
|
||||
|
||||
def _handle_api_error(self, msg):
|
||||
current_time = time.time()
|
||||
if (current_time - self._last_error_printed) >= self._retry_interval:
|
||||
print(f"[ERROR] {msg}")
|
||||
self._last_error_printed = current_time
|
||||
|
||||
self.set_name("Flyff - MP Total (API Disconnected)")
|
129
Data/Nodes/Flyff/flyff_character_status_node.py
Normal file
129
Data/Nodes/Flyff/flyff_character_status_node.py
Normal file
@ -0,0 +1,129 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Flyff Character Status Node:
|
||||
- Creates an OCR region in data_collector.
|
||||
- Periodically captures a screenshot and updates Flask.
|
||||
- If OCR is enabled, it extracts character status and updates the data_manager.
|
||||
"""
|
||||
|
||||
import re
|
||||
import base64
|
||||
from io import BytesIO
|
||||
|
||||
from OdenGraphQt import BaseNode
|
||||
from PyQt5.QtWidgets import QMessageBox
|
||||
from PyQt5.QtCore import QTimer
|
||||
|
||||
# Import the existing modules
|
||||
from Modules import data_manager, data_collector
|
||||
|
||||
class FlyffCharacterStatusNode(BaseNode):
|
||||
__identifier__ = "bunny-lab.io.flyff_character_status_node"
|
||||
NODE_NAME = "Flyff - Character Status"
|
||||
|
||||
def __init__(self):
|
||||
super(FlyffCharacterStatusNode, self).__init__()
|
||||
|
||||
if data_manager.character_status_collector_exists:
|
||||
QMessageBox.critical(None, "Error", "Only one Flyff Character Status Collector node is allowed.")
|
||||
raise Exception("Duplicate Character Status Node.")
|
||||
data_manager.character_status_collector_exists = True
|
||||
|
||||
# Add the Data Collection dropdown menu
|
||||
self.add_combo_menu("data_collection", "Data Collection", items=["Disabled", "Enabled"])
|
||||
self.set_property("data_collection", "Disabled") # Default to Disabled
|
||||
|
||||
self.add_text_input("hp", "HP", text="HP: 0/0")
|
||||
self.add_text_input("mp", "MP", text="MP: 0/0")
|
||||
self.add_text_input("fp", "FP", text="FP: 0/0")
|
||||
self.add_text_input("exp", "EXP", text="EXP: 0%")
|
||||
|
||||
self.region_id = "character_status"
|
||||
data_collector.create_ocr_region(
|
||||
self.region_id, x=250, y=50, w=180, h=130,
|
||||
color=(255, 255, 0), thickness=2
|
||||
)
|
||||
|
||||
data_collector.start_collector()
|
||||
self.set_name("Flyff - Character Status")
|
||||
|
||||
# Set up a timer to periodically update character stats
|
||||
self.timer = QTimer()
|
||||
self.timer.timeout.connect(self.process_input)
|
||||
self.timer.start(1000) # Update every second
|
||||
|
||||
def parse_character_stats(self, raw_text):
|
||||
"""
|
||||
Extract HP, MP, FP, EXP from the raw OCR text lines.
|
||||
"""
|
||||
lines = [l.strip() for l in raw_text.splitlines() if l.strip()]
|
||||
hp_current, hp_total = 0, 0
|
||||
mp_current, mp_total = 0, 0
|
||||
fp_current, fp_total = 0, 0
|
||||
exp_value = 0.0
|
||||
|
||||
if len(lines) >= 4:
|
||||
# line 1: HP
|
||||
hp_match = re.search(r"(\d+)\s*/\s*(\d+)", lines[0])
|
||||
if hp_match:
|
||||
hp_current = int(hp_match.group(1))
|
||||
hp_total = int(hp_match.group(2))
|
||||
|
||||
# line 2: MP
|
||||
mp_match = re.search(r"(\d+)\s*/\s*(\d+)", lines[1])
|
||||
if mp_match:
|
||||
mp_current = int(mp_match.group(1))
|
||||
mp_total = int(mp_match.group(2))
|
||||
|
||||
# line 3: FP
|
||||
fp_match = re.search(r"(\d+)\s*/\s*(\d+)", lines[2])
|
||||
if fp_match:
|
||||
fp_current = int(fp_match.group(1))
|
||||
fp_total = int(fp_match.group(2))
|
||||
|
||||
# line 4: EXP
|
||||
exp_match = re.search(r"(\d+(?:\.\d+)?)", lines[3])
|
||||
if exp_match:
|
||||
val = float(exp_match.group(1))
|
||||
if val < 0: val = 0
|
||||
if val > 100: val = 100
|
||||
exp_value = val
|
||||
|
||||
return hp_current, hp_total, mp_current, mp_total, fp_current, fp_total, exp_value
|
||||
|
||||
def process_input(self):
|
||||
"""
|
||||
Called periodically to capture a screenshot and update character status (if enabled).
|
||||
"""
|
||||
# Always capture the screenshot, regardless of OCR status
|
||||
screenshot_img = data_collector.capture_region_as_image(self.region_id)
|
||||
if screenshot_img:
|
||||
buf = BytesIO()
|
||||
screenshot_img.save(buf, format='PNG')
|
||||
image_b64 = base64.b64encode(buf.getvalue()).decode('utf-8')
|
||||
data_manager.set_status_screenshot(image_b64)
|
||||
|
||||
# If OCR is disabled, return early (skip OCR processing)
|
||||
if self.get_property("data_collection") == "Disabled":
|
||||
return
|
||||
|
||||
# Process OCR if enabled
|
||||
raw_text = data_collector.get_raw_text(self.region_id)
|
||||
hp_c, hp_t, mp_c, mp_t, fp_c, fp_t, exp_v = self.parse_character_stats(raw_text)
|
||||
|
||||
# Update data_manager with parsed values
|
||||
data_manager.set_data_bulk({
|
||||
"hp_current": hp_c,
|
||||
"hp_total": hp_t,
|
||||
"mp_current": mp_c,
|
||||
"mp_total": mp_t,
|
||||
"fp_current": fp_c,
|
||||
"fp_total": fp_t,
|
||||
"exp": exp_v
|
||||
})
|
||||
|
||||
# Update the node's UI text fields
|
||||
self.set_property("hp", f"HP: {hp_c}/{hp_t}")
|
||||
self.set_property("mp", f"MP: {mp_c}/{mp_t}")
|
||||
self.set_property("fp", f"FP: {fp_c}/{fp_t}")
|
||||
self.set_property("exp", f"EXP: {exp_v}%")
|
141
Data/Nodes/Flyff/flyff_leveling_predictor_node.py
Normal file
141
Data/Nodes/Flyff/flyff_leveling_predictor_node.py
Normal file
@ -0,0 +1,141 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Flyff - Leveling Predictor Node:
|
||||
- Tracks the last N changes in EXP values.
|
||||
- Calculates the average change rate and time intervals.
|
||||
- Predicts the estimated time to reach level 100.
|
||||
"""
|
||||
|
||||
import time
|
||||
import numpy as np
|
||||
from OdenGraphQt import BaseNode
|
||||
from PyQt5.QtCore import QTimer
|
||||
from Modules import data_manager
|
||||
|
||||
class FlyffLevelingPredictorNode(BaseNode):
|
||||
__identifier__ = "bunny-lab.io.flyff_leveling_predictor_node"
|
||||
NODE_NAME = "Flyff - Leveling Predictor"
|
||||
|
||||
def __init__(self):
|
||||
super(FlyffLevelingPredictorNode, self).__init__()
|
||||
|
||||
# Input port for EXP values
|
||||
self.add_input("exp", "EXP")
|
||||
|
||||
# User-defined number of changes to track
|
||||
self.add_text_input("exp_track_count", "# of EXP Changes to Track", text="7")
|
||||
|
||||
# Output widgets
|
||||
self.add_text_input("time_to_level", "Time to Level", text="Calculating...")
|
||||
self.add_text_input("time_between_kills", "Time Between Kills", text="N/A")
|
||||
self.add_text_input("exp_per_kill", "EXP Per Kill", text="N/A")
|
||||
|
||||
# Internal tracking lists
|
||||
self.exp_history = []
|
||||
self.time_intervals = []
|
||||
self.last_exp_value = None
|
||||
self.last_update_time = None
|
||||
|
||||
# Timer to periodically process EXP changes
|
||||
self.timer = QTimer()
|
||||
self.timer.timeout.connect(self.process_exp_change)
|
||||
self.timer.start(1000) # Check for updates every second
|
||||
|
||||
def reset_tracking_arrays(self):
|
||||
"""
|
||||
Resets the EXP history and time interval arrays when a level-up is detected.
|
||||
"""
|
||||
self.exp_history.clear()
|
||||
self.time_intervals.clear()
|
||||
self.last_exp_value = None
|
||||
self.last_update_time = None
|
||||
|
||||
def process_exp_change(self):
|
||||
"""
|
||||
Monitors changes in EXP values and calculates various statistics.
|
||||
"""
|
||||
exp_value = data_manager.get_data().get("exp", None)
|
||||
if exp_value is None:
|
||||
return
|
||||
|
||||
exp_track_count = self.get_property("exp_track_count")
|
||||
try:
|
||||
exp_track_count = int(exp_track_count)
|
||||
except ValueError:
|
||||
exp_track_count = 7 # Default to 7 if invalid input
|
||||
|
||||
# Reset if EXP value decreases (indicating a level-up)
|
||||
if self.last_exp_value is not None and exp_value < self.last_exp_value:
|
||||
self.reset_tracking_arrays()
|
||||
|
||||
if self.last_exp_value is not None and exp_value != self.last_exp_value:
|
||||
current_time = time.time()
|
||||
|
||||
# Store EXP change history
|
||||
self.exp_history.append(exp_value)
|
||||
if len(self.exp_history) > exp_track_count:
|
||||
self.exp_history.pop(0)
|
||||
|
||||
# Store time intervals
|
||||
if self.last_update_time is not None:
|
||||
interval = current_time - self.last_update_time
|
||||
self.time_intervals.append(interval)
|
||||
if len(self.time_intervals) > exp_track_count:
|
||||
self.time_intervals.pop(0)
|
||||
|
||||
# Perform calculations
|
||||
self.calculate_time_to_level()
|
||||
self.calculate_additional_metrics()
|
||||
|
||||
# Update last tracking values
|
||||
self.last_update_time = current_time
|
||||
|
||||
self.last_exp_value = exp_value
|
||||
|
||||
def calculate_time_to_level(self):
|
||||
"""
|
||||
Calculates the estimated time to reach level 100 based on EXP change history.
|
||||
"""
|
||||
if len(self.exp_history) < 2 or len(self.time_intervals) < 1:
|
||||
self.set_property("time_to_level", "Insufficient data")
|
||||
return
|
||||
|
||||
exp_deltas = np.diff(self.exp_history) # Compute EXP change per interval
|
||||
avg_exp_change = np.mean(exp_deltas) if len(exp_deltas) > 0 else 0
|
||||
avg_time_change = np.mean(self.time_intervals)
|
||||
|
||||
if avg_exp_change <= 0:
|
||||
self.set_property("time_to_level", "Not gaining EXP")
|
||||
return
|
||||
|
||||
current_exp = self.exp_history[-1]
|
||||
remaining_exp = 100.0 - current_exp # Distance to level 100
|
||||
|
||||
estimated_time = (remaining_exp / avg_exp_change) * avg_time_change
|
||||
|
||||
# Convert estimated time into hours, minutes, and seconds
|
||||
hours = int(estimated_time // 3600)
|
||||
minutes = int((estimated_time % 3600) // 60)
|
||||
seconds = int(estimated_time % 60)
|
||||
|
||||
time_str = f"{hours}h {minutes}m {seconds}s"
|
||||
self.set_property("time_to_level", time_str)
|
||||
|
||||
def calculate_additional_metrics(self):
|
||||
"""
|
||||
Calculates and updates the "Time Between Kills" and "EXP Per Kill".
|
||||
"""
|
||||
if len(self.time_intervals) > 0:
|
||||
avg_time_between_kills = np.mean(self.time_intervals)
|
||||
minutes = int(avg_time_between_kills // 60)
|
||||
seconds = int(avg_time_between_kills % 60)
|
||||
self.set_property("time_between_kills", f"{minutes}m {seconds}s")
|
||||
else:
|
||||
self.set_property("time_between_kills", "N/A")
|
||||
|
||||
if len(self.exp_history) > 1:
|
||||
exp_deltas = np.diff(self.exp_history)
|
||||
avg_exp_per_kill = np.mean(exp_deltas) if len(exp_deltas) > 0 else 0
|
||||
self.set_property("exp_per_kill", f"{avg_exp_per_kill:.2f}%")
|
||||
else:
|
||||
self.set_property("exp_per_kill", "N/A")
|
134
Data/Nodes/Flyff/flyff_low_health_alert_node.py
Normal file
134
Data/Nodes/Flyff/flyff_low_health_alert_node.py
Normal file
@ -0,0 +1,134 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Standardized Flyff Low Health Alert Node:
|
||||
- Monitors an input value (1 = health alert, 0 = normal).
|
||||
- Displays a visual alert and plays a sound if enabled.
|
||||
- Uses a global update timer for processing.
|
||||
- Automatically processes float, int, and string values.
|
||||
"""
|
||||
|
||||
import time
|
||||
from OdenGraphQt import BaseNode
|
||||
from Qt import QtCore, QtWidgets, QtGui
|
||||
|
||||
try:
|
||||
import winsound
|
||||
HAS_WINSOUND = True
|
||||
except ImportError:
|
||||
winsound = None
|
||||
HAS_WINSOUND = False
|
||||
|
||||
class OverlayCanvas(QtWidgets.QWidget):
|
||||
"""
|
||||
UI overlay for displaying a red warning box, which can be repositioned by dragging.
|
||||
"""
|
||||
def __init__(self, parent=None):
|
||||
super().__init__(parent)
|
||||
screen_geo = QtWidgets.QApplication.primaryScreen().geometry()
|
||||
self.setGeometry(screen_geo)
|
||||
self.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.WindowStaysOnTopHint)
|
||||
self.setAttribute(QtCore.Qt.WA_TranslucentBackground, True)
|
||||
self.setVisible(False)
|
||||
self.helper_LowHealthAlert = QtCore.QRect(250, 300, 900, 35)
|
||||
self.dragging = False
|
||||
self.drag_offset = None
|
||||
|
||||
def paintEvent(self, event):
|
||||
if not self.isVisible():
|
||||
return
|
||||
painter = QtGui.QPainter(self)
|
||||
painter.setPen(QtCore.Qt.NoPen)
|
||||
painter.setBrush(QtGui.QColor(255, 0, 0))
|
||||
painter.drawRect(self.helper_LowHealthAlert)
|
||||
font = QtGui.QFont("Arial", 14, QtGui.QFont.Bold)
|
||||
painter.setFont(font)
|
||||
painter.setPen(QtGui.QColor(255, 255, 255))
|
||||
text_x = self.helper_LowHealthAlert.center().x() - 50
|
||||
text_y = self.helper_LowHealthAlert.center().y() + 5
|
||||
painter.drawText(text_x, text_y, "LOW HEALTH")
|
||||
|
||||
def toggle_alert(self, state):
|
||||
self.setVisible(state == 1)
|
||||
self.update()
|
||||
|
||||
def mousePressEvent(self, event):
|
||||
if event.button() == QtCore.Qt.LeftButton:
|
||||
if self.helper_LowHealthAlert.contains(event.pos()):
|
||||
self.dragging = True
|
||||
self.drag_offset = event.pos() - self.helper_LowHealthAlert.topLeft()
|
||||
super().mousePressEvent(event)
|
||||
|
||||
def mouseMoveEvent(self, event):
|
||||
if self.dragging:
|
||||
new_top_left = event.pos() - self.drag_offset
|
||||
self.helper_LowHealthAlert.moveTo(new_top_left)
|
||||
self.update()
|
||||
super().mouseMoveEvent(event)
|
||||
|
||||
def mouseReleaseEvent(self, event):
|
||||
if event.button() == QtCore.Qt.LeftButton:
|
||||
self.dragging = False
|
||||
super().mouseReleaseEvent(event)
|
||||
|
||||
class FlyffLowHealthAlertNode(BaseNode):
|
||||
__identifier__ = 'bunny-lab.io.flyff_low_health_alert_node'
|
||||
NODE_NAME = 'Flyff - Low Health Alert'
|
||||
|
||||
overlay_instance = None
|
||||
last_beep_time = 0
|
||||
BEEP_INTERVAL_SECONDS = 2
|
||||
|
||||
def __init__(self):
|
||||
super(FlyffLowHealthAlertNode, self).__init__()
|
||||
self.add_checkbox('cb_1', '', 'Sound Alert', True)
|
||||
self.add_checkbox('cb_2', '', 'Visual Alert', True)
|
||||
self.add_input('Toggle (1 = On | 0 = Off)', color=(200, 100, 0))
|
||||
self.add_text_input('value', 'Current Value', text='0')
|
||||
self.add_combo_menu('beep_interval', 'Beep Interval', items=["0.5s", "1.0s", "2.0s"])
|
||||
|
||||
if not FlyffLowHealthAlertNode.overlay_instance:
|
||||
FlyffLowHealthAlertNode.overlay_instance = OverlayCanvas()
|
||||
FlyffLowHealthAlertNode.overlay_instance.show()
|
||||
|
||||
def process_input(self):
|
||||
input_port = self.input(0)
|
||||
value = input_port.connected_ports()[0].node().get_property('value') if input_port.connected_ports() else "0"
|
||||
self.receive_data(value)
|
||||
|
||||
def receive_data(self, data, source_port_name=None):
|
||||
try:
|
||||
if isinstance(data, str):
|
||||
data = float(data) if '.' in data else int(data)
|
||||
if isinstance(data, (float, int)):
|
||||
data = 1 if data > 1 else 0 if data <= 0 else int(data)
|
||||
else:
|
||||
data = 0
|
||||
except ValueError:
|
||||
data = 0
|
||||
|
||||
self.set_property('value', str(data))
|
||||
if self.get_property('cb_2'):
|
||||
FlyffLowHealthAlertNode.overlay_instance.toggle_alert(data)
|
||||
self.handle_beep(data)
|
||||
|
||||
def handle_beep(self, input_value):
|
||||
# Update beep interval from the dropdown property
|
||||
interval_str = self.get_property('beep_interval')
|
||||
if interval_str.endswith("s"):
|
||||
interval_seconds = float(interval_str[:-1])
|
||||
else:
|
||||
interval_seconds = float(interval_str)
|
||||
self.BEEP_INTERVAL_SECONDS = interval_seconds
|
||||
|
||||
if input_value == 1 and self.get_property('cb_1'):
|
||||
current_time = time.time()
|
||||
if (current_time - FlyffLowHealthAlertNode.last_beep_time) >= self.BEEP_INTERVAL_SECONDS:
|
||||
FlyffLowHealthAlertNode.last_beep_time = current_time
|
||||
self.play_beep()
|
||||
|
||||
def play_beep(self):
|
||||
if HAS_WINSOUND:
|
||||
winsound.Beep(376, 100)
|
||||
else:
|
||||
print('\a', end='')
|
103
Data/Nodes/Flyff/flyff_mob_identification_overlay.py
Normal file
103
Data/Nodes/Flyff/flyff_mob_identification_overlay.py
Normal file
@ -0,0 +1,103 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Identification Overlay Node:
|
||||
- Users can configure threads/slices for parallel processing.
|
||||
"""
|
||||
|
||||
import re
|
||||
from OdenGraphQt import BaseNode
|
||||
from PyQt5.QtCore import QTimer
|
||||
from PyQt5.QtGui import QColor
|
||||
from Modules import data_collector
|
||||
|
||||
|
||||
class IdentificationOverlayNode(BaseNode):
|
||||
__identifier__ = "bunny-lab.io.identification_overlay_node"
|
||||
NODE_NAME = "Identification Overlay"
|
||||
|
||||
def __init__(self):
|
||||
super(IdentificationOverlayNode, self).__init__()
|
||||
|
||||
# User-configurable options
|
||||
self.add_text_input("search_term", "Search Term", text="Aibatt")
|
||||
self.add_text_input("offset_value", "Offset Value (X,Y)", text="0,0") # X,Y Offset
|
||||
self.add_text_input("margin", "Margin", text="5") # Box Margin
|
||||
self.add_text_input("polling_freq", "Polling Frequency (ms)", text="500") # Polling Rate
|
||||
self.add_combo_menu("ocr_engine", "Type", items=["CPU", "GPU"])
|
||||
self.set_property("ocr_engine", "CPU") # Default to CPU mode
|
||||
|
||||
# Custom overlay options
|
||||
self.add_text_input("overlay_color", "Overlay Color (RGB)", text="0,0,255") # Default blue
|
||||
self.add_text_input("thickness", "Line Thickness", text="2") # Default 2px
|
||||
self.add_text_input("threads_slices", "Threads / Slices", text="8") # Default 8 threads/slices
|
||||
|
||||
self.region_id = "identification_overlay"
|
||||
data_collector.create_ocr_region(self.region_id, x=250, y=50, w=300, h=200, color=(0, 0, 255), thickness=2)
|
||||
|
||||
data_collector.start_collector()
|
||||
self.set_name("Identification Overlay")
|
||||
|
||||
# Timer for updating overlays
|
||||
self.timer = QTimer()
|
||||
self.timer.timeout.connect(self.update_overlay)
|
||||
|
||||
# Set initial polling frequency
|
||||
self.update_polling_frequency()
|
||||
|
||||
def update_polling_frequency(self):
|
||||
polling_text = self.get_property("polling_freq")
|
||||
try:
|
||||
polling_interval = max(50, int(polling_text))
|
||||
except ValueError:
|
||||
polling_interval = 500
|
||||
|
||||
self.timer.start(polling_interval)
|
||||
|
||||
def update_overlay(self):
|
||||
search_term = self.get_property("search_term")
|
||||
offset_text = self.get_property("offset_value")
|
||||
margin_text = self.get_property("margin")
|
||||
ocr_engine = self.get_property("ocr_engine")
|
||||
threads_slices_text = self.get_property("threads_slices")
|
||||
|
||||
self.update_polling_frequency()
|
||||
|
||||
try:
|
||||
offset_x, offset_y = map(int, offset_text.split(","))
|
||||
except ValueError:
|
||||
offset_x, offset_y = 0, 0
|
||||
|
||||
try:
|
||||
margin = int(margin_text)
|
||||
except ValueError:
|
||||
margin = 5
|
||||
|
||||
color_text = self.get_property("overlay_color")
|
||||
try:
|
||||
color = tuple(map(int, color_text.split(",")))
|
||||
except ValueError:
|
||||
color = (0, 0, 255)
|
||||
|
||||
thickness_text = self.get_property("thickness")
|
||||
try:
|
||||
thickness = max(1, int(thickness_text))
|
||||
except ValueError:
|
||||
thickness = 2
|
||||
|
||||
try:
|
||||
num_slices = max(1, int(threads_slices_text)) # Ensure at least 1 slice
|
||||
except ValueError:
|
||||
num_slices = 1
|
||||
|
||||
if not search_term:
|
||||
return
|
||||
|
||||
detected_positions = data_collector.find_word_positions(
|
||||
self.region_id, search_term, offset_x, offset_y, margin, ocr_engine, num_slices
|
||||
)
|
||||
|
||||
# Ensure slice count is updated visually in the region widget
|
||||
data_collector.update_region_slices(self.region_id, num_slices)
|
||||
|
||||
data_collector.draw_identification_boxes(self.region_id, detected_positions, color=color, thickness=thickness)
|
||||
|
49
Data/Nodes/General Purpose/array_node.py
Normal file
49
Data/Nodes/General Purpose/array_node.py
Normal file
@ -0,0 +1,49 @@
|
||||
from OdenGraphQt import BaseNode
|
||||
|
||||
class ArrayNode(BaseNode):
|
||||
"""
|
||||
Array Node:
|
||||
- Inputs: 'in' (value to store), 'ArraySize' (defines maximum length)
|
||||
- Output: 'Array' (the current array as a string)
|
||||
- Stores incoming values in an array with a size defined by ArraySize.
|
||||
- Updates are now handled via a global update timer.
|
||||
"""
|
||||
__identifier__ = 'bunny-lab.io.array_node'
|
||||
NODE_NAME = 'Array'
|
||||
|
||||
def __init__(self):
|
||||
super(ArrayNode, self).__init__()
|
||||
self.values = {} # Ensure values is a dictionary.
|
||||
self.add_input('in')
|
||||
self.add_input('ArraySize')
|
||||
self.add_output('Array')
|
||||
self.array = []
|
||||
self.value = "[]" # Output as a string.
|
||||
self.array_size = 10 # Default array size.
|
||||
self.set_name("Array: []")
|
||||
|
||||
def process_input(self):
|
||||
# Get array size from 'ArraySize' input if available.
|
||||
size_port = self.input('ArraySize')
|
||||
connected_size = size_port.connected_ports() if size_port is not None else []
|
||||
if connected_size:
|
||||
connected_port = connected_size[0]
|
||||
parent_node = connected_port.node()
|
||||
try:
|
||||
self.array_size = int(float(getattr(parent_node, 'value', 10)))
|
||||
except (ValueError, TypeError):
|
||||
self.array_size = 10
|
||||
|
||||
# Get new value from 'in' input if available.
|
||||
in_port = self.input('in')
|
||||
connected_in = in_port.connected_ports() if in_port is not None else []
|
||||
if connected_in:
|
||||
connected_port = connected_in[0]
|
||||
parent_node = connected_port.node()
|
||||
new_value = getattr(parent_node, 'value', None)
|
||||
if new_value is not None:
|
||||
self.array.append(new_value)
|
||||
while len(self.array) > self.array_size:
|
||||
self.array.pop(0)
|
||||
self.value = str(self.array)
|
||||
self.set_name(f"Array: {self.value}")
|
122
Data/Nodes/General Purpose/comparison_node.py
Normal file
122
Data/Nodes/General Purpose/comparison_node.py
Normal file
@ -0,0 +1,122 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Standardized Comparison Node:
|
||||
- Compares two input values using a selected operator (==, !=, >, <, >=, <=).
|
||||
- Outputs a result of 1 (True) or 0 (False).
|
||||
- Uses a global update timer for processing.
|
||||
- Supports an additional 'Input Type' dropdown to choose between 'Number' and 'String'.
|
||||
"""
|
||||
|
||||
from OdenGraphQt import BaseNode
|
||||
from Qt import QtCore
|
||||
|
||||
class ComparisonNode(BaseNode):
|
||||
__identifier__ = 'bunny-lab.io.comparison_node'
|
||||
NODE_NAME = 'Comparison Node'
|
||||
|
||||
def __init__(self):
|
||||
super(ComparisonNode, self).__init__()
|
||||
self.add_input('A')
|
||||
self.add_input('B')
|
||||
self.add_output('Result')
|
||||
|
||||
# Add the Input Type dropdown first.
|
||||
self.add_combo_menu('input_type', 'Input Type', items=['Number', 'String'])
|
||||
self.add_combo_menu('operator', 'Operator', items=[
|
||||
'Equal (==)', 'Not Equal (!=)', 'Greater Than (>)',
|
||||
'Less Than (<)', 'Greater Than or Equal (>=)', 'Less Than or Equal (<=)'
|
||||
])
|
||||
# Replace calc_result with a standardized "value" text input.
|
||||
self.add_text_input('value', 'Value', text='0')
|
||||
self.value = 0
|
||||
self.set_name("Comparison Node")
|
||||
self.processing = False # Guard for process_input
|
||||
|
||||
# Set default properties explicitly
|
||||
self.set_property('input_type', 'Number')
|
||||
self.set_property('operator', 'Equal (==)')
|
||||
|
||||
def process_input(self):
|
||||
if self.processing:
|
||||
return
|
||||
self.processing = True
|
||||
|
||||
# Retrieve input values; if no connection or None, default to "0"
|
||||
input_a = self.input(0)
|
||||
input_b = self.input(1)
|
||||
a_raw = (input_a.connected_ports()[0].node().get_property('value')
|
||||
if input_a.connected_ports() else "0")
|
||||
b_raw = (input_b.connected_ports()[0].node().get_property('value')
|
||||
if input_b.connected_ports() else "0")
|
||||
a_raw = a_raw if a_raw is not None else "0"
|
||||
b_raw = b_raw if b_raw is not None else "0"
|
||||
|
||||
# Get input type property
|
||||
input_type = self.get_property('input_type')
|
||||
|
||||
# Convert values based on input type
|
||||
if input_type == 'Number':
|
||||
try:
|
||||
a_val = float(a_raw)
|
||||
except (ValueError, TypeError):
|
||||
a_val = 0.0
|
||||
try:
|
||||
b_val = float(b_raw)
|
||||
except (ValueError, TypeError):
|
||||
b_val = 0.0
|
||||
elif input_type == 'String':
|
||||
a_val = str(a_raw)
|
||||
b_val = str(b_raw)
|
||||
else:
|
||||
try:
|
||||
a_val = float(a_raw)
|
||||
except (ValueError, TypeError):
|
||||
a_val = 0.0
|
||||
try:
|
||||
b_val = float(b_raw)
|
||||
except (ValueError, TypeError):
|
||||
b_val = 0.0
|
||||
|
||||
operator = self.get_property('operator')
|
||||
|
||||
# Perform the comparison
|
||||
result = {
|
||||
'Equal (==)': a_val == b_val,
|
||||
'Not Equal (!=)': a_val != b_val,
|
||||
'Greater Than (>)': a_val > b_val,
|
||||
'Less Than (<)': a_val < b_val,
|
||||
'Greater Than or Equal (>=)': a_val >= b_val,
|
||||
'Less Than or Equal (<=)': a_val <= b_val
|
||||
}.get(operator, False)
|
||||
|
||||
new_value = 1 if result else 0
|
||||
self.value = new_value
|
||||
self.set_property('value', str(self.value))
|
||||
self.transmit_data(self.value)
|
||||
|
||||
self.processing = False
|
||||
|
||||
def on_input_connected(self, input_port, output_port):
|
||||
pass
|
||||
|
||||
def on_input_disconnected(self, input_port, output_port):
|
||||
pass
|
||||
|
||||
def property_changed(self, property_name):
|
||||
pass
|
||||
|
||||
def receive_data(self, data, source_port_name=None):
|
||||
pass
|
||||
|
||||
def transmit_data(self, data):
|
||||
output_port = self.output(0)
|
||||
if output_port and output_port.connected_ports():
|
||||
for connected_port in output_port.connected_ports():
|
||||
connected_node = connected_port.node()
|
||||
if hasattr(connected_node, 'receive_data'):
|
||||
try:
|
||||
data_int = int(data)
|
||||
connected_node.receive_data(data_int, source_port_name='Result')
|
||||
except ValueError:
|
||||
pass
|
72
Data/Nodes/General Purpose/data_node.py
Normal file
72
Data/Nodes/General Purpose/data_node.py
Normal file
@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Standardized Data Node:
|
||||
- Accepts and transmits values consistently.
|
||||
- Updates its value based on a global update timer.
|
||||
"""
|
||||
|
||||
from OdenGraphQt import BaseNode
|
||||
from Qt import QtCore
|
||||
|
||||
class DataNode(BaseNode):
|
||||
__identifier__ = 'bunny-lab.io.data_node'
|
||||
NODE_NAME = 'Data Node'
|
||||
|
||||
def __init__(self):
|
||||
super(DataNode, self).__init__()
|
||||
self.add_input('Input')
|
||||
self.add_output('Output')
|
||||
self.add_text_input('value', 'Value', text='')
|
||||
self.process_widget_event()
|
||||
self.set_name("Data Node")
|
||||
# Removed self-contained update timer; global timer now drives updates.
|
||||
|
||||
def post_create(self):
|
||||
text_widget = self.get_widget('value')
|
||||
if text_widget is not None:
|
||||
try:
|
||||
# Removed textChanged signal connection; global timer will call process_input.
|
||||
pass
|
||||
except Exception as e:
|
||||
print("Error connecting textChanged signal:", e)
|
||||
|
||||
def process_widget_event(self, event=None):
|
||||
current_text = self.get_property('value')
|
||||
self.value = current_text
|
||||
self.transmit_data(current_text)
|
||||
|
||||
def property_changed(self, property_name):
|
||||
if property_name == 'value':
|
||||
# Immediate update removed; relying on global timer.
|
||||
pass
|
||||
|
||||
def process_input(self):
|
||||
input_port = self.input(0)
|
||||
output_port = self.output(0)
|
||||
if input_port.connected_ports():
|
||||
input_value = input_port.connected_ports()[0].node().get_property('value')
|
||||
self.set_property('value', input_value)
|
||||
self.transmit_data(input_value)
|
||||
elif output_port.connected_ports():
|
||||
self.transmit_data(self.get_property('value'))
|
||||
|
||||
def on_input_connected(self, input_port, output_port):
|
||||
# Removed immediate update; global timer handles updates.
|
||||
pass
|
||||
|
||||
def on_input_disconnected(self, input_port, output_port):
|
||||
# Removed immediate update; global timer handles updates.
|
||||
pass
|
||||
|
||||
def receive_data(self, data, source_port_name=None):
|
||||
self.set_property('value', str(data))
|
||||
self.transmit_data(data)
|
||||
|
||||
def transmit_data(self, data):
|
||||
output_port = self.output(0)
|
||||
if output_port and output_port.connected_ports():
|
||||
for connected_port in output_port.connected_ports():
|
||||
connected_node = connected_port.node()
|
||||
if hasattr(connected_node, 'receive_data'):
|
||||
connected_node.receive_data(data, source_port_name="Output")
|
109
Data/Nodes/General Purpose/math_operation_node.py
Normal file
109
Data/Nodes/General Purpose/math_operation_node.py
Normal file
@ -0,0 +1,109 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Standardized Math Operation Node:
|
||||
- Performs mathematical operations (+, -, *, /, avg) on two inputs.
|
||||
- Outputs the computed result.
|
||||
- Uses a global update timer for processing (defined in borealis.py).
|
||||
- Ensures it always has a "value" property that the Comparison Node can read.
|
||||
"""
|
||||
|
||||
from OdenGraphQt import BaseNode
|
||||
from Qt import QtCore
|
||||
|
||||
class MathOperationNode(BaseNode):
|
||||
__identifier__ = 'bunny-lab.io.math_node'
|
||||
NODE_NAME = 'Math Operation'
|
||||
|
||||
def __init__(self):
|
||||
super(MathOperationNode, self).__init__()
|
||||
self.add_input('A')
|
||||
self.add_input('B')
|
||||
self.add_output('Result')
|
||||
|
||||
# Drop-down to choose which operation we do:
|
||||
self.add_combo_menu('operator', 'Operator', items=[
|
||||
'Add', 'Subtract', 'Multiply', 'Divide', 'Average'
|
||||
])
|
||||
|
||||
# A text field for showing the result to the user:
|
||||
self.add_text_input('calc_result', 'Result', text='0')
|
||||
|
||||
# IMPORTANT: define a "value" property that the Comparison Node can read
|
||||
# We do not necessarily need a text input for it, but adding it ensures
|
||||
# it becomes an official property recognized by OdenGraphQt.
|
||||
self.add_text_input('value', 'Internal Value', text='0')
|
||||
|
||||
# Keep a Python-side float of the current computed result:
|
||||
self.value = 0
|
||||
|
||||
# Give the node a nice name:
|
||||
self.set_name("Math Operation")
|
||||
|
||||
# Removed self-contained timer; global timer calls process_input().
|
||||
|
||||
def process_input(self):
|
||||
# Attempt to read "value" from both inputs:
|
||||
input_a = self.input(0)
|
||||
input_b = self.input(1)
|
||||
a_raw = input_a.connected_ports()[0].node().get_property('value') if input_a.connected_ports() else "0"
|
||||
b_raw = input_b.connected_ports()[0].node().get_property('value') if input_b.connected_ports() else "0"
|
||||
|
||||
try:
|
||||
a_val = float(a_raw)
|
||||
except (ValueError, TypeError):
|
||||
a_val = 0.0
|
||||
try:
|
||||
b_val = float(b_raw)
|
||||
except (ValueError, TypeError):
|
||||
b_val = 0.0
|
||||
|
||||
operator = self.get_property('operator')
|
||||
if operator == 'Add':
|
||||
result = a_val + b_val
|
||||
elif operator == 'Subtract':
|
||||
result = a_val - b_val
|
||||
elif operator == 'Multiply':
|
||||
result = a_val * b_val
|
||||
elif operator == 'Divide':
|
||||
result = a_val / b_val if b_val != 0 else 0.0
|
||||
elif operator == 'Average':
|
||||
result = (a_val + b_val) / 2.0
|
||||
else:
|
||||
result = 0.0
|
||||
|
||||
# If the computed result changed, update our internal properties and transmit
|
||||
if self.value != result:
|
||||
self.value = result
|
||||
|
||||
# Update the two text fields so the user sees the numeric result:
|
||||
self.set_property('calc_result', str(result))
|
||||
self.set_property('value', str(result)) # <= This is the critical step
|
||||
|
||||
# Let downstream nodes know there's new data:
|
||||
self.transmit_data(result)
|
||||
|
||||
def on_input_connected(self, input_port, output_port):
|
||||
pass
|
||||
|
||||
def on_input_disconnected(self, input_port, output_port):
|
||||
pass
|
||||
|
||||
def property_changed(self, property_name):
|
||||
pass
|
||||
|
||||
def receive_data(self, data, source_port_name=None):
|
||||
pass
|
||||
|
||||
def transmit_data(self, data):
|
||||
output_port = self.output(0)
|
||||
if output_port and output_port.connected_ports():
|
||||
for connected_port in output_port.connected_ports():
|
||||
connected_node = connected_port.node()
|
||||
if hasattr(connected_node, 'receive_data'):
|
||||
try:
|
||||
# Attempt to convert to int if possible, else float
|
||||
data_int = int(data)
|
||||
connected_node.receive_data(data_int, source_port_name='Result')
|
||||
except ValueError:
|
||||
connected_node.receive_data(data, source_port_name='Result')
|
161
Data/Nodes/Organization/backdrop_node.py
Normal file
161
Data/Nodes/Organization/backdrop_node.py
Normal file
@ -0,0 +1,161 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from Qt import QtWidgets, QtGui, QtCore
|
||||
from OdenGraphQt import BaseNode
|
||||
from OdenGraphQt.constants import NodePropWidgetEnum
|
||||
from OdenGraphQt.qgraphics.node_backdrop import BackdropNodeItem
|
||||
|
||||
class BackdropNode(BaseNode):
|
||||
"""
|
||||
Backdrop Node:
|
||||
- Allows grouping or annotating other nodes by resizing a large rectangle.
|
||||
- Title is set by double-clicking in the title area.
|
||||
"""
|
||||
|
||||
__identifier__ = 'bunny-lab.io.backdrop'
|
||||
NODE_NAME = 'Backdrop'
|
||||
|
||||
def __init__(self):
|
||||
# Use BackdropNodeItem for the specialized QGraphicsItem.
|
||||
super(BackdropNode, self).__init__(qgraphics_item=BackdropNodeItem)
|
||||
|
||||
# Default color (teal).
|
||||
self.model.color = (5, 129, 138, 255)
|
||||
|
||||
# Set default title without prompting:
|
||||
self.set_name("Double-Click to Add Name to Backdrop")
|
||||
|
||||
# Multi-line text property for storing the backdrop text.
|
||||
self.create_property(
|
||||
'backdrop_text',
|
||||
'',
|
||||
widget_type=NodePropWidgetEnum.QTEXT_EDIT.value,
|
||||
tab='Backdrop'
|
||||
)
|
||||
|
||||
# Override the view's double-click event to allow editing the title.
|
||||
original_double_click = self.view.mouseDoubleClickEvent
|
||||
|
||||
def new_double_click_event(event):
|
||||
# Assume the title is in the top 30 pixels of the node.
|
||||
if event.pos().y() < 30:
|
||||
new_title, ok = QtWidgets.QInputDialog.getText(
|
||||
None, "Edit Title", "Enter new backdrop title:", text=self.name()
|
||||
)
|
||||
if ok and new_title:
|
||||
self.set_name(new_title)
|
||||
self.view.update() # force immediate update of the node title
|
||||
else:
|
||||
if original_double_click:
|
||||
original_double_click(event)
|
||||
|
||||
self.view.mouseDoubleClickEvent = new_double_click_event
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Resizing / Geometry
|
||||
# --------------------------------------------------------------------------
|
||||
def on_backdrop_updated(self, update_prop, value=None):
|
||||
"""
|
||||
Triggered when the user resizes or double-clicks the backdrop sizer handle.
|
||||
"""
|
||||
if not self.graph:
|
||||
return
|
||||
|
||||
if update_prop == 'sizer_mouse_release':
|
||||
# User finished dragging the resize handle
|
||||
self.view.prepareGeometryChange()
|
||||
self.graph.begin_undo(f'resized "{self.name()}"')
|
||||
self.set_property('width', value['width'])
|
||||
self.set_property('height', value['height'])
|
||||
self.set_pos(*value['pos'])
|
||||
self.graph.end_undo()
|
||||
self.view.update()
|
||||
|
||||
elif update_prop == 'sizer_double_clicked':
|
||||
# User double-clicked the resize handle (auto-resize)
|
||||
self.view.prepareGeometryChange()
|
||||
self.graph.begin_undo(f'"{self.name()}" auto resize')
|
||||
self.set_property('width', value['width'])
|
||||
self.set_property('height', value['height'])
|
||||
self.set_pos(*value['pos'])
|
||||
self.graph.end_undo()
|
||||
self.view.update()
|
||||
|
||||
def auto_size(self):
|
||||
"""
|
||||
Auto-resize the backdrop to fit around intersecting nodes.
|
||||
"""
|
||||
if not self.graph:
|
||||
return
|
||||
self.view.prepareGeometryChange()
|
||||
self.graph.begin_undo(f'"{self.name()}" auto resize')
|
||||
size = self.view.calc_backdrop_size()
|
||||
self.set_property('width', size['width'])
|
||||
self.set_property('height', size['height'])
|
||||
self.set_pos(*size['pos'])
|
||||
self.graph.end_undo()
|
||||
self.view.update()
|
||||
|
||||
def wrap_nodes(self, nodes):
|
||||
"""
|
||||
Fit the backdrop around the specified nodes.
|
||||
"""
|
||||
if not self.graph or not nodes:
|
||||
return
|
||||
self.view.prepareGeometryChange()
|
||||
self.graph.begin_undo(f'"{self.name()}" wrap nodes')
|
||||
size = self.view.calc_backdrop_size([n.view for n in nodes])
|
||||
self.set_property('width', size['width'])
|
||||
self.set_property('height', size['height'])
|
||||
self.set_pos(*size['pos'])
|
||||
self.graph.end_undo()
|
||||
self.view.update()
|
||||
|
||||
def nodes(self):
|
||||
"""
|
||||
Return a list of nodes wrapped by this backdrop.
|
||||
"""
|
||||
node_ids = [n.id for n in self.view.get_nodes()]
|
||||
return [self.graph.get_node_by_id(nid) for nid in node_ids]
|
||||
|
||||
def set_text(self, text=''):
|
||||
"""
|
||||
Set the multi-line text in the backdrop.
|
||||
"""
|
||||
self.set_property('backdrop_text', text)
|
||||
|
||||
def text(self):
|
||||
"""
|
||||
Return the text content in the backdrop.
|
||||
"""
|
||||
return self.get_property('backdrop_text')
|
||||
|
||||
def set_size(self, width, height):
|
||||
"""
|
||||
Manually set the backdrop size.
|
||||
"""
|
||||
if self.graph:
|
||||
self.view.prepareGeometryChange()
|
||||
self.graph.begin_undo('backdrop size')
|
||||
self.set_property('width', width)
|
||||
self.set_property('height', height)
|
||||
self.graph.end_undo()
|
||||
self.view.update()
|
||||
else:
|
||||
self.view.width, self.view.height = width, height
|
||||
self.model.width, self.model.height = width, height
|
||||
|
||||
def size(self):
|
||||
"""
|
||||
Return (width, height) of the backdrop.
|
||||
"""
|
||||
self.model.width = self.view.width
|
||||
self.model.height = self.view.height
|
||||
return self.model.width, self.model.height
|
||||
|
||||
# No ports for a backdrop:
|
||||
def inputs(self):
|
||||
return
|
||||
|
||||
def outputs(self):
|
||||
return
|
3
Data/Nodes/Reporting/Export_to_CSV.py
Normal file
3
Data/Nodes/Reporting/Export_to_CSV.py
Normal file
@ -0,0 +1,3 @@
|
||||
# HIGH-LEVEL OVERVIEW
|
||||
# - This node takes an input source and either replaces or appends data fed into it into a CSV file on disk.
|
||||
# - There will be a checkbox to allow the user to change the behavior (Replace / Append)
|
4
Data/Nodes/Reporting/Export_to_Image.py
Normal file
4
Data/Nodes/Reporting/Export_to_Image.py
Normal file
@ -0,0 +1,4 @@
|
||||
# HIGH-LEVEL OVERVIEW
|
||||
# - This node takes an input source and dumps the data to disk in a dropdown menu of various image formats
|
||||
# - Ability to view image processing results would be an interesting bonus if displayed within the node.
|
||||
# - Could be used to show the life cycle of an image processing pipeline.
|
0
Data/Nodes/__init__.py
Normal file
0
Data/Nodes/__init__.py
Normal file
@ -1,43 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<meta name="theme-color" content="#000000" />
|
||||
<meta
|
||||
name="Borealis"
|
||||
content="Workflow Automation Tool"
|
||||
/>
|
||||
<link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
|
||||
<!--
|
||||
manifest.json provides metadata used when your web app is installed on a
|
||||
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
|
||||
-->
|
||||
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
|
||||
<!--
|
||||
Notice the use of %PUBLIC_URL% in the tags above.
|
||||
It will be replaced with the URL of the `public` folder during the build.
|
||||
Only files inside the `public` folder can be referenced from the HTML.
|
||||
|
||||
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
|
||||
work correctly both with client-side routing and a non-root public URL.
|
||||
Learn how to configure a non-root public URL by running `npm run build`.
|
||||
-->
|
||||
<title>Borealis</title>
|
||||
</head>
|
||||
<body>
|
||||
<noscript>You need to enable JavaScript to run this app.</noscript>
|
||||
<div id="root"></div>
|
||||
<!--
|
||||
This HTML file is a template.
|
||||
If you open it directly in the browser, you will see an empty page.
|
||||
|
||||
You can add webfonts, meta tags, or analytics to this file.
|
||||
The build step will place the bundled scripts into the <body> tag.
|
||||
|
||||
To begin the development, run `npm start` or `yarn start`.
|
||||
To create a production bundle, use `npm run build` or `yarn build`.
|
||||
-->
|
||||
</body>
|
||||
</html>
|
@ -1,139 +0,0 @@
|
||||
import React from "react";
|
||||
import FlowEditor from "./components/FlowEditor";
|
||||
import KeyboardArrowDownIcon from '@mui/icons-material/KeyboardArrowDown';
|
||||
import {
|
||||
AppBar,
|
||||
Toolbar,
|
||||
Typography,
|
||||
Box,
|
||||
Menu,
|
||||
MenuItem,
|
||||
Button,
|
||||
CssBaseline,
|
||||
ThemeProvider,
|
||||
createTheme
|
||||
} from "@mui/material";
|
||||
|
||||
const darkTheme = createTheme({
|
||||
palette: {
|
||||
mode: "dark",
|
||||
background: {
|
||||
default: "#121212",
|
||||
paper: "#1e1e1e"
|
||||
},
|
||||
text: {
|
||||
primary: "#ffffff"
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
export default function App() {
|
||||
const [workflowsAnchorEl, setWorkflowsAnchorEl] = React.useState(null);
|
||||
const [aboutAnchorEl, setAboutAnchorEl] = React.useState(null);
|
||||
|
||||
const handleWorkflowsMenuOpen = (event) => {
|
||||
setWorkflowsAnchorEl(event.currentTarget);
|
||||
};
|
||||
|
||||
const handleAboutMenuOpen = (event) => {
|
||||
setAboutAnchorEl(event.currentTarget);
|
||||
};
|
||||
|
||||
const handleWorkflowsMenuClose = () => {
|
||||
setWorkflowsAnchorEl(null);
|
||||
};
|
||||
|
||||
const handleAboutMenuClose = () => {
|
||||
setAboutAnchorEl(null);
|
||||
};
|
||||
|
||||
return (
|
||||
<ThemeProvider theme={darkTheme}>
|
||||
<CssBaseline />
|
||||
{/*
|
||||
Main container that:
|
||||
- fills 100% viewport height
|
||||
- organizes content with flexbox (vertical)
|
||||
*/}
|
||||
<Box display="flex" flexDirection="column" height="100vh">
|
||||
{/* --- TOP BAR --- */}
|
||||
<AppBar position="static" sx={{ bgcolor: "#092c44" }}>
|
||||
<Toolbar>
|
||||
<Typography variant="h6" sx={{ flexGrow: 1 }}>
|
||||
Borealis - Workflow Automation Tool
|
||||
</Typography>
|
||||
|
||||
{/* Workflows Menu */}
|
||||
<Button
|
||||
color="inherit"
|
||||
onClick={handleWorkflowsMenuOpen}
|
||||
endIcon={<KeyboardArrowDownIcon />}
|
||||
>
|
||||
Workflows
|
||||
</Button>
|
||||
<Menu
|
||||
anchorEl={workflowsAnchorEl}
|
||||
open={Boolean(workflowsAnchorEl)}
|
||||
onClose={handleWorkflowsMenuClose}
|
||||
>
|
||||
<MenuItem onClick={handleWorkflowsMenuClose}>Save Workflow</MenuItem>
|
||||
<MenuItem onClick={handleWorkflowsMenuClose}>Load Workflow</MenuItem>
|
||||
<MenuItem onClick={handleWorkflowsMenuClose}>Close Workflow</MenuItem>
|
||||
</Menu>
|
||||
|
||||
{/* About Menu */}
|
||||
<Button
|
||||
color="inherit"
|
||||
onClick={handleAboutMenuOpen}
|
||||
endIcon={<KeyboardArrowDownIcon />}
|
||||
>
|
||||
About
|
||||
</Button>
|
||||
<Menu
|
||||
anchorEl={aboutAnchorEl}
|
||||
open={Boolean(aboutAnchorEl)}
|
||||
onClose={handleAboutMenuClose}
|
||||
>
|
||||
<MenuItem onClick={handleAboutMenuClose}>Gitea Project</MenuItem>
|
||||
<MenuItem onClick={handleAboutMenuClose}>Credits</MenuItem>
|
||||
</Menu>
|
||||
</Toolbar>
|
||||
</AppBar>
|
||||
|
||||
{/* --- REACT FLOW EDITOR --- */}
|
||||
{/*
|
||||
flexGrow={1} ⇒ This box expands to fill remaining vertical space
|
||||
overflow="hidden" ⇒ No scroll bars, so React Flow does internal panning
|
||||
mt: 1 ⇒ Add top margin so the gradient starts closer to the AppBar.
|
||||
*/}
|
||||
<Box flexGrow={1} overflow="hidden" sx={{ mt: 0 }}>
|
||||
<FlowEditor
|
||||
updateNodeCount={(count) => {
|
||||
document.getElementById("nodeCount").innerText = count;
|
||||
}}
|
||||
/>
|
||||
</Box>
|
||||
|
||||
{/* --- STATUS BAR at BOTTOM --- */}
|
||||
<Box
|
||||
component="footer"
|
||||
sx={{
|
||||
bgcolor: "#1e1e1e",
|
||||
color: "white",
|
||||
px: 2,
|
||||
py: 1,
|
||||
textAlign: "left"
|
||||
}}
|
||||
>
|
||||
<b>Nodes</b>: <span id="nodeCount">0</span> | <b>Update Rate</b>: 500ms | <b>Flask API Server:</b>{" "}
|
||||
<a
|
||||
href="http://127.0.0.1:5000/api/nodes"
|
||||
style={{ color: "#3c78b4" }}
|
||||
>
|
||||
http://127.0.0.1:5000/data/api/nodes
|
||||
</a>
|
||||
</Box>
|
||||
</Box>
|
||||
</ThemeProvider>
|
||||
);
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
/* FlowEditor background container */
|
||||
.flow-editor-container {
|
||||
position: relative;
|
||||
width: 100vw;
|
||||
height: 100vh;
|
||||
}
|
||||
|
||||
/* Blue Gradient Overlay */
|
||||
.flow-editor-container::before {
|
||||
content: "";
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
pointer-events: none; /* Ensures grid and nodes remain fully interactive */
|
||||
background: linear-gradient( to bottom, rgba(9, 44, 68, 0.9) 0%, /* Deep blue at the top */
|
||||
rgba(30, 30, 30, 0) 45%, /* Fade out towards center */
|
||||
rgba(30, 30, 30, 0) 75%, /* No gradient in the middle */
|
||||
rgba(9, 44, 68, 0.7) 100% /* Deep blue at the bottom */
|
||||
);
|
||||
z-index: -1; /* Ensures it stays behind the React Flow elements */
|
||||
}
|
@ -1,68 +0,0 @@
|
||||
import React, { useState, useEffect, useCallback } from "react";
|
||||
import ReactFlow, {
|
||||
addEdge,
|
||||
Controls,
|
||||
Background,
|
||||
} from "reactflow";
|
||||
import "reactflow/dist/style.css";
|
||||
import "./FlowEditor.css";
|
||||
|
||||
const fetchNodes = async () => {
|
||||
const response = await fetch("/api/workflow");
|
||||
return response.json();
|
||||
};
|
||||
|
||||
const saveWorkflow = async (workflow) => {
|
||||
await fetch("/api/workflow", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(workflow),
|
||||
});
|
||||
};
|
||||
|
||||
export default function FlowEditor() {
|
||||
const [elements, setElements] = useState([]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchNodes().then((data) => {
|
||||
// Data should contain nodes and edges arrays
|
||||
const newElements = [...data.nodes, ...data.edges];
|
||||
setElements(newElements);
|
||||
});
|
||||
}, []);
|
||||
|
||||
const onConnect = useCallback(
|
||||
(params) => {
|
||||
const newEdge = { id: `e${params.source}-${params.target}`, ...params };
|
||||
setElements((els) => [...els, newEdge]);
|
||||
|
||||
// Separate nodes/edges for saving:
|
||||
const nodes = elements.filter((el) => el.type);
|
||||
const edges = elements.filter((el) => !el.type);
|
||||
|
||||
saveWorkflow({
|
||||
nodes,
|
||||
edges: [...edges, newEdge],
|
||||
});
|
||||
},
|
||||
[elements]
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="flow-editor-container">
|
||||
<ReactFlow
|
||||
proOptions={{ hideAttribution: true }} // Remove the React Flow watermark
|
||||
elements={elements}
|
||||
onConnect={onConnect}
|
||||
>
|
||||
<Controls />
|
||||
<Background
|
||||
variant="lines"
|
||||
gap={100}
|
||||
size={1}
|
||||
color="rgba(255, 255, 255, 0.2)" // White grid lines at 20% opacity
|
||||
/>
|
||||
</ReactFlow>
|
||||
</div>
|
||||
);
|
||||
}
|
379
Data/Workflows/Flyff/Flyff - Low Health Alert.json
Normal file
379
Data/Workflows/Flyff/Flyff - Low Health Alert.json
Normal file
@ -0,0 +1,379 @@
|
||||
{
|
||||
"graph":{
|
||||
"layout_direction":0,
|
||||
"acyclic":true,
|
||||
"pipe_collision":false,
|
||||
"pipe_slicing":true,
|
||||
"pipe_style":1,
|
||||
"accept_connection_types":{},
|
||||
"reject_connection_types":{}
|
||||
},
|
||||
"nodes":{
|
||||
"0x2697e9777d0":{
|
||||
"type_":"bunny-lab.io.flyff_character_status_node.FlyffCharacterStatusNode",
|
||||
"icon":null,
|
||||
"name":"Flyff - Character Status",
|
||||
"color":[
|
||||
13,
|
||||
18,
|
||||
23,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":278.0,
|
||||
"height":200.20000000000002,
|
||||
"pos":[
|
||||
-162.4474451079301,
|
||||
412.29351565404465
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"hp":"HP: 0/0",
|
||||
"mp":"MP: 0/0",
|
||||
"fp":"FP: 0/0",
|
||||
"exp":"EXP: 0.0%"
|
||||
}
|
||||
},
|
||||
"0x2697f589250":{
|
||||
"type_":"bunny-lab.io.data_node.DataNode",
|
||||
"icon":null,
|
||||
"name":"Data Node",
|
||||
"color":[
|
||||
13,
|
||||
18,
|
||||
23,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":269.0,
|
||||
"height":74.2,
|
||||
"pos":[
|
||||
-46.54926789642434,
|
||||
276.44565220121416
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"value":"0.40"
|
||||
}
|
||||
},
|
||||
"0x2697eeb2960":{
|
||||
"type_":"bunny-lab.io.math_node.MathOperationNode",
|
||||
"icon":null,
|
||||
"name":"Math Operation",
|
||||
"color":[
|
||||
13,
|
||||
18,
|
||||
23,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":269.0,
|
||||
"height":162.4,
|
||||
"pos":[
|
||||
263.14586137366473,
|
||||
175.74723593547986
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"operator":"Multiply",
|
||||
"calc_result":"0.0",
|
||||
"value":"0.0"
|
||||
}
|
||||
},
|
||||
"0x2697ea1b560":{
|
||||
"type_":"bunny-lab.io.flyff_hp_current_node.FlyffHPCurrentNode",
|
||||
"icon":null,
|
||||
"name":"Flyff - HP Current (API Connected)",
|
||||
"color":[
|
||||
13,
|
||||
18,
|
||||
23,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":378.0,
|
||||
"height":74.2,
|
||||
"pos":[
|
||||
188.09704170391905,
|
||||
29.44953683243171
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"value":"0"
|
||||
}
|
||||
},
|
||||
"0x2697f589be0":{
|
||||
"type_":"bunny-lab.io.flyff_hp_total_node.FlyffHPTotalNode",
|
||||
"icon":null,
|
||||
"name":"Flyff - HP Total (API Connected)",
|
||||
"color":[
|
||||
13,
|
||||
18,
|
||||
23,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":364.0,
|
||||
"height":74.2,
|
||||
"pos":[
|
||||
-138.69781863016254,
|
||||
175.74723593547975
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"value":"0"
|
||||
}
|
||||
},
|
||||
"0x2697eb0e8d0":{
|
||||
"type_":"bunny-lab.io.backdrop.BackdropNode",
|
||||
"icon":null,
|
||||
"name":"Calculate 40% of Total HP",
|
||||
"color":[
|
||||
5,
|
||||
129,
|
||||
138,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":728.2402137175101,
|
||||
"height":257.0476243986018,
|
||||
"pos":[
|
||||
-164.34741522615138,
|
||||
125.39802780261283
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"backdrop_text":""
|
||||
}
|
||||
},
|
||||
"0x2697e856d20":{
|
||||
"type_":"bunny-lab.io.comparison_node.ComparisonNode",
|
||||
"icon":null,
|
||||
"name":"Comparison Node",
|
||||
"color":[
|
||||
13,
|
||||
18,
|
||||
23,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":322.0,
|
||||
"height":166.6,
|
||||
"pos":[
|
||||
625.0901688948422,
|
||||
218.49656359546154
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"input_type":"Number",
|
||||
"operator":"Less Than or Equal (<=)",
|
||||
"value":"1"
|
||||
}
|
||||
},
|
||||
"0x2697eeb1100":{
|
||||
"type_":"bunny-lab.io.flyff_low_health_alert_node.FlyffLowHealthAlertNode",
|
||||
"icon":null,
|
||||
"name":"Flyff - Low Health Alert",
|
||||
"color":[
|
||||
13,
|
||||
18,
|
||||
23,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":324.0,
|
||||
"height":181.3,
|
||||
"pos":[
|
||||
630.7900792495066,
|
||||
585.1907964121928
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"cb_1":true,
|
||||
"cb_2":true,
|
||||
"value":"1",
|
||||
"beep_interval":"1.0s"
|
||||
}
|
||||
}
|
||||
},
|
||||
"connections":[
|
||||
{
|
||||
"out":[
|
||||
"0x2697f589250",
|
||||
"Output"
|
||||
],
|
||||
"in":[
|
||||
"0x2697eeb2960",
|
||||
"B"
|
||||
]
|
||||
},
|
||||
{
|
||||
"in":[
|
||||
"0x2697eeb2960",
|
||||
"A"
|
||||
],
|
||||
"out":[
|
||||
"0x2697f589be0",
|
||||
"value"
|
||||
]
|
||||
},
|
||||
{
|
||||
"out":[
|
||||
"0x2697eeb2960",
|
||||
"Result"
|
||||
],
|
||||
"in":[
|
||||
"0x2697e856d20",
|
||||
"B"
|
||||
]
|
||||
},
|
||||
{
|
||||
"out":[
|
||||
"0x2697ea1b560",
|
||||
"value"
|
||||
],
|
||||
"in":[
|
||||
"0x2697e856d20",
|
||||
"A"
|
||||
]
|
||||
},
|
||||
{
|
||||
"out":[
|
||||
"0x2697e856d20",
|
||||
"Result"
|
||||
],
|
||||
"in":[
|
||||
"0x2697eeb1100",
|
||||
"Toggle (1 = On | 0 = Off)"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
183
Data/Workflows/Flyff/Flyff EXP Predictor.json
Normal file
183
Data/Workflows/Flyff/Flyff EXP Predictor.json
Normal file
@ -0,0 +1,183 @@
|
||||
{
|
||||
"graph":{
|
||||
"layout_direction":0,
|
||||
"acyclic":true,
|
||||
"pipe_collision":false,
|
||||
"pipe_slicing":true,
|
||||
"pipe_style":1,
|
||||
"accept_connection_types":{},
|
||||
"reject_connection_types":{}
|
||||
},
|
||||
"nodes":{
|
||||
"0x191410fec90":{
|
||||
"type_":"bunny-lab.io.flyff_character_status_node.FlyffCharacterStatusNode",
|
||||
"icon":null,
|
||||
"name":"Flyff - Character Status",
|
||||
"color":[
|
||||
13,
|
||||
18,
|
||||
23,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":278.0,
|
||||
"height":200.20000000000002,
|
||||
"pos":[
|
||||
-234.47843187544638,
|
||||
171.50740184739476
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"hp":"HP: 5848/5848",
|
||||
"mp":"MP: 955/555",
|
||||
"fp":"FP: 0/0",
|
||||
"exp":"EXP: 49.0%"
|
||||
}
|
||||
},
|
||||
"0x19173496de0":{
|
||||
"type_":"bunny-lab.io.flyff_exp_current_node.FlyffEXPCurrentNode",
|
||||
"icon":null,
|
||||
"name":"Flyff - EXP (API Connected)",
|
||||
"color":[
|
||||
13,
|
||||
18,
|
||||
23,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":339.0,
|
||||
"height":74.2,
|
||||
"pos":[
|
||||
-237.34556433027646,
|
||||
77.62806051403777
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"value":"49.0"
|
||||
}
|
||||
},
|
||||
"0x191735ae690":{
|
||||
"type_":"bunny-lab.io.flyff_leveling_predictor_node.FlyffLevelingPredictorNode",
|
||||
"icon":null,
|
||||
"name":"Flyff - Leveling Predictor",
|
||||
"color":[
|
||||
13,
|
||||
18,
|
||||
23,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":324.0,
|
||||
"height":200.20000000000002,
|
||||
"pos":[
|
||||
170.42482250783007,
|
||||
77.62806051403777
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"exp_track_count":"7",
|
||||
"time_to_level":"Insufficient data",
|
||||
"time_between_kills":"N/A",
|
||||
"exp_per_kill":"N/A"
|
||||
}
|
||||
},
|
||||
"0x191735ae9c0":{
|
||||
"type_":"bunny-lab.io.backdrop.BackdropNode",
|
||||
"icon":null,
|
||||
"name":"Track EXP Changes Over Time to Predict Leveling Up",
|
||||
"color":[
|
||||
5,
|
||||
129,
|
||||
138,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":777.8842478973615,
|
||||
"height":380.82117975084645,
|
||||
"pos":[
|
||||
-264.113861059255,
|
||||
23.199190498448075
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"backdrop_text":""
|
||||
}
|
||||
}
|
||||
},
|
||||
"connections":[
|
||||
{
|
||||
"out":[
|
||||
"0x19173496de0",
|
||||
"value"
|
||||
],
|
||||
"in":[
|
||||
"0x191735ae690",
|
||||
"exp"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
101
Data/Workflows/Testing/Basic_Data_Node_Connection.json
Normal file
101
Data/Workflows/Testing/Basic_Data_Node_Connection.json
Normal file
@ -0,0 +1,101 @@
|
||||
{
|
||||
"graph":{
|
||||
"layout_direction":0,
|
||||
"acyclic":true,
|
||||
"pipe_collision":false,
|
||||
"pipe_slicing":true,
|
||||
"pipe_style":1,
|
||||
"accept_connection_types":{},
|
||||
"reject_connection_types":{}
|
||||
},
|
||||
"nodes":{
|
||||
"0x1ad82a5c620":{
|
||||
"type_":"bunny-lab.io.data_node.DataNode",
|
||||
"icon":null,
|
||||
"name":"Data Node",
|
||||
"color":[
|
||||
13,
|
||||
18,
|
||||
23,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":269.0,
|
||||
"height":74.2,
|
||||
"pos":[
|
||||
-93.6890385514249,
|
||||
181.13214119942148
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"value":"57"
|
||||
}
|
||||
},
|
||||
"0x1ad82a5cef0":{
|
||||
"type_":"bunny-lab.io.data_node.DataNode",
|
||||
"icon":null,
|
||||
"name":"Data Node 1",
|
||||
"color":[
|
||||
13,
|
||||
18,
|
||||
23,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":269.0,
|
||||
"height":74.2,
|
||||
"pos":[
|
||||
361.37200584121035,
|
||||
287.313051557703
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"value":"57"
|
||||
}
|
||||
}
|
||||
},
|
||||
"connections":[
|
||||
{
|
||||
"out":[
|
||||
"0x1ad82a5c620",
|
||||
"Output"
|
||||
],
|
||||
"in":[
|
||||
"0x1ad82a5cef0",
|
||||
"Input"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
57
Data/Workflows/Testing/Identification_Overlay.json
Normal file
57
Data/Workflows/Testing/Identification_Overlay.json
Normal file
@ -0,0 +1,57 @@
|
||||
{
|
||||
"graph":{
|
||||
"layout_direction":0,
|
||||
"acyclic":true,
|
||||
"pipe_collision":false,
|
||||
"pipe_slicing":true,
|
||||
"pipe_style":1,
|
||||
"accept_connection_types":{},
|
||||
"reject_connection_types":{}
|
||||
},
|
||||
"nodes":{
|
||||
"0x20c129abb30":{
|
||||
"type_":"bunny-lab.io.identification_overlay_node.IdentificationOverlayNode",
|
||||
"icon":null,
|
||||
"name":"Identification Overlay",
|
||||
"color":[
|
||||
13,
|
||||
18,
|
||||
23,
|
||||
255
|
||||
],
|
||||
"border_color":[
|
||||
74,
|
||||
84,
|
||||
85,
|
||||
255
|
||||
],
|
||||
"text_color":[
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
180
|
||||
],
|
||||
"disabled":false,
|
||||
"selected":false,
|
||||
"visible":true,
|
||||
"width":271.0,
|
||||
"height":330.40000000000003,
|
||||
"pos":[
|
||||
44.64929777820301,
|
||||
256.49596595988965
|
||||
],
|
||||
"layout_direction":0,
|
||||
"port_deletion_allowed":false,
|
||||
"subgraph_session":{},
|
||||
"custom":{
|
||||
"search_term":"Aibatt",
|
||||
"offset_value":"-10,-10",
|
||||
"margin":"10",
|
||||
"polling_freq":"50",
|
||||
"ocr_engine":"GPU",
|
||||
"overlay_color":"255,255,255",
|
||||
"thickness":"5"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
440
Data/borealis.py
Normal file
440
Data/borealis.py
Normal file
@ -0,0 +1,440 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import pkgutil
|
||||
import importlib
|
||||
import inspect
|
||||
import os
|
||||
|
||||
from Qt import QtWidgets, QtCore, QtGui
|
||||
|
||||
# -------------------------------------------------------#
|
||||
# MONKEY PATCHES - MODIFICATIONS TO OdenGraphQT BEHAVIOR #
|
||||
# -------------------------------------------------------#
|
||||
|
||||
# PATCH: Override the color of interconnection pipes between nodes
|
||||
try:
|
||||
from OdenGraphQt.qgraphics.pipe import PipeItem
|
||||
from OdenGraphQt.qgraphics.node_base import NodeItem
|
||||
from qtpy.QtGui import QPen, QColor
|
||||
from qtpy import QtCore
|
||||
|
||||
# If you want the original paint logic, capture it first:
|
||||
_orig_paint_pipe = PipeItem.paint
|
||||
_orig_paint_node = NodeItem.paint
|
||||
|
||||
# Custom pipe painting function
|
||||
def _custom_paint_pipe(self, painter, option, widget=None):
|
||||
painter.save()
|
||||
my_pen = QPen(QColor(0, 161, 115, 255)) # Match desired RGBA
|
||||
my_pen.setWidthF(2.0)
|
||||
painter.setPen(my_pen)
|
||||
_orig_paint_pipe(self, painter, option, widget)
|
||||
painter.restore()
|
||||
|
||||
# Custom node painting function
|
||||
def _custom_paint_node(self, painter, option, widget=None):
|
||||
painter.save()
|
||||
_orig_paint_node(self, painter, option, widget) # Call original method
|
||||
if self.isSelected():
|
||||
pen = QPen(QColor(0, 161, 115, 255)) # Set selected border color
|
||||
pen.setWidth(3)
|
||||
painter.setPen(pen)
|
||||
painter.drawRect(self.boundingRect())
|
||||
painter.restore()
|
||||
|
||||
# Apply the patches
|
||||
PipeItem.paint = _custom_paint_pipe
|
||||
NodeItem.paint = _custom_paint_node
|
||||
|
||||
except ImportError as e:
|
||||
print(f"WARNING: Could not patch PipeItem or NodeItem: {e}")
|
||||
except Exception as e:
|
||||
print(f"Patch for PipeItem or NodeItem override failed: {e}")
|
||||
|
||||
## PATCH: Fix "module 'qtpy.QtGui' has no attribute 'QUndoStack'" (KEEP AROUND FOR LEGACY DOCUMENTATION)
|
||||
#try:
|
||||
# from qtpy.QtWidgets import QUndoStack
|
||||
# import qtpy
|
||||
# qtpy.QtGui.QUndoStack = QUndoStack
|
||||
#except ImportError:
|
||||
# print("WARNING: Could not monkey-patch QUndoStack.")
|
||||
|
||||
# PATCH: Fix "'BackdropNodeItem' object has no attribute 'widgets'" by giving BackdropNodeItem a trivial widgets dictionary.
|
||||
try:
|
||||
from OdenGraphQt.nodes.backdrop_node import BackdropNodeItem
|
||||
if not hasattr(BackdropNodeItem, "widgets"):
|
||||
BackdropNodeItem.widgets = {}
|
||||
except ImportError:
|
||||
print("WARNING: Could not monkey-patch BackdropNodeItem to add `widgets`.")
|
||||
|
||||
# PATCH: BEGIN ROBUST PATCH FOR QGraphicsScene.setSelectionArea
|
||||
_original_setSelectionArea = QtWidgets.QGraphicsScene.setSelectionArea
|
||||
|
||||
def _patched_setSelectionArea(self, *args, **kwargs):
|
||||
"""
|
||||
A robust patch that handles various call signatures for QGraphicsScene.setSelectionArea().
|
||||
"""
|
||||
try:
|
||||
return _original_setSelectionArea(self, *args, **kwargs)
|
||||
except TypeError:
|
||||
if not args:
|
||||
raise
|
||||
painterPath = args[0]
|
||||
selection_op = QtCore.Qt.ReplaceSelection
|
||||
selection_mode = QtCore.Qt.IntersectsItemShape
|
||||
transform = QtGui.QTransform()
|
||||
return _original_setSelectionArea(self, painterPath, selection_op, selection_mode, transform)
|
||||
|
||||
QtWidgets.QGraphicsScene.setSelectionArea = _patched_setSelectionArea
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------- #
|
||||
|
||||
# Import data_manager so we can start the Flask server
|
||||
from Modules import data_manager
|
||||
|
||||
from OdenGraphQt import NodeGraph, BaseNode
|
||||
from OdenGraphQt.widgets.dialogs import FileDialog
|
||||
|
||||
def import_nodes_from_folder(package_name):
|
||||
"""
|
||||
Recursively import all modules from the given package.
|
||||
Returns a dictionary where keys are subfolder names, and values are lists of BaseNode subclasses.
|
||||
"""
|
||||
nodes_by_category = {}
|
||||
package = importlib.import_module(package_name)
|
||||
package_path = package.__path__[0]
|
||||
|
||||
for root, _, files in os.walk(package_path):
|
||||
rel_path = os.path.relpath(root, package_path).replace(os.sep, '.')
|
||||
module_prefix = f"{package_name}.{rel_path}" if rel_path != '.' else package_name
|
||||
category_name = os.path.basename(root)
|
||||
|
||||
for file in files:
|
||||
if file.endswith(".py") and file != "__init__.py":
|
||||
module_name = f"{module_prefix}.{file[:-3]}"
|
||||
try:
|
||||
module = importlib.import_module(module_name)
|
||||
for name, obj in inspect.getmembers(module, inspect.isclass):
|
||||
if issubclass(obj, BaseNode) and obj.__module__ == module.__name__:
|
||||
if category_name not in nodes_by_category:
|
||||
nodes_by_category[category_name] = []
|
||||
nodes_by_category[category_name].append(obj)
|
||||
except Exception as e:
|
||||
print(f"Failed to import {module_name}: {e}")
|
||||
|
||||
return nodes_by_category
|
||||
|
||||
|
||||
def make_node_command(graph, node_type_str):
|
||||
"""
|
||||
Return a function that creates a node of the given type at the current cursor position.
|
||||
Ensures that only one FlyffCharacterStatusNode exists.
|
||||
"""
|
||||
def real_create():
|
||||
if node_type_str.startswith("bunny-lab.io.flyff_character_status_node"):
|
||||
for node in graph.all_nodes():
|
||||
if node.__class__.__name__ == "FlyffCharacterStatusNode":
|
||||
QtWidgets.QMessageBox.critical(
|
||||
None,
|
||||
"Error",
|
||||
"Only one Flyff Character Status Collector node is allowed."
|
||||
)
|
||||
return
|
||||
try:
|
||||
pos = graph.cursor_pos()
|
||||
graph.create_node(node_type_str, pos=pos)
|
||||
except Exception as e:
|
||||
QtWidgets.QMessageBox.critical(None, "Error", str(e))
|
||||
|
||||
def command():
|
||||
if QtWidgets.QApplication.instance():
|
||||
real_create()
|
||||
else:
|
||||
QtCore.QTimer.singleShot(0, real_create)
|
||||
|
||||
return command
|
||||
|
||||
def ensure_workflows_folder():
|
||||
"""
|
||||
Ensures a 'Workflows' subfolder exists.
|
||||
"""
|
||||
if not os.path.exists("Workflows"):
|
||||
os.makedirs("Workflows")
|
||||
|
||||
def close_workflow(graph: NodeGraph):
|
||||
"""
|
||||
Closes the current workflow (removes all nodes and connections).
|
||||
"""
|
||||
graph.clear_session()
|
||||
|
||||
def save_workflow(graph: NodeGraph):
|
||||
"""
|
||||
Saves the current workflow (including custom names, positions, wires, etc.) into a JSON file
|
||||
in the 'Workflows' subfolder.
|
||||
"""
|
||||
ensure_workflows_folder()
|
||||
file_filter = "JSON Files (*.json);;All Files (*.*)"
|
||||
dlg = FileDialog.getSaveFileName(None, "Save Workflow", os.path.join("Workflows", ""), file_filter)
|
||||
file_path = dlg[0]
|
||||
if not file_path:
|
||||
return # User canceled
|
||||
|
||||
if not file_path.lower().endswith(".json"):
|
||||
file_path += ".json"
|
||||
|
||||
try:
|
||||
graph.save_session(file_path)
|
||||
print(f"Workflow saved to {file_path}")
|
||||
except Exception as e:
|
||||
QtWidgets.QMessageBox.critical(None, "Error Saving Workflow", str(e))
|
||||
|
||||
def load_workflow(graph: NodeGraph):
|
||||
"""
|
||||
Loads a workflow (including node values, connections, positions, etc.) from a specified JSON file
|
||||
and centers it within the graph.
|
||||
"""
|
||||
ensure_workflows_folder()
|
||||
file_filter = "JSON Files (*.json);;All Files (*.*)"
|
||||
dlg = FileDialog.getOpenFileName(None, "Load Workflow", os.path.join("Workflows", ""), file_filter)
|
||||
file_path = dlg[0]
|
||||
if not file_path:
|
||||
return # User canceled
|
||||
|
||||
try:
|
||||
graph.load_session(file_path)
|
||||
print(f"Workflow loaded from {file_path}")
|
||||
|
||||
# Center the workflow within the graph
|
||||
nodes = graph.all_nodes()
|
||||
if nodes:
|
||||
graph.center_on(nodes)
|
||||
else:
|
||||
print("No nodes found in the loaded workflow.")
|
||||
|
||||
except Exception as e:
|
||||
QtWidgets.QMessageBox.critical(None, "Error Loading Workflow", str(e))
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = QtWidgets.QApplication([])
|
||||
|
||||
# Start Flask API Server
|
||||
data_manager.start_api_server()
|
||||
|
||||
# Create the NodeGraph
|
||||
graph = NodeGraph()
|
||||
graph.widget.setWindowTitle("Borealis - Workflow Automation Tool")
|
||||
|
||||
# Dynamically import custom node classes from the 'Nodes' package.
|
||||
custom_nodes_by_category = import_nodes_from_folder("Nodes")
|
||||
|
||||
# Register each node in its category
|
||||
for category, node_classes in custom_nodes_by_category.items():
|
||||
for node_class in node_classes:
|
||||
graph.register_node(node_class)
|
||||
|
||||
# Recursively apply the stylesheet to all submenus
|
||||
def apply_styles_to_submenus(menu):
|
||||
""" Recursively applies the stylesheet to all submenus in the menu. """
|
||||
menu.setStyleSheet(menu_stylesheet)
|
||||
for action in menu.actions():
|
||||
if action.menu(): # Check if action has a submenu
|
||||
apply_styles_to_submenus(action.menu())
|
||||
|
||||
# Override the Color of the Context Menu to Blue
|
||||
menu_stylesheet = """
|
||||
QMenu {
|
||||
background-color: rgb(30, 30, 30);
|
||||
border: 1px solid rgba(200, 200, 200, 60);
|
||||
}
|
||||
QMenu::item {
|
||||
padding: 5px 18px 2px;
|
||||
background-color: transparent;
|
||||
}
|
||||
QMenu::item:selected {
|
||||
color: rgb(255, 255, 255);
|
||||
background-color: rgba(60, 120, 180, 150);
|
||||
}
|
||||
QMenu::separator {
|
||||
height: 1px;
|
||||
background: rgba(255, 255, 255, 50);
|
||||
margin: 4px 8px;
|
||||
}
|
||||
"""
|
||||
|
||||
# Create categorized context menu
|
||||
graph_context_menu = graph.get_context_menu("graph")
|
||||
add_node_menu = graph_context_menu.add_menu("Add Node")
|
||||
|
||||
for category, node_classes in custom_nodes_by_category.items():
|
||||
category_menu = add_node_menu.add_menu(category) # Create submenu
|
||||
category_menu.qmenu.setStyleSheet(menu_stylesheet) # Apply to submenu
|
||||
|
||||
for node_class in node_classes:
|
||||
node_type = f"{node_class.__identifier__}.{node_class.__name__}"
|
||||
node_name = node_class.NODE_NAME
|
||||
category_menu.add_command(f"{node_name}", make_node_command(graph, node_type))
|
||||
|
||||
# Ensure styles are propagated across all dynamically created submenus
|
||||
apply_styles_to_submenus(graph_context_menu.qmenu)
|
||||
|
||||
# Add a "Remove Selected Node" command
|
||||
graph_context_menu.add_command(
|
||||
"Remove Selected Node",
|
||||
lambda: [graph.remove_node(node) for node in graph.selected_nodes()] if graph.selected_nodes() else None
|
||||
)
|
||||
|
||||
# ------------------------------#
|
||||
# WRAPPER: QMainWindow Integration with Additional UI Elements
|
||||
# ------------------------------#
|
||||
# SECTION: Enhanced Graph Wrapper for QMainWindow
|
||||
# This section wraps the NodeGraph widget in a QMainWindow with:
|
||||
# - A menu bar at the top (named "Workflows" menu)
|
||||
# - A status bar at the bottom
|
||||
# - A central QSplitter dividing the window horizontally:
|
||||
# * Left side (2/3): the NodeGraph widget
|
||||
# * Right side (1/3): an empty text box for future use
|
||||
_original_show = graph.widget.show # Save original method
|
||||
|
||||
def _wrapped_show():
|
||||
"""
|
||||
Wrap the NodeGraph widget inside a QMainWindow with a "Workflows" menu,
|
||||
a status bar, and a central splitter for layout.
|
||||
"""
|
||||
# Create a new QMainWindow instance
|
||||
main_window = QtWidgets.QMainWindow()
|
||||
|
||||
# Create a menu bar and add a "Workflows" menu
|
||||
menu_bar = main_window.menuBar()
|
||||
workflows_menu = menu_bar.addMenu("Workflows")
|
||||
|
||||
# Add "Open" action
|
||||
open_action = QtWidgets.QAction("Open", main_window)
|
||||
open_action.triggered.connect(lambda: load_workflow(graph))
|
||||
workflows_menu.addAction(open_action)
|
||||
|
||||
# Add "Save" action
|
||||
save_action = QtWidgets.QAction("Save", main_window)
|
||||
save_action.triggered.connect(lambda: save_workflow(graph))
|
||||
workflows_menu.addAction(save_action)
|
||||
|
||||
# Add "Close" action
|
||||
close_action = QtWidgets.QAction("Close", main_window)
|
||||
close_action.triggered.connect(lambda: close_workflow(graph))
|
||||
workflows_menu.addAction(close_action)
|
||||
|
||||
# Create and set a blank status bar at the bottom.
|
||||
main_window.setStatusBar(QtWidgets.QStatusBar())
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# SECTION: Status Bar Enhancement - Dynamic Status Display
|
||||
# Add a QLabel to the status bar that shows:
|
||||
# - The number of nodes in the graph.
|
||||
# - A fixed update rate (500ms).
|
||||
# - A clickable hyperlink to the Flask API server.
|
||||
status_bar = main_window.statusBar()
|
||||
|
||||
status_label = QtWidgets.QLabel()
|
||||
status_label.setTextFormat(QtCore.Qt.RichText) # Enable rich text for clickable links.
|
||||
status_label.setStyleSheet("color: white;") # Set default text color to white.
|
||||
status_label.setOpenExternalLinks(True) # Allow hyperlinks to be clickable.
|
||||
status_bar.setSizeGripEnabled(False) # Disable resizing via the size grip.
|
||||
status_bar.addWidget(status_label)
|
||||
status_bar.setStyleSheet("""
|
||||
QStatusBar::item {
|
||||
border: none; /* remove the line around items */
|
||||
}
|
||||
""")
|
||||
|
||||
def update_status():
|
||||
node_count = len(graph.all_nodes())
|
||||
api_link = (
|
||||
'<a href="http://127.0.0.1:5000/data" '
|
||||
'style="color: rgb(60, 120, 180); text-decoration: none;">'
|
||||
'http://127.0.0.1:5000/data</a>'
|
||||
)
|
||||
status_label.setText(
|
||||
f'Nodes: {node_count} | Update Rate: 500ms | Flask API Server: {api_link}'
|
||||
)
|
||||
|
||||
# Create the timer, pass the main_window as parent, and store the reference.
|
||||
status_timer = QtCore.QTimer(main_window)
|
||||
status_timer.timeout.connect(update_status)
|
||||
status_timer.start(500)
|
||||
|
||||
main_window._status_timer = status_timer # Keep a reference so it's not GCed
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
# Create a QSplitter for horizontal division.
|
||||
splitter = QtWidgets.QSplitter(QtCore.Qt.Horizontal)
|
||||
|
||||
# SECTION: Left Pane - Graph Widget
|
||||
splitter.addWidget(graph.widget)
|
||||
|
||||
# SECTION: Right Pane - Empty Text Box
|
||||
text_edit = QtWidgets.QTextEdit()
|
||||
splitter.addWidget(text_edit)
|
||||
|
||||
# Set stretch factors
|
||||
splitter.setStretchFactor(0, 2) # Split of Left Side
|
||||
splitter.setStretchFactor(1, 3) # Split of Right Side
|
||||
|
||||
# Reduce the Size of the Splitter Handle
|
||||
splitter.setHandleWidth(1)
|
||||
splitter.setStyleSheet("""
|
||||
QSplitter::handle {
|
||||
background: none;
|
||||
}
|
||||
""")
|
||||
|
||||
# Set the splitter as the central widget of the main window.
|
||||
main_window.setCentralWidget(splitter)
|
||||
|
||||
# Transfer the window title from the graph widget to the main window.
|
||||
main_window.setWindowTitle(graph.widget.windowTitle())
|
||||
# Resize the main window using the size set for the graph widget.
|
||||
main_window.resize(graph.widget.size())
|
||||
|
||||
# Store a reference to the main window to prevent it from being garbage collected.
|
||||
graph.widget._main_window = main_window
|
||||
# Show the main window instead of the standalone graph widget.
|
||||
main_window.show()
|
||||
|
||||
# Monkey-patch the show method of the graph widget.
|
||||
graph.widget.show = _wrapped_show
|
||||
|
||||
# Grid styling changes
|
||||
graph.set_background_color(20, 20, 20) # Dark gray
|
||||
graph.set_grid_color(60, 60, 60) # Gray grid lines
|
||||
|
||||
# Add gradient background
|
||||
scene = graph.scene()
|
||||
gradient = QtGui.QLinearGradient(0, 0, 0, 1)
|
||||
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
|
||||
gradient.setColorAt(0.0, QtGui.QColor(9, 44, 68))
|
||||
gradient.setColorAt(0.3, QtGui.QColor(30, 30, 30))
|
||||
gradient.setColorAt(0.7, QtGui.QColor(30, 30, 30))
|
||||
gradient.setColorAt(1.0, QtGui.QColor(9, 44, 68))
|
||||
scene.setBackgroundBrush(QtGui.QBrush(gradient))
|
||||
|
||||
# Resize and show the graph widget (which now triggers the QMainWindow wrapper)
|
||||
graph.widget.resize(1600, 900)
|
||||
graph.widget.show()
|
||||
|
||||
graph_context_menu.qmenu.setStyleSheet(menu_stylesheet)
|
||||
|
||||
# Global update function
|
||||
def global_update():
|
||||
for node in graph.all_nodes():
|
||||
if hasattr(node, "process_input"):
|
||||
try:
|
||||
node.process_input()
|
||||
except Exception as e:
|
||||
print("Error updating node", node, e)
|
||||
|
||||
timer = QtCore.QTimer()
|
||||
timer.timeout.connect(global_update)
|
||||
timer.start(500)
|
||||
|
||||
sys.exit(app.exec_())
|
131
Data/server.py
131
Data/server.py
@ -1,131 +0,0 @@
|
||||
from flask import Flask, send_from_directory, jsonify, request, abort
|
||||
import os
|
||||
import importlib
|
||||
import inspect
|
||||
import uuid
|
||||
from OdenGraphQt import BaseNode
|
||||
|
||||
# Determine the absolute path for the React build folder
|
||||
build_folder = os.path.join(os.getcwd(), "web-interface", "build")
|
||||
if not os.path.exists(build_folder):
|
||||
print("WARNING: web-interface build folder not found. Please build your React app.")
|
||||
|
||||
app = Flask(__name__, static_folder=build_folder, static_url_path="/")
|
||||
|
||||
# Directory where nodes are stored
|
||||
NODES_PACKAGE = "Nodes"
|
||||
|
||||
# In-memory workflow storage
|
||||
workflow_data = {
|
||||
"nodes": [],
|
||||
"edges": [] # Store connections separately
|
||||
}
|
||||
|
||||
def import_nodes_from_folder(package_name):
|
||||
"""Dynamically import node classes from the given package and list them."""
|
||||
nodes_by_category = {}
|
||||
package = importlib.import_module(package_name)
|
||||
package_path = package.__path__[0]
|
||||
|
||||
for root, _, files in os.walk(package_path):
|
||||
rel_path = os.path.relpath(root, package_path).replace(os.sep, ".")
|
||||
module_prefix = f"{package_name}.{rel_path}" if rel_path != "." else package_name
|
||||
category_name = os.path.basename(root)
|
||||
|
||||
for file in files:
|
||||
if file.endswith(".py") and file != "__init__.py":
|
||||
module_name = f"{module_prefix}.{file[:-3]}"
|
||||
try:
|
||||
module = importlib.import_module(module_name)
|
||||
for name, obj in inspect.getmembers(module, inspect.isclass):
|
||||
if issubclass(obj, BaseNode) and obj.__module__ == module.__name__:
|
||||
if category_name not in nodes_by_category:
|
||||
nodes_by_category[category_name] = []
|
||||
nodes_by_category[category_name].append(obj.NODE_NAME)
|
||||
except Exception as e:
|
||||
print(f"Failed to import {module_name}: {e}")
|
||||
|
||||
return nodes_by_category
|
||||
|
||||
@app.route("/")
|
||||
def serve_frontend():
|
||||
"""Serve the React app."""
|
||||
index_path = os.path.join(build_folder, "index.html")
|
||||
if os.path.exists(index_path):
|
||||
return send_from_directory(app.static_folder, "index.html")
|
||||
return "<h1>Borealis React App Code Not Found</h1><p>Please re-deploy Borealis Workflow Automation Tool</p>", 404
|
||||
|
||||
@app.route("/api/nodes", methods=["GET"])
|
||||
def get_available_nodes():
|
||||
"""Return available node types."""
|
||||
nodes = import_nodes_from_folder(NODES_PACKAGE)
|
||||
return jsonify(nodes)
|
||||
|
||||
@app.route("/api/workflow", methods=["GET", "POST"])
|
||||
def handle_workflow():
|
||||
"""Retrieve or update the workflow."""
|
||||
global workflow_data
|
||||
if request.method == "GET":
|
||||
return jsonify(workflow_data)
|
||||
elif request.method == "POST":
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
abort(400, "Invalid workflow data")
|
||||
workflow_data = data
|
||||
return jsonify({"status": "success", "workflow": workflow_data})
|
||||
|
||||
@app.route("/api/node", methods=["POST"])
|
||||
def create_node():
|
||||
"""Create a new node with a unique UUID."""
|
||||
data = request.get_json()
|
||||
if not data or "nodeType" not in data:
|
||||
abort(400, "Invalid node data")
|
||||
|
||||
node_id = str(uuid.uuid4()) # Generate a unique ID
|
||||
node = {
|
||||
"id": node_id,
|
||||
"type": data["nodeType"],
|
||||
"position": data.get("position", {"x": 100, "y": 100}),
|
||||
"properties": data.get("properties", {})
|
||||
}
|
||||
workflow_data["nodes"].append(node)
|
||||
return jsonify({"status": "success", "node": node})
|
||||
|
||||
@app.route("/api/node/<string:node_id>", methods=["PUT", "DELETE"])
|
||||
def modify_node(node_id):
|
||||
"""Update or delete a node."""
|
||||
global workflow_data
|
||||
if request.method == "PUT":
|
||||
data = request.get_json()
|
||||
for node in workflow_data["nodes"]:
|
||||
if node["id"] == node_id:
|
||||
node["position"] = data.get("position", node["position"])
|
||||
node["properties"] = data.get("properties", node["properties"])
|
||||
return jsonify({"status": "success", "node": node})
|
||||
abort(404, "Node not found")
|
||||
|
||||
elif request.method == "DELETE":
|
||||
workflow_data["nodes"] = [n for n in workflow_data["nodes"] if n["id"] != node_id]
|
||||
return jsonify({"status": "success", "deletedNode": node_id})
|
||||
|
||||
@app.route("/api/edge", methods=["POST"])
|
||||
def create_edge():
|
||||
"""Create a new connection (edge) between nodes."""
|
||||
data = request.get_json()
|
||||
if not data or "source" not in data or "target" not in data:
|
||||
abort(400, "Invalid edge data")
|
||||
|
||||
edge_id = str(uuid.uuid4())
|
||||
edge = {"id": edge_id, "source": data["source"], "target": data["target"]}
|
||||
workflow_data["edges"].append(edge)
|
||||
return jsonify({"status": "success", "edge": edge})
|
||||
|
||||
@app.route("/api/edge/<string:edge_id>", methods=["DELETE"])
|
||||
def delete_edge(edge_id):
|
||||
"""Delete an edge by ID."""
|
||||
global workflow_data
|
||||
workflow_data["edges"] = [e for e in workflow_data["edges"] if e["id"] != edge_id]
|
||||
return jsonify({"status": "success", "deletedEdge": edge_id})
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(host="0.0.0.0", port=5000, debug=False)
|
50
Launch-Borealis-Legacy.ps1
Normal file
50
Launch-Borealis-Legacy.ps1
Normal file
@ -0,0 +1,50 @@
|
||||
# Bootstrap Borealis Virtual Python Environment
|
||||
# Run Script: "Set-ExecutionPolicy Unrestricted -Scope Process .\Start_Windows.ps1"
|
||||
|
||||
# Define paths
|
||||
$venvPath = "Borealis-Workflow-Automation-Tool"
|
||||
$dataSource = "Data"
|
||||
$dataDestination = "$venvPath\Borealis"
|
||||
|
||||
# Check if virtual environment exists
|
||||
if (!(Test-Path "$venvPath\Scripts\Activate")) {
|
||||
Write-Output "Creating virtual environment '$venvPath'..."
|
||||
python -m venv $venvPath
|
||||
}
|
||||
|
||||
# Ensure the Data folder exists before copying
|
||||
if (Test-Path $dataSource) {
|
||||
Write-Output "Copying Data folder into virtual environment..."
|
||||
|
||||
# Remove old data if it exists
|
||||
if (Test-Path $dataDestination) {
|
||||
Remove-Item -Recurse -Force $dataDestination
|
||||
}
|
||||
|
||||
# Create the Borealis directory inside the virtual environment
|
||||
New-Item -Path $dataDestination -ItemType Directory -Force | Out-Null
|
||||
|
||||
# Copy Data into the virtual environment under Borealis
|
||||
Copy-Item -Path "$dataSource\*" -Destination $dataDestination -Recurse
|
||||
} else {
|
||||
Write-Output "Warning: Data folder not found, skipping copy."
|
||||
}
|
||||
|
||||
# Activate virtual environment
|
||||
Write-Output "Activating virtual environment..."
|
||||
. "$venvPath\Scripts\Activate"
|
||||
|
||||
# Install dependencies
|
||||
if (Test-Path "requirements.txt") {
|
||||
Write-Output "Installing dependencies..."
|
||||
pip install -q -r requirements.txt
|
||||
} else {
|
||||
Write-Output "No requirements.txt found, skipping installation."
|
||||
}
|
||||
|
||||
# Run the main script from inside the copied Data folder
|
||||
Write-Output "Starting Borealis Workflow Automation Tool..."
|
||||
python "$dataDestination\borealis.py"
|
||||
|
||||
# Deactivate after execution
|
||||
deactivate
|
@ -1,149 +0,0 @@
|
||||
# Start_Windows - WebServer.ps1
|
||||
# Run this script with:
|
||||
# Set-ExecutionPolicy Unrestricted -Scope Process; .\Start_Windows -WebServer.ps1
|
||||
|
||||
# ---------------------- Initialization & Visuals ----------------------
|
||||
$symbols = @{
|
||||
Success = [char]0x2705
|
||||
Running = [char]0x23F3
|
||||
Fail = [char]0x274C
|
||||
Info = [char]0x2139
|
||||
}
|
||||
|
||||
function Write-ProgressStep {
|
||||
param (
|
||||
[string]$Message,
|
||||
[string]$Status = $symbols["Info"] # Ensure proper lookup
|
||||
)
|
||||
Write-Host "`r$Status $Message... " -NoNewline
|
||||
}
|
||||
|
||||
function Run-Step {
|
||||
param (
|
||||
[string]$Message,
|
||||
[scriptblock]$Script
|
||||
)
|
||||
Write-ProgressStep -Message $Message -Status "$($symbols.Running)"
|
||||
try {
|
||||
& $Script
|
||||
if ($LASTEXITCODE -eq 0 -or $?) {
|
||||
Write-Host "`r$($symbols.Success) $Message " # Fix symbol lookup
|
||||
} else {
|
||||
throw "Non-zero exit code"
|
||||
}
|
||||
} catch {
|
||||
Write-Host "`r$($symbols.Fail) $Message - Failed: $_ " -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
Clear-Host
|
||||
Write-Host "Deploying Borealis - Workflow Automation Tool..." -ForegroundColor Green
|
||||
Write-Host "===================================================================================="
|
||||
|
||||
# ---------------------- Node.js Check ----------------------
|
||||
if (-not (Get-Command node -ErrorAction SilentlyContinue)) {
|
||||
Write-Host "`r$($symbols.Fail) Node.js is not installed. Please install Node.js and try again." -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
# ---------------------- Path Definitions ----------------------
|
||||
$venvFolder = "Borealis-Workflow-Automation-Tool"
|
||||
$dataSource = "Data"
|
||||
$dataDestination = "$venvFolder\Borealis"
|
||||
$customUIPath = "$dataSource\WebUI"
|
||||
$webUIDestination = "$venvFolder\web-interface"
|
||||
|
||||
# ---------------------- Create Python Virtual Environment ----------------------
|
||||
Run-Step "Create Virtual Python Environment" {
|
||||
if (!(Test-Path "$venvFolder\Scripts\Activate")) {
|
||||
python -m venv $venvFolder | Out-Null
|
||||
}
|
||||
}
|
||||
|
||||
# ---------------------- Copy Server Data ----------------------
|
||||
Run-Step "Copy Borealis Server Data into Virtual Python Environment" {
|
||||
if (Test-Path $dataSource) {
|
||||
if (Test-Path $dataDestination) {
|
||||
Remove-Item -Recurse -Force $dataDestination | Out-Null
|
||||
}
|
||||
New-Item -Path $dataDestination -ItemType Directory -Force | Out-Null
|
||||
Copy-Item -Path "$dataSource\*" -Destination $dataDestination -Recurse
|
||||
} else {
|
||||
Write-Host "`r$($symbols.Info) Warning: Data folder not found, skipping copy." -ForegroundColor Yellow
|
||||
}
|
||||
}
|
||||
|
||||
# ---------------------- React UI Deployment ----------------------
|
||||
Run-Step "Create a new ReactJS App in $webUIDestination" {
|
||||
if (-not (Test-Path $webUIDestination)) {
|
||||
npx create-react-app $webUIDestination | Out-Null
|
||||
}
|
||||
}
|
||||
|
||||
Run-Step "Overwrite ReactJS App Files with Borealis ReactJS Files" {
|
||||
if (Test-Path $customUIPath) {
|
||||
Copy-Item -Path "$customUIPath\*" -Destination $webUIDestination -Recurse -Force
|
||||
} else {
|
||||
Write-Host "`r$($symbols.Info) No custom UI found, using default React app." -ForegroundColor Yellow
|
||||
}
|
||||
}
|
||||
|
||||
Run-Step "Remove Existing ReactJS Build Folder (If Exists)" {
|
||||
if (Test-Path "$webUIDestination\build") {
|
||||
Remove-Item -Path "$webUIDestination\build" -Recurse -Force
|
||||
}
|
||||
}
|
||||
|
||||
# ---------------------- Activate Python Virtual Environment ----------------------
|
||||
Run-Step "Activate Virtual Python Environment" {
|
||||
. "$venvFolder\Scripts\Activate"
|
||||
}
|
||||
|
||||
# ---------------------- Install Python Dependencies ----------------------
|
||||
Run-Step "Install Python Dependencies into Virtual Python Environment" {
|
||||
if (Test-Path "requirements.txt") {
|
||||
pip install -q -r requirements.txt 2>&1 | Out-Null
|
||||
} else {
|
||||
Write-Host "`r$($symbols.Info) No requirements.txt found, skipping Python packages." -ForegroundColor Yellow
|
||||
}
|
||||
}
|
||||
|
||||
# ---------------------- Build React App ----------------------
|
||||
Run-Step "Install NPM into ReactJS App" {
|
||||
$packageJsonPath = Join-Path $webUIDestination "package.json"
|
||||
if (Test-Path $packageJsonPath) {
|
||||
Push-Location $webUIDestination
|
||||
$env:npm_config_loglevel = "silent"
|
||||
npm install --silent --no-fund --audit=false 2>&1 | Out-Null
|
||||
Pop-Location
|
||||
}
|
||||
}
|
||||
|
||||
Run-Step "Install React Flow into ReactJS App" {
|
||||
Push-Location $webUIDestination
|
||||
npm install reactflow --no-fund --audit=false | Out-Null
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
Run-Step "Install Material UI Libraries into ReactJS App" {
|
||||
Push-Location $webUIDestination
|
||||
$env:npm_config_loglevel = "silent" # Force NPM to be completely silent
|
||||
npm install --silent @mui/material @mui/icons-material @emotion/react @emotion/styled --no-fund --audit=false 2>&1 | Out-Null
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
Run-Step "Build ReactJS App" {
|
||||
Push-Location $webUIDestination
|
||||
npm run build | Out-Null
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
# ---------------------- Launch Flask Server ----------------------
|
||||
Push-Location $venvFolder
|
||||
Write-Host "`nLaunching Borealis..." -ForegroundColor Green
|
||||
Write-Host "===================================================================================="
|
||||
Write-Host "$($symbols.Running) Starting the Python Flask server..." -NoNewline
|
||||
python "Borealis\server.py"
|
||||
Write-Host "`r$($symbols.Success) Borealis Launched Successfully!"
|
||||
Pop-Location
|
@ -1,185 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# --------------------------------------------------------------------
|
||||
# Deploying Borealis - Workflow Automation Tool
|
||||
#
|
||||
# This script deploys the Borealis Workflow Automation Tool by:
|
||||
# - Detecting the Linux distro and installing required system dependencies.
|
||||
# - Creating a Python virtual environment.
|
||||
# - Copying server data.
|
||||
# - Setting up a React UI application.
|
||||
# - Installing Python and Node dependencies.
|
||||
# - Building the React app.
|
||||
# - Launching the Flask server.
|
||||
#
|
||||
# Usage:
|
||||
# chmod +x deploy_borealis.sh
|
||||
# ./deploy_borealis.sh
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
# ---------------------- Initialization & Visuals ----------------------
|
||||
GREEN="\033[0;32m"
|
||||
YELLOW="\033[1;33m"
|
||||
RED="\033[0;31m"
|
||||
RESET="\033[0m"
|
||||
CHECKMARK="✅"
|
||||
HOURGLASS="⏳"
|
||||
CROSSMARK="❌"
|
||||
INFO="ℹ️"
|
||||
|
||||
# Function to run a step with progress visuals and error checking
|
||||
run_step() {
|
||||
local message="$1"
|
||||
shift
|
||||
echo -ne "${HOURGLASS} ${message}... "
|
||||
if "$@"; then
|
||||
echo -e "\r${CHECKMARK} ${message}"
|
||||
else
|
||||
echo -e "\r${CROSSMARK} ${message} - Failed${RESET}"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
echo -e "${GREEN}Deploying Borealis - Workflow Automation Tool...${RESET}"
|
||||
echo "===================================================================================="
|
||||
|
||||
# ---------------------- Detect Linux Distribution ----------------------
|
||||
detect_distro() {
|
||||
# This function detects the Linux distribution by sourcing /etc/os-release.
|
||||
if [ -f /etc/os-release ]; then
|
||||
. /etc/os-release
|
||||
DISTRO_ID=$ID
|
||||
else
|
||||
DISTRO_ID="unknown"
|
||||
fi
|
||||
echo -e "${INFO} Detected OS: ${DISTRO_ID}"
|
||||
}
|
||||
detect_distro
|
||||
|
||||
# ---------------------- Install System Dependencies ----------------------
|
||||
install_core_dependencies() {
|
||||
# Install required packages based on detected Linux distribution.
|
||||
case "$DISTRO_ID" in
|
||||
ubuntu|debian)
|
||||
sudo apt update -qq
|
||||
sudo apt install -y python3 python3-venv python3-pip nodejs npm git curl
|
||||
;;
|
||||
rhel|centos|fedora|rocky)
|
||||
# For Fedora and similar distributions, the venv module is built-in so we omit python3-venv.
|
||||
sudo dnf install -y python3 python3-pip nodejs npm git curl
|
||||
;;
|
||||
arch)
|
||||
sudo pacman -Sy --noconfirm python python-venv python-pip nodejs npm git curl
|
||||
;;
|
||||
*)
|
||||
echo -e "${RED}${CROSSMARK} Unsupported Linux distribution: ${DISTRO_ID}${RESET}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
run_step "Install System Dependencies" install_core_dependencies
|
||||
|
||||
# ---------------------- Path Setup ----------------------
|
||||
# Variables and path definitions
|
||||
venvFolder="Borealis-Workflow-Automation-Tool"
|
||||
dataSource="Data"
|
||||
dataDestination="${venvFolder}/Borealis"
|
||||
customUIPath="${dataSource}/WebUI"
|
||||
webUIDestination="${venvFolder}/web-interface"
|
||||
|
||||
# ---------------------- Create Python Virtual Environment ----------------------
|
||||
run_step "Create Virtual Python Environment" bash -c "
|
||||
# Check if virtual environment already exists; if not, create one.
|
||||
if [ ! -f '${venvFolder}/bin/activate' ]; then
|
||||
python3 -m venv '${venvFolder}'
|
||||
fi
|
||||
"
|
||||
|
||||
# ---------------------- Copy Borealis Data ----------------------
|
||||
run_step "Copy Borealis Server Data into Virtual Python Environment" bash -c "
|
||||
# If the Data folder exists, remove any existing server data folder and copy fresh data.
|
||||
if [ -d \"$dataSource\" ]; then
|
||||
rm -rf \"$dataDestination\"
|
||||
mkdir -p \"$dataDestination\"
|
||||
cp -r \"$dataSource/\"* \"$dataDestination\"
|
||||
else
|
||||
echo -e \"\r${INFO} Warning: Data folder not found, skipping copy.${RESET}\"
|
||||
fi
|
||||
true
|
||||
"
|
||||
|
||||
# ---------------------- React UI Setup ----------------------
|
||||
run_step "Create a new ReactJS App in ${webUIDestination}" bash -c "
|
||||
# Create a React app if the destination folder does not exist.
|
||||
if [ ! -d \"$webUIDestination\" ]; then
|
||||
# Set CI=true and add --loglevel=error to suppress funding and audit messages
|
||||
CI=true npx create-react-app \"$webUIDestination\" --silent --use-npm --loglevel=error
|
||||
fi
|
||||
"
|
||||
|
||||
run_step "Overwrite React App with Custom Files" bash -c "
|
||||
# If custom UI files exist, copy them into the React app folder.
|
||||
if [ -d \"$customUIPath\" ]; then
|
||||
cp -r \"$customUIPath/\"* \"$webUIDestination\"
|
||||
else
|
||||
echo -e \"\r${INFO} No custom UI found, using default React app.${RESET}\"
|
||||
fi
|
||||
true
|
||||
"
|
||||
|
||||
run_step "Remove Existing React Build (if any)" bash -c "
|
||||
# Remove the build folder if it exists to ensure a fresh build.
|
||||
if [ -d \"$webUIDestination/build\" ]; then
|
||||
rm -rf \"$webUIDestination/build\"
|
||||
fi
|
||||
true
|
||||
"
|
||||
|
||||
# ---------------------- Activate Python Virtual Environment ----------------------
|
||||
# Activate the Python virtual environment for subsequent commands.
|
||||
source "${venvFolder}/bin/activate"
|
||||
|
||||
# ---------------------- Install Python Dependencies ----------------------
|
||||
run_step "Install Python Dependencies into Virtual Python Environment" bash -c "
|
||||
# Install Python packages if a requirements.txt file is present.
|
||||
if [ -f \"requirements.txt\" ]; then
|
||||
pip install -q -r requirements.txt
|
||||
else
|
||||
echo -e \"\r${INFO} No requirements.txt found, skipping Python packages.${RESET}\"
|
||||
fi
|
||||
true
|
||||
"
|
||||
|
||||
# ---------------------- Install Node Dependencies & Build React UI ----------------------
|
||||
run_step "Install React App Dependencies" bash -c "
|
||||
# Install npm dependencies if package.json exists.
|
||||
if [ -f \"$webUIDestination/package.json\" ]; then
|
||||
cd \"$webUIDestination\"
|
||||
# Add --loglevel=error to suppress npm's funding and audit messages
|
||||
npm install --silent --no-fund --audit=false --loglevel=error
|
||||
cd -
|
||||
fi
|
||||
"
|
||||
|
||||
run_step "Install React Flow and UI Libraries" bash -c "
|
||||
# Install additional React libraries.
|
||||
cd \"$webUIDestination\"
|
||||
npm install reactflow --silent --no-fund --audit=false --loglevel=error
|
||||
npm install --silent @mui/material @mui/icons-material @emotion/react @emotion/styled --no-fund --audit=false --loglevel=error
|
||||
cd -
|
||||
"
|
||||
|
||||
run_step "Build React App" bash -c "
|
||||
# Build the React app to create production-ready files.
|
||||
cd \"$webUIDestination\"
|
||||
npm run build --silent --loglevel=error
|
||||
cd -
|
||||
"
|
||||
|
||||
# ---------------------- Launch Flask Server ----------------------
|
||||
cd "${venvFolder}"
|
||||
echo -e "\n${GREEN}Launching Borealis...${RESET}"
|
||||
echo "===================================================================================="
|
||||
echo -ne "${HOURGLASS} Starting Flask server... "
|
||||
python3 Borealis/server.py
|
||||
echo -e "\r${CHECKMARK} Borealis Launched Successfully!"
|
||||
|
Loading…
x
Reference in New Issue
Block a user