Restructured project and implemented virtual python environments to isolate application. Added launch scripts too.

This commit is contained in:
2025-02-27 19:38:25 -07:00
parent a2c0080662
commit 17b99ca836
76 changed files with 264 additions and 5 deletions

View File

@@ -0,0 +1,339 @@
# Modules/data_collector.py
import threading
import time
import re
import sys
import numpy as np
import cv2
import concurrent.futures
# Vision-related Imports
import pytesseract
import easyocr
import torch
from PIL import Image, ImageGrab, ImageFilter
from PyQt5.QtWidgets import QApplication, QWidget
from PyQt5.QtCore import QRect, QPoint, Qt, QMutex, QTimer
from PyQt5.QtGui import QPainter, QPen, QColor, QFont
# Initialize EasyOCR with CUDA support
reader_cpu = easyocr.Reader(['en'], gpu=False)
reader_gpu = easyocr.Reader(['en'], gpu=True if torch.cuda.is_available() else False)
pytesseract.pytesseract.tesseract_cmd = r"C:\Program Files\Tesseract-OCR\tesseract.exe"
DEFAULT_WIDTH = 180
DEFAULT_HEIGHT = 130
HANDLE_SIZE = 8
LABEL_HEIGHT = 20
collector_mutex = QMutex()
regions = {}
app_instance = None
def _ensure_qapplication():
"""
Ensures that QApplication is initialized before creating widgets.
Must be called from the main thread.
"""
global app_instance
if app_instance is None:
app_instance = QApplication(sys.argv) # Start in main thread
def create_ocr_region(region_id, x=250, y=50, w=DEFAULT_WIDTH, h=DEFAULT_HEIGHT, color=(255, 255, 0), thickness=2):
"""
Creates an OCR region with a visible, resizable box on the screen.
Allows setting custom color (RGB) and line thickness.
"""
_ensure_qapplication()
collector_mutex.lock()
if region_id in regions:
collector_mutex.unlock()
return
regions[region_id] = {
'bbox': [x, y, w, h],
'raw_text': "",
'widget': OCRRegionWidget(x, y, w, h, region_id, color, thickness)
}
collector_mutex.unlock()
def get_raw_text(region_id):
collector_mutex.lock()
if region_id not in regions:
collector_mutex.unlock()
return ""
text = regions[region_id]['raw_text']
collector_mutex.unlock()
return text
def start_collector():
t = threading.Thread(target=_update_ocr_loop, daemon=True)
t.start()
def _update_ocr_loop():
while True:
collector_mutex.lock()
region_ids = list(regions.keys())
collector_mutex.unlock()
for rid in region_ids:
collector_mutex.lock()
bbox = regions[rid]['bbox'][:]
collector_mutex.unlock()
x, y, w, h = bbox
screenshot = ImageGrab.grab(bbox=(x, y, x + w, y + h))
processed = _preprocess_image(screenshot)
raw_text = pytesseract.image_to_string(processed, config='--psm 6 --oem 1')
collector_mutex.lock()
if rid in regions:
regions[rid]['raw_text'] = raw_text
collector_mutex.unlock()
time.sleep(0.7)
def _preprocess_image(image):
gray = image.convert("L")
scaled = gray.resize((gray.width * 3, gray.height * 3))
thresh = scaled.point(lambda p: 255 if p > 200 else 0)
return thresh.filter(ImageFilter.MedianFilter(3))
def find_word_positions(region_id, word, offset_x=0, offset_y=0, margin=5, ocr_engine="CPU", num_slices=1):
"""
Uses user-defined horizontal slices and threading for faster inference.
"""
collector_mutex.lock()
if region_id not in regions:
collector_mutex.unlock()
return []
bbox = regions[region_id]['bbox']
collector_mutex.unlock()
x, y, w, h = bbox
left, top, right, bottom = x, y, x + w, y + h
if right <= left or bottom <= top:
print(f"[ERROR] Invalid OCR region bounds: {bbox}")
return []
try:
image = ImageGrab.grab(bbox=(left, top, right, bottom))
orig_width, orig_height = image.size
word_positions = []
# Ensure number of slices does not exceed image height
num_slices = min(num_slices, orig_height)
strip_height = max(1, orig_height // num_slices)
def process_strip(strip_id):
strip_y = strip_id * strip_height
strip = image.crop((0, strip_y, orig_width, strip_y + strip_height))
strip_np = np.array(strip)
detected_positions = []
if ocr_engine == "CPU":
ocr_data = pytesseract.image_to_data(strip, config='--psm 6 --oem 1', output_type=pytesseract.Output.DICT)
for i in range(len(ocr_data['text'])):
if re.search(rf"\b{word}\b", ocr_data['text'][i], re.IGNORECASE):
x_scaled = int(ocr_data['left'][i])
y_scaled = int(ocr_data['top'][i]) + strip_y
w_scaled = int(ocr_data['width'][i])
h_scaled = int(ocr_data['height'][i])
detected_positions.append((x_scaled + offset_x, y_scaled + offset_y, w_scaled + (margin * 2), h_scaled + (margin * 2)))
else:
results = reader_gpu.readtext(strip_np)
for (bbox, text, _) in results:
if re.search(rf"\b{word}\b", text, re.IGNORECASE):
(x_min, y_min), (x_max, y_max) = bbox[0], bbox[2]
x_scaled = int(x_min)
y_scaled = int(y_min) + strip_y
w_scaled = int(x_max - x_min)
h_scaled = int(y_max - y_min)
detected_positions.append((x_scaled + offset_x, y_scaled + offset_y, w_scaled + (margin * 2), h_scaled + (margin * 2)))
return detected_positions
with concurrent.futures.ThreadPoolExecutor(max_workers=num_slices) as executor:
strip_results = list(executor.map(process_strip, range(num_slices)))
for strip_result in strip_results:
word_positions.extend(strip_result)
return word_positions
except Exception as e:
print(f"[ERROR] Failed to capture OCR region: {e}")
return []
def draw_identification_boxes(region_id, positions, color=(0, 0, 255), thickness=2):
"""
Draws non-interactive rectangles at specified positions within the given OCR region.
Uses a separate rendering thread to prevent blocking OCR processing.
"""
collector_mutex.lock()
if region_id in regions and 'widget' in regions[region_id]:
widget = regions[region_id]['widget']
widget.update_draw_positions(positions, color, thickness)
collector_mutex.unlock()
def update_region_slices(region_id, num_slices):
"""
Updates the number of visual slices in the OCR region.
"""
collector_mutex.lock()
if region_id in regions and 'widget' in regions[region_id]:
widget = regions[region_id]['widget']
widget.set_num_slices(num_slices)
collector_mutex.unlock()
class OCRRegionWidget(QWidget):
def __init__(self, x, y, w, h, region_id, color, thickness):
super().__init__()
self.setGeometry(x, y, w, h)
self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint | Qt.Tool)
self.setAttribute(Qt.WA_TranslucentBackground, True)
self.setAttribute(Qt.WA_TransparentForMouseEvents, False)
self.region_id = region_id
self.box_color = QColor(*color)
self.line_thickness = thickness
self.draw_positions = []
self.previous_positions = [] # This prevents redundant redraws
self.num_slices = 1 # Ensures slice count is initialized
self.show()
def paintEvent(self, event):
painter = QPainter(self)
pen = QPen(self.box_color)
pen.setWidth(self.line_thickness)
painter.setPen(pen)
# Draw main rectangle
painter.drawRect(0, 0, self.width(), self.height())
# Draw detected word overlays
for x, y, w, h in self.draw_positions:
painter.drawRect(x, y, w, h)
# Draw faint slice division lines
if self.num_slices > 1:
strip_height = self.height() // self.num_slices
pen.setColor(QColor(150, 150, 150, 100)) # Light gray, semi-transparent
pen.setWidth(1)
painter.setPen(pen)
for i in range(1, self.num_slices): # Do not draw the last one at the bottom
painter.drawLine(0, i * strip_height, self.width(), i * strip_height)
def set_draw_positions(self, positions, color, thickness):
"""
Updates the overlay positions and visual settings.
"""
self.draw_positions = positions
self.box_color = QColor(*color)
self.line_thickness = thickness
self.update()
def update_draw_positions(self, positions, color, thickness):
"""
Updates the overlay positions and redraws only if the positions have changed.
This prevents unnecessary flickering.
"""
if positions == self.previous_positions:
return # No change, do not update
self.previous_positions = positions # Store last known positions
self.draw_positions = positions
self.box_color = QColor(*color)
self.line_thickness = thickness
self.update() # Redraw only if needed
def set_num_slices(self, num_slices):
"""
Updates the number of horizontal slices for visualization.
"""
self.num_slices = num_slices
self.update()
def _resize_handles(self):
w, h = self.width(), self.height()
return [
QRect(0, 0, HANDLE_SIZE, HANDLE_SIZE), # Top-left
QRect(w - HANDLE_SIZE, h - HANDLE_SIZE, HANDLE_SIZE, HANDLE_SIZE) # Bottom-right
]
def mousePressEvent(self, event):
if event.button() == Qt.LeftButton:
for i, handle in enumerate(self._resize_handles()):
if handle.contains(event.pos()):
self.selected_handle = i
return
self.drag_offset = event.pos()
def mouseMoveEvent(self, event):
if self.selected_handle is not None:
w, h = self.width(), self.height()
if self.selected_handle == 0: # Top-left
new_w = w + (self.x() - event.globalX())
new_h = h + (self.y() - event.globalY())
new_x = event.globalX()
new_y = event.globalY()
if new_w < 20:
new_w = 20
if new_h < 20:
new_h = 20
self.setGeometry(new_x, new_y, new_w, new_h)
elif self.selected_handle == 1: # Bottom-right
new_w = event.globalX() - self.x()
new_h = event.globalY() - self.y()
if new_w < 20:
new_w = 20
if new_h < 20:
new_h = 20
self.setGeometry(self.x(), self.y(), new_w, new_h)
collector_mutex.lock()
if self.region_id in regions:
regions[self.region_id]["bbox"] = [self.x(), self.y(), self.width(), self.height()]
collector_mutex.unlock()
self.update()
elif self.drag_offset:
new_x = event.globalX() - self.drag_offset.x()
new_y = event.globalY() - self.drag_offset.y()
self.move(new_x, new_y)
collector_mutex.lock()
if self.region_id in regions:
regions[self.region_id]["bbox"] = [new_x, new_y, self.width(), self.height()]
collector_mutex.unlock()

View File

@@ -0,0 +1,103 @@
# Modules/data_manager.py
import threading
import time
from flask import Flask, jsonify
from PyQt5.QtCore import QMutex
# Global datastore for character metrics
data_store = {
"hp_current": 0,
"hp_total": 0,
"mp_current": 0,
"mp_total": 0,
"fp_current": 0,
"fp_total": 0,
"exp": 0.0
}
# Mutex for thread safety
data_mutex = QMutex()
# Flag to ensure only one character status collector node exists
character_status_collector_exists = False
# Flask Application
app = Flask(__name__)
@app.route('/data')
def data_api():
"""
Returns the current character metrics as JSON.
"""
return jsonify(get_data())
@app.route('/exp')
def exp_api():
"""
Returns the EXP data.
"""
return jsonify({"exp": get_data()["exp"]})
@app.route('/hp')
def hp_api():
"""
Returns the HP data.
"""
return jsonify({
"hp_current": get_data()["hp_current"],
"hp_total": get_data()["hp_total"]
})
@app.route('/mp')
def mp_api():
"""
Returns the MP data.
"""
return jsonify({
"mp_current": get_data()["mp_current"],
"mp_total": get_data()["mp_total"]
})
@app.route('/fp')
def fp_api():
"""
Returns the FP data.
"""
return jsonify({
"fp_current": get_data()["fp_current"],
"fp_total": get_data()["fp_total"]
})
def start_api_server():
"""
Starts the Flask API server in a separate daemon thread.
"""
def run():
app.run(host="0.0.0.0", port=5000) # Allows external connections
t = threading.Thread(target=run, daemon=True)
t.start()
def get_data():
"""
Return a copy of the global data_store.
"""
data_mutex.lock()
data_copy = data_store.copy()
data_mutex.unlock()
return data_copy
def set_data(key, value):
"""
Set a single metric in the global data_store.
"""
data_mutex.lock()
data_store[key] = value
data_mutex.unlock()
def set_data_bulk(metrics_dict):
"""
Update multiple metrics in the global data_store at once.
"""
data_mutex.lock()
data_store.update(metrics_dict)
data_mutex.unlock()

View File

@@ -0,0 +1,38 @@
#!/usr/bin/env python3
from OdenGraphQt import BaseNode
from Qt import QtCore
class BlueprintNode(BaseNode):
"""
A placeholder node used to preview placement before spawning
the real node. It has a distinct color and minimal UI.
"""
__identifier__ = 'bunny-lab.io.blueprint'
NODE_NAME = 'Blueprint Node'
def __init__(self):
super(BlueprintNode, self).__init__()
# Display a name so the user sees "Click to Place Node"
self.set_name("Click to Place Node")
# Give it a bluish color + white text, for visibility
self.set_color(60, 120, 220) # R, G, B
self.view.text_color = (255, 255, 255, 200)
self.view.border_color = (255, 255, 255, 180)
# Make it slightly transparent if desired (alpha=150)
self.view._bg_color = (60, 120, 220, 150)
# Remove any default inputs/outputs (make it minimal)
for port in self.input_ports() + self.output_ports():
self.model.delete_port(port.name(), port.port_type)
# Store the "actual node" we want to spawn
self.create_property("actual_node_type", "", widget_type=0)
def process_input(self):
"""
We do nothing here; it is purely a placeholder node.
"""
pass

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.6 KiB

View File

@@ -0,0 +1,50 @@
#!/usr/bin/env python3
"""
Flyff EXP Node (Final Combined Version)
- Pulls the EXP value directly from data_manager.py
- Outputs only the "exp" value as a string
- Uses color (48, 116, 143) for its output port
- Displays "exp" in a text field labeled "Value"
- Retrieves the port with self.outputs().get('value')
"""
import time
import traceback
from OdenGraphQt import BaseNode
from Modules import data_manager # Importing data_manager from Modules
class FlyffEXPCurrentNode(BaseNode):
__identifier__ = 'bunny-lab.io.flyff_exp_current_node'
NODE_NAME = 'Flyff - EXP'
def __init__(self):
super(FlyffEXPCurrentNode, self).__init__()
# 1) Text input property named "value" for UI display
self.add_text_input('value', 'Value', text='N/A')
# 2) Output port also named "value"
self.add_output('value', color=(48, 116, 143))
self.set_name("Flyff - EXP")
def process_input(self):
try:
new_value = data_manager.get_data().get("exp", "N/A")
new_value_str = str(new_value)
self.set_property('value', new_value_str)
self.transmit_data(new_value_str)
except Exception as e:
tb = traceback.format_exc()
print(f"[ERROR] Exception in FlyffEXPCurrentNode: {e}\nTraceback:\n{tb}")
def transmit_data(self, data):
output_port = self.outputs().get('value')
if output_port and output_port.connected_ports():
for connected_port in output_port.connected_ports():
connected_node = connected_port.node()
if hasattr(connected_node, 'receive_data'):
try:
connected_node.receive_data(data, source_port_name='value')
except Exception as e:
print(f"[ERROR] Error transmitting data to {connected_node}: {e}")

View File

@@ -0,0 +1,93 @@
#!/usr/bin/env python3
"""
Flyff FP Current Node (Final Combined Version)
- Polls the API at http://127.0.0.1:5000/data
- Outputs only the "fp_current" value as a string
- Uses color (36, 116, 32) for its output port
- Displays "fp_current" in a text field labeled "Value"
- Retrieves the port with self.outputs().get('value')
"""
import time
import requests
import traceback
from OdenGraphQt import BaseNode
class FlyffFPCurrentNode(BaseNode):
__identifier__ = 'bunny-lab.io.flyff_fp_current_node'
NODE_NAME = 'Flyff - FP Current'
def __init__(self):
super(FlyffFPCurrentNode, self).__init__()
# 1) Text input property named "value" for UI display
self.add_text_input('value', 'Value', text='N/A')
# 2) Output port also named "value"
self.add_output('value', color=(36, 116, 32))
self._api_down = True
self._last_api_attempt = 0.0
self._retry_interval = 5.0
self._last_error_printed = 0.0
self.set_name("Flyff - FP Current (API Disconnected)")
def process_input(self):
current_time = time.time()
if self._api_down and (current_time - self._last_api_attempt < self._retry_interval):
return
self._last_api_attempt = current_time
try:
response = requests.get("http://127.0.0.1:5000/data", timeout=1)
status_code = response.status_code
print(f"[DEBUG] FlyffFPCurrentNode: HTTP Status Code = {status_code}")
if status_code == 200:
try:
data = response.json() or {}
except ValueError:
data = {}
if isinstance(data, list):
data = {}
self._api_down = False
self.set_name("Flyff - FP Current (API Connected)")
new_value = data.get("fp_current", "N/A")
print(f"[DEBUG] FlyffFPCurrentNode: fp_current = {new_value}")
new_value_str = str(new_value)
self.set_property('value', new_value_str)
self.transmit_data(new_value_str)
else:
self._handle_api_error(f"HTTP {status_code} from FlyffFPCurrentNode")
self._api_down = True
except Exception as e:
tb = traceback.format_exc()
self._handle_api_error(f"Exception in FlyffFPCurrentNode: {e}\nTraceback:\n{tb}")
self._api_down = True
def transmit_data(self, data):
output_port = self.outputs().get('value')
if output_port and output_port.connected_ports():
for connected_port in output_port.connected_ports():
connected_node = connected_port.node()
if hasattr(connected_node, 'receive_data'):
try:
connected_node.receive_data(data, source_port_name='value')
except Exception as e:
print(f"[ERROR] Error transmitting data to {connected_node}: {e}")
def _handle_api_error(self, msg):
current_time = time.time()
if (current_time - self._last_error_printed) >= self._retry_interval:
print(f"[ERROR] {msg}")
self._last_error_printed = current_time
self.set_name("Flyff - FP Current (API Disconnected)")

View File

@@ -0,0 +1,93 @@
#!/usr/bin/env python3
"""
Flyff FP Total Node (Final Combined Version)
- Polls the API at http://127.0.0.1:5000/data
- Outputs only the "fp_total" value as a string
- Uses color (36, 116, 32) for its output port
- Displays "fp_total" in a text field labeled "Value"
- Retrieves the port with self.outputs().get('value')
"""
import time
import requests
import traceback
from OdenGraphQt import BaseNode
class FlyffFPTotalNode(BaseNode):
__identifier__ = 'bunny-lab.io.flyff_fp_total_node'
NODE_NAME = 'Flyff - FP Total'
def __init__(self):
super(FlyffFPTotalNode, self).__init__()
# 1) Text input property named "value" for UI display
self.add_text_input('value', 'Value', text='N/A')
# 2) Output port also named "value"
self.add_output('value', color=(36, 116, 32))
self._api_down = True
self._last_api_attempt = 0.0
self._retry_interval = 5.0
self._last_error_printed = 0.0
self.set_name("Flyff - FP Total (API Disconnected)")
def process_input(self):
current_time = time.time()
if self._api_down and (current_time - self._last_api_attempt < self._retry_interval):
return
self._last_api_attempt = current_time
try:
response = requests.get("http://127.0.0.1:5000/data", timeout=1)
status_code = response.status_code
print(f"[DEBUG] FlyffFPTotalNode: HTTP Status Code = {status_code}")
if status_code == 200:
try:
data = response.json() or {}
except ValueError:
data = {}
if isinstance(data, list):
data = {}
self._api_down = False
self.set_name("Flyff - FP Total (API Connected)")
new_value = data.get("fp_total", "N/A")
print(f"[DEBUG] FlyffFPTotalNode: fp_total = {new_value}")
new_value_str = str(new_value)
self.set_property('value', new_value_str)
self.transmit_data(new_value_str)
else:
self._handle_api_error(f"HTTP {status_code} from FlyffFPTotalNode")
self._api_down = True
except Exception as e:
tb = traceback.format_exc()
self._handle_api_error(f"Exception in FlyffFPTotalNode: {e}\nTraceback:\n{tb}")
self._api_down = True
def transmit_data(self, data):
output_port = self.outputs().get('value')
if output_port and output_port.connected_ports():
for connected_port in output_port.connected_ports():
connected_node = connected_port.node()
if hasattr(connected_node, 'receive_data'):
try:
connected_node.receive_data(data, source_port_name='value')
except Exception as e:
print(f"[ERROR] Error transmitting data to {connected_node}: {e}")
def _handle_api_error(self, msg):
current_time = time.time()
if (current_time - self._last_error_printed) >= self._retry_interval:
print(f"[ERROR] {msg}")
self._last_error_printed = current_time
self.set_name("Flyff - FP Total (API Disconnected)")

View File

@@ -0,0 +1,112 @@
#!/usr/bin/env python3
"""
Flyff HP Current Node (Final Combined Version)
- Polls the API at http://127.0.0.1:5000/data
- Outputs only the "hp_current" value as a string
- Uses color (126, 36, 57) for its output port
- Displays "hp_current" in a text field labeled "Value"
- Avoids "list indices must be integers" by retrieving the port with self.outputs().get('value')
"""
import time
import requests
import traceback
from OdenGraphQt import BaseNode
class FlyffHPCurrentNode(BaseNode):
__identifier__ = 'bunny-lab.io.flyff_hp_current_node'
NODE_NAME = 'Flyff - HP Current'
def __init__(self):
super(FlyffHPCurrentNode, self).__init__()
# 1) Add a text input property named "value" for UI display
self.add_text_input('value', 'Value', text='N/A')
# 2) Add an output port also named "value"
self.add_output('value', color=(126, 36, 57))
# Start in "disconnected" state
self._api_down = True
self._last_api_attempt = 0.0
self._retry_interval = 5.0
self._last_error_printed = 0.0
# Default node title
self.set_name("Flyff - HP Current (API Disconnected)")
def process_input(self):
"""
Called periodically by the global timer in borealis.py
"""
current_time = time.time()
if self._api_down and (current_time - self._last_api_attempt < self._retry_interval):
return
self._last_api_attempt = current_time
try:
response = requests.get("http://127.0.0.1:5000/data", timeout=1)
status_code = response.status_code
print(f"[DEBUG] FlyffHPCurrentNode: HTTP Status Code = {status_code}")
if status_code == 200:
# Attempt to parse JSON
try:
data = response.json() or {}
except ValueError:
data = {}
# If data is a list, ignore or convert to {}
if isinstance(data, list):
data = {}
# Mark node as connected
self._api_down = False
self.set_name("Flyff - HP Current (API Connected)")
# Retrieve hp_current (default "N/A" if missing)
new_value = data.get("hp_current", "N/A")
print(f"[DEBUG] FlyffHPCurrentNode: hp_current = {new_value}")
# Convert to string
new_value_str = str(new_value)
# 3) Update the text input property so the user sees it
self.set_property('value', new_value_str)
# 4) Transmit to downstream nodes
self.transmit_data(new_value_str)
else:
# Non-200 => disconnected
self._handle_api_error(f"HTTP {status_code} from FlyffHPCurrentNode")
self._api_down = True
except Exception as e:
tb = traceback.format_exc()
self._handle_api_error(f"Exception in FlyffHPCurrentNode: {e}\nTraceback:\n{tb}")
self._api_down = True
def transmit_data(self, data):
"""
Sends 'data' to any connected node via the "value" port.
(Uses self.outputs().get('value') instead of self.output('value'))
"""
output_port = self.outputs().get('value')
if output_port and output_port.connected_ports():
for connected_port in output_port.connected_ports():
connected_node = connected_port.node()
if hasattr(connected_node, 'receive_data'):
try:
connected_node.receive_data(data, source_port_name='value')
except Exception as e:
print(f"[ERROR] Error transmitting data to {connected_node}: {e}")
def _handle_api_error(self, msg):
current_time = time.time()
if (current_time - self._last_error_printed) >= self._retry_interval:
print(f"[ERROR] {msg}")
self._last_error_printed = current_time
self.set_name("Flyff - HP Current (API Disconnected)")

View File

@@ -0,0 +1,93 @@
#!/usr/bin/env python3
"""
Flyff HP Total Node (Final Combined Version)
- Polls the API at http://127.0.0.1:5000/data
- Outputs only the "hp_total" value as a string
- Uses color (126, 36, 57) for its output port
- Displays "hp_total" in a text field labeled "Value"
- Retrieves the port with self.outputs().get('value')
"""
import time
import requests
import traceback
from OdenGraphQt import BaseNode
class FlyffHPTotalNode(BaseNode):
__identifier__ = 'bunny-lab.io.flyff_hp_total_node'
NODE_NAME = 'Flyff - HP Total'
def __init__(self):
super(FlyffHPTotalNode, self).__init__()
# 1) Text input property named "value" for UI display
self.add_text_input('value', 'Value', text='N/A')
# 2) Output port also named "value"
self.add_output('value', color=(126, 36, 57))
self._api_down = True
self._last_api_attempt = 0.0
self._retry_interval = 5.0
self._last_error_printed = 0.0
self.set_name("Flyff - HP Total (API Disconnected)")
def process_input(self):
current_time = time.time()
if self._api_down and (current_time - self._last_api_attempt < self._retry_interval):
return
self._last_api_attempt = current_time
try:
response = requests.get("http://127.0.0.1:5000/data", timeout=1)
status_code = response.status_code
print(f"[DEBUG] FlyffHPTotalNode: HTTP Status Code = {status_code}")
if status_code == 200:
try:
data = response.json() or {}
except ValueError:
data = {}
if isinstance(data, list):
data = {}
self._api_down = False
self.set_name("Flyff - HP Total (API Connected)")
new_value = data.get("hp_total", "N/A")
print(f"[DEBUG] FlyffHPTotalNode: hp_total = {new_value}")
new_value_str = str(new_value)
self.set_property('value', new_value_str)
self.transmit_data(new_value_str)
else:
self._handle_api_error(f"HTTP {status_code} from FlyffHPTotalNode")
self._api_down = True
except Exception as e:
tb = traceback.format_exc()
self._handle_api_error(f"Exception in FlyffHPTotalNode: {e}\nTraceback:\n{tb}")
self._api_down = True
def transmit_data(self, data):
output_port = self.outputs().get('value')
if output_port and output_port.connected_ports():
for connected_port in output_port.connected_ports():
connected_node = connected_port.node()
if hasattr(connected_node, 'receive_data'):
try:
connected_node.receive_data(data, source_port_name='value')
except Exception as e:
print(f"[ERROR] Error transmitting data to {connected_node}: {e}")
def _handle_api_error(self, msg):
current_time = time.time()
if (current_time - self._last_error_printed) >= self._retry_interval:
print(f"[ERROR] {msg}")
self._last_error_printed = current_time
self.set_name("Flyff - HP Total (API Disconnected)")

View File

@@ -0,0 +1,93 @@
#!/usr/bin/env python3
"""
Flyff MP Current Node (Final Combined Version)
- Polls the API at http://127.0.0.1:5000/data
- Outputs only the "mp_current" value as a string
- Uses color (35, 89, 144) for its output port
- Displays "mp_current" in a text field labeled "Value"
- Retrieves the port with self.outputs().get('value')
"""
import time
import requests
import traceback
from OdenGraphQt import BaseNode
class FlyffMPCurrentNode(BaseNode):
__identifier__ = 'bunny-lab.io.flyff_mp_current_node'
NODE_NAME = 'Flyff - MP Current'
def __init__(self):
super(FlyffMPCurrentNode, self).__init__()
# 1) Text input property named "value" for UI display
self.add_text_input('value', 'Value', text='N/A')
# 2) Output port also named "value"
self.add_output('value', color=(35, 89, 144))
self._api_down = True
self._last_api_attempt = 0.0
self._retry_interval = 5.0
self._last_error_printed = 0.0
self.set_name("Flyff - MP Current (API Disconnected)")
def process_input(self):
current_time = time.time()
if self._api_down and (current_time - self._last_api_attempt < self._retry_interval):
return
self._last_api_attempt = current_time
try:
response = requests.get("http://127.0.0.1:5000/data", timeout=1)
status_code = response.status_code
print(f"[DEBUG] FlyffMPCurrentNode: HTTP Status Code = {status_code}")
if status_code == 200:
try:
data = response.json() or {}
except ValueError:
data = {}
if isinstance(data, list):
data = {}
self._api_down = False
self.set_name("Flyff - MP Current (API Connected)")
new_value = data.get("mp_current", "N/A")
print(f"[DEBUG] FlyffMPCurrentNode: mp_current = {new_value}")
new_value_str = str(new_value)
self.set_property('value', new_value_str)
self.transmit_data(new_value_str)
else:
self._handle_api_error(f"HTTP {status_code} from FlyffMPCurrentNode")
self._api_down = True
except Exception as e:
tb = traceback.format_exc()
self._handle_api_error(f"Exception in FlyffMPCurrentNode: {e}\nTraceback:\n{tb}")
self._api_down = True
def transmit_data(self, data):
output_port = self.outputs().get('value')
if output_port and output_port.connected_ports():
for connected_port in output_port.connected_ports():
connected_node = connected_port.node()
if hasattr(connected_node, 'receive_data'):
try:
connected_node.receive_data(data, source_port_name='value')
except Exception as e:
print(f"[ERROR] Error transmitting data to {connected_node}: {e}")
def _handle_api_error(self, msg):
current_time = time.time()
if (current_time - self._last_error_printed) >= self._retry_interval:
print(f"[ERROR] {msg}")
self._last_error_printed = current_time
self.set_name("Flyff - MP Current (API Disconnected)")

View File

@@ -0,0 +1,93 @@
#!/usr/bin/env python3
"""
Flyff MP Total Node (Final Combined Version)
- Polls the API at http://127.0.0.1:5000/data
- Outputs only the "mp_total" value as a string
- Uses color (35, 89, 144) for its output port
- Displays "mp_total" in a text field labeled "Value"
- Retrieves the port with self.outputs().get('value')
"""
import time
import requests
import traceback
from OdenGraphQt import BaseNode
class FlyffMPTotalNode(BaseNode):
__identifier__ = 'bunny-lab.io.flyff_mp_total_node'
NODE_NAME = 'Flyff - MP Total'
def __init__(self):
super(FlyffMPTotalNode, self).__init__()
# 1) Text input property named "value" for UI display
self.add_text_input('value', 'Value', text='N/A')
# 2) Output port also named "value"
self.add_output('value', color=(35, 89, 144))
self._api_down = True
self._last_api_attempt = 0.0
self._retry_interval = 5.0
self._last_error_printed = 0.0
self.set_name("Flyff - MP Total (API Disconnected)")
def process_input(self):
current_time = time.time()
if self._api_down and (current_time - self._last_api_attempt < self._retry_interval):
return
self._last_api_attempt = current_time
try:
response = requests.get("http://127.0.0.1:5000/data", timeout=1)
status_code = response.status_code
print(f"[DEBUG] FlyffMPTotalNode: HTTP Status Code = {status_code}")
if status_code == 200:
try:
data = response.json() or {}
except ValueError:
data = {}
if isinstance(data, list):
data = {}
self._api_down = False
self.set_name("Flyff - MP Total (API Connected)")
new_value = data.get("mp_total", "N/A")
print(f"[DEBUG] FlyffMPTotalNode: mp_total = {new_value}")
new_value_str = str(new_value)
self.set_property('value', new_value_str)
self.transmit_data(new_value_str)
else:
self._handle_api_error(f"HTTP {status_code} from FlyffMPTotalNode")
self._api_down = True
except Exception as e:
tb = traceback.format_exc()
self._handle_api_error(f"Exception in FlyffMPTotalNode: {e}\nTraceback:\n{tb}")
self._api_down = True
def transmit_data(self, data):
output_port = self.outputs().get('value')
if output_port and output_port.connected_ports():
for connected_port in output_port.connected_ports():
connected_node = connected_port.node()
if hasattr(connected_node, 'receive_data'):
try:
connected_node.receive_data(data, source_port_name='value')
except Exception as e:
print(f"[ERROR] Error transmitting data to {connected_node}: {e}")
def _handle_api_error(self, msg):
current_time = time.time()
if (current_time - self._last_error_printed) >= self._retry_interval:
print(f"[ERROR] {msg}")
self._last_error_printed = current_time
self.set_name("Flyff - MP Total (API Disconnected)")

View File

@@ -0,0 +1,105 @@
#!/usr/bin/env python3
"""
Flyff Character Status Node:
- Creates an OCR region in data_collector.
- Periodically grabs raw text from that region and updates status.
"""
import re
from OdenGraphQt import BaseNode
from PyQt5.QtWidgets import QMessageBox
from PyQt5.QtCore import QTimer # Corrected import
from Modules import data_manager, data_collector
class FlyffCharacterStatusNode(BaseNode):
__identifier__ = "bunny-lab.io.flyff_character_status_node"
NODE_NAME = "Flyff - Character Status"
def __init__(self):
super(FlyffCharacterStatusNode, self).__init__()
if data_manager.character_status_collector_exists:
QMessageBox.critical(None, "Error", "Only one Flyff Character Status Collector node is allowed.")
raise Exception("Duplicate Character Status Node.")
data_manager.character_status_collector_exists = True
self.add_text_input("hp", "HP", text="HP: 0/0")
self.add_text_input("mp", "MP", text="MP: 0/0")
self.add_text_input("fp", "FP", text="FP: 0/0")
self.add_text_input("exp", "EXP", text="EXP: 0%")
self.region_id = "character_status"
data_collector.create_ocr_region(self.region_id, x=250, y=50, w=180, h=130, color=(255, 255, 0), thickness=2)
data_collector.start_collector()
self.set_name("Flyff - Character Status")
# Set up a timer to periodically update character stats
self.timer = QTimer()
self.timer.timeout.connect(self.process_input)
self.timer.start(1000) # Update every second
def parse_character_stats(self, raw_text):
"""
Extract HP, MP, FP, EXP from the raw OCR text lines.
"""
lines = [l.strip() for l in raw_text.splitlines() if l.strip()]
hp_current, hp_total = 0, 0
mp_current, mp_total = 0, 0
fp_current, fp_total = 0, 0
exp_value = 0.0
if len(lines) >= 4:
# line 1: HP
hp_match = re.search(r"(\d+)\s*/\s*(\d+)", lines[0])
if hp_match:
hp_current = int(hp_match.group(1))
hp_total = int(hp_match.group(2))
# line 2: MP
mp_match = re.search(r"(\d+)\s*/\s*(\d+)", lines[1])
if mp_match:
mp_current = int(mp_match.group(1))
mp_total = int(mp_match.group(2))
# line 3: FP
fp_match = re.search(r"(\d+)\s*/\s*(\d+)", lines[2])
if fp_match:
fp_current = int(fp_match.group(1))
fp_total = int(fp_match.group(2))
# line 4: EXP
exp_match = re.search(r"(\d+(?:\.\d+)?)", lines[3])
if exp_match:
val = float(exp_match.group(1))
if val < 0: val = 0
if val > 100: val = 100
exp_value = val
return hp_current, hp_total, mp_current, mp_total, fp_current, fp_total, exp_value
def process_input(self):
"""
Called periodically to update character status from OCR.
"""
raw_text = data_collector.get_raw_text(self.region_id)
# print("Raw OCR Text:", raw_text) # Debugging OCR text reading
hp_c, hp_t, mp_c, mp_t, fp_c, fp_t, exp_v = self.parse_character_stats(raw_text)
# Update the data manager with the parsed values
data_manager.set_data_bulk({
"hp_current": hp_c,
"hp_total": hp_t,
"mp_current": mp_c,
"mp_total": mp_t,
"fp_current": fp_c,
"fp_total": fp_t,
"exp": exp_v
})
# Update the node's UI text fields
self.set_property("hp", f"HP: {hp_c}/{hp_t}")
self.set_property("mp", f"MP: {mp_c}/{mp_t}")
self.set_property("fp", f"FP: {fp_c}/{fp_t}")
self.set_property("exp", f"EXP: {exp_v}%")

View File

@@ -0,0 +1,141 @@
#!/usr/bin/env python3
"""
Flyff - Leveling Predictor Node:
- Tracks the last N changes in EXP values.
- Calculates the average change rate and time intervals.
- Predicts the estimated time to reach level 100.
"""
import time
import numpy as np
from OdenGraphQt import BaseNode
from PyQt5.QtCore import QTimer
from Modules import data_manager
class FlyffLevelingPredictorNode(BaseNode):
__identifier__ = "bunny-lab.io.flyff_leveling_predictor_node"
NODE_NAME = "Flyff - Leveling Predictor"
def __init__(self):
super(FlyffLevelingPredictorNode, self).__init__()
# Input port for EXP values
self.add_input("exp", "EXP")
# User-defined number of changes to track
self.add_text_input("exp_track_count", "# of EXP Changes to Track", text="7")
# Output widgets
self.add_text_input("time_to_level", "Time to Level", text="Calculating...")
self.add_text_input("time_between_kills", "Time Between Kills", text="N/A")
self.add_text_input("exp_per_kill", "EXP Per Kill", text="N/A")
# Internal tracking lists
self.exp_history = []
self.time_intervals = []
self.last_exp_value = None
self.last_update_time = None
# Timer to periodically process EXP changes
self.timer = QTimer()
self.timer.timeout.connect(self.process_exp_change)
self.timer.start(1000) # Check for updates every second
def reset_tracking_arrays(self):
"""
Resets the EXP history and time interval arrays when a level-up is detected.
"""
self.exp_history.clear()
self.time_intervals.clear()
self.last_exp_value = None
self.last_update_time = None
def process_exp_change(self):
"""
Monitors changes in EXP values and calculates various statistics.
"""
exp_value = data_manager.get_data().get("exp", None)
if exp_value is None:
return
exp_track_count = self.get_property("exp_track_count")
try:
exp_track_count = int(exp_track_count)
except ValueError:
exp_track_count = 7 # Default to 7 if invalid input
# Reset if EXP value decreases (indicating a level-up)
if self.last_exp_value is not None and exp_value < self.last_exp_value:
self.reset_tracking_arrays()
if self.last_exp_value is not None and exp_value != self.last_exp_value:
current_time = time.time()
# Store EXP change history
self.exp_history.append(exp_value)
if len(self.exp_history) > exp_track_count:
self.exp_history.pop(0)
# Store time intervals
if self.last_update_time is not None:
interval = current_time - self.last_update_time
self.time_intervals.append(interval)
if len(self.time_intervals) > exp_track_count:
self.time_intervals.pop(0)
# Perform calculations
self.calculate_time_to_level()
self.calculate_additional_metrics()
# Update last tracking values
self.last_update_time = current_time
self.last_exp_value = exp_value
def calculate_time_to_level(self):
"""
Calculates the estimated time to reach level 100 based on EXP change history.
"""
if len(self.exp_history) < 2 or len(self.time_intervals) < 1:
self.set_property("time_to_level", "Insufficient data")
return
exp_deltas = np.diff(self.exp_history) # Compute EXP change per interval
avg_exp_change = np.mean(exp_deltas) if len(exp_deltas) > 0 else 0
avg_time_change = np.mean(self.time_intervals)
if avg_exp_change <= 0:
self.set_property("time_to_level", "Not gaining EXP")
return
current_exp = self.exp_history[-1]
remaining_exp = 100.0 - current_exp # Distance to level 100
estimated_time = (remaining_exp / avg_exp_change) * avg_time_change
# Convert estimated time into hours, minutes, and seconds
hours = int(estimated_time // 3600)
minutes = int((estimated_time % 3600) // 60)
seconds = int(estimated_time % 60)
time_str = f"{hours}h {minutes}m {seconds}s"
self.set_property("time_to_level", time_str)
def calculate_additional_metrics(self):
"""
Calculates and updates the "Time Between Kills" and "EXP Per Kill".
"""
if len(self.time_intervals) > 0:
avg_time_between_kills = np.mean(self.time_intervals)
minutes = int(avg_time_between_kills // 60)
seconds = int(avg_time_between_kills % 60)
self.set_property("time_between_kills", f"{minutes}m {seconds}s")
else:
self.set_property("time_between_kills", "N/A")
if len(self.exp_history) > 1:
exp_deltas = np.diff(self.exp_history)
avg_exp_per_kill = np.mean(exp_deltas) if len(exp_deltas) > 0 else 0
self.set_property("exp_per_kill", f"{avg_exp_per_kill:.2f}%")
else:
self.set_property("exp_per_kill", "N/A")

View File

@@ -0,0 +1,134 @@
#!/usr/bin/env python3
"""
Standardized Flyff Low Health Alert Node:
- Monitors an input value (1 = health alert, 0 = normal).
- Displays a visual alert and plays a sound if enabled.
- Uses a global update timer for processing.
- Automatically processes float, int, and string values.
"""
import time
from OdenGraphQt import BaseNode
from Qt import QtCore, QtWidgets, QtGui
try:
import winsound
HAS_WINSOUND = True
except ImportError:
winsound = None
HAS_WINSOUND = False
class OverlayCanvas(QtWidgets.QWidget):
"""
UI overlay for displaying a red warning box, which can be repositioned by dragging.
"""
def __init__(self, parent=None):
super().__init__(parent)
screen_geo = QtWidgets.QApplication.primaryScreen().geometry()
self.setGeometry(screen_geo)
self.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.WindowStaysOnTopHint)
self.setAttribute(QtCore.Qt.WA_TranslucentBackground, True)
self.setVisible(False)
self.helper_LowHealthAlert = QtCore.QRect(250, 300, 900, 35)
self.dragging = False
self.drag_offset = None
def paintEvent(self, event):
if not self.isVisible():
return
painter = QtGui.QPainter(self)
painter.setPen(QtCore.Qt.NoPen)
painter.setBrush(QtGui.QColor(255, 0, 0))
painter.drawRect(self.helper_LowHealthAlert)
font = QtGui.QFont("Arial", 14, QtGui.QFont.Bold)
painter.setFont(font)
painter.setPen(QtGui.QColor(255, 255, 255))
text_x = self.helper_LowHealthAlert.center().x() - 50
text_y = self.helper_LowHealthAlert.center().y() + 5
painter.drawText(text_x, text_y, "LOW HEALTH")
def toggle_alert(self, state):
self.setVisible(state == 1)
self.update()
def mousePressEvent(self, event):
if event.button() == QtCore.Qt.LeftButton:
if self.helper_LowHealthAlert.contains(event.pos()):
self.dragging = True
self.drag_offset = event.pos() - self.helper_LowHealthAlert.topLeft()
super().mousePressEvent(event)
def mouseMoveEvent(self, event):
if self.dragging:
new_top_left = event.pos() - self.drag_offset
self.helper_LowHealthAlert.moveTo(new_top_left)
self.update()
super().mouseMoveEvent(event)
def mouseReleaseEvent(self, event):
if event.button() == QtCore.Qt.LeftButton:
self.dragging = False
super().mouseReleaseEvent(event)
class FlyffLowHealthAlertNode(BaseNode):
__identifier__ = 'bunny-lab.io.flyff_low_health_alert_node'
NODE_NAME = 'Flyff - Low Health Alert'
overlay_instance = None
last_beep_time = 0
BEEP_INTERVAL_SECONDS = 2
def __init__(self):
super(FlyffLowHealthAlertNode, self).__init__()
self.add_checkbox('cb_1', '', 'Sound Alert', True)
self.add_checkbox('cb_2', '', 'Visual Alert', True)
self.add_input('Toggle (1 = On | 0 = Off)', color=(200, 100, 0))
self.add_text_input('value', 'Current Value', text='0')
self.add_combo_menu('beep_interval', 'Beep Interval', items=["0.5s", "1.0s", "2.0s"])
if not FlyffLowHealthAlertNode.overlay_instance:
FlyffLowHealthAlertNode.overlay_instance = OverlayCanvas()
FlyffLowHealthAlertNode.overlay_instance.show()
def process_input(self):
input_port = self.input(0)
value = input_port.connected_ports()[0].node().get_property('value') if input_port.connected_ports() else "0"
self.receive_data(value)
def receive_data(self, data, source_port_name=None):
try:
if isinstance(data, str):
data = float(data) if '.' in data else int(data)
if isinstance(data, (float, int)):
data = 1 if data > 1 else 0 if data <= 0 else int(data)
else:
data = 0
except ValueError:
data = 0
self.set_property('value', str(data))
if self.get_property('cb_2'):
FlyffLowHealthAlertNode.overlay_instance.toggle_alert(data)
self.handle_beep(data)
def handle_beep(self, input_value):
# Update beep interval from the dropdown property
interval_str = self.get_property('beep_interval')
if interval_str.endswith("s"):
interval_seconds = float(interval_str[:-1])
else:
interval_seconds = float(interval_str)
self.BEEP_INTERVAL_SECONDS = interval_seconds
if input_value == 1 and self.get_property('cb_1'):
current_time = time.time()
if (current_time - FlyffLowHealthAlertNode.last_beep_time) >= self.BEEP_INTERVAL_SECONDS:
FlyffLowHealthAlertNode.last_beep_time = current_time
self.play_beep()
def play_beep(self):
if HAS_WINSOUND:
winsound.Beep(376, 100)
else:
print('\a', end='')

View File

@@ -0,0 +1,49 @@
from OdenGraphQt import BaseNode
class ArrayNode(BaseNode):
"""
Array Node:
- Inputs: 'in' (value to store), 'ArraySize' (defines maximum length)
- Output: 'Array' (the current array as a string)
- Stores incoming values in an array with a size defined by ArraySize.
- Updates are now handled via a global update timer.
"""
__identifier__ = 'bunny-lab.io.array_node'
NODE_NAME = 'Array'
def __init__(self):
super(ArrayNode, self).__init__()
self.values = {} # Ensure values is a dictionary.
self.add_input('in')
self.add_input('ArraySize')
self.add_output('Array')
self.array = []
self.value = "[]" # Output as a string.
self.array_size = 10 # Default array size.
self.set_name("Array: []")
def process_input(self):
# Get array size from 'ArraySize' input if available.
size_port = self.input('ArraySize')
connected_size = size_port.connected_ports() if size_port is not None else []
if connected_size:
connected_port = connected_size[0]
parent_node = connected_port.node()
try:
self.array_size = int(float(getattr(parent_node, 'value', 10)))
except (ValueError, TypeError):
self.array_size = 10
# Get new value from 'in' input if available.
in_port = self.input('in')
connected_in = in_port.connected_ports() if in_port is not None else []
if connected_in:
connected_port = connected_in[0]
parent_node = connected_port.node()
new_value = getattr(parent_node, 'value', None)
if new_value is not None:
self.array.append(new_value)
while len(self.array) > self.array_size:
self.array.pop(0)
self.value = str(self.array)
self.set_name(f"Array: {self.value}")

View File

@@ -0,0 +1,122 @@
#!/usr/bin/env python3
"""
Standardized Comparison Node:
- Compares two input values using a selected operator (==, !=, >, <, >=, <=).
- Outputs a result of 1 (True) or 0 (False).
- Uses a global update timer for processing.
- Supports an additional 'Input Type' dropdown to choose between 'Number' and 'String'.
"""
from OdenGraphQt import BaseNode
from Qt import QtCore
class ComparisonNode(BaseNode):
__identifier__ = 'bunny-lab.io.comparison_node'
NODE_NAME = 'Comparison Node'
def __init__(self):
super(ComparisonNode, self).__init__()
self.add_input('A')
self.add_input('B')
self.add_output('Result')
# Add the Input Type dropdown first.
self.add_combo_menu('input_type', 'Input Type', items=['Number', 'String'])
self.add_combo_menu('operator', 'Operator', items=[
'Equal (==)', 'Not Equal (!=)', 'Greater Than (>)',
'Less Than (<)', 'Greater Than or Equal (>=)', 'Less Than or Equal (<=)'
])
# Replace calc_result with a standardized "value" text input.
self.add_text_input('value', 'Value', text='0')
self.value = 0
self.set_name("Comparison Node")
self.processing = False # Guard for process_input
# Set default properties explicitly
self.set_property('input_type', 'Number')
self.set_property('operator', 'Equal (==)')
def process_input(self):
if self.processing:
return
self.processing = True
# Retrieve input values; if no connection or None, default to "0"
input_a = self.input(0)
input_b = self.input(1)
a_raw = (input_a.connected_ports()[0].node().get_property('value')
if input_a.connected_ports() else "0")
b_raw = (input_b.connected_ports()[0].node().get_property('value')
if input_b.connected_ports() else "0")
a_raw = a_raw if a_raw is not None else "0"
b_raw = b_raw if b_raw is not None else "0"
# Get input type property
input_type = self.get_property('input_type')
# Convert values based on input type
if input_type == 'Number':
try:
a_val = float(a_raw)
except (ValueError, TypeError):
a_val = 0.0
try:
b_val = float(b_raw)
except (ValueError, TypeError):
b_val = 0.0
elif input_type == 'String':
a_val = str(a_raw)
b_val = str(b_raw)
else:
try:
a_val = float(a_raw)
except (ValueError, TypeError):
a_val = 0.0
try:
b_val = float(b_raw)
except (ValueError, TypeError):
b_val = 0.0
operator = self.get_property('operator')
# Perform the comparison
result = {
'Equal (==)': a_val == b_val,
'Not Equal (!=)': a_val != b_val,
'Greater Than (>)': a_val > b_val,
'Less Than (<)': a_val < b_val,
'Greater Than or Equal (>=)': a_val >= b_val,
'Less Than or Equal (<=)': a_val <= b_val
}.get(operator, False)
new_value = 1 if result else 0
self.value = new_value
self.set_property('value', str(self.value))
self.transmit_data(self.value)
self.processing = False
def on_input_connected(self, input_port, output_port):
pass
def on_input_disconnected(self, input_port, output_port):
pass
def property_changed(self, property_name):
pass
def receive_data(self, data, source_port_name=None):
pass
def transmit_data(self, data):
output_port = self.output(0)
if output_port and output_port.connected_ports():
for connected_port in output_port.connected_ports():
connected_node = connected_port.node()
if hasattr(connected_node, 'receive_data'):
try:
data_int = int(data)
connected_node.receive_data(data_int, source_port_name='Result')
except ValueError:
pass

View File

@@ -0,0 +1,72 @@
#!/usr/bin/env python3
"""
Standardized Data Node:
- Accepts and transmits values consistently.
- Updates its value based on a global update timer.
"""
from OdenGraphQt import BaseNode
from Qt import QtCore
class DataNode(BaseNode):
__identifier__ = 'bunny-lab.io.data_node'
NODE_NAME = 'Data Node'
def __init__(self):
super(DataNode, self).__init__()
self.add_input('Input')
self.add_output('Output')
self.add_text_input('value', 'Value', text='')
self.process_widget_event()
self.set_name("Data Node")
# Removed self-contained update timer; global timer now drives updates.
def post_create(self):
text_widget = self.get_widget('value')
if text_widget is not None:
try:
# Removed textChanged signal connection; global timer will call process_input.
pass
except Exception as e:
print("Error connecting textChanged signal:", e)
def process_widget_event(self, event=None):
current_text = self.get_property('value')
self.value = current_text
self.transmit_data(current_text)
def property_changed(self, property_name):
if property_name == 'value':
# Immediate update removed; relying on global timer.
pass
def process_input(self):
input_port = self.input(0)
output_port = self.output(0)
if input_port.connected_ports():
input_value = input_port.connected_ports()[0].node().get_property('value')
self.set_property('value', input_value)
self.transmit_data(input_value)
elif output_port.connected_ports():
self.transmit_data(self.get_property('value'))
def on_input_connected(self, input_port, output_port):
# Removed immediate update; global timer handles updates.
pass
def on_input_disconnected(self, input_port, output_port):
# Removed immediate update; global timer handles updates.
pass
def receive_data(self, data, source_port_name=None):
self.set_property('value', str(data))
self.transmit_data(data)
def transmit_data(self, data):
output_port = self.output(0)
if output_port and output_port.connected_ports():
for connected_port in output_port.connected_ports():
connected_node = connected_port.node()
if hasattr(connected_node, 'receive_data'):
connected_node.receive_data(data, source_port_name="Output")

View File

@@ -0,0 +1,103 @@
#!/usr/bin/env python3
"""
Identification Overlay Node:
- Users can configure threads/slices for parallel processing.
"""
import re
from OdenGraphQt import BaseNode
from PyQt5.QtCore import QTimer
from PyQt5.QtGui import QColor
from Modules import data_collector
class IdentificationOverlayNode(BaseNode):
__identifier__ = "bunny-lab.io.identification_overlay_node"
NODE_NAME = "Identification Overlay"
def __init__(self):
super(IdentificationOverlayNode, self).__init__()
# User-configurable options
self.add_text_input("search_term", "Search Term", text="Aibatt")
self.add_text_input("offset_value", "Offset Value (X,Y)", text="0,0") # X,Y Offset
self.add_text_input("margin", "Margin", text="5") # Box Margin
self.add_text_input("polling_freq", "Polling Frequency (ms)", text="500") # Polling Rate
self.add_combo_menu("ocr_engine", "Type", items=["CPU", "GPU"])
self.set_property("ocr_engine", "CPU") # Default to CPU mode
# Custom overlay options
self.add_text_input("overlay_color", "Overlay Color (RGB)", text="0,0,255") # Default blue
self.add_text_input("thickness", "Line Thickness", text="2") # Default 2px
self.add_text_input("threads_slices", "Threads / Slices", text="8") # Default 8 threads/slices
self.region_id = "identification_overlay"
data_collector.create_ocr_region(self.region_id, x=250, y=50, w=300, h=200, color=(0, 0, 255), thickness=2)
data_collector.start_collector()
self.set_name("Identification Overlay")
# Timer for updating overlays
self.timer = QTimer()
self.timer.timeout.connect(self.update_overlay)
# Set initial polling frequency
self.update_polling_frequency()
def update_polling_frequency(self):
polling_text = self.get_property("polling_freq")
try:
polling_interval = max(50, int(polling_text))
except ValueError:
polling_interval = 500
self.timer.start(polling_interval)
def update_overlay(self):
search_term = self.get_property("search_term")
offset_text = self.get_property("offset_value")
margin_text = self.get_property("margin")
ocr_engine = self.get_property("ocr_engine")
threads_slices_text = self.get_property("threads_slices")
self.update_polling_frequency()
try:
offset_x, offset_y = map(int, offset_text.split(","))
except ValueError:
offset_x, offset_y = 0, 0
try:
margin = int(margin_text)
except ValueError:
margin = 5
color_text = self.get_property("overlay_color")
try:
color = tuple(map(int, color_text.split(",")))
except ValueError:
color = (0, 0, 255)
thickness_text = self.get_property("thickness")
try:
thickness = max(1, int(thickness_text))
except ValueError:
thickness = 2
try:
num_slices = max(1, int(threads_slices_text)) # Ensure at least 1 slice
except ValueError:
num_slices = 1
if not search_term:
return
detected_positions = data_collector.find_word_positions(
self.region_id, search_term, offset_x, offset_y, margin, ocr_engine, num_slices
)
# Ensure slice count is updated visually in the region widget
data_collector.update_region_slices(self.region_id, num_slices)
data_collector.draw_identification_boxes(self.region_id, detected_positions, color=color, thickness=thickness)

View File

@@ -0,0 +1,109 @@
#!/usr/bin/env python3
"""
Standardized Math Operation Node:
- Performs mathematical operations (+, -, *, /, avg) on two inputs.
- Outputs the computed result.
- Uses a global update timer for processing (defined in borealis.py).
- Ensures it always has a "value" property that the Comparison Node can read.
"""
from OdenGraphQt import BaseNode
from Qt import QtCore
class MathOperationNode(BaseNode):
__identifier__ = 'bunny-lab.io.math_node'
NODE_NAME = 'Math Operation'
def __init__(self):
super(MathOperationNode, self).__init__()
self.add_input('A')
self.add_input('B')
self.add_output('Result')
# Drop-down to choose which operation we do:
self.add_combo_menu('operator', 'Operator', items=[
'Add', 'Subtract', 'Multiply', 'Divide', 'Average'
])
# A text field for showing the result to the user:
self.add_text_input('calc_result', 'Result', text='0')
# IMPORTANT: define a "value" property that the Comparison Node can read
# We do not necessarily need a text input for it, but adding it ensures
# it becomes an official property recognized by OdenGraphQt.
self.add_text_input('value', 'Internal Value', text='0')
# Keep a Python-side float of the current computed result:
self.value = 0
# Give the node a nice name:
self.set_name("Math Operation")
# Removed self-contained timer; global timer calls process_input().
def process_input(self):
# Attempt to read "value" from both inputs:
input_a = self.input(0)
input_b = self.input(1)
a_raw = input_a.connected_ports()[0].node().get_property('value') if input_a.connected_ports() else "0"
b_raw = input_b.connected_ports()[0].node().get_property('value') if input_b.connected_ports() else "0"
try:
a_val = float(a_raw)
except (ValueError, TypeError):
a_val = 0.0
try:
b_val = float(b_raw)
except (ValueError, TypeError):
b_val = 0.0
operator = self.get_property('operator')
if operator == 'Add':
result = a_val + b_val
elif operator == 'Subtract':
result = a_val - b_val
elif operator == 'Multiply':
result = a_val * b_val
elif operator == 'Divide':
result = a_val / b_val if b_val != 0 else 0.0
elif operator == 'Average':
result = (a_val + b_val) / 2.0
else:
result = 0.0
# If the computed result changed, update our internal properties and transmit
if self.value != result:
self.value = result
# Update the two text fields so the user sees the numeric result:
self.set_property('calc_result', str(result))
self.set_property('value', str(result)) # <= This is the critical step
# Let downstream nodes know there's new data:
self.transmit_data(result)
def on_input_connected(self, input_port, output_port):
pass
def on_input_disconnected(self, input_port, output_port):
pass
def property_changed(self, property_name):
pass
def receive_data(self, data, source_port_name=None):
pass
def transmit_data(self, data):
output_port = self.output(0)
if output_port and output_port.connected_ports():
for connected_port in output_port.connected_ports():
connected_node = connected_port.node()
if hasattr(connected_node, 'receive_data'):
try:
# Attempt to convert to int if possible, else float
data_int = int(data)
connected_node.receive_data(data_int, source_port_name='Result')
except ValueError:
connected_node.receive_data(data, source_port_name='Result')

View File

@@ -0,0 +1,161 @@
#!/usr/bin/env python3
from Qt import QtWidgets, QtGui, QtCore
from OdenGraphQt import BaseNode
from OdenGraphQt.constants import NodePropWidgetEnum
from OdenGraphQt.qgraphics.node_backdrop import BackdropNodeItem
class BackdropNode(BaseNode):
"""
Backdrop Node:
- Allows grouping or annotating other nodes by resizing a large rectangle.
- Title is set by double-clicking in the title area.
"""
__identifier__ = 'bunny-lab.io.backdrop'
NODE_NAME = 'Backdrop'
def __init__(self):
# Use BackdropNodeItem for the specialized QGraphicsItem.
super(BackdropNode, self).__init__(qgraphics_item=BackdropNodeItem)
# Default color (teal).
self.model.color = (5, 129, 138, 255)
# Set default title without prompting:
self.set_name("Double-Click to Add Name to Backdrop")
# Multi-line text property for storing the backdrop text.
self.create_property(
'backdrop_text',
'',
widget_type=NodePropWidgetEnum.QTEXT_EDIT.value,
tab='Backdrop'
)
# Override the view's double-click event to allow editing the title.
original_double_click = self.view.mouseDoubleClickEvent
def new_double_click_event(event):
# Assume the title is in the top 30 pixels of the node.
if event.pos().y() < 30:
new_title, ok = QtWidgets.QInputDialog.getText(
None, "Edit Title", "Enter new backdrop title:", text=self.name()
)
if ok and new_title:
self.set_name(new_title)
self.view.update() # force immediate update of the node title
else:
if original_double_click:
original_double_click(event)
self.view.mouseDoubleClickEvent = new_double_click_event
# --------------------------------------------------------------------------
# Resizing / Geometry
# --------------------------------------------------------------------------
def on_backdrop_updated(self, update_prop, value=None):
"""
Triggered when the user resizes or double-clicks the backdrop sizer handle.
"""
if not self.graph:
return
if update_prop == 'sizer_mouse_release':
# User finished dragging the resize handle
self.view.prepareGeometryChange()
self.graph.begin_undo(f'resized "{self.name()}"')
self.set_property('width', value['width'])
self.set_property('height', value['height'])
self.set_pos(*value['pos'])
self.graph.end_undo()
self.view.update()
elif update_prop == 'sizer_double_clicked':
# User double-clicked the resize handle (auto-resize)
self.view.prepareGeometryChange()
self.graph.begin_undo(f'"{self.name()}" auto resize')
self.set_property('width', value['width'])
self.set_property('height', value['height'])
self.set_pos(*value['pos'])
self.graph.end_undo()
self.view.update()
def auto_size(self):
"""
Auto-resize the backdrop to fit around intersecting nodes.
"""
if not self.graph:
return
self.view.prepareGeometryChange()
self.graph.begin_undo(f'"{self.name()}" auto resize')
size = self.view.calc_backdrop_size()
self.set_property('width', size['width'])
self.set_property('height', size['height'])
self.set_pos(*size['pos'])
self.graph.end_undo()
self.view.update()
def wrap_nodes(self, nodes):
"""
Fit the backdrop around the specified nodes.
"""
if not self.graph or not nodes:
return
self.view.prepareGeometryChange()
self.graph.begin_undo(f'"{self.name()}" wrap nodes')
size = self.view.calc_backdrop_size([n.view for n in nodes])
self.set_property('width', size['width'])
self.set_property('height', size['height'])
self.set_pos(*size['pos'])
self.graph.end_undo()
self.view.update()
def nodes(self):
"""
Return a list of nodes wrapped by this backdrop.
"""
node_ids = [n.id for n in self.view.get_nodes()]
return [self.graph.get_node_by_id(nid) for nid in node_ids]
def set_text(self, text=''):
"""
Set the multi-line text in the backdrop.
"""
self.set_property('backdrop_text', text)
def text(self):
"""
Return the text content in the backdrop.
"""
return self.get_property('backdrop_text')
def set_size(self, width, height):
"""
Manually set the backdrop size.
"""
if self.graph:
self.view.prepareGeometryChange()
self.graph.begin_undo('backdrop size')
self.set_property('width', width)
self.set_property('height', height)
self.graph.end_undo()
self.view.update()
else:
self.view.width, self.view.height = width, height
self.model.width, self.model.height = width, height
def size(self):
"""
Return (width, height) of the backdrop.
"""
self.model.width = self.view.width
self.model.height = self.view.height
return self.model.width, self.model.height
# No ports for a backdrop:
def inputs(self):
return
def outputs(self):
return

View File

@@ -0,0 +1,3 @@
# HIGH-LEVEL OVERVIEW
# - This node takes an input source and either replaces or appends data fed into it into a CSV file on disk.
# - There will be a checkbox to allow the user to change the behavior (Replace / Append)

View File

@@ -0,0 +1,4 @@
# HIGH-LEVEL OVERVIEW
# - This node takes an input source and dumps the data to disk in a dropdown menu of various image formats
# - Ability to view image processing results would be an interesting bonus if displayed within the node.
# - Could be used to show the life cycle of an image processing pipeline.

0
Data/Nodes/__init__.py Normal file
View File

View File

@@ -0,0 +1,379 @@
{
"graph":{
"layout_direction":0,
"acyclic":true,
"pipe_collision":false,
"pipe_slicing":true,
"pipe_style":1,
"accept_connection_types":{},
"reject_connection_types":{}
},
"nodes":{
"0x2697e9777d0":{
"type_":"bunny-lab.io.flyff_character_status_node.FlyffCharacterStatusNode",
"icon":null,
"name":"Flyff - Character Status",
"color":[
13,
18,
23,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":278.0,
"height":200.20000000000002,
"pos":[
-162.4474451079301,
412.29351565404465
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"hp":"HP: 0/0",
"mp":"MP: 0/0",
"fp":"FP: 0/0",
"exp":"EXP: 0.0%"
}
},
"0x2697f589250":{
"type_":"bunny-lab.io.data_node.DataNode",
"icon":null,
"name":"Data Node",
"color":[
13,
18,
23,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":269.0,
"height":74.2,
"pos":[
-46.54926789642434,
276.44565220121416
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"value":"0.40"
}
},
"0x2697eeb2960":{
"type_":"bunny-lab.io.math_node.MathOperationNode",
"icon":null,
"name":"Math Operation",
"color":[
13,
18,
23,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":269.0,
"height":162.4,
"pos":[
263.14586137366473,
175.74723593547986
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"operator":"Multiply",
"calc_result":"0.0",
"value":"0.0"
}
},
"0x2697ea1b560":{
"type_":"bunny-lab.io.flyff_hp_current_node.FlyffHPCurrentNode",
"icon":null,
"name":"Flyff - HP Current (API Connected)",
"color":[
13,
18,
23,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":378.0,
"height":74.2,
"pos":[
188.09704170391905,
29.44953683243171
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"value":"0"
}
},
"0x2697f589be0":{
"type_":"bunny-lab.io.flyff_hp_total_node.FlyffHPTotalNode",
"icon":null,
"name":"Flyff - HP Total (API Connected)",
"color":[
13,
18,
23,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":364.0,
"height":74.2,
"pos":[
-138.69781863016254,
175.74723593547975
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"value":"0"
}
},
"0x2697eb0e8d0":{
"type_":"bunny-lab.io.backdrop.BackdropNode",
"icon":null,
"name":"Calculate 40% of Total HP",
"color":[
5,
129,
138,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":728.2402137175101,
"height":257.0476243986018,
"pos":[
-164.34741522615138,
125.39802780261283
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"backdrop_text":""
}
},
"0x2697e856d20":{
"type_":"bunny-lab.io.comparison_node.ComparisonNode",
"icon":null,
"name":"Comparison Node",
"color":[
13,
18,
23,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":322.0,
"height":166.6,
"pos":[
625.0901688948422,
218.49656359546154
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"input_type":"Number",
"operator":"Less Than or Equal (<=)",
"value":"1"
}
},
"0x2697eeb1100":{
"type_":"bunny-lab.io.flyff_low_health_alert_node.FlyffLowHealthAlertNode",
"icon":null,
"name":"Flyff - Low Health Alert",
"color":[
13,
18,
23,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":324.0,
"height":181.3,
"pos":[
630.7900792495066,
585.1907964121928
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"cb_1":true,
"cb_2":true,
"value":"1",
"beep_interval":"1.0s"
}
}
},
"connections":[
{
"out":[
"0x2697f589250",
"Output"
],
"in":[
"0x2697eeb2960",
"B"
]
},
{
"in":[
"0x2697eeb2960",
"A"
],
"out":[
"0x2697f589be0",
"value"
]
},
{
"out":[
"0x2697eeb2960",
"Result"
],
"in":[
"0x2697e856d20",
"B"
]
},
{
"out":[
"0x2697ea1b560",
"value"
],
"in":[
"0x2697e856d20",
"A"
]
},
{
"out":[
"0x2697e856d20",
"Result"
],
"in":[
"0x2697eeb1100",
"Toggle (1 = On | 0 = Off)"
]
}
]
}

View File

@@ -0,0 +1,183 @@
{
"graph":{
"layout_direction":0,
"acyclic":true,
"pipe_collision":false,
"pipe_slicing":true,
"pipe_style":1,
"accept_connection_types":{},
"reject_connection_types":{}
},
"nodes":{
"0x191410fec90":{
"type_":"bunny-lab.io.flyff_character_status_node.FlyffCharacterStatusNode",
"icon":null,
"name":"Flyff - Character Status",
"color":[
13,
18,
23,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":278.0,
"height":200.20000000000002,
"pos":[
-234.47843187544638,
171.50740184739476
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"hp":"HP: 5848/5848",
"mp":"MP: 955/555",
"fp":"FP: 0/0",
"exp":"EXP: 49.0%"
}
},
"0x19173496de0":{
"type_":"bunny-lab.io.flyff_exp_current_node.FlyffEXPCurrentNode",
"icon":null,
"name":"Flyff - EXP (API Connected)",
"color":[
13,
18,
23,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":339.0,
"height":74.2,
"pos":[
-237.34556433027646,
77.62806051403777
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"value":"49.0"
}
},
"0x191735ae690":{
"type_":"bunny-lab.io.flyff_leveling_predictor_node.FlyffLevelingPredictorNode",
"icon":null,
"name":"Flyff - Leveling Predictor",
"color":[
13,
18,
23,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":324.0,
"height":200.20000000000002,
"pos":[
170.42482250783007,
77.62806051403777
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"exp_track_count":"7",
"time_to_level":"Insufficient data",
"time_between_kills":"N/A",
"exp_per_kill":"N/A"
}
},
"0x191735ae9c0":{
"type_":"bunny-lab.io.backdrop.BackdropNode",
"icon":null,
"name":"Track EXP Changes Over Time to Predict Leveling Up",
"color":[
5,
129,
138,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":777.8842478973615,
"height":380.82117975084645,
"pos":[
-264.113861059255,
23.199190498448075
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"backdrop_text":""
}
}
},
"connections":[
{
"out":[
"0x19173496de0",
"value"
],
"in":[
"0x191735ae690",
"exp"
]
}
]
}

View File

@@ -0,0 +1,101 @@
{
"graph":{
"layout_direction":0,
"acyclic":true,
"pipe_collision":false,
"pipe_slicing":true,
"pipe_style":1,
"accept_connection_types":{},
"reject_connection_types":{}
},
"nodes":{
"0x1ad82a5c620":{
"type_":"bunny-lab.io.data_node.DataNode",
"icon":null,
"name":"Data Node",
"color":[
13,
18,
23,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":269.0,
"height":74.2,
"pos":[
-93.6890385514249,
181.13214119942148
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"value":"57"
}
},
"0x1ad82a5cef0":{
"type_":"bunny-lab.io.data_node.DataNode",
"icon":null,
"name":"Data Node 1",
"color":[
13,
18,
23,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":269.0,
"height":74.2,
"pos":[
361.37200584121035,
287.313051557703
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"value":"57"
}
}
},
"connections":[
{
"out":[
"0x1ad82a5c620",
"Output"
],
"in":[
"0x1ad82a5cef0",
"Input"
]
}
]
}

View File

@@ -0,0 +1,57 @@
{
"graph":{
"layout_direction":0,
"acyclic":true,
"pipe_collision":false,
"pipe_slicing":true,
"pipe_style":1,
"accept_connection_types":{},
"reject_connection_types":{}
},
"nodes":{
"0x20c129abb30":{
"type_":"bunny-lab.io.identification_overlay_node.IdentificationOverlayNode",
"icon":null,
"name":"Identification Overlay",
"color":[
13,
18,
23,
255
],
"border_color":[
74,
84,
85,
255
],
"text_color":[
255,
255,
255,
180
],
"disabled":false,
"selected":false,
"visible":true,
"width":271.0,
"height":330.40000000000003,
"pos":[
44.64929777820301,
256.49596595988965
],
"layout_direction":0,
"port_deletion_allowed":false,
"subgraph_session":{},
"custom":{
"search_term":"Aibatt",
"offset_value":"-10,-10",
"margin":"10",
"polling_freq":"50",
"ocr_engine":"GPU",
"overlay_color":"255,255,255",
"thickness":"5"
}
}
}
}

440
Data/borealis.py Normal file
View File

@@ -0,0 +1,440 @@
# -*- coding: utf-8 -*-
#!/usr/bin/env python3
import sys
import pkgutil
import importlib
import inspect
import os
from Qt import QtWidgets, QtCore, QtGui
# -------------------------------------------------------#
# MONKEY PATCHES - MODIFICATIONS TO OdenGraphQT BEHAVIOR #
# -------------------------------------------------------#
# PATCH: Override the color of interconnection pipes between nodes
try:
from OdenGraphQt.qgraphics.pipe import PipeItem
from OdenGraphQt.qgraphics.node_base import NodeItem
from qtpy.QtGui import QPen, QColor
from qtpy import QtCore
# If you want the original paint logic, capture it first:
_orig_paint_pipe = PipeItem.paint
_orig_paint_node = NodeItem.paint
# Custom pipe painting function
def _custom_paint_pipe(self, painter, option, widget=None):
painter.save()
my_pen = QPen(QColor(0, 161, 115, 255)) # Match desired RGBA
my_pen.setWidthF(2.0)
painter.setPen(my_pen)
_orig_paint_pipe(self, painter, option, widget)
painter.restore()
# Custom node painting function
def _custom_paint_node(self, painter, option, widget=None):
painter.save()
_orig_paint_node(self, painter, option, widget) # Call original method
if self.isSelected():
pen = QPen(QColor(0, 161, 115, 255)) # Set selected border color
pen.setWidth(3)
painter.setPen(pen)
painter.drawRect(self.boundingRect())
painter.restore()
# Apply the patches
PipeItem.paint = _custom_paint_pipe
NodeItem.paint = _custom_paint_node
except ImportError as e:
print(f"WARNING: Could not patch PipeItem or NodeItem: {e}")
except Exception as e:
print(f"Patch for PipeItem or NodeItem override failed: {e}")
## PATCH: Fix "module 'qtpy.QtGui' has no attribute 'QUndoStack'" (KEEP AROUND FOR LEGACY DOCUMENTATION)
#try:
# from qtpy.QtWidgets import QUndoStack
# import qtpy
# qtpy.QtGui.QUndoStack = QUndoStack
#except ImportError:
# print("WARNING: Could not monkey-patch QUndoStack.")
# PATCH: Fix "'BackdropNodeItem' object has no attribute 'widgets'" by giving BackdropNodeItem a trivial widgets dictionary.
try:
from OdenGraphQt.nodes.backdrop_node import BackdropNodeItem
if not hasattr(BackdropNodeItem, "widgets"):
BackdropNodeItem.widgets = {}
except ImportError:
print("WARNING: Could not monkey-patch BackdropNodeItem to add `widgets`.")
# PATCH: BEGIN ROBUST PATCH FOR QGraphicsScene.setSelectionArea
_original_setSelectionArea = QtWidgets.QGraphicsScene.setSelectionArea
def _patched_setSelectionArea(self, *args, **kwargs):
"""
A robust patch that handles various call signatures for QGraphicsScene.setSelectionArea().
"""
try:
return _original_setSelectionArea(self, *args, **kwargs)
except TypeError:
if not args:
raise
painterPath = args[0]
selection_op = QtCore.Qt.ReplaceSelection
selection_mode = QtCore.Qt.IntersectsItemShape
transform = QtGui.QTransform()
return _original_setSelectionArea(self, painterPath, selection_op, selection_mode, transform)
QtWidgets.QGraphicsScene.setSelectionArea = _patched_setSelectionArea
# ----------------------------------------------------------------------------------------------------- #
# Import data_manager so we can start the Flask server
from Modules import data_manager
from OdenGraphQt import NodeGraph, BaseNode
from OdenGraphQt.widgets.dialogs import FileDialog
def import_nodes_from_folder(package_name):
"""
Recursively import all modules from the given package.
Returns a dictionary where keys are subfolder names, and values are lists of BaseNode subclasses.
"""
nodes_by_category = {}
package = importlib.import_module(package_name)
package_path = package.__path__[0]
for root, _, files in os.walk(package_path):
rel_path = os.path.relpath(root, package_path).replace(os.sep, '.')
module_prefix = f"{package_name}.{rel_path}" if rel_path != '.' else package_name
category_name = os.path.basename(root)
for file in files:
if file.endswith(".py") and file != "__init__.py":
module_name = f"{module_prefix}.{file[:-3]}"
try:
module = importlib.import_module(module_name)
for name, obj in inspect.getmembers(module, inspect.isclass):
if issubclass(obj, BaseNode) and obj.__module__ == module.__name__:
if category_name not in nodes_by_category:
nodes_by_category[category_name] = []
nodes_by_category[category_name].append(obj)
except Exception as e:
print(f"Failed to import {module_name}: {e}")
return nodes_by_category
def make_node_command(graph, node_type_str):
"""
Return a function that creates a node of the given type at the current cursor position.
Ensures that only one FlyffCharacterStatusNode exists.
"""
def real_create():
if node_type_str.startswith("bunny-lab.io.flyff_character_status_node"):
for node in graph.all_nodes():
if node.__class__.__name__ == "FlyffCharacterStatusNode":
QtWidgets.QMessageBox.critical(
None,
"Error",
"Only one Flyff Character Status Collector node is allowed."
)
return
try:
pos = graph.cursor_pos()
graph.create_node(node_type_str, pos=pos)
except Exception as e:
QtWidgets.QMessageBox.critical(None, "Error", str(e))
def command():
if QtWidgets.QApplication.instance():
real_create()
else:
QtCore.QTimer.singleShot(0, real_create)
return command
def ensure_workflows_folder():
"""
Ensures a 'Workflows' subfolder exists.
"""
if not os.path.exists("Workflows"):
os.makedirs("Workflows")
def close_workflow(graph: NodeGraph):
"""
Closes the current workflow (removes all nodes and connections).
"""
graph.clear_session()
def save_workflow(graph: NodeGraph):
"""
Saves the current workflow (including custom names, positions, wires, etc.) into a JSON file
in the 'Workflows' subfolder.
"""
ensure_workflows_folder()
file_filter = "JSON Files (*.json);;All Files (*.*)"
dlg = FileDialog.getSaveFileName(None, "Save Workflow", os.path.join("Workflows", ""), file_filter)
file_path = dlg[0]
if not file_path:
return # User canceled
if not file_path.lower().endswith(".json"):
file_path += ".json"
try:
graph.save_session(file_path)
print(f"Workflow saved to {file_path}")
except Exception as e:
QtWidgets.QMessageBox.critical(None, "Error Saving Workflow", str(e))
def load_workflow(graph: NodeGraph):
"""
Loads a workflow (including node values, connections, positions, etc.) from a specified JSON file
and centers it within the graph.
"""
ensure_workflows_folder()
file_filter = "JSON Files (*.json);;All Files (*.*)"
dlg = FileDialog.getOpenFileName(None, "Load Workflow", os.path.join("Workflows", ""), file_filter)
file_path = dlg[0]
if not file_path:
return # User canceled
try:
graph.load_session(file_path)
print(f"Workflow loaded from {file_path}")
# Center the workflow within the graph
nodes = graph.all_nodes()
if nodes:
graph.center_on(nodes)
else:
print("No nodes found in the loaded workflow.")
except Exception as e:
QtWidgets.QMessageBox.critical(None, "Error Loading Workflow", str(e))
if __name__ == "__main__":
app = QtWidgets.QApplication([])
# Start Flask API Server
data_manager.start_api_server()
# Create the NodeGraph
graph = NodeGraph()
graph.widget.setWindowTitle("Borealis - Workflow Automation Tool")
# Dynamically import custom node classes from the 'Nodes' package.
custom_nodes_by_category = import_nodes_from_folder("Nodes")
# Register each node in its category
for category, node_classes in custom_nodes_by_category.items():
for node_class in node_classes:
graph.register_node(node_class)
# Recursively apply the stylesheet to all submenus
def apply_styles_to_submenus(menu):
""" Recursively applies the stylesheet to all submenus in the menu. """
menu.setStyleSheet(menu_stylesheet)
for action in menu.actions():
if action.menu(): # Check if action has a submenu
apply_styles_to_submenus(action.menu())
# Override the Color of the Context Menu to Blue
menu_stylesheet = """
QMenu {
background-color: rgb(30, 30, 30);
border: 1px solid rgba(200, 200, 200, 60);
}
QMenu::item {
padding: 5px 18px 2px;
background-color: transparent;
}
QMenu::item:selected {
color: rgb(255, 255, 255);
background-color: rgba(60, 120, 180, 150);
}
QMenu::separator {
height: 1px;
background: rgba(255, 255, 255, 50);
margin: 4px 8px;
}
"""
# Create categorized context menu
graph_context_menu = graph.get_context_menu("graph")
add_node_menu = graph_context_menu.add_menu("Add Node")
for category, node_classes in custom_nodes_by_category.items():
category_menu = add_node_menu.add_menu(category) # Create submenu
category_menu.qmenu.setStyleSheet(menu_stylesheet) # Apply to submenu
for node_class in node_classes:
node_type = f"{node_class.__identifier__}.{node_class.__name__}"
node_name = node_class.NODE_NAME
category_menu.add_command(f"{node_name}", make_node_command(graph, node_type))
# Ensure styles are propagated across all dynamically created submenus
apply_styles_to_submenus(graph_context_menu.qmenu)
# Add a "Remove Selected Node" command
graph_context_menu.add_command(
"Remove Selected Node",
lambda: [graph.remove_node(node) for node in graph.selected_nodes()] if graph.selected_nodes() else None
)
# ------------------------------#
# WRAPPER: QMainWindow Integration with Additional UI Elements
# ------------------------------#
# SECTION: Enhanced Graph Wrapper for QMainWindow
# This section wraps the NodeGraph widget in a QMainWindow with:
# - A menu bar at the top (named "Workflows" menu)
# - A status bar at the bottom
# - A central QSplitter dividing the window horizontally:
# * Left side (2/3): the NodeGraph widget
# * Right side (1/3): an empty text box for future use
_original_show = graph.widget.show # Save original method
def _wrapped_show():
"""
Wrap the NodeGraph widget inside a QMainWindow with a "Workflows" menu,
a status bar, and a central splitter for layout.
"""
# Create a new QMainWindow instance
main_window = QtWidgets.QMainWindow()
# Create a menu bar and add a "Workflows" menu
menu_bar = main_window.menuBar()
workflows_menu = menu_bar.addMenu("Workflows")
# Add "Open" action
open_action = QtWidgets.QAction("Open", main_window)
open_action.triggered.connect(lambda: load_workflow(graph))
workflows_menu.addAction(open_action)
# Add "Save" action
save_action = QtWidgets.QAction("Save", main_window)
save_action.triggered.connect(lambda: save_workflow(graph))
workflows_menu.addAction(save_action)
# Add "Close" action
close_action = QtWidgets.QAction("Close", main_window)
close_action.triggered.connect(lambda: close_workflow(graph))
workflows_menu.addAction(close_action)
# Create and set a blank status bar at the bottom.
main_window.setStatusBar(QtWidgets.QStatusBar())
# ---------------------------------------------------------------------
# SECTION: Status Bar Enhancement - Dynamic Status Display
# Add a QLabel to the status bar that shows:
# - The number of nodes in the graph.
# - A fixed update rate (500ms).
# - A clickable hyperlink to the Flask API server.
status_bar = main_window.statusBar()
status_label = QtWidgets.QLabel()
status_label.setTextFormat(QtCore.Qt.RichText) # Enable rich text for clickable links.
status_label.setStyleSheet("color: white;") # Set default text color to white.
status_label.setOpenExternalLinks(True) # Allow hyperlinks to be clickable.
status_bar.setSizeGripEnabled(False) # Disable resizing via the size grip.
status_bar.addWidget(status_label)
status_bar.setStyleSheet("""
QStatusBar::item {
border: none; /* remove the line around items */
}
""")
def update_status():
node_count = len(graph.all_nodes())
api_link = (
'<a href="http://127.0.0.1:5000/data" '
'style="color: rgb(60, 120, 180); text-decoration: none;">'
'http://127.0.0.1:5000/data</a>'
)
status_label.setText(
f'Nodes: {node_count} | Update Rate: 500ms | Flask API Server: {api_link}'
)
# Create the timer, pass the main_window as parent, and store the reference.
status_timer = QtCore.QTimer(main_window)
status_timer.timeout.connect(update_status)
status_timer.start(500)
main_window._status_timer = status_timer # Keep a reference so it's not GCed
# ---------------------------------------------------------------------
# Create a QSplitter for horizontal division.
splitter = QtWidgets.QSplitter(QtCore.Qt.Horizontal)
# SECTION: Left Pane - Graph Widget
splitter.addWidget(graph.widget)
# SECTION: Right Pane - Empty Text Box
text_edit = QtWidgets.QTextEdit()
splitter.addWidget(text_edit)
# Set stretch factors
splitter.setStretchFactor(0, 2) # Split of Left Side
splitter.setStretchFactor(1, 3) # Split of Right Side
# Reduce the Size of the Splitter Handle
splitter.setHandleWidth(1)
splitter.setStyleSheet("""
QSplitter::handle {
background: none;
}
""")
# Set the splitter as the central widget of the main window.
main_window.setCentralWidget(splitter)
# Transfer the window title from the graph widget to the main window.
main_window.setWindowTitle(graph.widget.windowTitle())
# Resize the main window using the size set for the graph widget.
main_window.resize(graph.widget.size())
# Store a reference to the main window to prevent it from being garbage collected.
graph.widget._main_window = main_window
# Show the main window instead of the standalone graph widget.
main_window.show()
# Monkey-patch the show method of the graph widget.
graph.widget.show = _wrapped_show
# Grid styling changes
graph.set_background_color(20, 20, 20) # Dark gray
graph.set_grid_color(60, 60, 60) # Gray grid lines
# Add gradient background
scene = graph.scene()
gradient = QtGui.QLinearGradient(0, 0, 0, 1)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(9, 44, 68))
gradient.setColorAt(0.3, QtGui.QColor(30, 30, 30))
gradient.setColorAt(0.7, QtGui.QColor(30, 30, 30))
gradient.setColorAt(1.0, QtGui.QColor(9, 44, 68))
scene.setBackgroundBrush(QtGui.QBrush(gradient))
# Resize and show the graph widget (which now triggers the QMainWindow wrapper)
graph.widget.resize(1600, 900)
graph.widget.show()
graph_context_menu.qmenu.setStyleSheet(menu_stylesheet)
# Global update function
def global_update():
for node in graph.all_nodes():
if hasattr(node, "process_input"):
try:
node.process_input()
except Exception as e:
print("Error updating node", node, e)
timer = QtCore.QTimer()
timer.timeout.connect(global_update)
timer.start(500)
sys.exit(app.exec_())