diff --git a/Modules/__pycache__/data_collector.cpython-312.pyc b/Modules/__pycache__/data_collector.cpython-312.pyc index 2cfb315..d93ef58 100644 Binary files a/Modules/__pycache__/data_collector.cpython-312.pyc and b/Modules/__pycache__/data_collector.cpython-312.pyc differ diff --git a/Modules/data_collector.py b/Modules/data_collector.py index bc2e29e..9aa1598 100644 --- a/Modules/data_collector.py +++ b/Modules/data_collector.py @@ -31,7 +31,6 @@ def _ensure_qapplication(): """ global app_instance if QApplication.instance() is None: - print("Starting QApplication in a separate thread.") app_instance = QApplication(sys.argv) threading.Thread(target=app_instance.exec_, daemon=True).start() @@ -39,7 +38,6 @@ def create_ocr_region(region_id, x=250, y=50, w=DEFAULT_WIDTH, h=DEFAULT_HEIGHT) """ Creates an OCR region with a visible, resizable box on the screen. """ - print(f"Creating OCR Region: {region_id} at ({x}, {y}, {w}, {h})") _ensure_qapplication() # Ensure QApplication is running first @@ -88,7 +86,7 @@ def _update_ocr_loop(): regions[rid]['raw_text'] = raw_text collector_mutex.unlock() - print(f"OCR Text for {rid}: {raw_text}") +# print(f"OCR Text for {rid}: {raw_text}") # SHOW RAW OCR OUTPUT IN TERMINAL FOR DEBUGGING time.sleep(0.7) @@ -111,8 +109,6 @@ class OCRRegionWidget(QWidget): self.selected_handle = None self.region_id = region_id - print(f"OCR Region Widget Created at {x}, {y}, {w}, {h}") - self.show() def paintEvent(self, event): diff --git a/Nodes/Flyff/__pycache__/flyff_EXP_current.cpython-312.pyc b/Nodes/Flyff/__pycache__/flyff_EXP_current.cpython-312.pyc index 84bb711..9a01ad2 100644 Binary files a/Nodes/Flyff/__pycache__/flyff_EXP_current.cpython-312.pyc and b/Nodes/Flyff/__pycache__/flyff_EXP_current.cpython-312.pyc differ diff --git a/Nodes/Flyff/__pycache__/flyff_character_status_node.cpython-312.pyc b/Nodes/Flyff/__pycache__/flyff_character_status_node.cpython-312.pyc index 3192abd..d98ebf3 100644 Binary files a/Nodes/Flyff/__pycache__/flyff_character_status_node.cpython-312.pyc and b/Nodes/Flyff/__pycache__/flyff_character_status_node.cpython-312.pyc differ diff --git a/Nodes/Flyff/__pycache__/flyff_leveling_predictor_node.cpython-312.pyc b/Nodes/Flyff/__pycache__/flyff_leveling_predictor_node.cpython-312.pyc new file mode 100644 index 0000000..c4f5299 Binary files /dev/null and b/Nodes/Flyff/__pycache__/flyff_leveling_predictor_node.cpython-312.pyc differ diff --git a/Nodes/Flyff/flyff_EXP_current.py b/Nodes/Flyff/flyff_EXP_current.py index 1ab4591..22ec090 100644 --- a/Nodes/Flyff/flyff_EXP_current.py +++ b/Nodes/Flyff/flyff_EXP_current.py @@ -43,7 +43,6 @@ class FlyffEXPCurrentNode(BaseNode): try: response = requests.get("http://127.0.0.1:5000/data", timeout=1) status_code = response.status_code - print(f"[DEBUG] FlyffEXPCurrentNode: HTTP Status Code = {status_code}") if status_code == 200: try: @@ -58,7 +57,6 @@ class FlyffEXPCurrentNode(BaseNode): self.set_name("Flyff - EXP (API Connected)") new_value = data.get("exp", "N/A") - print(f"[DEBUG] FlyffEXPCurrentNode: exp = {new_value}") new_value_str = str(new_value) self.set_property('value', new_value_str) diff --git a/Nodes/Flyff/flyff_character_status_node.py b/Nodes/Flyff/flyff_character_status_node.py index 3a2b5ef..4498261 100644 --- a/Nodes/Flyff/flyff_character_status_node.py +++ b/Nodes/Flyff/flyff_character_status_node.py @@ -50,8 +50,6 @@ class FlyffCharacterStatusNode(BaseNode): exp_value = 0.0 if len(lines) >= 4: - print("Processing OCR Lines:", lines) # Debugging output - # line 1: HP hp_match = re.search(r"(\d+)\s*/\s*(\d+)", lines[0]) if hp_match: @@ -85,7 +83,7 @@ class FlyffCharacterStatusNode(BaseNode): Called periodically to update character status from OCR. """ raw_text = data_collector.get_raw_text(self.region_id) - print("Raw OCR Text:", raw_text) # Debugging OCR text reading +# print("Raw OCR Text:", raw_text) # Debugging OCR text reading hp_c, hp_t, mp_c, mp_t, fp_c, fp_t, exp_v = self.parse_character_stats(raw_text) diff --git a/Nodes/Flyff/flyff_leveling_predictor_node.py b/Nodes/Flyff/flyff_leveling_predictor_node.py new file mode 100644 index 0000000..43e56a2 --- /dev/null +++ b/Nodes/Flyff/flyff_leveling_predictor_node.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python3 +""" +Flyff - Leveling Predictor Node: +- Tracks the last N changes in EXP values. +- Calculates the average change rate and time intervals. +- Predicts the estimated time to reach level 100. +""" + +import time +import numpy as np +from OdenGraphQt import BaseNode +from PyQt5.QtCore import QTimer +from Modules import data_manager + +class FlyffLevelingPredictorNode(BaseNode): + __identifier__ = "bunny-lab.io.flyff_leveling_predictor_node" + NODE_NAME = "Flyff - Leveling Predictor" + + def __init__(self): + super(FlyffLevelingPredictorNode, self).__init__() + + # Input port for EXP values + self.add_input("exp", "EXP") + + # User-defined number of changes to track + self.add_text_input("exp_track_count", "# of EXP Changes to Track", text="7") + + # Output widgets + self.add_text_input("time_to_level", "Time to Level", text="Calculating...") + self.add_text_input("time_between_kills", "Time Between Kills", text="N/A") + self.add_text_input("exp_per_kill", "EXP Per Kill", text="N/A") + + # Internal tracking lists + self.exp_history = [] + self.time_intervals = [] + self.last_exp_value = None + self.last_update_time = None + + # Timer to periodically process EXP changes + self.timer = QTimer() + self.timer.timeout.connect(self.process_exp_change) + self.timer.start(1000) # Check for updates every second + + def reset_tracking_arrays(self): + """ + Resets the EXP history and time interval arrays when a level-up is detected. + """ + self.exp_history.clear() + self.time_intervals.clear() + self.last_exp_value = None + self.last_update_time = None + + def process_exp_change(self): + """ + Monitors changes in EXP values and calculates various statistics. + """ + exp_value = data_manager.get_data().get("exp", None) + if exp_value is None: + return + + exp_track_count = self.get_property("exp_track_count") + try: + exp_track_count = int(exp_track_count) + except ValueError: + exp_track_count = 7 # Default to 7 if invalid input + + # Reset if EXP value decreases (indicating a level-up) + if self.last_exp_value is not None and exp_value < self.last_exp_value: + self.reset_tracking_arrays() + + if self.last_exp_value is not None and exp_value != self.last_exp_value: + current_time = time.time() + + # Store EXP change history + self.exp_history.append(exp_value) + if len(self.exp_history) > exp_track_count: + self.exp_history.pop(0) + + # Store time intervals + if self.last_update_time is not None: + interval = current_time - self.last_update_time + self.time_intervals.append(interval) + if len(self.time_intervals) > exp_track_count: + self.time_intervals.pop(0) + + # Perform calculations + self.calculate_time_to_level() + self.calculate_additional_metrics() + + # Update last tracking values + self.last_update_time = current_time + + self.last_exp_value = exp_value + + def calculate_time_to_level(self): + """ + Calculates the estimated time to reach level 100 based on EXP change history. + """ + if len(self.exp_history) < 2 or len(self.time_intervals) < 1: + self.set_property("time_to_level", "Insufficient data") + return + + exp_deltas = np.diff(self.exp_history) # Compute EXP change per interval + avg_exp_change = np.mean(exp_deltas) if len(exp_deltas) > 0 else 0 + avg_time_change = np.mean(self.time_intervals) + + if avg_exp_change <= 0: + self.set_property("time_to_level", "Not gaining EXP") + return + + current_exp = self.exp_history[-1] + remaining_exp = 100.0 - current_exp # Distance to level 100 + + estimated_time = (remaining_exp / avg_exp_change) * avg_time_change + + # Convert estimated time into hours, minutes, and seconds + hours = int(estimated_time // 3600) + minutes = int((estimated_time % 3600) // 60) + seconds = int(estimated_time % 60) + + time_str = f"{hours}h {minutes}m {seconds}s" + self.set_property("time_to_level", time_str) + + def calculate_additional_metrics(self): + """ + Calculates and updates the "Time Between Kills" and "EXP Per Kill". + """ + if len(self.time_intervals) > 0: + avg_time_between_kills = np.mean(self.time_intervals) + minutes = int(avg_time_between_kills // 60) + seconds = int(avg_time_between_kills % 60) + self.set_property("time_between_kills", f"{minutes}m {seconds}s") + else: + self.set_property("time_between_kills", "N/A") + + if len(self.exp_history) > 1: + exp_deltas = np.diff(self.exp_history) + avg_exp_per_kill = np.mean(exp_deltas) if len(exp_deltas) > 0 else 0 + self.set_property("exp_per_kill", f"{avg_exp_per_kill:.2f}%") + else: + self.set_property("exp_per_kill", "N/A") diff --git a/borealis.py b/borealis.py index dba6bee..f60d6c9 100644 --- a/borealis.py +++ b/borealis.py @@ -150,7 +150,7 @@ if __name__ == "__main__": # Create the NodeGraph controller # (the monkey-patch ensures NodeGraph won't crash if it tries QtGui.QUndoStack(self)) graph = NodeGraph() - graph.widget.setWindowTitle("Project Borealis - Flyff Information Overlay") + graph.widget.setWindowTitle("Project Borealis - Workflow Automation System") # Dynamically import custom node classes from the 'Nodes' package. custom_nodes = import_nodes_from_folder("Nodes")