From 981d5cb57321ca044b250708477e374141f52955 Mon Sep 17 00:00:00 2001 From: Nicole Rappe Date: Wed, 26 Feb 2025 01:15:25 -0700 Subject: [PATCH] Added Identification Overlay Node --- Experiments/borealis_overlay.py | 542 ++++++++++++++++++ .../data_collector.cpython-312.pyc | Bin 9878 -> 13762 bytes Modules/data_collector.py | 127 +++- .../identification_overlay.cpython-312.pyc | Bin 0 -> 154 bytes .../identification_overlay.cpython-312.pyc | Bin 0 -> 3245 bytes .../General Purpose/identification_overlay.py | 66 +++ 6 files changed, 720 insertions(+), 15 deletions(-) create mode 100644 Experiments/borealis_overlay.py create mode 100644 Nodes/Flyff/__pycache__/identification_overlay.cpython-312.pyc create mode 100644 Nodes/General Purpose/__pycache__/identification_overlay.cpython-312.pyc create mode 100644 Nodes/General Purpose/identification_overlay.py diff --git a/Experiments/borealis_overlay.py b/Experiments/borealis_overlay.py new file mode 100644 index 0000000..74b9d8f --- /dev/null +++ b/Experiments/borealis_overlay.py @@ -0,0 +1,542 @@ +#!/usr/bin/env python3 + +import sys +import time +import re +import numpy as np +import cv2 +import pytesseract + +try: + import winsound + HAS_WINSOUND = True +except ImportError: + HAS_WINSOUND = False + +from PyQt5.QtWidgets import QApplication, QWidget +from PyQt5.QtCore import Qt, QRect, QPoint, QTimer +from PyQt5.QtGui import QPainter, QPen, QColor, QFont +from PIL import Image, ImageGrab, ImageFilter + +from rich.console import Console, Group +from rich.table import Table +from rich.progress import Progress, BarColumn, TextColumn +from rich.text import Text +from rich.live import Live + +# ============================================================================= +# Global Config +# ============================================================================= + +pytesseract.pytesseract.tesseract_cmd = r"C:\Program Files\Tesseract-OCR\tesseract.exe" + +POLLING_RATE_MS = 500 +MAX_DATA_POINTS = 8 + +# We still use these defaults for Region size. +DEFAULT_WIDTH = 180 +DEFAULT_HEIGHT = 130 +HANDLE_SIZE = 8 +LABEL_HEIGHT = 20 + +GREEN_HEADER_STYLE = "bold green" + +BEEP_INTERVAL_SECONDS = 1.0 # Only beep once every 1 second + +# STATUS BAR AUTO-LOCATOR LOGIC (WILL BE BUILT-OUT TO BE MORE ROBUST LATER) +TEMPLATE_PATH = "G:\\Nextcloud\\Projects\\Scripting\\bars_template.png" # Path to your bars template file +MATCH_THRESHOLD = 0.4 # The correlation threshold to consider a "good" match + +# ============================================================================= +# Helper Functions +# ============================================================================= + +def beep_hp_warning(): + """ + Only beep if enough time has elapsed since the last beep (BEEP_INTERVAL_SECONDS). + """ + current_time = time.time() + if (beep_hp_warning.last_beep_time is None or + (current_time - beep_hp_warning.last_beep_time >= BEEP_INTERVAL_SECONDS)): + + beep_hp_warning.last_beep_time = current_time + if HAS_WINSOUND: + # frequency=376 Hz, duration=100 ms + winsound.Beep(376, 100) + else: + # Attempt terminal bell + print('\a', end='') + +beep_hp_warning.last_beep_time = None + + +def locate_bars_opencv(template_path, threshold=MATCH_THRESHOLD): + """ + Attempt to locate the bars via OpenCV template matching: + 1) Grab the full screen using PIL.ImageGrab. + 2) Convert to NumPy array in BGR format for cv2. + 3) Load template from `template_path`. + 4) Use cv2.matchTemplate to find the best match location. + 5) If max correlation > threshold, return (x, y, w, h). + 6) Else return None. + """ + # 1) Capture full screen + screenshot_pil = ImageGrab.grab() + screenshot_np = np.array(screenshot_pil) # shape (H, W, 4) possibly + # Convert RGBA or RGB to BGR + screenshot_bgr = cv2.cvtColor(screenshot_np, cv2.COLOR_RGB2BGR) + + # 2) Load template from file + template_bgr = cv2.imread(template_path, cv2.IMREAD_COLOR) + if template_bgr is None: + print(f"[WARN] Could not load template file: {template_path}") + return None + + # 3) Template matching + result = cv2.matchTemplate(screenshot_bgr, template_bgr, cv2.TM_CCOEFF_NORMED) + + # 4) Find best match + min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(result) + # template width/height + th, tw, _ = template_bgr.shape + + if max_val >= threshold: + # max_loc is top-left corner of the best match + found_x, found_y = max_loc + return (found_x, found_y, tw, th) + else: + return None + + +def format_duration(seconds): + if seconds is None: + return "???" + seconds = int(seconds) + hours = seconds // 3600 + leftover = seconds % 3600 + mins = leftover // 60 + secs = leftover % 60 + if hours > 0: + return f"{hours}h {mins}m {secs}s" + else: + return f"{mins}m {secs}s" + + +def sanitize_experience_string(raw_text): + text_no_percent = raw_text.replace('%', '') + text_no_spaces = text_no_percent.replace(' ', '') + cleaned = re.sub(r'[^0-9\.]', '', text_no_spaces) + match = re.search(r'\d+(?:\.\d+)?', cleaned) + if not match: + return None + val = float(match.group(0)) + if val < 0: + val = 0 + elif val > 100: + val = 100 + return round(val, 4) + + +def format_experience_value(value): + if value < 0: + value = 0 + elif value > 100: + value = 100 + float_4 = round(value, 4) + raw_str = f"{float_4:.4f}" + int_part, dec_part = raw_str.split('.') + if int_part == "100": + pass + elif len(int_part) == 1 and int_part != "0": + int_part = "0" + int_part + elif int_part == "0": + int_part = "00" + return f"{int_part}.{dec_part}" + +# ----------------------------------------------------------------------------- +# Region Class +# ----------------------------------------------------------------------------- +class Region: + """ + Defines a draggable/resizable screen region for OCR capture. + """ + def __init__(self, x, y, label="Region", color=QColor(0,0,255)): + self.x = x + self.y = y + self.w = DEFAULT_WIDTH + self.h = DEFAULT_HEIGHT + self.label = label + self.color = color + self.visible = True + self.data = "" + + def rect(self): + return QRect(self.x, self.y, self.w, self.h) + + def label_rect(self): + return QRect(self.x, self.y - LABEL_HEIGHT, self.w, LABEL_HEIGHT) + + def resize_handles(self): + return [ + QRect(self.x - HANDLE_SIZE // 2, self.y - HANDLE_SIZE // 2, HANDLE_SIZE, HANDLE_SIZE), + QRect(self.x + self.w - HANDLE_SIZE // 2, self.y - HANDLE_SIZE // 2, HANDLE_SIZE, HANDLE_SIZE), + QRect(self.x - HANDLE_SIZE // 2, self.y + self.h - HANDLE_SIZE // 2, HANDLE_SIZE, HANDLE_SIZE), + QRect(self.x + self.w - HANDLE_SIZE // 2, self.y + self.h - HANDLE_SIZE // 2, HANDLE_SIZE, HANDLE_SIZE), + ] + +# ----------------------------------------------------------------------------- +# OverlayCanvas Class +# ----------------------------------------------------------------------------- +class OverlayCanvas(QWidget): + """ + Renders the overlay & handles region dragging/resizing. + """ + def __init__(self, regions, parent=None): + super().__init__(parent) + self.regions = regions + self.edit_mode = True + self.selected_region = None + self.selected_handle = None + self.drag_offset = QPoint() + + def paintEvent(self, event): + painter = QPainter(self) + for region in self.regions: + if region.visible: + pen = QPen(region.color) + pen.setWidth(3) + painter.setPen(pen) + painter.drawRect(region.x, region.y, region.w, region.h) + + painter.setFont(QFont("Arial", 12, QFont.Bold)) + painter.setPen(region.color) + painter.drawText(region.x, region.y - 5, region.label) + + if self.edit_mode: + for handle in region.resize_handles(): + painter.fillRect(handle, region.color) + + def mousePressEvent(self, event): + if not self.edit_mode: + return + if event.button() == Qt.LeftButton: + for region in reversed(self.regions): + for i, handle in enumerate(region.resize_handles()): + if handle.contains(event.pos()): + self.selected_region = region + self.selected_handle = i + return + if region.label_rect().contains(event.pos()): + self.selected_region = region + self.selected_handle = None + self.drag_offset = event.pos() - QPoint(region.x, region.y) + return + if region.rect().contains(event.pos()): + self.selected_region = region + self.selected_handle = None + self.drag_offset = event.pos() - QPoint(region.x, region.y) + return + + def mouseMoveEvent(self, event): + if not self.edit_mode or self.selected_region is None: + return + + if self.selected_handle is None: + self.selected_region.x = event.x() - self.drag_offset.x() + self.selected_region.y = event.y() - self.drag_offset.y() + else: + sr = self.selected_region + if self.selected_handle == 0: # top-left + sr.w += sr.x - event.x() + sr.h += sr.y - event.y() + sr.x = event.x() + sr.y = event.y() + elif self.selected_handle == 1: # top-right + sr.w = event.x() - sr.x + sr.h += sr.y - event.y() + sr.y = event.y() + elif self.selected_handle == 2: # bottom-left + sr.w += sr.x - event.x() + sr.h = event.y() - sr.y + sr.x = event.x() + elif self.selected_handle == 3: # bottom-right + sr.w = event.x() - sr.x + sr.h = event.y() - sr.y + + sr.w = max(sr.w, 10) + sr.h = max(sr.h, 10) + + self.update() + + def mouseReleaseEvent(self, event): + if not self.edit_mode: + return + if event.button() == Qt.LeftButton: + self.selected_region = None + self.selected_handle = None + +# ----------------------------------------------------------------------------- +# BorealisOverlay Class +# ----------------------------------------------------------------------------- +class BorealisOverlay(QWidget): + """ + Single Region Overlay for Player Stats (HP/MP/FP/EXP) with: + - Automatic location via OpenCV template matching at startup + - OCR scanning + - Low-HP beep + - Rich Live updates in terminal + """ + def __init__(self, live=None): + super().__init__() + screen_geo = QApplication.primaryScreen().geometry() + self.setGeometry(screen_geo) + self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint) + self.setAttribute(Qt.WA_TranslucentBackground, True) + + # Try to find the bars automatically + # If found => use that location, else default + initial_x, initial_y = 250, 50 + region_w, region_h = DEFAULT_WIDTH, DEFAULT_HEIGHT + + match_result = locate_bars_opencv(TEMPLATE_PATH, MATCH_THRESHOLD) + if match_result is not None: + found_x, found_y, w, h = match_result + print(f"Character Status Located at {found_x}, {found_y} with confidence >= {MATCH_THRESHOLD}.") + initial_x, initial_y = found_x, found_y + # Optionally override region size with template size + region_w, region_h = w, h + else: + print("Could not auto-locate the character status page. Set your theme to Masquerade and Interface Scale to 140%, and browser zoom level to 110%. Using default region.") + + region = Region(initial_x, initial_y, label="Character Status") + region.w = region_w + region.h = region_h + self.regions = [region] + + self.canvas = OverlayCanvas(self.regions, self) + self.canvas.setGeometry(self.rect()) + + # Tesseract + self.engine = pytesseract + + # Keep history of EXP data + self.points = [] + + self.live = live + + # Timer for periodic OCR scanning + self.timer = QTimer(self) + self.timer.timeout.connect(self.collect_ocr_data) + self.timer.start(POLLING_RATE_MS) + + def set_live(self, live): + self.live = live + + def collect_ocr_data(self): + for region in self.regions: + if region.visible: + screenshot = ImageGrab.grab( + bbox=(region.x, region.y, region.x + region.w, region.y + region.h) + ) + processed = self.preprocess_image(screenshot) + text = pytesseract.image_to_string(processed, config='--psm 4 --oem 1') + region.data = text.strip() + + if self.live is not None: + renderable = self.build_renderable() + self.live.update(renderable) + + def preprocess_image(self, image): + gray = image.convert("L") + scaled = gray.resize((gray.width * 3, gray.height * 3)) + thresh = scaled.point(lambda p: p > 200 and 255) + return thresh.filter(ImageFilter.MedianFilter(3)) + + def parse_all_stats(self, raw_text): + raw_lines = raw_text.splitlines() + lines = [l.strip() for l in raw_lines if l.strip()] + stats_dict = { + "hp": (0,1), + "mp": (0,1), + "fp": (0,1), + "exp": None + } + if len(lines) < 4: + return stats_dict + + hp_match = re.search(r"(\d+)\s*/\s*(\d+)", lines[0]) + if hp_match: + stats_dict["hp"] = (int(hp_match.group(1)), int(hp_match.group(2))) + + mp_match = re.search(r"(\d+)\s*/\s*(\d+)", lines[1]) + if mp_match: + stats_dict["mp"] = (int(mp_match.group(1)), int(mp_match.group(2))) + + fp_match = re.search(r"(\d+)\s*/\s*(\d+)", lines[2]) + if fp_match: + stats_dict["fp"] = (int(fp_match.group(1)), int(fp_match.group(2))) + + exp_val = sanitize_experience_string(lines[3]) + stats_dict["exp"] = exp_val + return stats_dict + + def update_points(self, new_val): + now = time.time() + if self.points: + _, last_v = self.points[-1] + if abs(new_val - last_v) < 1e-6: + return + if new_val < last_v: + self.points.clear() + self.points.append((now, new_val)) + if len(self.points) > MAX_DATA_POINTS: + self.points.pop(0) + + def compute_time_to_100(self): + n = len(self.points) + if n < 2: + return None + first_t, first_v = self.points[0] + last_t, last_v = self.points[-1] + diff_v = last_v - first_v + if diff_v <= 0: + return None + + steps = n - 1 + total_time = last_t - first_t + if total_time <= 0: + return None + + avg_change = diff_v / steps + remain = 100.0 - last_v + if remain <= 0: + return None + + avg_time = total_time / steps + rate_per_s = avg_change / avg_time if avg_time > 0 else 0 + if rate_per_s <= 0: + return None + + return int(remain / rate_per_s) + + def build_renderable(self): + raw_text = self.regions[0].data + stats = self.parse_all_stats(raw_text) + hp_cur, hp_max = stats["hp"] + mp_cur, mp_max = stats["mp"] + fp_cur, fp_max = stats["fp"] + exp_val = stats["exp"] + + # HP beep logic + if hp_max > 0: + hp_ratio = hp_cur / hp_max + if 0 < hp_ratio <= 0.40: + beep_hp_warning() + + if exp_val is not None: + self.update_points(exp_val) + current_exp = self.points[-1][1] if self.points else 0.0 + + # Title + title_text = Text("Project Borealis\n", style="bold white") + subtitle_text = Text("Flyff Information Overlay\n\n", style="dim") + + # HP / MP / FP bars + bar_progress = Progress( + "{task.description}", + BarColumn(bar_width=30), + TextColumn(" {task.completed}/{task.total} ({task.percentage:>5.2f}%)"), + transient=False, + auto_refresh=False + ) + bar_progress.add_task("[bold red]HP[/bold red]", total=hp_max, completed=hp_cur, + style="red", complete_style="red") + bar_progress.add_task("[bold blue]MP[/bold blue]", total=mp_max, completed=mp_cur, + style="blue", complete_style="blue") + bar_progress.add_task("[bold green]FP[/bold green]", total=fp_max, completed=fp_cur, + style="green", complete_style="green") + bar_progress.refresh() + + # Historical EXP table + table = Table(show_header=True, header_style=GREEN_HEADER_STYLE, style=None) + table.add_column("Historical EXP", justify="center", style="green") + table.add_column("Time Since Last Kill", justify="center", style="green") + table.add_column("Average EXP Per Kill", justify="center", style="green") + table.add_column("Average Time Between Kills", justify="center", style="green") + + n = len(self.points) + if n == 0: + table.add_row("N/A", "N/A", "N/A", "N/A") + elif n == 1: + _, v0 = self.points[0] + exp_str = f"[green]{format_experience_value(v0)}%[/green]" + table.add_row(exp_str, "N/A", "N/A", "N/A") + else: + for i in range(1, n): + t_cur, v_cur = self.points[i] + t_prev, v_prev = self.points[i - 1] + delta_v = v_cur - v_prev + delta_str = f"{delta_v:+.4f}%" + exp_main = format_experience_value(v_cur) + exp_str = f"[green]{exp_main}%[/green] [dim]({delta_str})[/dim]" + + delta_t = t_cur - t_prev + t_since_str = f"{delta_t:.1f}s" + + diff_v = v_cur - self.points[0][1] + steps = i + avg_exp_str = f"{diff_v/steps:.4f}%" + + total_time = t_cur - self.points[0][0] + avg_kill_time = total_time / steps + avg_time_str = f"{avg_kill_time:.1f}s" + + table.add_row(exp_str, t_since_str, avg_exp_str, avg_time_str) + + # Predicted Time to Level + secs_left = self.compute_time_to_100() + time_str = format_duration(secs_left) + + time_bar = Progress( + TextColumn("[bold white]Predicted Time to Level:[/bold white] "), + BarColumn(bar_width=30, complete_style="magenta"), + TextColumn(" [green]{task.percentage:>5.2f}%[/green] "), + TextColumn(f"[magenta]{time_str}[/magenta] until 100%", justify="right"), + transient=False, + auto_refresh=False + ) + time_bar.add_task("", total=100, completed=current_exp) + time_bar.refresh() + + return Group( + title_text, + subtitle_text, + bar_progress, + table, + time_bar + ) + +# ----------------------------------------------------------------------------- +# main +# ----------------------------------------------------------------------------- +def main(): + """ + 1) Attempt to locate HP/MP/FP/Exp bars using OpenCV template matching. + 2) Position overlay region accordingly if found, else default. + 3) Start PyQt, periodically OCR the region, update Rich Live in terminal. + """ + app = QApplication(sys.argv) + window = BorealisOverlay() + window.setWindowTitle("Project Borealis Overlay (HP/MP/FP/EXP)") + window.show() + + console = Console() + + with Live(console=console, refresh_per_second=4) as live: + window.set_live(live) + exit_code = app.exec_() + + sys.exit(exit_code) + +if __name__ == "__main__": + main() diff --git a/Modules/__pycache__/data_collector.cpython-312.pyc b/Modules/__pycache__/data_collector.cpython-312.pyc index 9d4d97e2cc1247359324e28f0abc58b01a666bc3..f99d78d41f91fdbad54aa5b13dcd4e5eefd81f24 100644 GIT binary patch delta 5217 zcmai24{RIPd4EUVkw;4WD~gh+KRk(&CEAwae{&K$vn$)OWLL3c*=Z6j4SVJtDJm36 zzdKnHcMP>P1*p!c5UKGiQf-hbJkP)u66!; zP;kZ=!SymV91`3wqv3YJ1GGc%0^K3h0PPg&1s~LR2@OIm)qAW;htBaxmE)%$nVIQ=@UFt;r#&Gb`$ zrw%fMgBM0bE}BlJF_+;nKPJi|=J;eXJt1+?aY;^(C9=GnNT;|iL5%U^Ntv4xN%d~- zVsc#MVxXA5EaD_TC56>j8R3FOvtcm~8j*ycQIi@qrBM?aHKH+kGZN_2OKiMVb*9@EpWj)onZ4QX^TA<24k8vUX;CE-2lcdEXm+r|6ADn3BSpAd2oh-Q~y zh+;|_Nz0logVRw_l0-poKqAhm%m$(QjNm^dnuMHIh3b!;Ey(7$YM-&s?!V&tkMP2I z$CH!^cdBnV2Pskgt@E0_pU4w(QT?iGw2lGJNLFBU-|wc?cP)o0zk0;l)f7kPkxUyh z3TgmWpa{(|AP^Rr=b<)d$ysx3&ZbxtEBx6k)B|TDd5qR2?76C3bQNO_< zr1n%b<=kgH!y1h0LGPStCDzzW8s@XuTXXBu6=e+sp;4LcNXz z>(|YO9i^UFlUslg7a}y;ruH_}< zARQ#DEf-Xp^zXb;z$wj&Nv~>ES`@R+wJCL@1Q{MmN~_YQ1Qjku8*@UTD`grfb>~qC z$pi-U)Q)f<7uvFHol5B8Wt)$ho=XGA4+IPZC1FP@|T8@btK04xEPVs z5rLO^&2m=nq(+}Ob^LOXUKsPB01`994I_BHcZ zo}bh#d?q8N1kKhr85J|c!{K(kPj8U8R%0u^icD^BQ<|mXGks=1>xjSt>9t6VFgOt( zku}T3vA%@6)q3elab0`h@inuY;Yq6XPu0|(&dwyb!wfV zM5!GHLUzdimnxA#4Hyt2Tl-p zT-7msVwJ90rUPXx#FzR8{uwDp)^p1(sc1Wmnf+!=1x}OT*&waI!p{Tpqqu9=^2X!Ug(n zLu0|Y>hZrO6%UjwH=bCm>-xx(4;Ok@U7o8`GgHN5MY$L&9V@oX`Q~D`n&$ai!GCG{ zQTIQ0f9M+cq_L$amrmYL=6Y}B=j+PB#}*qOpJi9|;=UVyKDYPAOY_!p@X^J_$Bg3s z8@ai2<=`Xpo#o);i;ahj@_`#K&GF^np7}@1!Th1c#>2Dh0}E;j7B7{$=DtvF+rQX& zpd!1iV5Kosx;#(a%$6JX&9d9SU{0X+*>a$J*18gCTj%b&TbA7&Wp_tuV&3}w`gb3@ zvt#f>_mlTJkay?3F63^x!;UoneznTChD_iW;<1DCm*%4z9(%?1$9oJ^{XyCRYv;$7 z<81#P^i$U{4zl<5*am9M?>+7saGKv|Dd69CGC-s2r!6QTM8Ji7-+x>Ek>${Sgu?>$ za13=B;g*NX*5fL(VphMz`qYw}t~L%q3du<2n9jT@Y|g%*gZ~N4L*SiCr@8@fM9fEZ zl*VFI28>Dqa`Uork^oL_?BK2ENgQxJ^}kLPyc_y{Cd@p9pPCnrm3%~iW;gMm5=7G# zP_u0`f*%DFx*ds32HC@vecGU_Cj>Z($O)%k7b0++h!R#o@HYG<0%5ROS=g-0bbXnw zU!nsmR`+ZDWozT3CHAfU`BUZHN6OrhRZo4fq3j8j2Fjl9f@OtuF0-v=wzU+VdwPl8 zzp7f30aZ9WtqFkSR$0j>D%;n^^UruR;Bq!=^s#`wCTAif4R| zZu6)Ij6IYfE?!T934XK@{;E&;y49Irtq-DFh=KJR0G(sVJwY+OYWMXhaJeM6h1+C( zK!QN483HZSXkF!dE8XmxnL>)%$~D`;EVP_sbz^yj-4d_W(r5~WnKPe409c6}aEx#y z#Du$j}5AG*|Wa`V;wiN9fJ9>e3(h^D?> z>-n4w{{tyyMA%ZYX3g;WuGGvB0DP8PHOmMDXs}Jm-G{hQ{cY_&?mhUBJ_a(4R=t7Z z0kx-KU3GeA4_rTT?Z~3DO+D#9nyZank5>?t;;yJ@;5- zb1$>?WwyTP`}>v~ElX_Iia)UI-&ywW{K&t%aB^K${kGqGcq`lyUf)H7z+6WP1^WWH zN|HAu6_dVyOzLl%cH}7p4-=;Bkd9;U-2zi0iha1qo0A++f>` zCk5`J$Pss?D&Z(GbOtW^-+;C&p3;468P|=6&AN;5s|blmM<|JO4s(X2BYG%91Bnm9V=az8OZ77YEH6Po^*O$!GPa2jlBGTqHI&Ur2P+d4!aaXA=BR zgp`12^ccM1gxM{vZ@ykIDRCk)p)o1-=iCMAcMCnOkC;?{duRSSQC}d0Ofn`T!|xFC zJ`l~Zj%!hPEs8|K6#hpjl>m~_=PI5dz>Nucn^d#{`Tfi2fyv4E);J&2v9{a7_}APF zV_$PxnO*`UfslCQhHOCZgO<2zi>q#|brHJ^!c muGwp;`n5eYb@0CP3Cer_go&k^ANV5_)iE3Y9U{3t{{I2oaYj@C delta 1684 zcmZuxU2GIp6rMXfGrL{(kL}-zTbOpYEF<00f6;27wuLq+2rUt8l%?!Uw@bG>>%B`G zP}4p%3W%}IRUnbXK1hW0#jsDliEqXyZ4G2X8hyZ|@kN(n^hG>pma-3cH~Y=G=bU@a zx%d2hH+kid?+35fEx^kCbUXD;?V>M4)Wz8EFON&QZ=Q@sbpO0C8r1`Ujrvx=oq7;3 zCJF}yy>edAs|??=0}Tkm=&o}_t1g@*Ux_5l73r2q;e=9+wv z4Dom5A?{aB5}9X}+75R`HQIqM-_* z_&M*7$8&5-?NH-!)0k3QHDW1AGdrG6+4fW8_WEs~<}Ew2q)eLSZFiP5X`Ewp4{!6F zvhC6~uu9tzIuJSmEH{G-8d)}B=BByhY_Q*hcH>EeKK{V-tFJ+* z@P6XH=w4{Q?Ef=fSoIzv;`(DA@eh$xJm+6@_o6<++k8vl48IXR(cA*Nqyn(HkhU{( zg$TlDiAJ-m_K$Yjsu#X|FZr2!B$YSBnW**bHdlijQ`&i zsjXI#NF+03j3pAx3CBv;nk(JuG!LnDiVkdcg?Fn9d`*ptS;D3IXfXe@gmeNwaoNH5 z*2jp0pQ`_uh&<3x-S~_~Df(r&3@#!-@Rbn!HXevLiNqHgLM>(C? zw_WSt%@>^aeZfz=u0^h~rJj|_oh!Z=KN8(n9k}ZX{pbo^uUT>|johqXd9G>I73cp% zV|;R_)m?Ii9$f&|mVutbeM9|;k-^b^%|Wv$cv?javjBNS-T+uCFOERSjm<% zl`&6`Wlq>7y}5gnCed!W^Dz76_Oq0;IUV}hc5F#g=Cq-?%I?covgE8WlbGSNEyu}1 z;cm+waW;v$*<485HLC(~RnQ^HiA`B)_ayxSWS7o@2s*!fWwGp&p{b|iX39LvjA0{V zj4`8s+Q{b5*T3F7JPebk)65=E>nS7O{EW_Ru)u-hEII4Vmss28agzTQ#i{t`$GmtE(9&WTqfCmAGv w1chkqX2aLsZ?``b0;H}Ob&^0aC=y@MRZBv}Hi>j?c#aaHE~Fj___ZVc4 200 else 0) return thresh.filter(ImageFilter.MedianFilter(3)) + +def find_word_positions(region_id, word, offset_x=0, offset_y=0, margin=5): + """ + Finds positions of a specific word within the OCR region. + Applies user-defined offset and margin adjustments. + Returns a list of bounding box coordinates relative to the OCR box. + """ + collector_mutex.lock() + if region_id not in regions: + collector_mutex.unlock() + return [] + + bbox = regions[region_id]['bbox'] + collector_mutex.unlock() + + # Extract OCR region position and size + x, y, w, h = bbox + left, top, right, bottom = x, y, x + w, y + h + + if right <= left or bottom <= top: + print(f"[ERROR] Invalid OCR region bounds: {bbox}") + return [] + + try: + image = ImageGrab.grab(bbox=(left, top, right, bottom)) + processed = _preprocess_image(image) + + # Get original and processed image sizes + orig_width, orig_height = image.size + proc_width, proc_height = processed.size + + # Scale factor between processed image and original screenshot + scale_x = orig_width / proc_width + scale_y = orig_height / proc_height + + data = pytesseract.image_to_data(processed, config='--psm 6 --oem 1', output_type=pytesseract.Output.DICT) + + word_positions = [] + for i in range(len(data['text'])): + if re.search(rf"\b{word}\b", data['text'][i], re.IGNORECASE): + # Scale the detected coordinates back to region-relative positions + x_scaled = int(data['left'][i] * scale_x) + y_scaled = int(data['top'][i] * scale_y) + w_scaled = int(data['width'][i] * scale_x) + h_scaled = int(data['height'][i] * scale_y) + + # Apply user-configured margin + x_margin = max(0, x_scaled - margin) + y_margin = max(0, y_scaled - margin) + w_margin = w_scaled + (margin * 2) + h_margin = h_scaled + (margin * 2) + + # Apply user-configured offset + x_final = x_margin + offset_x + y_final = y_margin + offset_y + + word_positions.append((x_final, y_final, w_margin, h_margin)) + + return word_positions + except Exception as e: + print(f"[ERROR] Failed to capture OCR region: {e}") + return [] + + + + + +def draw_identification_boxes(region_id, positions, color=(0, 0, 255)): + """ + Draws non-interactive rectangles at specified positions within the given OCR region. + """ + collector_mutex.lock() + if region_id in regions and 'widget' in regions[region_id]: + widget = regions[region_id]['widget'] + widget.set_draw_positions(positions, color) + collector_mutex.unlock() + + class OCRRegionWidget(QWidget): - def __init__(self, x, y, w, h, region_id): + def __init__(self, x, y, w, h, region_id, color): super().__init__() self.setGeometry(x, y, w, h) @@ -108,23 +191,41 @@ class OCRRegionWidget(QWidget): self.drag_offset = None self.selected_handle = None self.region_id = region_id + self.box_color = QColor(*color) + self.draw_positions = [] self.show() def paintEvent(self, event): painter = QPainter(self) - pen = QPen(QColor(255, 255, 0)) # COLOR OF THE BOX ITSELF - pen.setWidth(5) # WIDTH OF THE BOX BORDER + pen = QPen(self.box_color) + pen.setWidth(5) painter.setPen(pen) # Draw main rectangle painter.drawRect(0, 0, self.width(), self.height()) + # Draw detected word overlays + pen.setWidth(2) + pen.setColor(QColor(0, 0, 255)) + painter.setPen(pen) + + for x, y, w, h in self.draw_positions: + painter.drawRect(x, y, w, h) + # Draw resize handles - painter.setBrush(QColor(255, 255, 0)) # COLOR OF THE RESIZE HANDLES + painter.setBrush(self.box_color) for handle in self._resize_handles(): painter.drawRect(handle) + def set_draw_positions(self, positions, color): + """ + Update the positions where identification boxes should be drawn. + """ + self.draw_positions = positions + self.box_color = QColor(*color) + self.update() + def _resize_handles(self): w, h = self.width(), self.height() return [ @@ -174,7 +275,3 @@ class OCRRegionWidget(QWidget): if self.region_id in regions: regions[self.region_id]['bbox'] = [new_x, new_y, self.width(), self.height()] collector_mutex.unlock() - - def mouseReleaseEvent(self, event): - self.selected_handle = None - self.drag_offset = None diff --git a/Nodes/Flyff/__pycache__/identification_overlay.cpython-312.pyc b/Nodes/Flyff/__pycache__/identification_overlay.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..04981443be0db3b971f116e52f1b8aee89bf649b GIT binary patch literal 154 zcmX@j%ge<81QQPKO9#=9K?FMZ%mNgd&QQsq$>_I|p@<2{`wUX^%hJs%CZH%kD>bBr`uRKEEurC?~N}ub}c4hfQvN bN@-52T@fqLBt{@E1~EP|Gcqz3F#}lu*7zpU literal 0 HcmV?d00001 diff --git a/Nodes/General Purpose/__pycache__/identification_overlay.cpython-312.pyc b/Nodes/General Purpose/__pycache__/identification_overlay.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e05c3d988682386a9c2c567857dcdab72c70d54 GIT binary patch literal 3245 zcma)8O>7&-6`tktXZS1GmMK}Pl$D%Fq9dzH(%8sfZAULBixp=nuCU}X zv&)DSC_o1TRf-fX`UCrD6g|k0fu@I|J{3-Kr~zL{NGX+pjTUGxx+#?!81A8cv)m;q zR{}bRnfKYHgP8qtc zrYwU>i<*^@6lqQ`sFG2)F$&AyRxOa5u)wsGTnY>sX;3rsY6|vLiL_`?0>+6%sImy# zkc9$h&gl&+CO7k*sZvu)ffFesm)7PB6y)i&sap2htU~9xOQoEWg+O$@G#c9GaZV)! zhQ}3C<>3Gwo}N@qQ<+!CjUurA>36lPN)sLyvBSwKIgn8n+aFx2p0axHahTl^Abt#| zqarvJl=KiGDH2c8yNc3qMmKQMN4z&s(!Yv|LNYLe59nrQ zjcUfIw(n46C!ulOtdt}{T7YeAwQtX)p!0wCt$p6a8(RC-IVCk4EcUOVOcKPyo@TtR{58fW~6t4Vg3u+oMKr} z171rlS}anpCix=;3V7-0)p~9HfOFFD`y!H)?rE32<11J9ZA`!B^xO!*F~%iP=#7ez+xD{Lk7H1 zDMwr8vp5p403Qr6833Tk0d%ogArEMvx-|$MaYFzZUQ^Z6EGWw~q*UVX7;Xsx4)&u-$^cKj$9t>K<3?zuI&_SU^(1@~;?D_c0?Oioos&fhQC zvy;u)E4MSB&e)T${q4j{tKLsyk8r$(hpTvat>brnclz$Va)01~_`s|r)h}`SA6z(C z#exWZ`f;=pAKk>?-t(jQw|@CP7zpwC(8r+~?ylnQ8Xl8;wCRRr!={y+lb&^|7GD24NpVEbBKo6&=7;j z;(OZ7YQR(3b0A21-F52@yw1Y7>|OSO756nq<)(FiSullCpar512D<}*t1OhgWnbC9 z?$}*!wbJf-+#Dg&gRBGyz8h?UdMR)cSq*4?9#IVc2%j6pb@Yk4h>GHMv?x5I$NwJw zWj|;vxEhJj(nLsd+Ix2Sn39!gJn8k%0+LE|3~c<$Yml&6HY@s5YZgpq`& z1weTCQ?wKMgg`kQCW0(74@I4)24IF&Vq#XwGf~S~P_%hnvslOmw2PDi%Gf}0EUo2; z%>N*A!vWgQH#@=qBLFC^pCftBD5_=x(^H^fd&j~KZrUIkuA`%kObk1cVuLEx!#L^& z-rUmUiEo6>KNfTtx~1X60boBW4S}}#6?7|T2cyoOT70M)A6koU#LujTx8mInAKQqZ zZRPPs{H4|Kj$aH8KI(t|w=-)m)=s@qJ@v-DiH%d|H~KGBBYmq^exBJ0iNVfC$GdLz zY#e`K_0rcEwfAgAjzWp|jnsP2ReR6f_x?HdS*&vHgUXN8%DhqeaB-s-0H+&Nf}J%y zR>fnVcKosL_kEQMGnEh0f5G!l_WDtK_dk&k9Ng-E!QSW>nH>?>fA2*cN$1w__)jkJ z$(b1LGs;v##(2LZ6Ss#@`Y@e^Y?|3lyG+iC;to=o6T~6CBs!oe0Tt)*uC) z(-B$zu%PIUBnANLoUKYh8l1ZN_C!sJC8dhPVHsDte(Ro7)1ZrO`G%^Z5pQ~Qwk zvt++b{C=8U&^@_k6+nYJhg9M*QNG;O^)I&J+>BXFNh%E5<4+}R0uQ|fP4VA2lGr2NZcUufHzJE*=J zcah;Y>lyei{T{H~cQXMUBvlZ;_WA_xZVU;b$4GjNh90Bm{))!-{4Wa`;Wqvk;-hWq EzgoN>5dZ)H literal 0 HcmV?d00001 diff --git a/Nodes/General Purpose/identification_overlay.py b/Nodes/General Purpose/identification_overlay.py new file mode 100644 index 0000000..1510ab6 --- /dev/null +++ b/Nodes/General Purpose/identification_overlay.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +""" +Identification Overlay Node: +- Creates an OCR region in data_collector with a blue overlay. +- Detects instances of a specified word and draws adjustable overlays. +- Users can configure offset and margin dynamically. +""" + +import re +from OdenGraphQt import BaseNode +from PyQt5.QtWidgets import QMessageBox +from PyQt5.QtCore import QTimer +from Modules import data_manager, data_collector + + +class IdentificationOverlayNode(BaseNode): + __identifier__ = "bunny-lab.io.identification_overlay_node" + NODE_NAME = "Identification Overlay" + + def __init__(self): + super(IdentificationOverlayNode, self).__init__() + + # User-configurable options + self.add_text_input("search_term", "Search Term", text="Aibatt") + self.add_text_input("offset_value", "Offset Value (X,Y)", text="0,0") # New input + self.add_text_input("margin", "Margin", text="5") # New input + + self.region_id = "identification_overlay" + data_collector.create_ocr_region(self.region_id, x=250, y=50, w=300, h=200, color=(0, 0, 255)) + + data_collector.start_collector() + self.set_name("Identification Overlay") + + # Timer for updating overlays + self.timer = QTimer() + self.timer.timeout.connect(self.update_overlay) + self.timer.start(500) # Update every 500ms + + def update_overlay(self): + """ + Updates the overlay with detected word positions. + """ + search_term = self.get_property("search_term") + offset_text = self.get_property("offset_value") + margin_text = self.get_property("margin") + + # Parse user-defined offset + try: + offset_x, offset_y = map(int, offset_text.split(",")) + except ValueError: + offset_x, offset_y = 0, 0 # Default to no offset if invalid input + + # Parse user-defined margin + try: + margin = int(margin_text) + except ValueError: + margin = 5 # Default margin if invalid input + + if not search_term: + return + + # Get detected word positions + detected_positions = data_collector.find_word_positions(self.region_id, search_term, offset_x, offset_y, margin) + + # Draw detected word boxes + data_collector.draw_identification_boxes(self.region_id, detected_positions, color=(0, 0, 255))