mirror of
https://github.com/bunny-lab-io/Borealis.git
synced 2025-10-26 17:21:58 -06:00
Cache repo hash on server and reuse in updater
This commit is contained in:
File diff suppressed because one or more lines are too long
@@ -9,6 +9,7 @@ import shutil
|
|||||||
import string
|
import string
|
||||||
import asyncio
|
import asyncio
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import psutil # type: ignore
|
import psutil # type: ignore
|
||||||
@@ -134,24 +135,132 @@ def _project_root():
|
|||||||
return os.getcwd()
|
return os.getcwd()
|
||||||
|
|
||||||
|
|
||||||
_AGENT_HASH_CACHE = {"path": None, "mtime": None, "value": None}
|
_AGENT_HASH_CACHE = {
|
||||||
|
"path": None,
|
||||||
|
"mtime": None,
|
||||||
|
"value": None,
|
||||||
|
"source": None,
|
||||||
|
"extra": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _iter_hash_roots():
|
||||||
|
seen = set()
|
||||||
|
root = _project_root()
|
||||||
|
for _ in range(6):
|
||||||
|
if not root or root in seen:
|
||||||
|
break
|
||||||
|
yield root
|
||||||
|
seen.add(root)
|
||||||
|
parent = os.path.dirname(root)
|
||||||
|
if not parent or parent == root:
|
||||||
|
break
|
||||||
|
root = parent
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_git_head_hash(root: str) -> Optional[str]:
|
||||||
|
git_dir = os.path.join(root, ".git")
|
||||||
|
head_path = os.path.join(git_dir, "HEAD")
|
||||||
|
if not os.path.isfile(head_path):
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
with open(head_path, "r", encoding="utf-8") as fh:
|
||||||
|
head = fh.read().strip()
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
if not head:
|
||||||
|
return None
|
||||||
|
if head.startswith("ref:"):
|
||||||
|
ref = head.split(" ", 1)[1].strip() if " " in head else head.split(":", 1)[1].strip()
|
||||||
|
if not ref:
|
||||||
|
return None
|
||||||
|
ref_path = os.path.join(git_dir, *ref.split("/"))
|
||||||
|
if os.path.isfile(ref_path):
|
||||||
|
try:
|
||||||
|
with open(ref_path, "r", encoding="utf-8") as rf:
|
||||||
|
commit = rf.read().strip()
|
||||||
|
return commit or None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
packed_refs = os.path.join(git_dir, "packed-refs")
|
||||||
|
if os.path.isfile(packed_refs):
|
||||||
|
try:
|
||||||
|
with open(packed_refs, "r", encoding="utf-8") as pf:
|
||||||
|
for line in pf:
|
||||||
|
line = line.strip()
|
||||||
|
if not line or line.startswith("#") or line.startswith("^"):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
commit, ref_name = line.split(" ", 1)
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
if ref_name.strip() == ref:
|
||||||
|
commit = commit.strip()
|
||||||
|
return commit or None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
return None
|
||||||
|
# Detached head contains the commit hash directly
|
||||||
|
commit = head.splitlines()[0].strip()
|
||||||
|
return commit or None
|
||||||
|
|
||||||
|
|
||||||
def _read_agent_hash():
|
def _read_agent_hash():
|
||||||
try:
|
try:
|
||||||
root = _project_root()
|
|
||||||
path = os.path.join(root, 'github_repo_hash.txt')
|
|
||||||
cache = _AGENT_HASH_CACHE
|
cache = _AGENT_HASH_CACHE
|
||||||
if not os.path.isfile(path):
|
for root in _iter_hash_roots():
|
||||||
cache.update({"path": path, "mtime": None, "value": None})
|
path = os.path.join(root, 'github_repo_hash.txt')
|
||||||
return None
|
if not os.path.isfile(path):
|
||||||
mtime = os.path.getmtime(path)
|
continue
|
||||||
if cache.get("path") == path and cache.get("mtime") == mtime:
|
mtime = os.path.getmtime(path)
|
||||||
|
if (
|
||||||
|
cache.get("source") == "file"
|
||||||
|
and cache.get("path") == path
|
||||||
|
and cache.get("mtime") == mtime
|
||||||
|
):
|
||||||
|
return cache.get("value")
|
||||||
|
with open(path, 'r', encoding='utf-8') as fh:
|
||||||
|
value = fh.read().strip()
|
||||||
|
cache.update(
|
||||||
|
{
|
||||||
|
"source": "file",
|
||||||
|
"path": path,
|
||||||
|
"mtime": mtime,
|
||||||
|
"extra": None,
|
||||||
|
"value": value or None,
|
||||||
|
}
|
||||||
|
)
|
||||||
return cache.get("value")
|
return cache.get("value")
|
||||||
with open(path, 'r', encoding='utf-8') as fh:
|
|
||||||
value = fh.read().strip()
|
for root in _iter_hash_roots():
|
||||||
cache.update({"path": path, "mtime": mtime, "value": value or None})
|
git_dir = os.path.join(root, '.git')
|
||||||
return cache.get("value")
|
head_path = os.path.join(git_dir, 'HEAD')
|
||||||
|
if not os.path.isfile(head_path):
|
||||||
|
continue
|
||||||
|
head_mtime = os.path.getmtime(head_path)
|
||||||
|
packed_path = os.path.join(git_dir, 'packed-refs')
|
||||||
|
packed_mtime = os.path.getmtime(packed_path) if os.path.isfile(packed_path) else None
|
||||||
|
if (
|
||||||
|
cache.get("source") == "git"
|
||||||
|
and cache.get("path") == head_path
|
||||||
|
and cache.get("mtime") == head_mtime
|
||||||
|
and cache.get("extra") == packed_mtime
|
||||||
|
):
|
||||||
|
return cache.get("value")
|
||||||
|
commit = _resolve_git_head_hash(root)
|
||||||
|
cache.update(
|
||||||
|
{
|
||||||
|
"source": "git",
|
||||||
|
"path": head_path,
|
||||||
|
"mtime": head_mtime,
|
||||||
|
"extra": packed_mtime,
|
||||||
|
"value": commit or None,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if commit:
|
||||||
|
return commit
|
||||||
|
cache.update({"source": None, "path": None, "mtime": None, "extra": None, "value": None})
|
||||||
|
return None
|
||||||
except Exception:
|
except Exception:
|
||||||
try:
|
try:
|
||||||
_AGENT_HASH_CACHE.update({"value": None})
|
_AGENT_HASH_CACHE.update({"value": None})
|
||||||
@@ -803,6 +912,12 @@ def _build_details_fallback() -> dict:
|
|||||||
'storage': collect_storage(),
|
'storage': collect_storage(),
|
||||||
'network': network,
|
'network': network,
|
||||||
}
|
}
|
||||||
|
try:
|
||||||
|
agent_hash_value = _read_agent_hash()
|
||||||
|
if agent_hash_value:
|
||||||
|
details.setdefault('summary', {})['agent_hash'] = agent_hash_value
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
return details
|
return details
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -126,17 +126,15 @@ export default function DeviceList({ onSelectDevice }) {
|
|||||||
|
|
||||||
const fetchLatestRepoHash = useCallback(async () => {
|
const fetchLatestRepoHash = useCallback(async () => {
|
||||||
try {
|
try {
|
||||||
const resp = await fetch(
|
const params = new URLSearchParams({ repo: "bunny-lab-io/Borealis", branch: "main" });
|
||||||
"https://api.github.com/repos/bunny-lab-io/Borealis/branches/main",
|
const resp = await fetch(`/api/agent/repo_hash?${params.toString()}`);
|
||||||
{
|
|
||||||
headers: {
|
|
||||||
Accept: "application/vnd.github+json",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
if (!resp.ok) throw new Error(`GitHub status ${resp.status}`);
|
|
||||||
const json = await resp.json();
|
const json = await resp.json();
|
||||||
const sha = (json?.commit?.sha || "").trim();
|
const sha = (json?.sha || "").trim();
|
||||||
|
if (!resp.ok || !sha) {
|
||||||
|
const err = new Error(`Latest hash status ${resp.status}${json?.error ? ` - ${json.error}` : ""}`);
|
||||||
|
err.response = json;
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
setRepoHash((prev) => sha || prev || null);
|
setRepoHash((prev) => sha || prev || null);
|
||||||
return sha || null;
|
return sha || null;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -167,7 +165,7 @@ export default function DeviceList({ onSelectDevice }) {
|
|||||||
const arr = Object.entries(data || {}).map(([id, a]) => {
|
const arr = Object.entries(data || {}).map(([id, a]) => {
|
||||||
const hostname = a.hostname || id || "unknown";
|
const hostname = a.hostname || id || "unknown";
|
||||||
const details = detailsByHost[hostname] || {};
|
const details = detailsByHost[hostname] || {};
|
||||||
const agentHash = (a.agent_hash || "").trim();
|
const agentHash = (a.agent_hash || details.agentHash || "").trim();
|
||||||
return {
|
return {
|
||||||
id,
|
id,
|
||||||
hostname,
|
hostname,
|
||||||
@@ -230,43 +228,47 @@ export default function DeviceList({ onSelectDevice }) {
|
|||||||
const externalIp = summary.external_ip || "";
|
const externalIp = summary.external_ip || "";
|
||||||
const lastReboot = summary.last_reboot || "";
|
const lastReboot = summary.last_reboot || "";
|
||||||
const description = summary.description || "";
|
const description = summary.description || "";
|
||||||
|
const agentHashValue = (summary.agent_hash || "").trim();
|
||||||
|
const enriched = {
|
||||||
|
lastUser,
|
||||||
|
created: createdRaw,
|
||||||
|
createdTs,
|
||||||
|
type: deviceType,
|
||||||
|
internalIp,
|
||||||
|
externalIp,
|
||||||
|
lastReboot,
|
||||||
|
description,
|
||||||
|
agentHash: agentHashValue,
|
||||||
|
};
|
||||||
setDetailsByHost((prev) => ({
|
setDetailsByHost((prev) => ({
|
||||||
...prev,
|
...prev,
|
||||||
[h]: {
|
[h]: enriched,
|
||||||
lastUser,
|
|
||||||
created: createdRaw,
|
|
||||||
createdTs,
|
|
||||||
type: deviceType,
|
|
||||||
internalIp,
|
|
||||||
externalIp,
|
|
||||||
lastReboot,
|
|
||||||
description,
|
|
||||||
},
|
|
||||||
}));
|
}));
|
||||||
|
setRows((prev) =>
|
||||||
|
prev.map((r) => {
|
||||||
|
if (r.hostname !== h) return r;
|
||||||
|
const nextHash = agentHashValue || r.agentHash;
|
||||||
|
return {
|
||||||
|
...r,
|
||||||
|
lastUser: enriched.lastUser || r.lastUser,
|
||||||
|
type: enriched.type || r.type,
|
||||||
|
created: enriched.created || r.created,
|
||||||
|
createdTs: enriched.createdTs || r.createdTs,
|
||||||
|
internalIp: enriched.internalIp || r.internalIp,
|
||||||
|
externalIp: enriched.externalIp || r.externalIp,
|
||||||
|
lastReboot: enriched.lastReboot || r.lastReboot,
|
||||||
|
description: enriched.description || r.description,
|
||||||
|
agentHash: nextHash,
|
||||||
|
agentVersion: computeAgentVersion(nextHash, repoSha),
|
||||||
|
};
|
||||||
|
})
|
||||||
|
);
|
||||||
} catch {
|
} catch {
|
||||||
// ignore per-host failure
|
// ignore per-host failure
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// After caching, refresh rows to apply newly available details
|
|
||||||
setRows((prev) =>
|
|
||||||
prev.map((r) => {
|
|
||||||
const det = detailsByHost[r.hostname];
|
|
||||||
if (!det) return r;
|
|
||||||
return {
|
|
||||||
...r,
|
|
||||||
lastUser: det.lastUser || r.lastUser,
|
|
||||||
type: det.type || r.type,
|
|
||||||
created: det.created || r.created,
|
|
||||||
createdTs: det.createdTs || r.createdTs,
|
|
||||||
internalIp: det.internalIp || r.internalIp,
|
|
||||||
externalIp: det.externalIp || r.externalIp,
|
|
||||||
lastReboot: det.lastReboot || r.lastReboot,
|
|
||||||
description: det.description || r.description,
|
|
||||||
};
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.warn("Failed to load agents:", e);
|
console.warn("Failed to load agents:", e);
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ from typing import List, Dict, Tuple, Optional, Any, Set
|
|||||||
import sqlite3
|
import sqlite3
|
||||||
import io
|
import io
|
||||||
import uuid
|
import uuid
|
||||||
|
from threading import Lock
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -68,6 +69,142 @@ def _write_service_log(service: str, msg: str):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
_REPO_HEAD_CACHE: Dict[str, Tuple[str, float]] = {}
|
||||||
|
_REPO_HEAD_LOCK = Lock()
|
||||||
|
|
||||||
|
_DEFAULT_REPO = os.environ.get('BOREALIS_REPO', 'bunny-lab-io/Borealis')
|
||||||
|
_DEFAULT_BRANCH = os.environ.get('BOREALIS_REPO_BRANCH', 'main')
|
||||||
|
try:
|
||||||
|
_REPO_HASH_INTERVAL = int(os.environ.get('BOREALIS_REPO_HASH_REFRESH', '60'))
|
||||||
|
except ValueError:
|
||||||
|
_REPO_HASH_INTERVAL = 60
|
||||||
|
_REPO_HASH_INTERVAL = max(30, min(_REPO_HASH_INTERVAL, 3600))
|
||||||
|
_REPO_HASH_WORKER_STARTED = False
|
||||||
|
_REPO_HASH_WORKER_LOCK = Lock()
|
||||||
|
|
||||||
|
|
||||||
|
def _fetch_repo_head(owner_repo: str, branch: str = 'main', *, ttl_seconds: int = 60, force_refresh: bool = False) -> Dict[str, Any]:
|
||||||
|
"""Resolve the latest commit hash for ``owner_repo``/``branch`` via GitHub's REST API.
|
||||||
|
|
||||||
|
The server caches the response so that a fleet of agents can reuse the
|
||||||
|
result without exhausting rate limits. ``ttl_seconds`` bounds how long a
|
||||||
|
cached value is considered fresh. When ``force_refresh`` is True the cache
|
||||||
|
is bypassed and a new request is attempted immediately.
|
||||||
|
"""
|
||||||
|
|
||||||
|
key = f"{owner_repo}:{branch}"
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
|
with _REPO_HEAD_LOCK:
|
||||||
|
cached = _REPO_HEAD_CACHE.get(key)
|
||||||
|
|
||||||
|
cached_sha: Optional[str] = None
|
||||||
|
cached_ts: Optional[float] = None
|
||||||
|
cached_age: Optional[float] = None
|
||||||
|
if cached:
|
||||||
|
cached_sha, cached_ts = cached
|
||||||
|
cached_age = max(0.0, now - cached_ts)
|
||||||
|
|
||||||
|
if cached_sha and not force_refresh and cached_age is not None and cached_age < max(30, ttl_seconds):
|
||||||
|
return {
|
||||||
|
'sha': cached_sha,
|
||||||
|
'cached': True,
|
||||||
|
'age_seconds': cached_age,
|
||||||
|
'error': None,
|
||||||
|
'source': 'cache',
|
||||||
|
}
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'Accept': 'application/vnd.github+json',
|
||||||
|
'User-Agent': 'Borealis-Server'
|
||||||
|
}
|
||||||
|
token = os.environ.get('BOREALIS_GITHUB_TOKEN') or os.environ.get('GITHUB_TOKEN')
|
||||||
|
if token:
|
||||||
|
headers['Authorization'] = f'Bearer {token}'
|
||||||
|
|
||||||
|
error_msg: Optional[str] = None
|
||||||
|
sha: Optional[str] = None
|
||||||
|
try:
|
||||||
|
resp = requests.get(
|
||||||
|
f'https://api.github.com/repos/{owner_repo}/branches/{branch}',
|
||||||
|
headers=headers,
|
||||||
|
timeout=20,
|
||||||
|
)
|
||||||
|
if resp.status_code == 200:
|
||||||
|
data = resp.json()
|
||||||
|
sha = (data.get('commit') or {}).get('sha')
|
||||||
|
else:
|
||||||
|
error_msg = f'GitHub REST API repo head lookup failed: HTTP {resp.status_code} {resp.text[:200]}'
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
error_msg = f'GitHub REST API repo head lookup raised: {exc}'
|
||||||
|
|
||||||
|
if sha:
|
||||||
|
sha = sha.strip()
|
||||||
|
with _REPO_HEAD_LOCK:
|
||||||
|
_REPO_HEAD_CACHE[key] = (sha, now)
|
||||||
|
return {
|
||||||
|
'sha': sha,
|
||||||
|
'cached': False,
|
||||||
|
'age_seconds': 0.0,
|
||||||
|
'error': None,
|
||||||
|
'source': 'github',
|
||||||
|
}
|
||||||
|
|
||||||
|
if error_msg:
|
||||||
|
_write_service_log('server', error_msg)
|
||||||
|
|
||||||
|
if cached_sha is not None:
|
||||||
|
return {
|
||||||
|
'sha': cached_sha,
|
||||||
|
'cached': True,
|
||||||
|
'age_seconds': cached_age,
|
||||||
|
'error': error_msg or 'using cached value',
|
||||||
|
'source': 'cache-stale',
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
'sha': None,
|
||||||
|
'cached': False,
|
||||||
|
'age_seconds': None,
|
||||||
|
'error': error_msg or 'unable to resolve repository head',
|
||||||
|
'source': 'github',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _refresh_default_repo_hash(force: bool = False) -> Dict[str, Any]:
|
||||||
|
ttl = max(30, _REPO_HASH_INTERVAL)
|
||||||
|
try:
|
||||||
|
return _fetch_repo_head(_DEFAULT_REPO, _DEFAULT_BRANCH, ttl_seconds=ttl, force_refresh=force)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
_write_service_log('server', f'default repo hash refresh failed: {exc}')
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def _repo_hash_background_worker():
|
||||||
|
interval = max(30, _REPO_HASH_INTERVAL)
|
||||||
|
# Fetch immediately, then sleep between refreshes
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
_refresh_default_repo_hash(force=True)
|
||||||
|
except Exception:
|
||||||
|
# _refresh_default_repo_hash already logs details
|
||||||
|
pass
|
||||||
|
eventlet.sleep(interval)
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_repo_hash_worker():
|
||||||
|
global _REPO_HASH_WORKER_STARTED
|
||||||
|
with _REPO_HASH_WORKER_LOCK:
|
||||||
|
if _REPO_HASH_WORKER_STARTED:
|
||||||
|
return
|
||||||
|
_REPO_HASH_WORKER_STARTED = True
|
||||||
|
try:
|
||||||
|
eventlet.spawn_n(_repo_hash_background_worker)
|
||||||
|
except Exception as exc:
|
||||||
|
_REPO_HASH_WORKER_STARTED = False
|
||||||
|
_write_service_log('server', f'failed to start repo hash worker: {exc}')
|
||||||
|
|
||||||
|
|
||||||
def _ansible_log_server(msg: str):
|
def _ansible_log_server(msg: str):
|
||||||
_write_service_log('ansible', msg)
|
_write_service_log('ansible', msg)
|
||||||
|
|
||||||
@@ -126,6 +263,8 @@ socketio = SocketIO(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
_ensure_repo_hash_worker()
|
||||||
|
|
||||||
# ---------------------------------------------
|
# ---------------------------------------------
|
||||||
# Serve ReactJS Production Vite Build from dist/
|
# Serve ReactJS Production Vite Build from dist/
|
||||||
# ---------------------------------------------
|
# ---------------------------------------------
|
||||||
@@ -147,6 +286,44 @@ def serve_dist(path):
|
|||||||
def health():
|
def health():
|
||||||
return jsonify({"status": "ok"})
|
return jsonify({"status": "ok"})
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/api/agent/repo_hash", methods=["GET"])
|
||||||
|
def api_agent_repo_hash():
|
||||||
|
try:
|
||||||
|
repo = (request.args.get('repo') or _DEFAULT_REPO).strip()
|
||||||
|
branch = (request.args.get('branch') or _DEFAULT_BRANCH).strip()
|
||||||
|
refresh_flag = (request.args.get('refresh') or '').strip().lower()
|
||||||
|
ttl_raw = request.args.get('ttl')
|
||||||
|
if '/' not in repo:
|
||||||
|
return jsonify({"error": "repo must be in the form owner/name"}), 400
|
||||||
|
try:
|
||||||
|
ttl = int(ttl_raw) if ttl_raw else _REPO_HASH_INTERVAL
|
||||||
|
except ValueError:
|
||||||
|
ttl = _REPO_HASH_INTERVAL
|
||||||
|
ttl = max(30, min(ttl, 3600))
|
||||||
|
force_refresh = refresh_flag in {'1', 'true', 'yes', 'force', 'refresh'}
|
||||||
|
if repo == _DEFAULT_REPO and branch == _DEFAULT_BRANCH:
|
||||||
|
result = _refresh_default_repo_hash(force=force_refresh)
|
||||||
|
else:
|
||||||
|
result = _fetch_repo_head(repo, branch, ttl_seconds=ttl, force_refresh=force_refresh)
|
||||||
|
sha = (result.get('sha') or '').strip()
|
||||||
|
payload = {
|
||||||
|
'repo': repo,
|
||||||
|
'branch': branch,
|
||||||
|
'sha': sha if sha else None,
|
||||||
|
'cached': bool(result.get('cached')),
|
||||||
|
'age_seconds': result.get('age_seconds'),
|
||||||
|
'source': result.get('source'),
|
||||||
|
}
|
||||||
|
if result.get('error'):
|
||||||
|
payload['error'] = result['error']
|
||||||
|
if sha:
|
||||||
|
return jsonify(payload)
|
||||||
|
return jsonify(payload), 503
|
||||||
|
except Exception as exc:
|
||||||
|
_write_service_log('server', f'/api/agent/repo_hash error: {exc}')
|
||||||
|
return jsonify({"error": "internal error"}), 500
|
||||||
|
|
||||||
# ---------------------------------------------
|
# ---------------------------------------------
|
||||||
# Server Time Endpoint
|
# Server Time Endpoint
|
||||||
# ---------------------------------------------
|
# ---------------------------------------------
|
||||||
|
|||||||
Reference in New Issue
Block a user