From 54ae31356307c0849da0b8fdb866b83ad3f34eb3 Mon Sep 17 00:00:00 2001 From: DasPoschi Date: Mon, 13 Apr 2026 17:59:29 +0000 Subject: [PATCH] fix(security+perf): SSRF protection, timing-safe auth, proxy cache, submit error handling --- jd-webgui/app.py | 45 ++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 40 insertions(+), 5 deletions(-) diff --git a/jd-webgui/app.py b/jd-webgui/app.py index 5de0991..c857842 100644 --- a/jd-webgui/app.py +++ b/jd-webgui/app.py @@ -3,11 +3,14 @@ from __future__ import annotations import base64 import hashlib +import hmac import html as html_mod +import ipaddress import json import os import re import shlex +import socket import subprocess import threading import time @@ -106,7 +109,7 @@ def _check_basic_auth(req: Request) -> bool: if ":" not in raw: return False user, pw = raw.split(":", 1) - return user == BASIC_AUTH_USER and pw == BASIC_AUTH_PASS + return hmac.compare_digest(user, BASIC_AUTH_USER) and hmac.compare_digest(pw, BASIC_AUTH_PASS) def _auth_challenge() -> HTMLResponse: return HTMLResponse( @@ -237,8 +240,26 @@ def is_demo_link(name: str) -> bool: lower = name.lower().replace("-", "_").replace(".", " ") return any(pat in lower for pat in DEMO_PATTERNS) +def _is_ssrf_target(url: str) -> bool: + """Return True if the URL resolves to a private/loopback address (SSRF protection).""" + try: + host = urllib.parse.urlparse(url).hostname or "" + try: + addr = ipaddress.ip_address(host) + except ValueError: + try: + host = socket.gethostbyname(host) + addr = ipaddress.ip_address(host) + except Exception: + return False + return addr.is_private or addr.is_loopback or addr.is_link_local or addr.is_reserved + except Exception: + return False + def check_url_reachable(url: str) -> Optional[str]: """Try a HEAD request to verify the URL is reachable. Returns error string or None.""" + if _is_ssrf_target(url): + return "URL zeigt auf eine interne/private Adresse (nicht erlaubt)" try: req = urllib.request.Request(url, method="HEAD") req.add_header("User-Agent", "Mozilla/5.0") @@ -435,13 +456,24 @@ def format_proxy_lines(raw: str, scheme: str) -> str: return "\n".join(dedup) +_PROXY_FETCH_LIMIT = 2 * 1024 * 1024 # 2 MB cap to prevent memory exhaustion +_proxy_cache: Dict[str, Tuple[float, str]] = {} +_PROXY_CACHE_TTL = 300.0 # 5 minutes + def fetch_proxy_list(url: str) -> str: + now = time.time() + cached_ts, cached_text = _proxy_cache.get(url, (0.0, "")) + if cached_text and now - cached_ts < _PROXY_CACHE_TTL: + return cached_text req = urllib.request.Request(url) log_connection(f"HTTP GET {url} (no-proxy)") with NO_PROXY_OPENER.open(req, timeout=20) as resp: - text = resp.read().decode("utf-8", "replace") - if "\n" not in text and re.search(r"\s", text): - return re.sub(r"\s+", "\n", text.strip()) + text = resp.read(_PROXY_FETCH_LIMIT).decode("utf-8", "replace") + if " +" not in text and re.search(r"\s", text): + text = re.sub(r"\s+", " +", text.strip()) + _proxy_cache[url] = (now, text) return text def build_jdproxies_payload(text: str) -> Dict[str, Any]: @@ -1181,7 +1213,10 @@ def submit(url: str = Form(...), package_name: str = Form(""), library: str = Fo log_connection(f"URL-Check fehlgeschlagen: {url} -> {url_err}") return HTMLResponse(render_page(f"Link nicht erreichbar: {url_err}"), status_code=400) - dev = get_device() + try: + dev = get_device() + except Exception as e: + return HTMLResponse(render_page(f"JDownloader nicht erreichbar: {e}"), status_code=503) resp = dev.linkgrabber.add_links([{ "links": url, "autostart": True,