From 30329ef72a382e46caa98e92ecc11a5dbfbe1e8f Mon Sep 17 00:00:00 2001 From: DasPoschi Date: Thu, 1 Jan 2026 20:18:30 +0100 Subject: [PATCH] Fetch proxy lists from upstream sources --- jd-webgui/app.py | 316 ++++++++++++++++++++++++++++++++++++- jd-webgui/static/style.css | 8 + 2 files changed, 316 insertions(+), 8 deletions(-) diff --git a/jd-webgui/app.py b/jd-webgui/app.py index 9bd5d8b..4fc2064 100644 --- a/jd-webgui/app.py +++ b/jd-webgui/app.py @@ -117,6 +117,8 @@ class Job: library: str # movies|series|auto status: str # queued|collecting|downloading|upload|finished|failed message: str + progress: float = 0.0 + cancel_requested: bool = False jobs: Dict[str, Job] = {} lock = threading.Lock() @@ -205,12 +207,24 @@ def md5_file(path: str) -> str: return h.hexdigest() def write_md5_sidecar(file_path: str, md5_hex: str) -> str: - os.makedirs(MD5_DIR, exist_ok=True) base = os.path.basename(file_path) - md5_path = os.path.join(MD5_DIR, base + ".md5") - with open(md5_path, "w", encoding="utf-8") as f: - f.write(f"{md5_hex} {base}\n") - return md5_path + candidates = [MD5_DIR, "/tmp/md5"] + last_err: Optional[Exception] = None + + for target in candidates: + try: + os.makedirs(target, exist_ok=True) + md5_path = os.path.join(target, base + ".md5") + with open(md5_path, "w", encoding="utf-8") as f: + f.write(f"{md5_hex} {base}\n") + return md5_path + except PermissionError as exc: + last_err = exc + continue + + if last_err: + raise last_err + raise RuntimeError("Failed to write MD5 sidecar file.") def ffprobe_ok(path: str) -> bool: try: @@ -283,7 +297,10 @@ def tmdb_search_movie(query: str) -> Optional[Dict[str, Any]]: return None q = urllib.parse.quote(query.strip()) url = f"https://api.themoviedb.org/3/search/movie?api_key={TMDB_API_KEY}&language={urllib.parse.quote(TMDB_LANGUAGE)}&query={q}" - data = _http_get_json(url) + try: + data = _http_get_json(url) + except Exception: + return None results = data.get("results") or [] return results[0] if results else None @@ -292,7 +309,10 @@ def tmdb_search_tv(query: str) -> Optional[Dict[str, Any]]: return None q = urllib.parse.quote(query.strip()) url = f"https://api.themoviedb.org/3/search/tv?api_key={TMDB_API_KEY}&language={urllib.parse.quote(TMDB_LANGUAGE)}&query={q}" - data = _http_get_json(url) + try: + data = _http_get_json(url) + except Exception: + return None results = data.get("results") or [] return results[0] if results else None @@ -301,6 +321,50 @@ def sanitize_name(name: str) -> str: out = "".join("_" if c in bad else c for c in name).strip() return re.sub(r"\s+", " ", out) +def format_proxy_lines(raw: str, scheme: str) -> str: + """ + Takes raw lines (ip:port or scheme://ip:port) and outputs normalized lines: + scheme://ip:port (one per line). Ignores empty lines and comments. + """ + scheme = scheme.strip().lower() + if scheme not in {"socks5", "socks4", "http"}: + raise ValueError("Unsupported proxy scheme") + + out = [] + for line in (raw or "").splitlines(): + s = line.strip() + if not s or s.startswith("#"): + continue + + if "://" in s: + s = s.split("://", 1)[1].strip() + + if ":" not in s: + continue + + host, port = s.rsplit(":", 1) + host = host.strip() + port = port.strip() + + if not host or not port.isdigit(): + continue + + out.append(f"{scheme}://{host}:{port}") + + seen = set() + dedup = [] + for x in out: + if x not in seen: + seen.add(x) + dedup.append(x) + + return "\n".join(dedup) + +def fetch_proxy_list(url: str) -> str: + req = urllib.request.Request(url) + with urllib.request.urlopen(req, timeout=20) as resp: + return resp.read().decode("utf-8", "replace") + def pick_library_target(library_choice: str, filename: str, package_name: str) -> str: if library_choice not in {"movies", "series", "auto"}: library_choice = "auto" @@ -389,6 +453,10 @@ def query_links_and_packages(dev, jobid: str) -> Tuple[List[Dict[str, Any]], Dic "name": True, "finished": True, "running": True, + "bytesLoaded": True, + "bytesTotal": True, + "bytes": True, + "totalBytes": True, "status": True, "packageUUID": True, "uuid": True, @@ -461,6 +529,86 @@ def try_remove_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict return "JDownloader-API: Paket/Links konnten nicht entfernt werden (Wrapper-Methoden nicht vorhanden)." +def try_cancel_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[str, Any]]) -> Optional[str]: + link_ids = [l.get("uuid") for l in links if l.get("uuid") is not None] + pkg_ids = list(pkg_map.keys()) + + candidates = [ + ("downloads", "removeLinks"), + ("downloads", "remove_links"), + ("downloads", "deleteLinks"), + ("downloads", "delete_links"), + ("downloadcontroller", "removeLinks"), + ("downloadcontroller", "remove_links"), + ] + + payloads = [ + {"linkUUIDs": link_ids, "packageUUIDs": pkg_ids, "deleteFiles": True}, + {"linkIds": link_ids, "packageIds": pkg_ids, "deleteFiles": True}, + {"linkUUIDs": link_ids, "deleteFiles": True}, + {"packageUUIDs": pkg_ids, "deleteFiles": True}, + {"linkUUIDs": link_ids, "packageUUIDs": pkg_ids, "removeFiles": True}, + {"linkIds": link_ids, "packageIds": pkg_ids, "removeFiles": True}, + ] + + for ns, fn in candidates: + obj = getattr(dev, ns, None) + if obj is None: + continue + meth = getattr(obj, fn, None) + if meth is None: + continue + for payload in payloads: + try: + meth([payload]) + return None + except Exception: + continue + + return "JDownloader-API: Abbrechen fehlgeschlagen (Wrapper-Methoden nicht vorhanden)." + +def cancel_job(dev, jobid: str) -> Optional[str]: + links, pkg_map = query_links_and_packages(dev, jobid) + local_paths = local_paths_from_links(links, pkg_map) + for path in local_paths: + try: + if os.path.isfile(path): + os.remove(path) + except Exception: + pass + try: + sidecar = os.path.join(MD5_DIR, os.path.basename(path) + ".md5") + if os.path.isfile(sidecar): + os.remove(sidecar) + except Exception: + pass + return try_cancel_from_jd(dev, links, pkg_map) + +def calculate_progress(links: List[Dict[str, Any]]) -> float: + total = 0 + loaded = 0 + for link in links: + bytes_total = link.get("bytesTotal") + if bytes_total is None: + bytes_total = link.get("totalBytes") + if bytes_total is None: + bytes_total = link.get("bytes") + bytes_loaded = link.get("bytesLoaded") + if bytes_total is None or bytes_loaded is None: + continue + try: + bytes_total = int(bytes_total) + bytes_loaded = int(bytes_loaded) + except (TypeError, ValueError): + continue + if bytes_total <= 0: + continue + total += bytes_total + loaded += min(bytes_loaded, bytes_total) + if total <= 0: + return 0.0 + return max(0.0, min(100.0, (loaded / total) * 100.0)) + # ============================================================ # Worker # ============================================================ @@ -474,6 +622,13 @@ def worker(jobid: str): job = jobs.get(jobid) if not job: return + if job.cancel_requested: + cancel_msg = cancel_job(dev, jobid) + with lock: + job.status = "canceled" + job.message = cancel_msg or "Download abgebrochen und Dateien entfernt." + job.progress = 0.0 + return links, pkg_map = query_links_and_packages(dev, jobid) @@ -481,15 +636,18 @@ def worker(jobid: str): with lock: job.status = "collecting" job.message = "Warte auf Link-Crawler…" + job.progress = 0.0 time.sleep(POLL_SECONDS) continue all_finished = all(bool(l.get("finished")) for l in links) if not all_finished: + progress = calculate_progress(links) with lock: job.status = "downloading" done = sum(1 for l in links if l.get("finished")) job.message = f"Download läuft… ({done}/{len(links)} fertig)" + job.progress = progress time.sleep(POLL_SECONDS) continue @@ -500,6 +658,7 @@ def worker(jobid: str): with lock: job.status = "failed" job.message = "Keine Video-Datei gefunden (Whitelist)." + job.progress = 0.0 return valid_videos = [p for p in video_files if ffprobe_ok(p)] @@ -507,11 +666,13 @@ def worker(jobid: str): with lock: job.status = "failed" job.message = "ffprobe: keine gültige Video-Datei." + job.progress = 0.0 return with lock: job.status = "upload" job.message = f"Download fertig. MD5/Upload/Verify für {len(valid_videos)} Datei(en)…" + job.progress = 100.0 ssh = ssh_connect() try: @@ -551,6 +712,7 @@ def worker(jobid: str): with lock: job.status = "finished" job.message = "Upload + MD5 OK. " + (jd_cleanup_msg or "JDownloader: Paket/Links entfernt.") + job.progress = 100.0 return except Exception as e: @@ -559,6 +721,7 @@ def worker(jobid: str): if job: job.status = "failed" job.message = str(e) + job.progress = 0.0 # ============================================================ # Web @@ -573,13 +736,27 @@ def render_page(error: str = "") -> str: job_list = list(jobs.values())[::-1] for j in job_list: + progress_pct = f"{j.progress:.1f}%" + progress_html = ( + f"
" + f"" + f"{progress_pct}" + f"
" + ) + cancel_html = "" + if j.status not in {"finished", "failed", "canceled"}: + cancel_html = ( + f"
" + f"" + f"
" + ) rows += ( f"" f"{j.id}" f"{j.url}" f"{j.package_name}" f"{j.library}" - f"{j.status}
{j.message}" + f"{j.status}
{j.message}{progress_html}{cancel_html}" f"" ) @@ -591,9 +768,16 @@ def render_page(error: str = "") -> str: JD → Jellyfin +

JD → Jellyfin

+ {render_nav("downloads")} {err_html}
@@ -634,6 +818,62 @@ def render_page(error: str = "") -> str: """ +def render_nav(active: str) -> str: + def link(label: str, href: str, key: str) -> str: + style = "font-weight:700;" if active == key else "" + return f"{label}" + return ( + "
" + + link("Downloads", "/", "downloads") + + link("Proxies", "/proxies", "proxies") + + "
" + ) + +def render_proxies_page(error: str = "", socks5_in: str = "", socks4_in: str = "", http_in: str = "", out_text: str = "") -> str: + err_html = f"

{error}

" if error else "" + return f""" + + + + + JD → Jellyfin (Proxies) + + +

JD → Jellyfin

+ {render_nav("proxies")} + {err_html} + + +
+
+ +
+ +
+
+ +
+ +
+
+ +
+ + +
+ +

JDownloader Import-Liste

+

Format: socks5://IP:PORT, socks4://IP:PORT, http://IP:PORT. Keine Prüfung/Validierung.

+ +
+ +
+ + + + + """ + @app.get("/", response_class=HTMLResponse) def index(): try: @@ -672,9 +912,69 @@ def submit(url: str = Form(...), package_name: str = Form(""), library: str = Fo library=library, status="queued", message="Download gestartet", + progress=0.0, ) t = threading.Thread(target=worker, args=(jobid,), daemon=True) t.start() return RedirectResponse(url="/", status_code=303) + +@app.post("/cancel/{jobid}") +def cancel(jobid: str): + with lock: + job = jobs.get(jobid) + if not job: + return RedirectResponse(url="/", status_code=303) + if job.status in {"finished", "failed", "canceled"}: + return RedirectResponse(url="/", status_code=303) + job.cancel_requested = True + job.message = "Abbruch angefordert…" + return RedirectResponse(url="/", status_code=303) + +@app.get("/proxies", response_class=HTMLResponse) +def proxies_get(): + try: + socks5_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks5.txt") + socks4_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks4.txt") + http_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt") + + s5 = format_proxy_lines(socks5_in, "socks5") + s4 = format_proxy_lines(socks4_in, "socks4") + hp = format_proxy_lines(http_in, "http") + combined = "\n".join([x for x in [s5, s4, hp] if x.strip()]) + return HTMLResponse(render_proxies_page( + socks5_in=socks5_in, + socks4_in=socks4_in, + http_in=http_in, + out_text=combined + )) + except Exception as e: + return HTMLResponse(render_proxies_page(error=str(e)), status_code=502) + +@app.post("/proxies", response_class=HTMLResponse) +def proxies_post( + socks5_in: str = Form(""), + socks4_in: str = Form(""), + http_in: str = Form(""), +): + try: + s5 = format_proxy_lines(socks5_in, "socks5") + s4 = format_proxy_lines(socks4_in, "socks4") + hp = format_proxy_lines(http_in, "http") + + combined = "\n".join([x for x in [s5, s4, hp] if x.strip()]) + return HTMLResponse(render_proxies_page( + socks5_in=socks5_in, + socks4_in=socks4_in, + http_in=http_in, + out_text=combined + )) + except Exception as e: + return HTMLResponse(render_proxies_page( + error=str(e), + socks5_in=socks5_in, + socks4_in=socks4_in, + http_in=http_in, + out_text="" + ), status_code=400) diff --git a/jd-webgui/static/style.css b/jd-webgui/static/style.css index 9536602..6232238 100644 --- a/jd-webgui/static/style.css +++ b/jd-webgui/static/style.css @@ -4,9 +4,17 @@ form { background:#fff; border:1px solid #e5e5e5; border-radius:10px; padding:14 .row { margin-bottom: 10px; } input, select { padding:10px; border:1px solid #ccc; border-radius:8px; font-size:14px; width: 100%; max-width: 860px; } button { padding:10px 14px; border:0; border-radius:8px; font-weight:600; cursor:pointer; } +button.danger { background:#b00020; color:#fff; } +progress { width: 100%; height: 12px; } +progress::-webkit-progress-bar { background:#f0f0f0; border-radius:8px; } +progress::-webkit-progress-value { background:#1b7f3a; border-radius:8px; } +progress::-moz-progress-bar { background:#1b7f3a; border-radius:8px; } table { margin-top:16px; width:100%; border-collapse: collapse; background:#fff; border:1px solid #e5e5e5; border-radius:10px; overflow:hidden; } th, td { border-top:1px solid #eee; padding:10px; vertical-align: top; font-size:14px; } th { background:#fbfbfb; text-align:left; } code { font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace; font-size: 12px; background:#f2f2f2; padding:2px 4px; border-radius:4px; } .hint { color:#555; font-size: 12px; margin-top: 10px; } .error { color:#b00020; font-weight: 700; } +.progress-row { display:flex; align-items:center; gap:8px; margin-top:6px; } +.progress-text { font-size:12px; color:#333; min-width:48px; } +.inline-form { margin-top:6px; }