Compare commits

...

18 Commits

Author SHA1 Message Date
7a7f9979cd fix: correct
escape sequences in fetch_proxy_list (syntax error)
2026-04-13 18:16:23 +00:00
6b17e8bc06 docker-compose.yml aktualisiert 2026-04-13 18:06:01 +00:00
54ae313563 fix(security+perf): SSRF protection, timing-safe auth, proxy cache, submit error handling 2026-04-13 17:59:29 +00:00
b34d4062a4 fix(docker): run as non-root user (appuser:1000), use requirements.txt 2026-04-13 17:59:08 +00:00
79230d62a2 chore: extract pip dependencies to requirements.txt 2026-04-13 17:59:08 +00:00
71b6645157 fix(docker): use SSH_KEY_PATH env var and add env_file for jd-webgui 2026-04-13 17:58:49 +00:00
0fe0e436aa Update docker-compose.yml 2026-04-12 16:43:36 +02:00
6d103d42c5 Merge pull request #19 from DasPoschi/claude/audit-security-performance-pWwx2
Add security hardening and XSS protection
2026-04-06 09:49:22 +02:00
Claude
a879543a1c Security audit: fix XSS, missing function, improve SSH & URL handling
- Fix XSS: HTML-escape all user input (URLs, package names, errors, proxy data)
- Fix NameError: add missing is_demo_link() function (called but undefined)
- Fix: remove unused http_in fetch in proxies_get()
- Security: mask API keys in log output (TMDB key no longer visible in logs)
- Security: use known_hosts for SSH host key verification when available
- Security: remove .env from git tracking, add .env.example template
- Usability: add URL reachability check before submitting to JDownloader
- Usability: add "Erledigte Jobs entfernen" button to clear finished/failed jobs
- Usability: color-code job status (red for failed, green for finished)
- Docs: add security section to README (known_hosts, HTTPS, .env)

https://claude.ai/code/session_01S774Pqazr2U8vkSyhUBgDs
2026-04-06 07:46:53 +00:00
44e4354d1f Merge pull request #18 from DasPoschi/codex/fix-jdownloader-api-package-removal-error-54zoo0
Detect demo link downloads and fail early
2026-01-21 21:25:03 +01:00
f87f0f5cdc Merge branch 'main' into codex/fix-jdownloader-api-package-removal-error-54zoo0 2026-01-21 21:23:26 +01:00
68353b33aa Detect demo link downloads and fail early 2026-01-21 21:22:59 +01:00
c3b1fcadfa Merge pull request #17 from DasPoschi/codex/fix-jdownloader-api-package-removal-error
Add raw MyJDownloader API fallback for removing/canceling links
2026-01-21 21:09:25 +01:00
25ad8c05d0 Add raw API cleanup fallback for JDownloader 2026-01-21 21:08:48 +01:00
b65cb53463 Merge pull request #16 from DasPoschi/codex/fetch-proxies-from-proxyscrape-api-4xe4oq
Remove proxy blacklist and HTTP proxy handling; use ProxyScrape SOCKS lists
2026-01-04 14:46:16 +01:00
6c13fbbb2f Merge branch 'main' into codex/fetch-proxies-from-proxyscrape-api-4xe4oq 2026-01-04 14:46:06 +01:00
33282ddbcb Remove proxy blacklist filters 2026-01-04 14:45:44 +01:00
7795e22744 Merge pull request #15 from DasPoschi/codex/fetch-proxies-from-proxyscrape-api-4vaqb3
Remove HTTP proxies from proxy UI
2026-01-04 14:27:14 +01:00
6 changed files with 1368 additions and 1222 deletions

View File

@@ -53,3 +53,9 @@ BASIC_AUTH_PASS=CHANGE_ME
# ===== Polling ===== # ===== Polling =====
POLL_SECONDS=5 POLL_SECONDS=5
# ===== SSH host key verification (optional) =====
# Path to known_hosts file inside container. If present, strict host key
# checking is used. If absent, all host keys are accepted (less secure).
# Generate with: ssh-keyscan -p 22 192.168.1.1 > known_hosts
# SSH_KNOWN_HOSTS=/ssh/known_hosts

View File

@@ -11,7 +11,7 @@ Web GUI to:
## Files ## Files
- `docker-compose.yml` stack - `docker-compose.yml` stack
- `.env.example` copy to `.env` and fill values - `.env.example` copy to `.env` and fill in your values (**never commit `.env`!**)
- `jd-webgui/app.py` FastAPI web app - `jd-webgui/app.py` FastAPI web app
- `jd-webgui/Dockerfile` includes ffprobe - `jd-webgui/Dockerfile` includes ffprobe
@@ -40,6 +40,16 @@ docker compose up -d --build
- If `MYJD_DEVICE` is empty, the WebGUI will automatically pick the first available device. - If `MYJD_DEVICE` is empty, the WebGUI will automatically pick the first available device.
- Ensure the SSH user can write to `/jellyfin/Filme` (and series dir if used). - Ensure the SSH user can write to `/jellyfin/Filme` (and series dir if used).
## Security
- **Never commit `.env`** it contains passwords and API keys. Only `.env.example` is tracked.
- **SSH host key verification**: For secure SFTP transfers, provide a `known_hosts` file:
```bash
ssh-keyscan -p 22 192.168.1.1 > known_hosts
```
Mount it in `docker-compose.yml` and set `SSH_KNOWN_HOSTS=/ssh/known_hosts`.
Without it, any host key is accepted (MITM risk on untrusted networks).
- **Basic Auth** protects the WebGUI but transmits credentials in cleartext over HTTP. Use a reverse proxy with HTTPS (e.g. Traefik, Caddy) in production.
## Troubleshooting ## Troubleshooting
- Device not found: list devices - Device not found: list devices
```bash ```bash

View File

@@ -21,11 +21,9 @@ services:
- jdownloader - jdownloader
ports: ports:
- "8080:8080" - "8080:8080"
env_file:
- .env
environment: environment:
TZ: Europe/Berlin TZ: Europe/Berlin
volumes: volumes:
- ./data/jd-output:/output:rw - ./data/jd-output:/output:rw
- ./data/md5:/md5:rw - ./data/md5:/md5:rw
- /root/.ssh/id_ed25519:/ssh/id_ed25519:ro - ${SSH_KEY_PATH:-/root/.ssh/id_ed25519}:/ssh/id_ed25519:ro

View File

@@ -2,19 +2,17 @@ FROM python:3.12-slim
WORKDIR /app WORKDIR /app
RUN apt-get update \ RUN apt-get update && apt-get install -y --no-install-recommends ffmpeg && rm -rf /var/lib/apt/lists/*
&& apt-get install -y --no-install-recommends ffmpeg \
&& rm -rf /var/lib/apt/lists/*
RUN pip install --no-cache-dir \ COPY requirements.txt .
fastapi \ RUN pip install --no-cache-dir -r requirements.txt
uvicorn \
myjdapi \
paramiko \
python-multipart
COPY app.py /app/app.py RUN useradd -m -u 1000 appuser && chown appuser:appuser /app
COPY static /app/static
USER appuser
COPY --chown=appuser:appuser app.py .
COPY --chown=appuser:appuser static ./static
EXPOSE 8080 EXPOSE 8080
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8080"] CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8080"]

View File

@@ -3,10 +3,14 @@ from __future__ import annotations
import base64 import base64
import hashlib import hashlib
import hmac
import html as html_mod
import ipaddress
import json import json
import os import os
import re import re
import shlex import shlex
import socket
import subprocess import subprocess
import threading import threading
import time import time
@@ -63,6 +67,16 @@ URL_RE = re.compile(r"^https?://", re.I)
NO_PROXY_OPENER = urllib.request.build_opener(urllib.request.ProxyHandler({})) NO_PROXY_OPENER = urllib.request.build_opener(urllib.request.ProxyHandler({}))
def esc(s: str) -> str:
"""HTML-escape a string to prevent XSS."""
return html_mod.escape(str(s), quote=True)
def mask_secret(value: str, visible: int = 4) -> str:
"""Mask a secret string, showing only the last `visible` characters."""
if len(value) <= visible:
return "***"
return "***" + value[-visible:]
VIDEO_EXTS = { VIDEO_EXTS = {
".mkv", ".mp4", ".m4v", ".avi", ".mov", ".wmv", ".flv", ".webm", ".mkv", ".mp4", ".m4v", ".avi", ".mov", ".wmv", ".flv", ".webm",
".ts", ".m2ts", ".mts", ".mpg", ".mpeg", ".vob", ".ogv", ".ts", ".m2ts", ".mts", ".mpg", ".mpeg", ".vob", ".ogv",
@@ -95,7 +109,7 @@ def _check_basic_auth(req: Request) -> bool:
if ":" not in raw: if ":" not in raw:
return False return False
user, pw = raw.split(":", 1) user, pw = raw.split(":", 1)
return user == BASIC_AUTH_USER and pw == BASIC_AUTH_PASS return hmac.compare_digest(user, BASIC_AUTH_USER) and hmac.compare_digest(pw, BASIC_AUTH_PASS)
def _auth_challenge() -> HTMLResponse: def _auth_challenge() -> HTMLResponse:
return HTMLResponse( return HTMLResponse(
@@ -219,6 +233,47 @@ def is_video_file(path: str) -> bool:
return False return False
return ext in VIDEO_EXTS return ext in VIDEO_EXTS
DEMO_PATTERNS = {"big_buck_bunny", "bigbuckbunny", "big buck bunny", "bbb_sunflower"}
def is_demo_link(name: str) -> bool:
"""Detect JDownloader demo/fallback videos (e.g. Big Buck Bunny)."""
lower = name.lower().replace("-", "_").replace(".", " ")
return any(pat in lower for pat in DEMO_PATTERNS)
def _is_ssrf_target(url: str) -> bool:
"""Return True if the URL resolves to a private/loopback address (SSRF protection)."""
try:
host = urllib.parse.urlparse(url).hostname or ""
try:
addr = ipaddress.ip_address(host)
except ValueError:
try:
host = socket.gethostbyname(host)
addr = ipaddress.ip_address(host)
except Exception:
return False
return addr.is_private or addr.is_loopback or addr.is_link_local or addr.is_reserved
except Exception:
return False
def check_url_reachable(url: str) -> Optional[str]:
"""Try a HEAD request to verify the URL is reachable. Returns error string or None."""
if _is_ssrf_target(url):
return "URL zeigt auf eine interne/private Adresse (nicht erlaubt)"
try:
req = urllib.request.Request(url, method="HEAD")
req.add_header("User-Agent", "Mozilla/5.0")
with urllib.request.urlopen(req, timeout=10) as resp:
if resp.status >= 400:
return f"URL antwortet mit HTTP {resp.status}"
except urllib.error.HTTPError as e:
return f"URL nicht erreichbar: HTTP {e.code}"
except urllib.error.URLError as e:
return f"URL nicht erreichbar: {e.reason}"
except Exception as e:
return f"URL-Check fehlgeschlagen: {e}"
return None
def md5_file(path: str) -> str: def md5_file(path: str) -> str:
h = hashlib.md5() h = hashlib.md5()
with open(path, "rb") as f: with open(path, "rb") as f:
@@ -262,10 +317,17 @@ def ffprobe_ok(path: str) -> bool:
# ============================================================ # ============================================================
# SSH/SFTP # SSH/SFTP
# ============================================================ # ============================================================
SSH_KNOWN_HOSTS = os.environ.get("SSH_KNOWN_HOSTS", "/ssh/known_hosts")
def ssh_connect() -> paramiko.SSHClient: def ssh_connect() -> paramiko.SSHClient:
ssh = paramiko.SSHClient() ssh = paramiko.SSHClient()
if os.path.isfile(SSH_KNOWN_HOSTS):
ssh.load_host_keys(SSH_KNOWN_HOSTS)
ssh.set_missing_host_key_policy(paramiko.RejectPolicy())
log_connection(f"SSH connect {JELLYFIN_USER}@{JELLYFIN_HOST}:{JELLYFIN_PORT} (known_hosts verified)")
else:
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
log_connection(f"SSH connect {JELLYFIN_USER}@{JELLYFIN_HOST}:{JELLYFIN_PORT}") log_connection(f"SSH connect {JELLYFIN_USER}@{JELLYFIN_HOST}:{JELLYFIN_PORT} (WARNING: no known_hosts, accepting any host key)")
ssh.connect( ssh.connect(
hostname=JELLYFIN_HOST, hostname=JELLYFIN_HOST,
port=JELLYFIN_PORT, port=JELLYFIN_PORT,
@@ -310,9 +372,19 @@ def remote_md5sum(ssh: paramiko.SSHClient, remote_path: str) -> str:
# ============================================================ # ============================================================
# TMDB & naming # TMDB & naming
# ============================================================ # ============================================================
def _sanitize_url_for_log(url: str) -> str:
"""Remove sensitive query params (api_key) from URLs before logging."""
parsed = urllib.parse.urlparse(url)
params = urllib.parse.parse_qs(parsed.query, keep_blank_values=True)
for key in ("api_key", "apikey", "token"):
if key in params:
params[key] = ["***"]
safe_query = urllib.parse.urlencode(params, doseq=True)
return urllib.parse.urlunparse(parsed._replace(query=safe_query))
def _http_get_json(url: str, headers: Optional[Dict[str, str]] = None) -> Any: def _http_get_json(url: str, headers: Optional[Dict[str, str]] = None) -> Any:
req = urllib.request.Request(url, headers=headers or {}) req = urllib.request.Request(url, headers=headers or {})
log_connection(f"HTTP GET {url} (no-proxy)") log_connection(f"HTTP GET {_sanitize_url_for_log(url)} (no-proxy)")
with NO_PROXY_OPENER.open(req, timeout=20) as r: with NO_PROXY_OPENER.open(req, timeout=20) as r:
return json.loads(r.read().decode("utf-8", "replace")) return json.loads(r.read().decode("utf-8", "replace"))
@@ -384,41 +456,27 @@ def format_proxy_lines(raw: str, scheme: str) -> str:
return "\n".join(dedup) return "\n".join(dedup)
_PROXY_FETCH_LIMIT = 2 * 1024 * 1024 # 2 MB cap to prevent memory exhaustion
_proxy_cache: Dict[str, Tuple[float, str]] = {}
_PROXY_CACHE_TTL = 300.0 # 5 minutes
def fetch_proxy_list(url: str) -> str: def fetch_proxy_list(url: str) -> str:
now = time.time()
cached_ts, cached_text = _proxy_cache.get(url, (0.0, ""))
if cached_text and now - cached_ts < _PROXY_CACHE_TTL:
return cached_text
req = urllib.request.Request(url) req = urllib.request.Request(url)
log_connection(f"HTTP GET {url} (no-proxy)") log_connection(f"HTTP GET {url} (no-proxy)")
with NO_PROXY_OPENER.open(req, timeout=20) as resp: with NO_PROXY_OPENER.open(req, timeout=20) as resp:
text = resp.read().decode("utf-8", "replace") text = resp.read(_PROXY_FETCH_LIMIT).decode("utf-8", "replace")
if "\n" not in text and re.search(r"\s", text): if "\n" not in text and re.search(r"\s", text):
return re.sub(r"\s+", "\n", text.strip()) text = re.sub(r"\s+", "\n", text.strip())
_proxy_cache[url] = (now, text)
return text return text
def build_jdproxies_payload(text: str) -> Dict[str, Any]: def build_jdproxies_payload(text: str) -> Dict[str, Any]:
if not text.strip(): if not text.strip():
raise ValueError("Keine Proxy-Einträge zum Speichern.") raise ValueError("Keine Proxy-Einträge zum Speichern.")
blacklist_filter = {
"entries": [
"# Dies ist ein Kommentar",
"// Dies ist auch ein Kommentar",
"# Für jdownloader.org auskommentieren",
"# jdownloader.org",
"# unten für alle Accounts mit der ID 'test *' @ jdownloader.org auskommentieren",
"#test@jdownloader.org",
"# Kommentar unten für ein Konto mit der ID 'test' @ jdownloader.org",
"#test$@jdownloader.org",
"# Sie können Muster für Konto-ID und Host verwenden, z. B. accountPattern @ hostPattern",
"",
"my.jdownloader.org",
"",
"api.jdownloader.org",
"",
"*.jdownloader.org",
"",
"*.your-server.de",
"88.99.115.46",
],
"type": "BLACKLIST",
}
entries: List[Dict[str, Any]] = [] entries: List[Dict[str, Any]] = []
type_map = { type_map = {
"socks5": "SOCKS5", "socks5": "SOCKS5",
@@ -453,7 +511,7 @@ def build_jdproxies_payload(text: str) -> Dict[str, Any]:
if not proxy_type: if not proxy_type:
continue continue
entries.append({ entries.append({
"filter": blacklist_filter, "filter": None,
"proxy": { "proxy": {
"address": parsed.hostname, "address": parsed.hostname,
"password": None, "password": None,
@@ -615,6 +673,31 @@ def local_paths_from_links(links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[
out.append(p) out.append(p)
return out return out
def call_raw_jd_api(dev, endpoints: List[str], payloads: List[Dict[str, Any]]) -> bool:
method_candidates = ["action", "call", "api", "request"]
for method_name in method_candidates:
method = getattr(dev, method_name, None)
if method is None:
continue
for endpoint in endpoints:
for payload in payloads:
try:
method(endpoint, payload)
return True
except TypeError:
try:
method(endpoint, params=payload)
return True
except TypeError:
try:
method(endpoint, data=payload)
return True
except Exception:
continue
except Exception:
continue
return False
def try_remove_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[str, Any]]) -> Optional[str]: def try_remove_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[str, Any]]) -> Optional[str]:
link_ids = [l.get("uuid") for l in links if l.get("uuid") is not None] link_ids = [l.get("uuid") for l in links if l.get("uuid") is not None]
pkg_ids = list(pkg_map.keys()) pkg_ids = list(pkg_map.keys())
@@ -649,6 +732,14 @@ def try_remove_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict
except Exception: except Exception:
continue continue
endpoint_candidates = [
"downloads/removeLinks",
"downloadsV2/removeLinks",
"downloadcontroller/removeLinks",
]
if call_raw_jd_api(dev, endpoint_candidates, payloads):
return None
return "JDownloader-API: Paket/Links konnten nicht entfernt werden (Wrapper-Methoden nicht vorhanden)." return "JDownloader-API: Paket/Links konnten nicht entfernt werden (Wrapper-Methoden nicht vorhanden)."
def try_cancel_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[str, Any]]) -> Optional[str]: def try_cancel_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[str, Any]]) -> Optional[str]:
@@ -687,6 +778,14 @@ def try_cancel_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict
except Exception: except Exception:
continue continue
endpoint_candidates = [
"downloads/removeLinks",
"downloadsV2/removeLinks",
"downloadcontroller/removeLinks",
]
if call_raw_jd_api(dev, endpoint_candidates, payloads):
return None
return "JDownloader-API: Abbrechen fehlgeschlagen (Wrapper-Methoden nicht vorhanden)." return "JDownloader-API: Abbrechen fehlgeschlagen (Wrapper-Methoden nicht vorhanden)."
def cancel_job(dev, jobid: str) -> Optional[str]: def cancel_job(dev, jobid: str) -> Optional[str]:
@@ -762,6 +861,16 @@ def worker(jobid: str):
time.sleep(POLL_SECONDS) time.sleep(POLL_SECONDS)
continue continue
all_demo = all(is_demo_link(l.get("name", "")) for l in links)
if all_demo and not is_demo_link(job.url):
cancel_msg = cancel_job(dev, jobid)
with lock:
job.status = "failed"
base_msg = "JDownloader lieferte das Demo-Video Big Buck Bunny statt des gewünschten Links."
job.message = f"{base_msg} {cancel_msg}" if cancel_msg else base_msg
job.progress = 0.0
return
all_finished = all(bool(l.get("finished")) for l in links) all_finished = all(bool(l.get("finished")) for l in links)
if not all_finished: if not all_finished:
progress = calculate_progress(links) progress = calculate_progress(links)
@@ -880,17 +989,18 @@ def render_job_rows() -> str:
cancel_html = "" cancel_html = ""
if j.status not in {"finished", "failed", "canceled"}: if j.status not in {"finished", "failed", "canceled"}:
cancel_html = ( cancel_html = (
f"<form method='post' action='/cancel/{j.id}' class='inline-form'>" f"<form method='post' action='/cancel/{esc(j.id)}' class='inline-form'>"
f"<button type='submit' class='danger'>Abbrechen</button>" f"<button type='submit' class='danger'>Abbrechen</button>"
f"</form>" f"</form>"
) )
status_class = "error" if j.status == "failed" else ("success" if j.status == "finished" else "")
rows += ( rows += (
f"<tr>" f"<tr>"
f"<td><code>{j.id}</code></td>" f"<td><code>{esc(j.id)}</code></td>"
f"<td style='max-width:560px; word-break:break-all;'>{j.url}</td>" f"<td style='max-width:560px; word-break:break-all;'>{esc(j.url)}</td>"
f"<td>{j.package_name}</td>" f"<td>{esc(j.package_name)}</td>"
f"<td>{j.library}</td>" f"<td>{esc(j.library)}</td>"
f"<td><b>{j.status}</b><br/><small>{j.message}</small>{progress_html}{cancel_html}</td>" f"<td><b class='{status_class}'>{esc(j.status)}</b><br/><small>{esc(j.message)}</small>{progress_html}{cancel_html}</td>"
f"</tr>" f"</tr>"
) )
@@ -901,7 +1011,7 @@ def render_job_rows() -> str:
def render_page(error: str = "") -> str: def render_page(error: str = "") -> str:
rows = render_job_rows() rows = render_job_rows()
err_html = f"<p class='error'>{error}</p>" if error else "" err_html = f"<p class='error'>{esc(error)}</p>" if error else ""
auth_note = "aktiv" if _auth_enabled() else "aus" auth_note = "aktiv" if _auth_enabled() else "aus"
return f""" return f"""
<html> <html>
@@ -963,6 +1073,10 @@ def render_page(error: str = "") -> str:
{rows} {rows}
</tbody> </tbody>
</table> </table>
<form method="post" action="/clear-finished" style="margin-top:10px;">
<button type="submit" style="background:#666; color:#fff;">Erledigte Jobs entfernen</button>
</form>
</body> </body>
</html> </html>
""" """
@@ -1022,8 +1136,8 @@ def render_proxies_page(
out_text: str = "", out_text: str = "",
export_path: str = "", export_path: str = "",
) -> str: ) -> str:
err_html = f"<p class='error'>{error}</p>" if error else "" err_html = f"<p class='error'>{esc(error)}</p>" if error else ""
msg_html = f"<p class='success'>{message}</p>" if message else "" msg_html = f"<p class='success'>{esc(message)}</p>" if message else ""
return f""" return f"""
<html> <html>
<head> <head>
@@ -1040,12 +1154,12 @@ def render_proxies_page(
<form method="post" action="/proxies"> <form method="post" action="/proxies">
<div class="row"> <div class="row">
<label>SOCKS5 (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/> <label>SOCKS5 (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/>
<textarea name="socks5_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{socks5_in}</textarea> <textarea name="socks5_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{esc(socks5_in)}</textarea>
</div> </div>
<div class="row"> <div class="row">
<label>SOCKS4 (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/> <label>SOCKS4 (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/>
<textarea name="socks4_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{socks4_in}</textarea> <textarea name="socks4_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{esc(socks4_in)}</textarea>
</div> </div>
<button type="submit">In JDownloader-Format umwandeln</button> <button type="submit">In JDownloader-Format umwandeln</button>
@@ -1055,7 +1169,7 @@ def render_proxies_page(
<p class="hint">Format: <code>socks5://IP:PORT</code>, <code>socks4://IP:PORT</code>. Keine Prüfung/Validierung.</p> <p class="hint">Format: <code>socks5://IP:PORT</code>, <code>socks4://IP:PORT</code>. Keine Prüfung/Validierung.</p>
<div class="row"> <div class="row">
<textarea id="out" rows="12" readonly style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{out_text}</textarea> <textarea id="out" rows="12" readonly style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{esc(out_text)}</textarea>
</div> </div>
<button type="button" onclick="navigator.clipboard.writeText(document.getElementById('out').value)">Kopieren</button> <button type="button" onclick="navigator.clipboard.writeText(document.getElementById('out').value)">Kopieren</button>
@@ -1064,12 +1178,12 @@ def render_proxies_page(
<p class="hint">Speichert die Liste als <code>.jdproxies</code> im Container, z. B. zum Import in JDownloader → Verbindungsmanager → Importieren.</p> <p class="hint">Speichert die Liste als <code>.jdproxies</code> im Container, z. B. zum Import in JDownloader → Verbindungsmanager → Importieren.</p>
<form method="post" action="/proxies/save"> <form method="post" action="/proxies/save">
<textarea name="socks5_in" style="display:none;">{socks5_in}</textarea> <textarea name="socks5_in" style="display:none;">{esc(socks5_in)}</textarea>
<textarea name="socks4_in" style="display:none;">{socks4_in}</textarea> <textarea name="socks4_in" style="display:none;">{esc(socks4_in)}</textarea>
<button type="submit">Liste als JDProxies speichern</button> <button type="submit">Liste als JDProxies speichern</button>
</form> </form>
<p class="hint">Aktueller Pfad: <code>{export_path or PROXY_EXPORT_PATH}</code></p> <p class="hint">Aktueller Pfad: <code>{esc(export_path or PROXY_EXPORT_PATH)}</code></p>
</body> </body>
</html> </html>
""" """
@@ -1092,7 +1206,15 @@ def submit(url: str = Form(...), package_name: str = Form(""), library: str = Fo
if not URL_RE.match(url): if not URL_RE.match(url):
return HTMLResponse(render_page("Nur http(s) URLs erlaubt."), status_code=400) return HTMLResponse(render_page("Nur http(s) URLs erlaubt."), status_code=400)
url_err = check_url_reachable(url)
if url_err:
log_connection(f"URL-Check fehlgeschlagen: {url} -> {url_err}")
return HTMLResponse(render_page(f"Link nicht erreichbar: {url_err}"), status_code=400)
try:
dev = get_device() dev = get_device()
except Exception as e:
return HTMLResponse(render_page(f"JDownloader nicht erreichbar: {e}"), status_code=503)
resp = dev.linkgrabber.add_links([{ resp = dev.linkgrabber.add_links([{
"links": url, "links": url,
"autostart": True, "autostart": True,
@@ -1132,6 +1254,14 @@ def cancel(jobid: str):
job.message = "Abbruch angefordert…" job.message = "Abbruch angefordert…"
return RedirectResponse(url="/", status_code=303) return RedirectResponse(url="/", status_code=303)
@app.post("/clear-finished")
def clear_finished():
with lock:
to_remove = [jid for jid, j in jobs.items() if j.status in {"finished", "failed", "canceled"}]
for jid in to_remove:
del jobs[jid]
return RedirectResponse(url="/", status_code=303)
@app.get("/proxies", response_class=HTMLResponse) @app.get("/proxies", response_class=HTMLResponse)
def proxies_get(): def proxies_get():
try: try:
@@ -1141,7 +1271,6 @@ def proxies_get():
socks4_in = fetch_proxy_list( socks4_in = fetch_proxy_list(
"https://api.proxyscrape.com/v4/free-proxy-list/get?request=displayproxies&protocol=socks4&timeout=10000&country=all&ssl=yes&anonymity=elite&skip=0&limit=2000" "https://api.proxyscrape.com/v4/free-proxy-list/get?request=displayproxies&protocol=socks4&timeout=10000&country=all&ssl=yes&anonymity=elite&skip=0&limit=2000"
) )
http_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt")
s5 = format_proxy_lines(socks5_in, "socks5") s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4") s4 = format_proxy_lines(socks4_in, "socks4")

View File

@@ -0,0 +1,5 @@
fastapi
uvicorn
myjdapi
paramiko
python-multipart