Compare commits

...

13 Commits

Author SHA1 Message Date
7a7f9979cd fix: correct
escape sequences in fetch_proxy_list (syntax error)
2026-04-13 18:16:23 +00:00
6b17e8bc06 docker-compose.yml aktualisiert 2026-04-13 18:06:01 +00:00
54ae313563 fix(security+perf): SSRF protection, timing-safe auth, proxy cache, submit error handling 2026-04-13 17:59:29 +00:00
b34d4062a4 fix(docker): run as non-root user (appuser:1000), use requirements.txt 2026-04-13 17:59:08 +00:00
79230d62a2 chore: extract pip dependencies to requirements.txt 2026-04-13 17:59:08 +00:00
71b6645157 fix(docker): use SSH_KEY_PATH env var and add env_file for jd-webgui 2026-04-13 17:58:49 +00:00
0fe0e436aa Update docker-compose.yml 2026-04-12 16:43:36 +02:00
6d103d42c5 Merge pull request #19 from DasPoschi/claude/audit-security-performance-pWwx2
Add security hardening and XSS protection
2026-04-06 09:49:22 +02:00
Claude
a879543a1c Security audit: fix XSS, missing function, improve SSH & URL handling
- Fix XSS: HTML-escape all user input (URLs, package names, errors, proxy data)
- Fix NameError: add missing is_demo_link() function (called but undefined)
- Fix: remove unused http_in fetch in proxies_get()
- Security: mask API keys in log output (TMDB key no longer visible in logs)
- Security: use known_hosts for SSH host key verification when available
- Security: remove .env from git tracking, add .env.example template
- Usability: add URL reachability check before submitting to JDownloader
- Usability: add "Erledigte Jobs entfernen" button to clear finished/failed jobs
- Usability: color-code job status (red for failed, green for finished)
- Docs: add security section to README (known_hosts, HTTPS, .env)

https://claude.ai/code/session_01S774Pqazr2U8vkSyhUBgDs
2026-04-06 07:46:53 +00:00
44e4354d1f Merge pull request #18 from DasPoschi/codex/fix-jdownloader-api-package-removal-error-54zoo0
Detect demo link downloads and fail early
2026-01-21 21:25:03 +01:00
f87f0f5cdc Merge branch 'main' into codex/fix-jdownloader-api-package-removal-error-54zoo0 2026-01-21 21:23:26 +01:00
68353b33aa Detect demo link downloads and fail early 2026-01-21 21:22:59 +01:00
c3b1fcadfa Merge pull request #17 from DasPoschi/codex/fix-jdownloader-api-package-removal-error
Add raw MyJDownloader API fallback for removing/canceling links
2026-01-21 21:09:25 +01:00
6 changed files with 1368 additions and 1240 deletions

View File

@@ -53,3 +53,9 @@ BASIC_AUTH_PASS=CHANGE_ME
# ===== Polling =====
POLL_SECONDS=5
# ===== SSH host key verification (optional) =====
# Path to known_hosts file inside container. If present, strict host key
# checking is used. If absent, all host keys are accepted (less secure).
# Generate with: ssh-keyscan -p 22 192.168.1.1 > known_hosts
# SSH_KNOWN_HOSTS=/ssh/known_hosts

View File

@@ -11,7 +11,7 @@ Web GUI to:
## Files
- `docker-compose.yml` stack
- `.env.example` copy to `.env` and fill values
- `.env.example` copy to `.env` and fill in your values (**never commit `.env`!**)
- `jd-webgui/app.py` FastAPI web app
- `jd-webgui/Dockerfile` includes ffprobe
@@ -40,6 +40,16 @@ docker compose up -d --build
- If `MYJD_DEVICE` is empty, the WebGUI will automatically pick the first available device.
- Ensure the SSH user can write to `/jellyfin/Filme` (and series dir if used).
## Security
- **Never commit `.env`** it contains passwords and API keys. Only `.env.example` is tracked.
- **SSH host key verification**: For secure SFTP transfers, provide a `known_hosts` file:
```bash
ssh-keyscan -p 22 192.168.1.1 > known_hosts
```
Mount it in `docker-compose.yml` and set `SSH_KNOWN_HOSTS=/ssh/known_hosts`.
Without it, any host key is accepted (MITM risk on untrusted networks).
- **Basic Auth** protects the WebGUI but transmits credentials in cleartext over HTTP. Use a reverse proxy with HTTPS (e.g. Traefik, Caddy) in production.
## Troubleshooting
- Device not found: list devices
```bash

View File

@@ -21,11 +21,9 @@ services:
- jdownloader
ports:
- "8080:8080"
env_file:
- .env
environment:
TZ: Europe/Berlin
volumes:
- ./data/jd-output:/output:rw
- ./data/md5:/md5:rw
- /root/.ssh/id_ed25519:/ssh/id_ed25519:ro
- ${SSH_KEY_PATH:-/root/.ssh/id_ed25519}:/ssh/id_ed25519:ro

View File

@@ -2,19 +2,17 @@ FROM python:3.12-slim
WORKDIR /app
RUN apt-get update \
&& apt-get install -y --no-install-recommends ffmpeg \
&& rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y --no-install-recommends ffmpeg && rm -rf /var/lib/apt/lists/*
RUN pip install --no-cache-dir \
fastapi \
uvicorn \
myjdapi \
paramiko \
python-multipart
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY app.py /app/app.py
COPY static /app/static
RUN useradd -m -u 1000 appuser && chown appuser:appuser /app
USER appuser
COPY --chown=appuser:appuser app.py .
COPY --chown=appuser:appuser static ./static
EXPOSE 8080
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8080"]

View File

@@ -3,10 +3,14 @@ from __future__ import annotations
import base64
import hashlib
import hmac
import html as html_mod
import ipaddress
import json
import os
import re
import shlex
import socket
import subprocess
import threading
import time
@@ -63,6 +67,16 @@ URL_RE = re.compile(r"^https?://", re.I)
NO_PROXY_OPENER = urllib.request.build_opener(urllib.request.ProxyHandler({}))
def esc(s: str) -> str:
"""HTML-escape a string to prevent XSS."""
return html_mod.escape(str(s), quote=True)
def mask_secret(value: str, visible: int = 4) -> str:
"""Mask a secret string, showing only the last `visible` characters."""
if len(value) <= visible:
return "***"
return "***" + value[-visible:]
VIDEO_EXTS = {
".mkv", ".mp4", ".m4v", ".avi", ".mov", ".wmv", ".flv", ".webm",
".ts", ".m2ts", ".mts", ".mpg", ".mpeg", ".vob", ".ogv",
@@ -95,7 +109,7 @@ def _check_basic_auth(req: Request) -> bool:
if ":" not in raw:
return False
user, pw = raw.split(":", 1)
return user == BASIC_AUTH_USER and pw == BASIC_AUTH_PASS
return hmac.compare_digest(user, BASIC_AUTH_USER) and hmac.compare_digest(pw, BASIC_AUTH_PASS)
def _auth_challenge() -> HTMLResponse:
return HTMLResponse(
@@ -219,6 +233,47 @@ def is_video_file(path: str) -> bool:
return False
return ext in VIDEO_EXTS
DEMO_PATTERNS = {"big_buck_bunny", "bigbuckbunny", "big buck bunny", "bbb_sunflower"}
def is_demo_link(name: str) -> bool:
"""Detect JDownloader demo/fallback videos (e.g. Big Buck Bunny)."""
lower = name.lower().replace("-", "_").replace(".", " ")
return any(pat in lower for pat in DEMO_PATTERNS)
def _is_ssrf_target(url: str) -> bool:
"""Return True if the URL resolves to a private/loopback address (SSRF protection)."""
try:
host = urllib.parse.urlparse(url).hostname or ""
try:
addr = ipaddress.ip_address(host)
except ValueError:
try:
host = socket.gethostbyname(host)
addr = ipaddress.ip_address(host)
except Exception:
return False
return addr.is_private or addr.is_loopback or addr.is_link_local or addr.is_reserved
except Exception:
return False
def check_url_reachable(url: str) -> Optional[str]:
"""Try a HEAD request to verify the URL is reachable. Returns error string or None."""
if _is_ssrf_target(url):
return "URL zeigt auf eine interne/private Adresse (nicht erlaubt)"
try:
req = urllib.request.Request(url, method="HEAD")
req.add_header("User-Agent", "Mozilla/5.0")
with urllib.request.urlopen(req, timeout=10) as resp:
if resp.status >= 400:
return f"URL antwortet mit HTTP {resp.status}"
except urllib.error.HTTPError as e:
return f"URL nicht erreichbar: HTTP {e.code}"
except urllib.error.URLError as e:
return f"URL nicht erreichbar: {e.reason}"
except Exception as e:
return f"URL-Check fehlgeschlagen: {e}"
return None
def md5_file(path: str) -> str:
h = hashlib.md5()
with open(path, "rb") as f:
@@ -262,10 +317,17 @@ def ffprobe_ok(path: str) -> bool:
# ============================================================
# SSH/SFTP
# ============================================================
SSH_KNOWN_HOSTS = os.environ.get("SSH_KNOWN_HOSTS", "/ssh/known_hosts")
def ssh_connect() -> paramiko.SSHClient:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
log_connection(f"SSH connect {JELLYFIN_USER}@{JELLYFIN_HOST}:{JELLYFIN_PORT}")
if os.path.isfile(SSH_KNOWN_HOSTS):
ssh.load_host_keys(SSH_KNOWN_HOSTS)
ssh.set_missing_host_key_policy(paramiko.RejectPolicy())
log_connection(f"SSH connect {JELLYFIN_USER}@{JELLYFIN_HOST}:{JELLYFIN_PORT} (known_hosts verified)")
else:
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
log_connection(f"SSH connect {JELLYFIN_USER}@{JELLYFIN_HOST}:{JELLYFIN_PORT} (WARNING: no known_hosts, accepting any host key)")
ssh.connect(
hostname=JELLYFIN_HOST,
port=JELLYFIN_PORT,
@@ -310,9 +372,19 @@ def remote_md5sum(ssh: paramiko.SSHClient, remote_path: str) -> str:
# ============================================================
# TMDB & naming
# ============================================================
def _sanitize_url_for_log(url: str) -> str:
"""Remove sensitive query params (api_key) from URLs before logging."""
parsed = urllib.parse.urlparse(url)
params = urllib.parse.parse_qs(parsed.query, keep_blank_values=True)
for key in ("api_key", "apikey", "token"):
if key in params:
params[key] = ["***"]
safe_query = urllib.parse.urlencode(params, doseq=True)
return urllib.parse.urlunparse(parsed._replace(query=safe_query))
def _http_get_json(url: str, headers: Optional[Dict[str, str]] = None) -> Any:
req = urllib.request.Request(url, headers=headers or {})
log_connection(f"HTTP GET {url} (no-proxy)")
log_connection(f"HTTP GET {_sanitize_url_for_log(url)} (no-proxy)")
with NO_PROXY_OPENER.open(req, timeout=20) as r:
return json.loads(r.read().decode("utf-8", "replace"))
@@ -384,13 +456,22 @@ def format_proxy_lines(raw: str, scheme: str) -> str:
return "\n".join(dedup)
_PROXY_FETCH_LIMIT = 2 * 1024 * 1024 # 2 MB cap to prevent memory exhaustion
_proxy_cache: Dict[str, Tuple[float, str]] = {}
_PROXY_CACHE_TTL = 300.0 # 5 minutes
def fetch_proxy_list(url: str) -> str:
now = time.time()
cached_ts, cached_text = _proxy_cache.get(url, (0.0, ""))
if cached_text and now - cached_ts < _PROXY_CACHE_TTL:
return cached_text
req = urllib.request.Request(url)
log_connection(f"HTTP GET {url} (no-proxy)")
with NO_PROXY_OPENER.open(req, timeout=20) as resp:
text = resp.read().decode("utf-8", "replace")
text = resp.read(_PROXY_FETCH_LIMIT).decode("utf-8", "replace")
if "\n" not in text and re.search(r"\s", text):
return re.sub(r"\s+", "\n", text.strip())
text = re.sub(r"\s+", "\n", text.strip())
_proxy_cache[url] = (now, text)
return text
def build_jdproxies_payload(text: str) -> Dict[str, Any]:
@@ -780,6 +861,16 @@ def worker(jobid: str):
time.sleep(POLL_SECONDS)
continue
all_demo = all(is_demo_link(l.get("name", "")) for l in links)
if all_demo and not is_demo_link(job.url):
cancel_msg = cancel_job(dev, jobid)
with lock:
job.status = "failed"
base_msg = "JDownloader lieferte das Demo-Video Big Buck Bunny statt des gewünschten Links."
job.message = f"{base_msg} {cancel_msg}" if cancel_msg else base_msg
job.progress = 0.0
return
all_finished = all(bool(l.get("finished")) for l in links)
if not all_finished:
progress = calculate_progress(links)
@@ -898,17 +989,18 @@ def render_job_rows() -> str:
cancel_html = ""
if j.status not in {"finished", "failed", "canceled"}:
cancel_html = (
f"<form method='post' action='/cancel/{j.id}' class='inline-form'>"
f"<form method='post' action='/cancel/{esc(j.id)}' class='inline-form'>"
f"<button type='submit' class='danger'>Abbrechen</button>"
f"</form>"
)
status_class = "error" if j.status == "failed" else ("success" if j.status == "finished" else "")
rows += (
f"<tr>"
f"<td><code>{j.id}</code></td>"
f"<td style='max-width:560px; word-break:break-all;'>{j.url}</td>"
f"<td>{j.package_name}</td>"
f"<td>{j.library}</td>"
f"<td><b>{j.status}</b><br/><small>{j.message}</small>{progress_html}{cancel_html}</td>"
f"<td><code>{esc(j.id)}</code></td>"
f"<td style='max-width:560px; word-break:break-all;'>{esc(j.url)}</td>"
f"<td>{esc(j.package_name)}</td>"
f"<td>{esc(j.library)}</td>"
f"<td><b class='{status_class}'>{esc(j.status)}</b><br/><small>{esc(j.message)}</small>{progress_html}{cancel_html}</td>"
f"</tr>"
)
@@ -919,7 +1011,7 @@ def render_job_rows() -> str:
def render_page(error: str = "") -> str:
rows = render_job_rows()
err_html = f"<p class='error'>{error}</p>" if error else ""
err_html = f"<p class='error'>{esc(error)}</p>" if error else ""
auth_note = "aktiv" if _auth_enabled() else "aus"
return f"""
<html>
@@ -981,6 +1073,10 @@ def render_page(error: str = "") -> str:
{rows}
</tbody>
</table>
<form method="post" action="/clear-finished" style="margin-top:10px;">
<button type="submit" style="background:#666; color:#fff;">Erledigte Jobs entfernen</button>
</form>
</body>
</html>
"""
@@ -1040,8 +1136,8 @@ def render_proxies_page(
out_text: str = "",
export_path: str = "",
) -> str:
err_html = f"<p class='error'>{error}</p>" if error else ""
msg_html = f"<p class='success'>{message}</p>" if message else ""
err_html = f"<p class='error'>{esc(error)}</p>" if error else ""
msg_html = f"<p class='success'>{esc(message)}</p>" if message else ""
return f"""
<html>
<head>
@@ -1058,12 +1154,12 @@ def render_proxies_page(
<form method="post" action="/proxies">
<div class="row">
<label>SOCKS5 (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/>
<textarea name="socks5_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{socks5_in}</textarea>
<textarea name="socks5_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{esc(socks5_in)}</textarea>
</div>
<div class="row">
<label>SOCKS4 (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/>
<textarea name="socks4_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{socks4_in}</textarea>
<textarea name="socks4_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{esc(socks4_in)}</textarea>
</div>
<button type="submit">In JDownloader-Format umwandeln</button>
@@ -1073,7 +1169,7 @@ def render_proxies_page(
<p class="hint">Format: <code>socks5://IP:PORT</code>, <code>socks4://IP:PORT</code>. Keine Prüfung/Validierung.</p>
<div class="row">
<textarea id="out" rows="12" readonly style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{out_text}</textarea>
<textarea id="out" rows="12" readonly style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{esc(out_text)}</textarea>
</div>
<button type="button" onclick="navigator.clipboard.writeText(document.getElementById('out').value)">Kopieren</button>
@@ -1082,12 +1178,12 @@ def render_proxies_page(
<p class="hint">Speichert die Liste als <code>.jdproxies</code> im Container, z. B. zum Import in JDownloader → Verbindungsmanager → Importieren.</p>
<form method="post" action="/proxies/save">
<textarea name="socks5_in" style="display:none;">{socks5_in}</textarea>
<textarea name="socks4_in" style="display:none;">{socks4_in}</textarea>
<textarea name="socks5_in" style="display:none;">{esc(socks5_in)}</textarea>
<textarea name="socks4_in" style="display:none;">{esc(socks4_in)}</textarea>
<button type="submit">Liste als JDProxies speichern</button>
</form>
<p class="hint">Aktueller Pfad: <code>{export_path or PROXY_EXPORT_PATH}</code></p>
<p class="hint">Aktueller Pfad: <code>{esc(export_path or PROXY_EXPORT_PATH)}</code></p>
</body>
</html>
"""
@@ -1110,7 +1206,15 @@ def submit(url: str = Form(...), package_name: str = Form(""), library: str = Fo
if not URL_RE.match(url):
return HTMLResponse(render_page("Nur http(s) URLs erlaubt."), status_code=400)
dev = get_device()
url_err = check_url_reachable(url)
if url_err:
log_connection(f"URL-Check fehlgeschlagen: {url} -> {url_err}")
return HTMLResponse(render_page(f"Link nicht erreichbar: {url_err}"), status_code=400)
try:
dev = get_device()
except Exception as e:
return HTMLResponse(render_page(f"JDownloader nicht erreichbar: {e}"), status_code=503)
resp = dev.linkgrabber.add_links([{
"links": url,
"autostart": True,
@@ -1150,6 +1254,14 @@ def cancel(jobid: str):
job.message = "Abbruch angefordert…"
return RedirectResponse(url="/", status_code=303)
@app.post("/clear-finished")
def clear_finished():
with lock:
to_remove = [jid for jid, j in jobs.items() if j.status in {"finished", "failed", "canceled"}]
for jid in to_remove:
del jobs[jid]
return RedirectResponse(url="/", status_code=303)
@app.get("/proxies", response_class=HTMLResponse)
def proxies_get():
try:
@@ -1159,7 +1271,6 @@ def proxies_get():
socks4_in = fetch_proxy_list(
"https://api.proxyscrape.com/v4/free-proxy-list/get?request=displayproxies&protocol=socks4&timeout=10000&country=all&ssl=yes&anonymity=elite&skip=0&limit=2000"
)
http_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt")
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")

View File

@@ -0,0 +1,5 @@
fastapi
uvicorn
myjdapi
paramiko
python-multipart