Compare commits

..

6 Commits

4 changed files with 1351 additions and 1328 deletions

View File

@@ -26,4 +26,4 @@ services:
volumes: volumes:
- ./data/jd-output:/output:rw - ./data/jd-output:/output:rw
- ./data/md5:/md5:rw - ./data/md5:/md5:rw
- /root/.ssh/id_ed25519:/ssh/id_ed25519:ro - ${SSH_KEY_PATH:-/root/.ssh/id_ed25519}:/ssh/id_ed25519:ro

View File

@@ -2,19 +2,17 @@ FROM python:3.12-slim
WORKDIR /app WORKDIR /app
RUN apt-get update \ RUN apt-get update && apt-get install -y --no-install-recommends ffmpeg && rm -rf /var/lib/apt/lists/*
&& apt-get install -y --no-install-recommends ffmpeg \
&& rm -rf /var/lib/apt/lists/*
RUN pip install --no-cache-dir \ COPY requirements.txt .
fastapi \ RUN pip install --no-cache-dir -r requirements.txt
uvicorn \
myjdapi \
paramiko \
python-multipart
COPY app.py /app/app.py RUN useradd -m -u 1000 appuser && chown appuser:appuser /app
COPY static /app/static
USER appuser
COPY --chown=appuser:appuser app.py .
COPY --chown=appuser:appuser static ./static
EXPOSE 8080 EXPOSE 8080
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8080"] CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8080"]

View File

@@ -3,11 +3,14 @@ from __future__ import annotations
import base64 import base64
import hashlib import hashlib
import hmac
import html as html_mod import html as html_mod
import ipaddress
import json import json
import os import os
import re import re
import shlex import shlex
import socket
import subprocess import subprocess
import threading import threading
import time import time
@@ -106,7 +109,7 @@ def _check_basic_auth(req: Request) -> bool:
if ":" not in raw: if ":" not in raw:
return False return False
user, pw = raw.split(":", 1) user, pw = raw.split(":", 1)
return user == BASIC_AUTH_USER and pw == BASIC_AUTH_PASS return hmac.compare_digest(user, BASIC_AUTH_USER) and hmac.compare_digest(pw, BASIC_AUTH_PASS)
def _auth_challenge() -> HTMLResponse: def _auth_challenge() -> HTMLResponse:
return HTMLResponse( return HTMLResponse(
@@ -237,8 +240,26 @@ def is_demo_link(name: str) -> bool:
lower = name.lower().replace("-", "_").replace(".", " ") lower = name.lower().replace("-", "_").replace(".", " ")
return any(pat in lower for pat in DEMO_PATTERNS) return any(pat in lower for pat in DEMO_PATTERNS)
def _is_ssrf_target(url: str) -> bool:
"""Return True if the URL resolves to a private/loopback address (SSRF protection)."""
try:
host = urllib.parse.urlparse(url).hostname or ""
try:
addr = ipaddress.ip_address(host)
except ValueError:
try:
host = socket.gethostbyname(host)
addr = ipaddress.ip_address(host)
except Exception:
return False
return addr.is_private or addr.is_loopback or addr.is_link_local or addr.is_reserved
except Exception:
return False
def check_url_reachable(url: str) -> Optional[str]: def check_url_reachable(url: str) -> Optional[str]:
"""Try a HEAD request to verify the URL is reachable. Returns error string or None.""" """Try a HEAD request to verify the URL is reachable. Returns error string or None."""
if _is_ssrf_target(url):
return "URL zeigt auf eine interne/private Adresse (nicht erlaubt)"
try: try:
req = urllib.request.Request(url, method="HEAD") req = urllib.request.Request(url, method="HEAD")
req.add_header("User-Agent", "Mozilla/5.0") req.add_header("User-Agent", "Mozilla/5.0")
@@ -435,13 +456,22 @@ def format_proxy_lines(raw: str, scheme: str) -> str:
return "\n".join(dedup) return "\n".join(dedup)
_PROXY_FETCH_LIMIT = 2 * 1024 * 1024 # 2 MB cap to prevent memory exhaustion
_proxy_cache: Dict[str, Tuple[float, str]] = {}
_PROXY_CACHE_TTL = 300.0 # 5 minutes
def fetch_proxy_list(url: str) -> str: def fetch_proxy_list(url: str) -> str:
now = time.time()
cached_ts, cached_text = _proxy_cache.get(url, (0.0, ""))
if cached_text and now - cached_ts < _PROXY_CACHE_TTL:
return cached_text
req = urllib.request.Request(url) req = urllib.request.Request(url)
log_connection(f"HTTP GET {url} (no-proxy)") log_connection(f"HTTP GET {url} (no-proxy)")
with NO_PROXY_OPENER.open(req, timeout=20) as resp: with NO_PROXY_OPENER.open(req, timeout=20) as resp:
text = resp.read().decode("utf-8", "replace") text = resp.read(_PROXY_FETCH_LIMIT).decode("utf-8", "replace")
if "\n" not in text and re.search(r"\s", text): if "\n" not in text and re.search(r"\s", text):
return re.sub(r"\s+", "\n", text.strip()) text = re.sub(r"\s+", "\n", text.strip())
_proxy_cache[url] = (now, text)
return text return text
def build_jdproxies_payload(text: str) -> Dict[str, Any]: def build_jdproxies_payload(text: str) -> Dict[str, Any]:
@@ -1168,59 +1198,49 @@ def index():
@app.post("/submit") @app.post("/submit")
def submit(url: str = Form(...), package_name: str = Form(""), library: str = Form("auto")): def submit(url: str = Form(...), package_name: str = Form(""), library: str = Form("auto")):
ensure_env()
url = url.strip()
package_name = (package_name or "").strip() or "WebGUI"
library = (library or "auto").strip().lower()
if not URL_RE.match(url):
return HTMLResponse(render_page("Nur http(s) URLs erlaubt."), status_code=400)
url_err = check_url_reachable(url)
if url_err:
log_connection(f"URL-Check fehlgeschlagen: {url} -> {url_err}")
return HTMLResponse(render_page(f"Link nicht erreichbar: {url_err}"), status_code=400)
try: try:
ensure_env()
url = url.strip()
package_name = (package_name or "").strip() or "WebGUI"
library = (library or "auto").strip().lower()
if not URL_RE.match(url):
return HTMLResponse(render_page("Nur http(s) URLs erlaubt."), status_code=400)
url_err = check_url_reachable(url)
if url_err:
log_connection(f"URL-Check fehlgeschlagen: {url} -> {url_err}")
return HTMLResponse(render_page(f"Link nicht erreichbar: {url_err}"), status_code=400)
dev = get_device() dev = get_device()
resp = dev.linkgrabber.add_links([{
"links": url,
"autostart": True,
"assignJobID": True,
"packageName": package_name,
}])
jobid = ""
if isinstance(resp, dict):
jobid = str(resp.get("id", "")).strip()
elif isinstance(resp, (str, int)):
jobid = str(resp).strip()
elif isinstance(resp, list) and resp and isinstance(resp[0], dict):
jobid = str(resp[0].get("id", "")).strip()
if not jobid:
msg = f"Unerwartete Antwort von add_links (kein Job-ID): {resp!r}"
log_connection(msg)
return HTMLResponse(render_page(msg), status_code=502)
with lock:
jobs[jobid] = Job(
id=jobid,
url=url,
package_name=package_name,
library=library,
status="queued",
message="Download gestartet",
progress=0.0,
)
t = threading.Thread(target=worker, args=(jobid,), daemon=True)
t.start()
return RedirectResponse(url="/", status_code=303)
except Exception as e: except Exception as e:
log_connection(f"Submit-Fehler: {e}") return HTMLResponse(render_page(f"JDownloader nicht erreichbar: {e}"), status_code=503)
return HTMLResponse(render_page(f"Interner Fehler beim Absenden: {e}"), status_code=500) resp = dev.linkgrabber.add_links([{
"links": url,
"autostart": True,
"assignJobID": True,
"packageName": package_name,
}])
jobid = str(resp.get("id", ""))
if not jobid:
return HTMLResponse(render_page(f"Unerwartete Antwort von add_links: {resp}"), status_code=500)
with lock:
jobs[jobid] = Job(
id=jobid,
url=url,
package_name=package_name,
library=library,
status="queued",
message="Download gestartet",
progress=0.0,
)
t = threading.Thread(target=worker, args=(jobid,), daemon=True)
t.start()
return RedirectResponse(url="/", status_code=303)
@app.post("/cancel/{jobid}") @app.post("/cancel/{jobid}")
def cancel(jobid: str): def cancel(jobid: str):

View File

@@ -0,0 +1,5 @@
fastapi
uvicorn
myjdapi
paramiko
python-multipart