Compare commits

..

1 Commits

Author SHA1 Message Date
9376fae60b Fix md5 directory permissions fallback 2025-12-31 18:53:56 +01:00
7 changed files with 722 additions and 1379 deletions

View File

@@ -53,9 +53,3 @@ BASIC_AUTH_PASS=CHANGE_ME
# ===== Polling =====
POLL_SECONDS=5
# ===== SSH host key verification (optional) =====
# Path to known_hosts file inside container. If present, strict host key
# checking is used. If absent, all host keys are accepted (less secure).
# Generate with: ssh-keyscan -p 22 192.168.1.1 > known_hosts
# SSH_KNOWN_HOSTS=/ssh/known_hosts

View File

@@ -11,7 +11,7 @@ Web GUI to:
## Files
- `docker-compose.yml` stack
- `.env.example` copy to `.env` and fill in your values (**never commit `.env`!**)
- `.env.example` copy to `.env` and fill values
- `jd-webgui/app.py` FastAPI web app
- `jd-webgui/Dockerfile` includes ffprobe
@@ -40,16 +40,6 @@ docker compose up -d --build
- If `MYJD_DEVICE` is empty, the WebGUI will automatically pick the first available device.
- Ensure the SSH user can write to `/jellyfin/Filme` (and series dir if used).
## Security
- **Never commit `.env`** it contains passwords and API keys. Only `.env.example` is tracked.
- **SSH host key verification**: For secure SFTP transfers, provide a `known_hosts` file:
```bash
ssh-keyscan -p 22 192.168.1.1 > known_hosts
```
Mount it in `docker-compose.yml` and set `SSH_KNOWN_HOSTS=/ssh/known_hosts`.
Without it, any host key is accepted (MITM risk on untrusted networks).
- **Basic Auth** protects the WebGUI but transmits credentials in cleartext over HTTP. Use a reverse proxy with HTTPS (e.g. Traefik, Caddy) in production.
## Troubleshooting
- Device not found: list devices
```bash

View File

@@ -21,9 +21,11 @@ services:
- jdownloader
ports:
- "8080:8080"
env_file:
- .env
environment:
TZ: Europe/Berlin
volumes:
- ./data/jd-output:/output:rw
- ./data/jd-output:/output:ro
- ./data/md5:/md5:rw
- ${SSH_KEY_PATH:-/root/.ssh/id_ed25519}:/ssh/id_ed25519:ro
- /root/.ssh/id_ed25519:/ssh/id_ed25519:ro

View File

@@ -2,17 +2,19 @@ FROM python:3.12-slim
WORKDIR /app
RUN apt-get update && apt-get install -y --no-install-recommends ffmpeg && rm -rf /var/lib/apt/lists/*
RUN apt-get update \
&& apt-get install -y --no-install-recommends ffmpeg \
&& rm -rf /var/lib/apt/lists/*
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
RUN pip install --no-cache-dir \
fastapi \
uvicorn \
myjdapi \
paramiko \
python-multipart
RUN useradd -m -u 1000 appuser && chown appuser:appuser /app
USER appuser
COPY --chown=appuser:appuser app.py .
COPY --chown=appuser:appuser static ./static
COPY app.py /app/app.py
COPY static /app/static
EXPOSE 8080
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8080"]

View File

@@ -3,14 +3,10 @@ from __future__ import annotations
import base64
import hashlib
import hmac
import html as html_mod
import ipaddress
import json
import os
import re
import shlex
import socket
import subprocess
import threading
import time
@@ -22,7 +18,7 @@ from typing import Any, Dict, List, Optional, Tuple
from myjdapi import Myjdapi
import paramiko
from fastapi import FastAPI, Form, Request
from fastapi.responses import HTMLResponse, PlainTextResponse, RedirectResponse
from fastapi.responses import HTMLResponse, RedirectResponse
from fastapi.staticfiles import StaticFiles
# ============================================================
@@ -60,23 +56,9 @@ POLL_SECONDS = float(os.environ.get("POLL_SECONDS", "5"))
# JDownloader writes here inside container
JD_OUTPUT_PATH = "/output"
PROXY_EXPORT_PATH = os.environ.get("PROXY_EXPORT_PATH", "/output/jd-proxies.jdproxies")
LOG_BUFFER_LIMIT = int(os.environ.get("LOG_BUFFER_LIMIT", "500"))
URL_RE = re.compile(r"^https?://", re.I)
NO_PROXY_OPENER = urllib.request.build_opener(urllib.request.ProxyHandler({}))
def esc(s: str) -> str:
"""HTML-escape a string to prevent XSS."""
return html_mod.escape(str(s), quote=True)
def mask_secret(value: str, visible: int = 4) -> str:
"""Mask a secret string, showing only the last `visible` characters."""
if len(value) <= visible:
return "***"
return "***" + value[-visible:]
VIDEO_EXTS = {
".mkv", ".mp4", ".m4v", ".avi", ".mov", ".wmv", ".flv", ".webm",
".ts", ".m2ts", ".mts", ".mpg", ".mpeg", ".vob", ".ogv",
@@ -109,7 +91,7 @@ def _check_basic_auth(req: Request) -> bool:
if ":" not in raw:
return False
user, pw = raw.split(":", 1)
return hmac.compare_digest(user, BASIC_AUTH_USER) and hmac.compare_digest(pw, BASIC_AUTH_PASS)
return user == BASIC_AUTH_USER and pw == BASIC_AUTH_PASS
def _auth_challenge() -> HTMLResponse:
return HTMLResponse(
@@ -135,26 +117,9 @@ class Job:
library: str # movies|series|auto
status: str # queued|collecting|downloading|upload|finished|failed
message: str
progress: float = 0.0
cancel_requested: bool = False
jobs: Dict[str, Job] = {}
lock = threading.Lock()
log_lock = threading.Lock()
connection_logs: List[str] = []
def log_connection(message: str) -> None:
timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
line = f"[{timestamp}] {message}"
with log_lock:
connection_logs.append(line)
if len(connection_logs) > LOG_BUFFER_LIMIT:
excess = len(connection_logs) - LOG_BUFFER_LIMIT
del connection_logs[:excess]
def get_connection_logs() -> str:
with log_lock:
return "\n".join(connection_logs)
# ============================================================
# Core helpers
@@ -181,7 +146,6 @@ def ensure_env():
def get_device():
jd = Myjdapi()
log_connection(f"MyJDownloader connect as {MYJD_EMAIL or 'unknown'}")
jd.connect(MYJD_EMAIL, MYJD_PASSWORD)
wanted = (MYJD_DEVICE or "").strip()
@@ -233,47 +197,6 @@ def is_video_file(path: str) -> bool:
return False
return ext in VIDEO_EXTS
DEMO_PATTERNS = {"big_buck_bunny", "bigbuckbunny", "big buck bunny", "bbb_sunflower"}
def is_demo_link(name: str) -> bool:
"""Detect JDownloader demo/fallback videos (e.g. Big Buck Bunny)."""
lower = name.lower().replace("-", "_").replace(".", " ")
return any(pat in lower for pat in DEMO_PATTERNS)
def _is_ssrf_target(url: str) -> bool:
"""Return True if the URL resolves to a private/loopback address (SSRF protection)."""
try:
host = urllib.parse.urlparse(url).hostname or ""
try:
addr = ipaddress.ip_address(host)
except ValueError:
try:
host = socket.gethostbyname(host)
addr = ipaddress.ip_address(host)
except Exception:
return False
return addr.is_private or addr.is_loopback or addr.is_link_local or addr.is_reserved
except Exception:
return False
def check_url_reachable(url: str) -> Optional[str]:
"""Try a HEAD request to verify the URL is reachable. Returns error string or None."""
if _is_ssrf_target(url):
return "URL zeigt auf eine interne/private Adresse (nicht erlaubt)"
try:
req = urllib.request.Request(url, method="HEAD")
req.add_header("User-Agent", "Mozilla/5.0")
with urllib.request.urlopen(req, timeout=10) as resp:
if resp.status >= 400:
return f"URL antwortet mit HTTP {resp.status}"
except urllib.error.HTTPError as e:
return f"URL nicht erreichbar: HTTP {e.code}"
except urllib.error.URLError as e:
return f"URL nicht erreichbar: {e.reason}"
except Exception as e:
return f"URL-Check fehlgeschlagen: {e}"
return None
def md5_file(path: str) -> str:
h = hashlib.md5()
with open(path, "rb") as f:
@@ -281,25 +204,39 @@ def md5_file(path: str) -> str:
h.update(chunk)
return h.hexdigest()
def write_md5_sidecar(file_path: str, md5_hex: str) -> str:
base = os.path.basename(file_path)
candidates = [MD5_DIR, "/tmp/md5"]
last_err: Optional[Exception] = None
_md5_dir_cache: Optional[str] = None
for target in candidates:
def pick_md5_dir() -> str:
global _md5_dir_cache
if _md5_dir_cache:
return _md5_dir_cache
candidates = [
MD5_DIR,
os.path.join(JD_OUTPUT_PATH, ".md5"),
"/tmp/jd-md5",
]
for candidate in candidates:
try:
os.makedirs(target, exist_ok=True)
md5_path = os.path.join(target, base + ".md5")
with open(md5_path, "w", encoding="utf-8") as f:
f.write(f"{md5_hex} {base}\n")
return md5_path
except PermissionError as exc:
last_err = exc
os.makedirs(candidate, exist_ok=True)
except Exception:
continue
if os.access(candidate, os.W_OK):
_md5_dir_cache = candidate
return candidate
if last_err:
raise last_err
raise RuntimeError("Failed to write MD5 sidecar file.")
raise RuntimeError(
"Kein beschreibbares MD5-Verzeichnis gefunden (MD5_DIR, /output/.md5, /tmp/jd-md5)."
)
def write_md5_sidecar(file_path: str, md5_hex: str) -> str:
md5_dir = pick_md5_dir()
base = os.path.basename(file_path)
md5_path = os.path.join(md5_dir, base + ".md5")
with open(md5_path, "w", encoding="utf-8") as f:
f.write(f"{md5_hex} {base}\n")
return md5_path
def ffprobe_ok(path: str) -> bool:
try:
@@ -317,17 +254,9 @@ def ffprobe_ok(path: str) -> bool:
# ============================================================
# SSH/SFTP
# ============================================================
SSH_KNOWN_HOSTS = os.environ.get("SSH_KNOWN_HOSTS", "/ssh/known_hosts")
def ssh_connect() -> paramiko.SSHClient:
ssh = paramiko.SSHClient()
if os.path.isfile(SSH_KNOWN_HOSTS):
ssh.load_host_keys(SSH_KNOWN_HOSTS)
ssh.set_missing_host_key_policy(paramiko.RejectPolicy())
log_connection(f"SSH connect {JELLYFIN_USER}@{JELLYFIN_HOST}:{JELLYFIN_PORT} (known_hosts verified)")
else:
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
log_connection(f"SSH connect {JELLYFIN_USER}@{JELLYFIN_HOST}:{JELLYFIN_PORT} (WARNING: no known_hosts, accepting any host key)")
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(
hostname=JELLYFIN_HOST,
port=JELLYFIN_PORT,
@@ -350,7 +279,6 @@ def sftp_mkdirs(sftp: paramiko.SFTPClient, remote_dir: str):
def sftp_upload(ssh: paramiko.SSHClient, local_path: str, remote_path: str):
sftp = ssh.open_sftp()
try:
log_connection(f"SFTP upload {local_path} -> {remote_path}")
sftp_mkdirs(sftp, os.path.dirname(remote_path))
sftp.put(local_path, remote_path)
finally:
@@ -359,7 +287,6 @@ def sftp_upload(ssh: paramiko.SSHClient, local_path: str, remote_path: str):
def remote_md5sum(ssh: paramiko.SSHClient, remote_path: str) -> str:
quoted = shlex.quote(remote_path)
cmd = f"md5sum {quoted}"
log_connection(f"SSH exec {cmd}")
stdin, stdout, stderr = ssh.exec_command(cmd, timeout=120)
out = stdout.read().decode("utf-8", "replace").strip()
err = stderr.read().decode("utf-8", "replace").strip()
@@ -372,20 +299,9 @@ def remote_md5sum(ssh: paramiko.SSHClient, remote_path: str) -> str:
# ============================================================
# TMDB & naming
# ============================================================
def _sanitize_url_for_log(url: str) -> str:
"""Remove sensitive query params (api_key) from URLs before logging."""
parsed = urllib.parse.urlparse(url)
params = urllib.parse.parse_qs(parsed.query, keep_blank_values=True)
for key in ("api_key", "apikey", "token"):
if key in params:
params[key] = ["***"]
safe_query = urllib.parse.urlencode(params, doseq=True)
return urllib.parse.urlunparse(parsed._replace(query=safe_query))
def _http_get_json(url: str, headers: Optional[Dict[str, str]] = None) -> Any:
req = urllib.request.Request(url, headers=headers or {})
log_connection(f"HTTP GET {_sanitize_url_for_log(url)} (no-proxy)")
with NO_PROXY_OPENER.open(req, timeout=20) as r:
with urllib.request.urlopen(req, timeout=20) as r:
return json.loads(r.read().decode("utf-8", "replace"))
def tmdb_search_movie(query: str) -> Optional[Dict[str, Any]]:
@@ -393,10 +309,7 @@ def tmdb_search_movie(query: str) -> Optional[Dict[str, Any]]:
return None
q = urllib.parse.quote(query.strip())
url = f"https://api.themoviedb.org/3/search/movie?api_key={TMDB_API_KEY}&language={urllib.parse.quote(TMDB_LANGUAGE)}&query={q}"
try:
data = _http_get_json(url)
except Exception:
return None
data = _http_get_json(url)
results = data.get("results") or []
return results[0] if results else None
@@ -405,10 +318,7 @@ def tmdb_search_tv(query: str) -> Optional[Dict[str, Any]]:
return None
q = urllib.parse.quote(query.strip())
url = f"https://api.themoviedb.org/3/search/tv?api_key={TMDB_API_KEY}&language={urllib.parse.quote(TMDB_LANGUAGE)}&query={q}"
try:
data = _http_get_json(url)
except Exception:
return None
data = _http_get_json(url)
results = data.get("results") or []
return results[0] if results else None
@@ -417,133 +327,6 @@ def sanitize_name(name: str) -> str:
out = "".join("_" if c in bad else c for c in name).strip()
return re.sub(r"\s+", " ", out)
def format_proxy_lines(raw: str, scheme: str) -> str:
"""
Takes raw lines (ip:port or scheme://ip:port) and outputs normalized lines:
scheme://ip:port (one per line). Ignores empty lines and comments.
"""
scheme = scheme.strip().lower()
if scheme not in {"socks5", "socks4", "http"}:
raise ValueError("Unsupported proxy scheme")
out = []
for line in (raw or "").splitlines():
s = line.strip()
if not s or s.startswith("#"):
continue
if "://" in s:
s = s.split("://", 1)[1].strip()
if ":" not in s:
continue
host, port = s.rsplit(":", 1)
host = host.strip()
port = port.strip()
if not host or not port.isdigit():
continue
out.append(f"{scheme}://{host}:{port}")
seen = set()
dedup = []
for x in out:
if x not in seen:
seen.add(x)
dedup.append(x)
return "\n".join(dedup)
_PROXY_FETCH_LIMIT = 2 * 1024 * 1024 # 2 MB cap to prevent memory exhaustion
_proxy_cache: Dict[str, Tuple[float, str]] = {}
_PROXY_CACHE_TTL = 300.0 # 5 minutes
def fetch_proxy_list(url: str) -> str:
now = time.time()
cached_ts, cached_text = _proxy_cache.get(url, (0.0, ""))
if cached_text and now - cached_ts < _PROXY_CACHE_TTL:
return cached_text
req = urllib.request.Request(url)
log_connection(f"HTTP GET {url} (no-proxy)")
with NO_PROXY_OPENER.open(req, timeout=20) as resp:
text = resp.read(_PROXY_FETCH_LIMIT).decode("utf-8", "replace")
if "\n" not in text and re.search(r"\s", text):
text = re.sub(r"\s+", "\n", text.strip())
_proxy_cache[url] = (now, text)
return text
def build_jdproxies_payload(text: str) -> Dict[str, Any]:
if not text.strip():
raise ValueError("Keine Proxy-Einträge zum Speichern.")
entries: List[Dict[str, Any]] = []
type_map = {
"socks5": "SOCKS5",
"socks4": "SOCKS4",
"http": "HTTP",
}
entries.append({
"filter": None,
"proxy": {
"address": None,
"password": None,
"port": 80,
"type": "NONE",
"username": None,
"connectMethodPrefered": False,
"preferNativeImplementation": False,
"resolveHostName": False,
},
"enabled": True,
"pac": False,
"rangeRequestsSupported": True,
"reconnectSupported": True,
})
for line in text.splitlines():
s = line.strip()
if not s:
continue
parsed = urllib.parse.urlparse(s)
if not parsed.scheme or not parsed.hostname or parsed.port is None:
continue
proxy_type = type_map.get(parsed.scheme.lower())
if not proxy_type:
continue
entries.append({
"filter": None,
"proxy": {
"address": parsed.hostname,
"password": None,
"port": int(parsed.port),
"type": proxy_type,
"username": None,
"connectMethodPrefered": False,
"preferNativeImplementation": False,
"resolveHostName": False,
},
"enabled": True,
"pac": False,
"rangeRequestsSupported": True,
"reconnectSupported": False,
})
if not entries:
raise ValueError("Keine gültigen Proxy-Einträge gefunden.")
return {"customProxyList": entries}
def save_proxy_export(text: str) -> str:
payload = build_jdproxies_payload(text)
export_path = PROXY_EXPORT_PATH
export_dir = os.path.dirname(export_path)
if export_dir:
os.makedirs(export_dir, exist_ok=True)
if os.path.exists(export_path):
os.remove(export_path)
with open(export_path, "w", encoding="utf-8") as handle:
handle.write(json.dumps(payload, indent=2))
handle.write("\n")
return export_path
def pick_library_target(library_choice: str, filename: str, package_name: str) -> str:
if library_choice not in {"movies", "series", "auto"}:
library_choice = "auto"
@@ -615,8 +398,7 @@ def jellyfin_refresh_library():
try:
url = JELLYFIN_API_BASE + path
req = urllib.request.Request(url, headers=headers, method="POST")
log_connection(f"HTTP POST {url} (no-proxy)")
with NO_PROXY_OPENER.open(req, timeout=20) as r:
with urllib.request.urlopen(req, timeout=20) as r:
_ = r.read()
return
except Exception:
@@ -633,10 +415,6 @@ def query_links_and_packages(dev, jobid: str) -> Tuple[List[Dict[str, Any]], Dic
"name": True,
"finished": True,
"running": True,
"bytesLoaded": True,
"bytesTotal": True,
"bytes": True,
"totalBytes": True,
"status": True,
"packageUUID": True,
"uuid": True,
@@ -673,31 +451,6 @@ def local_paths_from_links(links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[
out.append(p)
return out
def call_raw_jd_api(dev, endpoints: List[str], payloads: List[Dict[str, Any]]) -> bool:
method_candidates = ["action", "call", "api", "request"]
for method_name in method_candidates:
method = getattr(dev, method_name, None)
if method is None:
continue
for endpoint in endpoints:
for payload in payloads:
try:
method(endpoint, payload)
return True
except TypeError:
try:
method(endpoint, params=payload)
return True
except TypeError:
try:
method(endpoint, data=payload)
return True
except Exception:
continue
except Exception:
continue
return False
def try_remove_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[str, Any]]) -> Optional[str]:
link_ids = [l.get("uuid") for l in links if l.get("uuid") is not None]
pkg_ids = list(pkg_map.keys())
@@ -732,104 +485,8 @@ def try_remove_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict
except Exception:
continue
endpoint_candidates = [
"downloads/removeLinks",
"downloadsV2/removeLinks",
"downloadcontroller/removeLinks",
]
if call_raw_jd_api(dev, endpoint_candidates, payloads):
return None
return "JDownloader-API: Paket/Links konnten nicht entfernt werden (Wrapper-Methoden nicht vorhanden)."
def try_cancel_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[str, Any]]) -> Optional[str]:
link_ids = [l.get("uuid") for l in links if l.get("uuid") is not None]
pkg_ids = list(pkg_map.keys())
candidates = [
("downloads", "removeLinks"),
("downloads", "remove_links"),
("downloads", "deleteLinks"),
("downloads", "delete_links"),
("downloadcontroller", "removeLinks"),
("downloadcontroller", "remove_links"),
]
payloads = [
{"linkUUIDs": link_ids, "packageUUIDs": pkg_ids, "deleteFiles": True},
{"linkIds": link_ids, "packageIds": pkg_ids, "deleteFiles": True},
{"linkUUIDs": link_ids, "deleteFiles": True},
{"packageUUIDs": pkg_ids, "deleteFiles": True},
{"linkUUIDs": link_ids, "packageUUIDs": pkg_ids, "removeFiles": True},
{"linkIds": link_ids, "packageIds": pkg_ids, "removeFiles": True},
]
for ns, fn in candidates:
obj = getattr(dev, ns, None)
if obj is None:
continue
meth = getattr(obj, fn, None)
if meth is None:
continue
for payload in payloads:
try:
meth([payload])
return None
except Exception:
continue
endpoint_candidates = [
"downloads/removeLinks",
"downloadsV2/removeLinks",
"downloadcontroller/removeLinks",
]
if call_raw_jd_api(dev, endpoint_candidates, payloads):
return None
return "JDownloader-API: Abbrechen fehlgeschlagen (Wrapper-Methoden nicht vorhanden)."
def cancel_job(dev, jobid: str) -> Optional[str]:
links, pkg_map = query_links_and_packages(dev, jobid)
local_paths = local_paths_from_links(links, pkg_map)
for path in local_paths:
try:
if os.path.isfile(path):
os.remove(path)
except Exception:
pass
try:
sidecar = os.path.join(MD5_DIR, os.path.basename(path) + ".md5")
if os.path.isfile(sidecar):
os.remove(sidecar)
except Exception:
pass
return try_cancel_from_jd(dev, links, pkg_map)
def calculate_progress(links: List[Dict[str, Any]]) -> float:
total = 0
loaded = 0
for link in links:
bytes_total = link.get("bytesTotal")
if bytes_total is None:
bytes_total = link.get("totalBytes")
if bytes_total is None:
bytes_total = link.get("bytes")
bytes_loaded = link.get("bytesLoaded")
if bytes_total is None or bytes_loaded is None:
continue
try:
bytes_total = int(bytes_total)
bytes_loaded = int(bytes_loaded)
except (TypeError, ValueError):
continue
if bytes_total <= 0:
continue
total += bytes_total
loaded += min(bytes_loaded, bytes_total)
if total <= 0:
return 0.0
return max(0.0, min(100.0, (loaded / total) * 100.0))
# ============================================================
# Worker
# ============================================================
@@ -843,13 +500,6 @@ def worker(jobid: str):
job = jobs.get(jobid)
if not job:
return
if job.cancel_requested:
cancel_msg = cancel_job(dev, jobid)
with lock:
job.status = "canceled"
job.message = cancel_msg or "Download abgebrochen und Dateien entfernt."
job.progress = 0.0
return
links, pkg_map = query_links_and_packages(dev, jobid)
@@ -857,28 +507,15 @@ def worker(jobid: str):
with lock:
job.status = "collecting"
job.message = "Warte auf Link-Crawler…"
job.progress = 0.0
time.sleep(POLL_SECONDS)
continue
all_demo = all(is_demo_link(l.get("name", "")) for l in links)
if all_demo and not is_demo_link(job.url):
cancel_msg = cancel_job(dev, jobid)
with lock:
job.status = "failed"
base_msg = "JDownloader lieferte das Demo-Video Big Buck Bunny statt des gewünschten Links."
job.message = f"{base_msg} {cancel_msg}" if cancel_msg else base_msg
job.progress = 0.0
return
all_finished = all(bool(l.get("finished")) for l in links)
if not all_finished:
progress = calculate_progress(links)
with lock:
job.status = "downloading"
done = sum(1 for l in links if l.get("finished"))
job.message = f"Download läuft… ({done}/{len(links)} fertig)"
job.progress = progress
time.sleep(POLL_SECONDS)
continue
@@ -889,7 +526,6 @@ def worker(jobid: str):
with lock:
job.status = "failed"
job.message = "Keine Video-Datei gefunden (Whitelist)."
job.progress = 0.0
return
valid_videos = [p for p in video_files if ffprobe_ok(p)]
@@ -897,13 +533,11 @@ def worker(jobid: str):
with lock:
job.status = "failed"
job.message = "ffprobe: keine gültige Video-Datei."
job.progress = 0.0
return
with lock:
job.status = "upload"
job.message = f"Download fertig. MD5/Upload/Verify für {len(valid_videos)} Datei(en)…"
job.progress = 100.0
ssh = ssh_connect()
try:
@@ -943,7 +577,6 @@ def worker(jobid: str):
with lock:
job.status = "finished"
job.message = "Upload + MD5 OK. " + (jd_cleanup_msg or "JDownloader: Paket/Links entfernt.")
job.progress = 100.0
return
except Exception as e:
@@ -952,7 +585,6 @@ def worker(jobid: str):
if job:
job.status = "failed"
job.message = str(e)
job.progress = 0.0
# ============================================================
# Web
@@ -961,57 +593,23 @@ def worker(jobid: str):
def favicon():
return HTMLResponse(status_code=204)
@app.get("/jobs", response_class=HTMLResponse)
def jobs_get():
return HTMLResponse(render_job_rows())
@app.get("/logs", response_class=HTMLResponse)
def logs_get():
return HTMLResponse(render_logs_page())
@app.get("/logs/data", response_class=PlainTextResponse)
def logs_data():
return PlainTextResponse(get_connection_logs())
def render_job_rows() -> str:
def render_page(error: str = "") -> str:
rows = ""
with lock:
job_list = list(jobs.values())[::-1]
for j in job_list:
progress_pct = f"{j.progress:.1f}%"
progress_html = (
f"<div class='progress-row'>"
f"<progress value='{j.progress:.1f}' max='100'></progress>"
f"<span class='progress-text'>{progress_pct}</span>"
f"</div>"
)
cancel_html = ""
if j.status not in {"finished", "failed", "canceled"}:
cancel_html = (
f"<form method='post' action='/cancel/{esc(j.id)}' class='inline-form'>"
f"<button type='submit' class='danger'>Abbrechen</button>"
f"</form>"
)
status_class = "error" if j.status == "failed" else ("success" if j.status == "finished" else "")
rows += (
f"<tr>"
f"<td><code>{esc(j.id)}</code></td>"
f"<td style='max-width:560px; word-break:break-all;'>{esc(j.url)}</td>"
f"<td>{esc(j.package_name)}</td>"
f"<td>{esc(j.library)}</td>"
f"<td><b class='{status_class}'>{esc(j.status)}</b><br/><small>{esc(j.message)}</small>{progress_html}{cancel_html}</td>"
f"<td><code>{j.id}</code></td>"
f"<td style='max-width:560px; word-break:break-all;'>{j.url}</td>"
f"<td>{j.package_name}</td>"
f"<td>{j.library}</td>"
f"<td><b>{j.status}</b><br/><small>{j.message}</small></td>"
f"</tr>"
)
if not rows:
rows = "<tr><td colspan='5'><em>No jobs yet.</em></td></tr>"
return rows
def render_page(error: str = "") -> str:
rows = render_job_rows()
err_html = f"<p class='error'>{esc(error)}</p>" if error else ""
err_html = f"<p class='error'>{error}</p>" if error else ""
auth_note = "aktiv" if _auth_enabled() else "aus"
return f"""
<html>
@@ -1019,24 +617,9 @@ def render_page(error: str = "") -> str:
<link rel="stylesheet" href="/static/style.css">
<meta charset="utf-8">
<title>JD → Jellyfin</title>
<script>
async function refreshJobs() {{
if (document.hidden) return;
try {{
const resp = await fetch('/jobs');
if (!resp.ok) return;
const html = await resp.text();
const tbody = document.getElementById('jobs-body');
if (tbody) tbody.innerHTML = html;
}} catch (e) {{
}}
}}
setInterval(refreshJobs, 5000);
</script>
</head>
<body>
<h1>JD → Jellyfin</h1>
{render_nav("downloads")}
{err_html}
<form method="post" action="/submit">
@@ -1069,121 +652,10 @@ def render_page(error: str = "") -> str:
<thead>
<tr><th>JobID</th><th>URL</th><th>Paket</th><th>Ziel</th><th>Status</th></tr>
</thead>
<tbody id="jobs-body">
{rows}
<tbody>
{rows if rows else "<tr><td colspan='5'><em>No jobs yet.</em></td></tr>"}
</tbody>
</table>
<form method="post" action="/clear-finished" style="margin-top:10px;">
<button type="submit" style="background:#666; color:#fff;">Erledigte Jobs entfernen</button>
</form>
</body>
</html>
"""
def render_nav(active: str) -> str:
def link(label: str, href: str, key: str) -> str:
style = "font-weight:700;" if active == key else ""
return f"<a href='{href}' style='margin-right:14px; {style}'>{label}</a>"
return (
"<div style='margin: 8px 0 14px 0;'>"
+ link("Downloads", "/", "downloads")
+ link("Proxies", "/proxies", "proxies")
+ link("Logs", "/logs", "logs")
+ "</div>"
)
def render_logs_page() -> str:
return f"""
<html>
<head>
<link rel="stylesheet" href="/static/style.css">
<meta charset="utf-8">
<title>JD → Jellyfin (Logs)</title>
<script>
async function refreshLogs() {{
if (document.hidden) return;
try {{
const resp = await fetch('/logs/data');
if (!resp.ok) return;
const text = await resp.text();
const area = document.getElementById('log-body');
if (area) {{
area.value = text;
area.scrollTop = area.scrollHeight;
}}
}} catch (e) {{
}}
}}
setInterval(refreshLogs, 2000);
window.addEventListener('load', refreshLogs);
</script>
</head>
<body>
<h1>JD → Jellyfin</h1>
{render_nav("logs")}
<p class="hint">Verbindungs-Debugger (Echtzeit). Letzte {LOG_BUFFER_LIMIT} Einträge.</p>
<textarea id="log-body" class="log-area" rows="20" readonly></textarea>
</body>
</html>
"""
def render_proxies_page(
error: str = "",
message: str = "",
socks5_in: str = "",
socks4_in: str = "",
out_text: str = "",
export_path: str = "",
) -> str:
err_html = f"<p class='error'>{esc(error)}</p>" if error else ""
msg_html = f"<p class='success'>{esc(message)}</p>" if message else ""
return f"""
<html>
<head>
<link rel="stylesheet" href="/static/style.css">
<meta charset="utf-8">
<title>JD → Jellyfin (Proxies)</title>
</head>
<body>
<h1>JD → Jellyfin</h1>
{render_nav("proxies")}
{err_html}
{msg_html}
<form method="post" action="/proxies">
<div class="row">
<label>SOCKS5 (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/>
<textarea name="socks5_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{esc(socks5_in)}</textarea>
</div>
<div class="row">
<label>SOCKS4 (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/>
<textarea name="socks4_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{esc(socks4_in)}</textarea>
</div>
<button type="submit">In JDownloader-Format umwandeln</button>
</form>
<h2 style="margin-top:18px;">JDownloader Import-Liste</h2>
<p class="hint">Format: <code>socks5://IP:PORT</code>, <code>socks4://IP:PORT</code>. Keine Prüfung/Validierung.</p>
<div class="row">
<textarea id="out" rows="12" readonly style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{esc(out_text)}</textarea>
</div>
<button type="button" onclick="navigator.clipboard.writeText(document.getElementById('out').value)">Kopieren</button>
<h2 style="margin-top:18px;">Datei für Connection Manager</h2>
<p class="hint">Speichert die Liste als <code>.jdproxies</code> im Container, z. B. zum Import in JDownloader → Verbindungsmanager → Importieren.</p>
<form method="post" action="/proxies/save">
<textarea name="socks5_in" style="display:none;">{esc(socks5_in)}</textarea>
<textarea name="socks4_in" style="display:none;">{esc(socks4_in)}</textarea>
<button type="submit">Liste als JDProxies speichern</button>
</form>
<p class="hint">Aktueller Pfad: <code>{esc(export_path or PROXY_EXPORT_PATH)}</code></p>
</body>
</html>
"""
@@ -1206,15 +678,7 @@ def submit(url: str = Form(...), package_name: str = Form(""), library: str = Fo
if not URL_RE.match(url):
return HTMLResponse(render_page("Nur http(s) URLs erlaubt."), status_code=400)
url_err = check_url_reachable(url)
if url_err:
log_connection(f"URL-Check fehlgeschlagen: {url} -> {url_err}")
return HTMLResponse(render_page(f"Link nicht erreichbar: {url_err}"), status_code=400)
try:
dev = get_device()
except Exception as e:
return HTMLResponse(render_page(f"JDownloader nicht erreichbar: {e}"), status_code=503)
dev = get_device()
resp = dev.linkgrabber.add_links([{
"links": url,
"autostart": True,
@@ -1234,103 +698,9 @@ def submit(url: str = Form(...), package_name: str = Form(""), library: str = Fo
library=library,
status="queued",
message="Download gestartet",
progress=0.0,
)
t = threading.Thread(target=worker, args=(jobid,), daemon=True)
t.start()
return RedirectResponse(url="/", status_code=303)
@app.post("/cancel/{jobid}")
def cancel(jobid: str):
with lock:
job = jobs.get(jobid)
if not job:
return RedirectResponse(url="/", status_code=303)
if job.status in {"finished", "failed", "canceled"}:
return RedirectResponse(url="/", status_code=303)
job.cancel_requested = True
job.message = "Abbruch angefordert…"
return RedirectResponse(url="/", status_code=303)
@app.post("/clear-finished")
def clear_finished():
with lock:
to_remove = [jid for jid, j in jobs.items() if j.status in {"finished", "failed", "canceled"}]
for jid in to_remove:
del jobs[jid]
return RedirectResponse(url="/", status_code=303)
@app.get("/proxies", response_class=HTMLResponse)
def proxies_get():
try:
socks5_in = fetch_proxy_list(
"https://api.proxyscrape.com/v4/free-proxy-list/get?request=displayproxies&protocol=socks5&timeout=10000&country=all&ssl=yes&anonymity=elite&skip=0&limit=2000"
)
socks4_in = fetch_proxy_list(
"https://api.proxyscrape.com/v4/free-proxy-list/get?request=displayproxies&protocol=socks4&timeout=10000&country=all&ssl=yes&anonymity=elite&skip=0&limit=2000"
)
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")
combined = "\n".join([x for x in [s5, s4] if x.strip()])
return HTMLResponse(render_proxies_page(
socks5_in=socks5_in,
socks4_in=socks4_in,
out_text=combined,
export_path=PROXY_EXPORT_PATH,
))
except Exception as e:
return HTMLResponse(render_proxies_page(error=str(e)), status_code=502)
@app.post("/proxies", response_class=HTMLResponse)
def proxies_post(
socks5_in: str = Form(""),
socks4_in: str = Form(""),
):
try:
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")
combined = "\n".join([x for x in [s5, s4] if x.strip()])
return HTMLResponse(render_proxies_page(
socks5_in=socks5_in,
socks4_in=socks4_in,
out_text=combined,
export_path=PROXY_EXPORT_PATH,
))
except Exception as e:
return HTMLResponse(render_proxies_page(
error=str(e),
socks5_in=socks5_in,
socks4_in=socks4_in,
out_text="",
export_path=PROXY_EXPORT_PATH,
), status_code=400)
@app.post("/proxies/save", response_class=HTMLResponse)
def proxies_save(
socks5_in: str = Form(""),
socks4_in: str = Form(""),
):
try:
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")
combined = "\n".join([x for x in [s5, s4] if x.strip()])
export_path = save_proxy_export(combined)
return HTMLResponse(render_proxies_page(
message=f"Proxy-Liste gespeichert: {export_path}",
socks5_in=socks5_in,
socks4_in=socks4_in,
out_text=combined,
export_path=export_path,
))
except Exception as e:
return HTMLResponse(render_proxies_page(
error=str(e),
socks5_in=socks5_in,
socks4_in=socks4_in,
out_text="",
export_path=PROXY_EXPORT_PATH,
), status_code=400)

View File

@@ -1,5 +0,0 @@
fastapi
uvicorn
myjdapi
paramiko
python-multipart

View File

@@ -4,19 +4,9 @@ form { background:#fff; border:1px solid #e5e5e5; border-radius:10px; padding:14
.row { margin-bottom: 10px; }
input, select { padding:10px; border:1px solid #ccc; border-radius:8px; font-size:14px; width: 100%; max-width: 860px; }
button { padding:10px 14px; border:0; border-radius:8px; font-weight:600; cursor:pointer; }
button.danger { background:#b00020; color:#fff; }
progress { width: 100%; height: 12px; }
progress::-webkit-progress-bar { background:#f0f0f0; border-radius:8px; }
progress::-webkit-progress-value { background:#1b7f3a; border-radius:8px; }
progress::-moz-progress-bar { background:#1b7f3a; border-radius:8px; }
table { margin-top:16px; width:100%; border-collapse: collapse; background:#fff; border:1px solid #e5e5e5; border-radius:10px; overflow:hidden; }
th, td { border-top:1px solid #eee; padding:10px; vertical-align: top; font-size:14px; }
th { background:#fbfbfb; text-align:left; }
code { font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace; font-size: 12px; background:#f2f2f2; padding:2px 4px; border-radius:4px; }
.hint { color:#555; font-size: 12px; margin-top: 10px; }
.error { color:#b00020; font-weight: 700; }
.success { color:#1b7f3a; font-weight: 700; }
.progress-row { display:flex; align-items:center; gap:8px; margin-top:6px; }
.progress-text { font-size:12px; color:#333; min-width:48px; }
.inline-form { margin-top:6px; }
.log-area { width:100%; max-width: 920px; padding:10px; border:1px solid #ccc; border-radius:8px; background:#fff; }