Fetch proxy lists from upstream sources
This commit is contained in:
316
jd-webgui/app.py
316
jd-webgui/app.py
@@ -117,6 +117,8 @@ class Job:
|
||||
library: str # movies|series|auto
|
||||
status: str # queued|collecting|downloading|upload|finished|failed
|
||||
message: str
|
||||
progress: float = 0.0
|
||||
cancel_requested: bool = False
|
||||
|
||||
jobs: Dict[str, Job] = {}
|
||||
lock = threading.Lock()
|
||||
@@ -205,12 +207,24 @@ def md5_file(path: str) -> str:
|
||||
return h.hexdigest()
|
||||
|
||||
def write_md5_sidecar(file_path: str, md5_hex: str) -> str:
|
||||
os.makedirs(MD5_DIR, exist_ok=True)
|
||||
base = os.path.basename(file_path)
|
||||
md5_path = os.path.join(MD5_DIR, base + ".md5")
|
||||
with open(md5_path, "w", encoding="utf-8") as f:
|
||||
f.write(f"{md5_hex} {base}\n")
|
||||
return md5_path
|
||||
candidates = [MD5_DIR, "/tmp/md5"]
|
||||
last_err: Optional[Exception] = None
|
||||
|
||||
for target in candidates:
|
||||
try:
|
||||
os.makedirs(target, exist_ok=True)
|
||||
md5_path = os.path.join(target, base + ".md5")
|
||||
with open(md5_path, "w", encoding="utf-8") as f:
|
||||
f.write(f"{md5_hex} {base}\n")
|
||||
return md5_path
|
||||
except PermissionError as exc:
|
||||
last_err = exc
|
||||
continue
|
||||
|
||||
if last_err:
|
||||
raise last_err
|
||||
raise RuntimeError("Failed to write MD5 sidecar file.")
|
||||
|
||||
def ffprobe_ok(path: str) -> bool:
|
||||
try:
|
||||
@@ -283,7 +297,10 @@ def tmdb_search_movie(query: str) -> Optional[Dict[str, Any]]:
|
||||
return None
|
||||
q = urllib.parse.quote(query.strip())
|
||||
url = f"https://api.themoviedb.org/3/search/movie?api_key={TMDB_API_KEY}&language={urllib.parse.quote(TMDB_LANGUAGE)}&query={q}"
|
||||
data = _http_get_json(url)
|
||||
try:
|
||||
data = _http_get_json(url)
|
||||
except Exception:
|
||||
return None
|
||||
results = data.get("results") or []
|
||||
return results[0] if results else None
|
||||
|
||||
@@ -292,7 +309,10 @@ def tmdb_search_tv(query: str) -> Optional[Dict[str, Any]]:
|
||||
return None
|
||||
q = urllib.parse.quote(query.strip())
|
||||
url = f"https://api.themoviedb.org/3/search/tv?api_key={TMDB_API_KEY}&language={urllib.parse.quote(TMDB_LANGUAGE)}&query={q}"
|
||||
data = _http_get_json(url)
|
||||
try:
|
||||
data = _http_get_json(url)
|
||||
except Exception:
|
||||
return None
|
||||
results = data.get("results") or []
|
||||
return results[0] if results else None
|
||||
|
||||
@@ -301,6 +321,50 @@ def sanitize_name(name: str) -> str:
|
||||
out = "".join("_" if c in bad else c for c in name).strip()
|
||||
return re.sub(r"\s+", " ", out)
|
||||
|
||||
def format_proxy_lines(raw: str, scheme: str) -> str:
|
||||
"""
|
||||
Takes raw lines (ip:port or scheme://ip:port) and outputs normalized lines:
|
||||
scheme://ip:port (one per line). Ignores empty lines and comments.
|
||||
"""
|
||||
scheme = scheme.strip().lower()
|
||||
if scheme not in {"socks5", "socks4", "http"}:
|
||||
raise ValueError("Unsupported proxy scheme")
|
||||
|
||||
out = []
|
||||
for line in (raw or "").splitlines():
|
||||
s = line.strip()
|
||||
if not s or s.startswith("#"):
|
||||
continue
|
||||
|
||||
if "://" in s:
|
||||
s = s.split("://", 1)[1].strip()
|
||||
|
||||
if ":" not in s:
|
||||
continue
|
||||
|
||||
host, port = s.rsplit(":", 1)
|
||||
host = host.strip()
|
||||
port = port.strip()
|
||||
|
||||
if not host or not port.isdigit():
|
||||
continue
|
||||
|
||||
out.append(f"{scheme}://{host}:{port}")
|
||||
|
||||
seen = set()
|
||||
dedup = []
|
||||
for x in out:
|
||||
if x not in seen:
|
||||
seen.add(x)
|
||||
dedup.append(x)
|
||||
|
||||
return "\n".join(dedup)
|
||||
|
||||
def fetch_proxy_list(url: str) -> str:
|
||||
req = urllib.request.Request(url)
|
||||
with urllib.request.urlopen(req, timeout=20) as resp:
|
||||
return resp.read().decode("utf-8", "replace")
|
||||
|
||||
def pick_library_target(library_choice: str, filename: str, package_name: str) -> str:
|
||||
if library_choice not in {"movies", "series", "auto"}:
|
||||
library_choice = "auto"
|
||||
@@ -389,6 +453,10 @@ def query_links_and_packages(dev, jobid: str) -> Tuple[List[Dict[str, Any]], Dic
|
||||
"name": True,
|
||||
"finished": True,
|
||||
"running": True,
|
||||
"bytesLoaded": True,
|
||||
"bytesTotal": True,
|
||||
"bytes": True,
|
||||
"totalBytes": True,
|
||||
"status": True,
|
||||
"packageUUID": True,
|
||||
"uuid": True,
|
||||
@@ -461,6 +529,86 @@ def try_remove_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict
|
||||
|
||||
return "JDownloader-API: Paket/Links konnten nicht entfernt werden (Wrapper-Methoden nicht vorhanden)."
|
||||
|
||||
def try_cancel_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[str, Any]]) -> Optional[str]:
|
||||
link_ids = [l.get("uuid") for l in links if l.get("uuid") is not None]
|
||||
pkg_ids = list(pkg_map.keys())
|
||||
|
||||
candidates = [
|
||||
("downloads", "removeLinks"),
|
||||
("downloads", "remove_links"),
|
||||
("downloads", "deleteLinks"),
|
||||
("downloads", "delete_links"),
|
||||
("downloadcontroller", "removeLinks"),
|
||||
("downloadcontroller", "remove_links"),
|
||||
]
|
||||
|
||||
payloads = [
|
||||
{"linkUUIDs": link_ids, "packageUUIDs": pkg_ids, "deleteFiles": True},
|
||||
{"linkIds": link_ids, "packageIds": pkg_ids, "deleteFiles": True},
|
||||
{"linkUUIDs": link_ids, "deleteFiles": True},
|
||||
{"packageUUIDs": pkg_ids, "deleteFiles": True},
|
||||
{"linkUUIDs": link_ids, "packageUUIDs": pkg_ids, "removeFiles": True},
|
||||
{"linkIds": link_ids, "packageIds": pkg_ids, "removeFiles": True},
|
||||
]
|
||||
|
||||
for ns, fn in candidates:
|
||||
obj = getattr(dev, ns, None)
|
||||
if obj is None:
|
||||
continue
|
||||
meth = getattr(obj, fn, None)
|
||||
if meth is None:
|
||||
continue
|
||||
for payload in payloads:
|
||||
try:
|
||||
meth([payload])
|
||||
return None
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return "JDownloader-API: Abbrechen fehlgeschlagen (Wrapper-Methoden nicht vorhanden)."
|
||||
|
||||
def cancel_job(dev, jobid: str) -> Optional[str]:
|
||||
links, pkg_map = query_links_and_packages(dev, jobid)
|
||||
local_paths = local_paths_from_links(links, pkg_map)
|
||||
for path in local_paths:
|
||||
try:
|
||||
if os.path.isfile(path):
|
||||
os.remove(path)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
sidecar = os.path.join(MD5_DIR, os.path.basename(path) + ".md5")
|
||||
if os.path.isfile(sidecar):
|
||||
os.remove(sidecar)
|
||||
except Exception:
|
||||
pass
|
||||
return try_cancel_from_jd(dev, links, pkg_map)
|
||||
|
||||
def calculate_progress(links: List[Dict[str, Any]]) -> float:
|
||||
total = 0
|
||||
loaded = 0
|
||||
for link in links:
|
||||
bytes_total = link.get("bytesTotal")
|
||||
if bytes_total is None:
|
||||
bytes_total = link.get("totalBytes")
|
||||
if bytes_total is None:
|
||||
bytes_total = link.get("bytes")
|
||||
bytes_loaded = link.get("bytesLoaded")
|
||||
if bytes_total is None or bytes_loaded is None:
|
||||
continue
|
||||
try:
|
||||
bytes_total = int(bytes_total)
|
||||
bytes_loaded = int(bytes_loaded)
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
if bytes_total <= 0:
|
||||
continue
|
||||
total += bytes_total
|
||||
loaded += min(bytes_loaded, bytes_total)
|
||||
if total <= 0:
|
||||
return 0.0
|
||||
return max(0.0, min(100.0, (loaded / total) * 100.0))
|
||||
|
||||
# ============================================================
|
||||
# Worker
|
||||
# ============================================================
|
||||
@@ -474,6 +622,13 @@ def worker(jobid: str):
|
||||
job = jobs.get(jobid)
|
||||
if not job:
|
||||
return
|
||||
if job.cancel_requested:
|
||||
cancel_msg = cancel_job(dev, jobid)
|
||||
with lock:
|
||||
job.status = "canceled"
|
||||
job.message = cancel_msg or "Download abgebrochen und Dateien entfernt."
|
||||
job.progress = 0.0
|
||||
return
|
||||
|
||||
links, pkg_map = query_links_and_packages(dev, jobid)
|
||||
|
||||
@@ -481,15 +636,18 @@ def worker(jobid: str):
|
||||
with lock:
|
||||
job.status = "collecting"
|
||||
job.message = "Warte auf Link-Crawler…"
|
||||
job.progress = 0.0
|
||||
time.sleep(POLL_SECONDS)
|
||||
continue
|
||||
|
||||
all_finished = all(bool(l.get("finished")) for l in links)
|
||||
if not all_finished:
|
||||
progress = calculate_progress(links)
|
||||
with lock:
|
||||
job.status = "downloading"
|
||||
done = sum(1 for l in links if l.get("finished"))
|
||||
job.message = f"Download läuft… ({done}/{len(links)} fertig)"
|
||||
job.progress = progress
|
||||
time.sleep(POLL_SECONDS)
|
||||
continue
|
||||
|
||||
@@ -500,6 +658,7 @@ def worker(jobid: str):
|
||||
with lock:
|
||||
job.status = "failed"
|
||||
job.message = "Keine Video-Datei gefunden (Whitelist)."
|
||||
job.progress = 0.0
|
||||
return
|
||||
|
||||
valid_videos = [p for p in video_files if ffprobe_ok(p)]
|
||||
@@ -507,11 +666,13 @@ def worker(jobid: str):
|
||||
with lock:
|
||||
job.status = "failed"
|
||||
job.message = "ffprobe: keine gültige Video-Datei."
|
||||
job.progress = 0.0
|
||||
return
|
||||
|
||||
with lock:
|
||||
job.status = "upload"
|
||||
job.message = f"Download fertig. MD5/Upload/Verify für {len(valid_videos)} Datei(en)…"
|
||||
job.progress = 100.0
|
||||
|
||||
ssh = ssh_connect()
|
||||
try:
|
||||
@@ -551,6 +712,7 @@ def worker(jobid: str):
|
||||
with lock:
|
||||
job.status = "finished"
|
||||
job.message = "Upload + MD5 OK. " + (jd_cleanup_msg or "JDownloader: Paket/Links entfernt.")
|
||||
job.progress = 100.0
|
||||
return
|
||||
|
||||
except Exception as e:
|
||||
@@ -559,6 +721,7 @@ def worker(jobid: str):
|
||||
if job:
|
||||
job.status = "failed"
|
||||
job.message = str(e)
|
||||
job.progress = 0.0
|
||||
|
||||
# ============================================================
|
||||
# Web
|
||||
@@ -573,13 +736,27 @@ def render_page(error: str = "") -> str:
|
||||
job_list = list(jobs.values())[::-1]
|
||||
|
||||
for j in job_list:
|
||||
progress_pct = f"{j.progress:.1f}%"
|
||||
progress_html = (
|
||||
f"<div class='progress-row'>"
|
||||
f"<progress value='{j.progress:.1f}' max='100'></progress>"
|
||||
f"<span class='progress-text'>{progress_pct}</span>"
|
||||
f"</div>"
|
||||
)
|
||||
cancel_html = ""
|
||||
if j.status not in {"finished", "failed", "canceled"}:
|
||||
cancel_html = (
|
||||
f"<form method='post' action='/cancel/{j.id}' class='inline-form'>"
|
||||
f"<button type='submit' class='danger'>Abbrechen</button>"
|
||||
f"</form>"
|
||||
)
|
||||
rows += (
|
||||
f"<tr>"
|
||||
f"<td><code>{j.id}</code></td>"
|
||||
f"<td style='max-width:560px; word-break:break-all;'>{j.url}</td>"
|
||||
f"<td>{j.package_name}</td>"
|
||||
f"<td>{j.library}</td>"
|
||||
f"<td><b>{j.status}</b><br/><small>{j.message}</small></td>"
|
||||
f"<td><b>{j.status}</b><br/><small>{j.message}</small>{progress_html}{cancel_html}</td>"
|
||||
f"</tr>"
|
||||
)
|
||||
|
||||
@@ -591,9 +768,16 @@ def render_page(error: str = "") -> str:
|
||||
<link rel="stylesheet" href="/static/style.css">
|
||||
<meta charset="utf-8">
|
||||
<title>JD → Jellyfin</title>
|
||||
<script>
|
||||
setInterval(() => {{
|
||||
if (document.hidden) return;
|
||||
window.location.reload();
|
||||
}}, 5000);
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<h1>JD → Jellyfin</h1>
|
||||
{render_nav("downloads")}
|
||||
{err_html}
|
||||
|
||||
<form method="post" action="/submit">
|
||||
@@ -634,6 +818,62 @@ def render_page(error: str = "") -> str:
|
||||
</html>
|
||||
"""
|
||||
|
||||
def render_nav(active: str) -> str:
|
||||
def link(label: str, href: str, key: str) -> str:
|
||||
style = "font-weight:700;" if active == key else ""
|
||||
return f"<a href='{href}' style='margin-right:14px; {style}'>{label}</a>"
|
||||
return (
|
||||
"<div style='margin: 8px 0 14px 0;'>"
|
||||
+ link("Downloads", "/", "downloads")
|
||||
+ link("Proxies", "/proxies", "proxies")
|
||||
+ "</div>"
|
||||
)
|
||||
|
||||
def render_proxies_page(error: str = "", socks5_in: str = "", socks4_in: str = "", http_in: str = "", out_text: str = "") -> str:
|
||||
err_html = f"<p class='error'>{error}</p>" if error else ""
|
||||
return f"""
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" href="/static/style.css">
|
||||
<meta charset="utf-8">
|
||||
<title>JD → Jellyfin (Proxies)</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>JD → Jellyfin</h1>
|
||||
{render_nav("proxies")}
|
||||
{err_html}
|
||||
|
||||
<form method="post" action="/proxies">
|
||||
<div class="row">
|
||||
<label>SOCKS5 (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/>
|
||||
<textarea name="socks5_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{socks5_in}</textarea>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label>SOCKS4 (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/>
|
||||
<textarea name="socks4_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{socks4_in}</textarea>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label>HTTP (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/>
|
||||
<textarea name="http_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{http_in}</textarea>
|
||||
</div>
|
||||
|
||||
<button type="submit">In JDownloader-Format umwandeln</button>
|
||||
</form>
|
||||
|
||||
<h2 style="margin-top:18px;">JDownloader Import-Liste</h2>
|
||||
<p class="hint">Format: <code>socks5://IP:PORT</code>, <code>socks4://IP:PORT</code>, <code>http://IP:PORT</code>. Keine Prüfung/Validierung.</p>
|
||||
|
||||
<div class="row">
|
||||
<textarea id="out" rows="12" readonly style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{out_text}</textarea>
|
||||
</div>
|
||||
|
||||
<button type="button" onclick="navigator.clipboard.writeText(document.getElementById('out').value)">Kopieren</button>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
@app.get("/", response_class=HTMLResponse)
|
||||
def index():
|
||||
try:
|
||||
@@ -672,9 +912,69 @@ def submit(url: str = Form(...), package_name: str = Form(""), library: str = Fo
|
||||
library=library,
|
||||
status="queued",
|
||||
message="Download gestartet",
|
||||
progress=0.0,
|
||||
)
|
||||
|
||||
t = threading.Thread(target=worker, args=(jobid,), daemon=True)
|
||||
t.start()
|
||||
|
||||
return RedirectResponse(url="/", status_code=303)
|
||||
|
||||
@app.post("/cancel/{jobid}")
|
||||
def cancel(jobid: str):
|
||||
with lock:
|
||||
job = jobs.get(jobid)
|
||||
if not job:
|
||||
return RedirectResponse(url="/", status_code=303)
|
||||
if job.status in {"finished", "failed", "canceled"}:
|
||||
return RedirectResponse(url="/", status_code=303)
|
||||
job.cancel_requested = True
|
||||
job.message = "Abbruch angefordert…"
|
||||
return RedirectResponse(url="/", status_code=303)
|
||||
|
||||
@app.get("/proxies", response_class=HTMLResponse)
|
||||
def proxies_get():
|
||||
try:
|
||||
socks5_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks5.txt")
|
||||
socks4_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks4.txt")
|
||||
http_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt")
|
||||
|
||||
s5 = format_proxy_lines(socks5_in, "socks5")
|
||||
s4 = format_proxy_lines(socks4_in, "socks4")
|
||||
hp = format_proxy_lines(http_in, "http")
|
||||
combined = "\n".join([x for x in [s5, s4, hp] if x.strip()])
|
||||
return HTMLResponse(render_proxies_page(
|
||||
socks5_in=socks5_in,
|
||||
socks4_in=socks4_in,
|
||||
http_in=http_in,
|
||||
out_text=combined
|
||||
))
|
||||
except Exception as e:
|
||||
return HTMLResponse(render_proxies_page(error=str(e)), status_code=502)
|
||||
|
||||
@app.post("/proxies", response_class=HTMLResponse)
|
||||
def proxies_post(
|
||||
socks5_in: str = Form(""),
|
||||
socks4_in: str = Form(""),
|
||||
http_in: str = Form(""),
|
||||
):
|
||||
try:
|
||||
s5 = format_proxy_lines(socks5_in, "socks5")
|
||||
s4 = format_proxy_lines(socks4_in, "socks4")
|
||||
hp = format_proxy_lines(http_in, "http")
|
||||
|
||||
combined = "\n".join([x for x in [s5, s4, hp] if x.strip()])
|
||||
return HTMLResponse(render_proxies_page(
|
||||
socks5_in=socks5_in,
|
||||
socks4_in=socks4_in,
|
||||
http_in=http_in,
|
||||
out_text=combined
|
||||
))
|
||||
except Exception as e:
|
||||
return HTMLResponse(render_proxies_page(
|
||||
error=str(e),
|
||||
socks5_in=socks5_in,
|
||||
socks4_in=socks4_in,
|
||||
http_in=http_in,
|
||||
out_text=""
|
||||
), status_code=400)
|
||||
|
||||
Reference in New Issue
Block a user