Compare commits

..

19 Commits

Author SHA1 Message Date
6c13fbbb2f Merge branch 'main' into codex/fetch-proxies-from-proxyscrape-api-4xe4oq 2026-01-04 14:46:06 +01:00
33282ddbcb Remove proxy blacklist filters 2026-01-04 14:45:44 +01:00
7795e22744 Merge pull request #15 from DasPoschi/codex/fetch-proxies-from-proxyscrape-api-4vaqb3
Remove HTTP proxies from proxy UI
2026-01-04 14:27:14 +01:00
e83f1323cd Merge branch 'main' into codex/fetch-proxies-from-proxyscrape-api-4vaqb3 2026-01-04 14:27:07 +01:00
194b16e09c Remove HTTP proxies from UI 2026-01-04 14:26:36 +01:00
423e8e28ec Merge pull request #14 from DasPoschi/codex/fetch-proxies-from-proxyscrape-api
Use ProxyScrape API for SOCKS lists and normalize single-line responses
2026-01-04 14:20:54 +01:00
daeee039fa Update proxy sources for socks lists 2026-01-04 14:20:38 +01:00
97a5afbee9 Update app.py 2026-01-03 23:09:10 +01:00
a2de578087 Merge pull request #13 from DasPoschi/codex/configure-proxies-for-downloads-only-r44dl5
Add *.your-server.de to proxy blacklist
2026-01-03 23:04:38 +01:00
c3aac479fe Merge branch 'main' into codex/configure-proxies-for-downloads-only-r44dl5 2026-01-03 22:56:21 +01:00
1350b50199 Add your-server.de to proxy blacklist 2026-01-03 22:55:54 +01:00
6b06134edf Merge pull request #12 from DasPoschi/codex/configure-proxies-for-downloads-only
Bypass proxies for internal HTTP calls
2026-01-03 22:43:36 +01:00
be4785b04a Bypass proxies for non-download requests 2026-01-03 22:42:49 +01:00
db39f2b55e Merge pull request #11 from DasPoschi/codex/add-proxy-list-import-function-3g187r
Refresh jobs table via `/jobs` endpoint instead of full page reload
2026-01-01 22:23:09 +01:00
a0e7ed91c7 Update jobs progress without full reload 2026-01-01 22:22:41 +01:00
7443a0e0ca Merge pull request #10 from DasPoschi/codex/add-proxy-list-import-function-g3956j
Add JDProxies blacklist filters and save/export support
2026-01-01 22:12:53 +01:00
3cf7581797 Merge branch 'main' into codex/add-proxy-list-import-function-g3956j 2026-01-01 22:12:44 +01:00
e9ccb51f13 Add JDProxies blacklist filters 2026-01-01 22:12:10 +01:00
a549ba66ba Merge pull request #9 from DasPoschi/codex/add-proxy-list-import-function-q1akx1
Write JDProxies JSON export and add save endpoint/UI
2026-01-01 21:27:20 +01:00
2 changed files with 132 additions and 34 deletions

View File

@@ -18,7 +18,7 @@ from typing import Any, Dict, List, Optional, Tuple
from myjdapi import Myjdapi
import paramiko
from fastapi import FastAPI, Form, Request
from fastapi.responses import HTMLResponse, RedirectResponse
from fastapi.responses import HTMLResponse, PlainTextResponse, RedirectResponse
from fastapi.staticfiles import StaticFiles
# ============================================================
@@ -57,9 +57,12 @@ POLL_SECONDS = float(os.environ.get("POLL_SECONDS", "5"))
# JDownloader writes here inside container
JD_OUTPUT_PATH = "/output"
PROXY_EXPORT_PATH = os.environ.get("PROXY_EXPORT_PATH", "/output/jd-proxies.jdproxies")
LOG_BUFFER_LIMIT = int(os.environ.get("LOG_BUFFER_LIMIT", "500"))
URL_RE = re.compile(r"^https?://", re.I)
NO_PROXY_OPENER = urllib.request.build_opener(urllib.request.ProxyHandler({}))
VIDEO_EXTS = {
".mkv", ".mp4", ".m4v", ".avi", ".mov", ".wmv", ".flv", ".webm",
".ts", ".m2ts", ".mts", ".mpg", ".mpeg", ".vob", ".ogv",
@@ -123,6 +126,21 @@ class Job:
jobs: Dict[str, Job] = {}
lock = threading.Lock()
log_lock = threading.Lock()
connection_logs: List[str] = []
def log_connection(message: str) -> None:
timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
line = f"[{timestamp}] {message}"
with log_lock:
connection_logs.append(line)
if len(connection_logs) > LOG_BUFFER_LIMIT:
excess = len(connection_logs) - LOG_BUFFER_LIMIT
del connection_logs[:excess]
def get_connection_logs() -> str:
with log_lock:
return "\n".join(connection_logs)
# ============================================================
# Core helpers
@@ -149,6 +167,7 @@ def ensure_env():
def get_device():
jd = Myjdapi()
log_connection(f"MyJDownloader connect as {MYJD_EMAIL or 'unknown'}")
jd.connect(MYJD_EMAIL, MYJD_PASSWORD)
wanted = (MYJD_DEVICE or "").strip()
@@ -246,6 +265,7 @@ def ffprobe_ok(path: str) -> bool:
def ssh_connect() -> paramiko.SSHClient:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
log_connection(f"SSH connect {JELLYFIN_USER}@{JELLYFIN_HOST}:{JELLYFIN_PORT}")
ssh.connect(
hostname=JELLYFIN_HOST,
port=JELLYFIN_PORT,
@@ -268,6 +288,7 @@ def sftp_mkdirs(sftp: paramiko.SFTPClient, remote_dir: str):
def sftp_upload(ssh: paramiko.SSHClient, local_path: str, remote_path: str):
sftp = ssh.open_sftp()
try:
log_connection(f"SFTP upload {local_path} -> {remote_path}")
sftp_mkdirs(sftp, os.path.dirname(remote_path))
sftp.put(local_path, remote_path)
finally:
@@ -276,6 +297,7 @@ def sftp_upload(ssh: paramiko.SSHClient, local_path: str, remote_path: str):
def remote_md5sum(ssh: paramiko.SSHClient, remote_path: str) -> str:
quoted = shlex.quote(remote_path)
cmd = f"md5sum {quoted}"
log_connection(f"SSH exec {cmd}")
stdin, stdout, stderr = ssh.exec_command(cmd, timeout=120)
out = stdout.read().decode("utf-8", "replace").strip()
err = stderr.read().decode("utf-8", "replace").strip()
@@ -290,7 +312,8 @@ def remote_md5sum(ssh: paramiko.SSHClient, remote_path: str) -> str:
# ============================================================
def _http_get_json(url: str, headers: Optional[Dict[str, str]] = None) -> Any:
req = urllib.request.Request(url, headers=headers or {})
with urllib.request.urlopen(req, timeout=20) as r:
log_connection(f"HTTP GET {url} (no-proxy)")
with NO_PROXY_OPENER.open(req, timeout=20) as r:
return json.loads(r.read().decode("utf-8", "replace"))
def tmdb_search_movie(query: str) -> Optional[Dict[str, Any]]:
@@ -363,8 +386,12 @@ def format_proxy_lines(raw: str, scheme: str) -> str:
def fetch_proxy_list(url: str) -> str:
req = urllib.request.Request(url)
with urllib.request.urlopen(req, timeout=20) as resp:
return resp.read().decode("utf-8", "replace")
log_connection(f"HTTP GET {url} (no-proxy)")
with NO_PROXY_OPENER.open(req, timeout=20) as resp:
text = resp.read().decode("utf-8", "replace")
if "\n" not in text and re.search(r"\s", text):
return re.sub(r"\s+", "\n", text.strip())
return text
def build_jdproxies_payload(text: str) -> Dict[str, Any]:
if not text.strip():
@@ -375,6 +402,23 @@ def build_jdproxies_payload(text: str) -> Dict[str, Any]:
"socks4": "SOCKS4",
"http": "HTTP",
}
entries.append({
"filter": None,
"proxy": {
"address": None,
"password": None,
"port": 80,
"type": "NONE",
"username": None,
"connectMethodPrefered": False,
"preferNativeImplementation": False,
"resolveHostName": False,
},
"enabled": True,
"pac": False,
"rangeRequestsSupported": True,
"reconnectSupported": True,
})
for line in text.splitlines():
s = line.strip()
if not s:
@@ -412,6 +456,8 @@ def save_proxy_export(text: str) -> str:
export_dir = os.path.dirname(export_path)
if export_dir:
os.makedirs(export_dir, exist_ok=True)
if os.path.exists(export_path):
os.remove(export_path)
with open(export_path, "w", encoding="utf-8") as handle:
handle.write(json.dumps(payload, indent=2))
handle.write("\n")
@@ -488,7 +534,8 @@ def jellyfin_refresh_library():
try:
url = JELLYFIN_API_BASE + path
req = urllib.request.Request(url, headers=headers, method="POST")
with urllib.request.urlopen(req, timeout=20) as r:
log_connection(f"HTTP POST {url} (no-proxy)")
with NO_PROXY_OPENER.open(req, timeout=20) as r:
_ = r.read()
return
except Exception:
@@ -782,7 +829,19 @@ def worker(jobid: str):
def favicon():
return HTMLResponse(status_code=204)
def render_page(error: str = "") -> str:
@app.get("/jobs", response_class=HTMLResponse)
def jobs_get():
return HTMLResponse(render_job_rows())
@app.get("/logs", response_class=HTMLResponse)
def logs_get():
return HTMLResponse(render_logs_page())
@app.get("/logs/data", response_class=PlainTextResponse)
def logs_data():
return PlainTextResponse(get_connection_logs())
def render_job_rows() -> str:
rows = ""
with lock:
job_list = list(jobs.values())[::-1]
@@ -812,6 +871,13 @@ def render_page(error: str = "") -> str:
f"</tr>"
)
if not rows:
rows = "<tr><td colspan='5'><em>No jobs yet.</em></td></tr>"
return rows
def render_page(error: str = "") -> str:
rows = render_job_rows()
err_html = f"<p class='error'>{error}</p>" if error else ""
auth_note = "aktiv" if _auth_enabled() else "aus"
return f"""
@@ -821,10 +887,18 @@ def render_page(error: str = "") -> str:
<meta charset="utf-8">
<title>JD → Jellyfin</title>
<script>
setInterval(() => {{
async function refreshJobs() {{
if (document.hidden) return;
window.location.reload();
}}, 5000);
try {{
const resp = await fetch('/jobs');
if (!resp.ok) return;
const html = await resp.text();
const tbody = document.getElementById('jobs-body');
if (tbody) tbody.innerHTML = html;
}} catch (e) {{
}}
}}
setInterval(refreshJobs, 5000);
</script>
</head>
<body>
@@ -862,8 +936,8 @@ def render_page(error: str = "") -> str:
<thead>
<tr><th>JobID</th><th>URL</th><th>Paket</th><th>Ziel</th><th>Status</th></tr>
</thead>
<tbody>
{rows if rows else "<tr><td colspan='5'><em>No jobs yet.</em></td></tr>"}
<tbody id="jobs-body">
{rows}
</tbody>
</table>
</body>
@@ -878,15 +952,50 @@ def render_nav(active: str) -> str:
"<div style='margin: 8px 0 14px 0;'>"
+ link("Downloads", "/", "downloads")
+ link("Proxies", "/proxies", "proxies")
+ link("Logs", "/logs", "logs")
+ "</div>"
)
def render_logs_page() -> str:
return f"""
<html>
<head>
<link rel="stylesheet" href="/static/style.css">
<meta charset="utf-8">
<title>JD → Jellyfin (Logs)</title>
<script>
async function refreshLogs() {{
if (document.hidden) return;
try {{
const resp = await fetch('/logs/data');
if (!resp.ok) return;
const text = await resp.text();
const area = document.getElementById('log-body');
if (area) {{
area.value = text;
area.scrollTop = area.scrollHeight;
}}
}} catch (e) {{
}}
}}
setInterval(refreshLogs, 2000);
window.addEventListener('load', refreshLogs);
</script>
</head>
<body>
<h1>JD → Jellyfin</h1>
{render_nav("logs")}
<p class="hint">Verbindungs-Debugger (Echtzeit). Letzte {LOG_BUFFER_LIMIT} Einträge.</p>
<textarea id="log-body" class="log-area" rows="20" readonly></textarea>
</body>
</html>
"""
def render_proxies_page(
error: str = "",
message: str = "",
socks5_in: str = "",
socks4_in: str = "",
http_in: str = "",
out_text: str = "",
export_path: str = "",
) -> str:
@@ -916,16 +1025,11 @@ def render_proxies_page(
<textarea name="socks4_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{socks4_in}</textarea>
</div>
<div class="row">
<label>HTTP (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/>
<textarea name="http_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{http_in}</textarea>
</div>
<button type="submit">In JDownloader-Format umwandeln</button>
</form>
<h2 style="margin-top:18px;">JDownloader Import-Liste</h2>
<p class="hint">Format: <code>socks5://IP:PORT</code>, <code>socks4://IP:PORT</code>, <code>http://IP:PORT</code>. Keine Prüfung/Validierung.</p>
<p class="hint">Format: <code>socks5://IP:PORT</code>, <code>socks4://IP:PORT</code>. Keine Prüfung/Validierung.</p>
<div class="row">
<textarea id="out" rows="12" readonly style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{out_text}</textarea>
@@ -939,7 +1043,6 @@ def render_proxies_page(
<form method="post" action="/proxies/save">
<textarea name="socks5_in" style="display:none;">{socks5_in}</textarea>
<textarea name="socks4_in" style="display:none;">{socks4_in}</textarea>
<textarea name="http_in" style="display:none;">{http_in}</textarea>
<button type="submit">Liste als JDProxies speichern</button>
</form>
@@ -1009,18 +1112,20 @@ def cancel(jobid: str):
@app.get("/proxies", response_class=HTMLResponse)
def proxies_get():
try:
socks5_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks5.txt")
socks4_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks4.txt")
socks5_in = fetch_proxy_list(
"https://api.proxyscrape.com/v4/free-proxy-list/get?request=displayproxies&protocol=socks5&timeout=10000&country=all&ssl=yes&anonymity=elite&skip=0&limit=2000"
)
socks4_in = fetch_proxy_list(
"https://api.proxyscrape.com/v4/free-proxy-list/get?request=displayproxies&protocol=socks4&timeout=10000&country=all&ssl=yes&anonymity=elite&skip=0&limit=2000"
)
http_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt")
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")
hp = format_proxy_lines(http_in, "http")
combined = "\n".join([x for x in [s5, s4, hp] if x.strip()])
combined = "\n".join([x for x in [s5, s4] if x.strip()])
return HTMLResponse(render_proxies_page(
socks5_in=socks5_in,
socks4_in=socks4_in,
http_in=http_in,
out_text=combined,
export_path=PROXY_EXPORT_PATH,
))
@@ -1031,18 +1136,15 @@ def proxies_get():
def proxies_post(
socks5_in: str = Form(""),
socks4_in: str = Form(""),
http_in: str = Form(""),
):
try:
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")
hp = format_proxy_lines(http_in, "http")
combined = "\n".join([x for x in [s5, s4, hp] if x.strip()])
combined = "\n".join([x for x in [s5, s4] if x.strip()])
return HTMLResponse(render_proxies_page(
socks5_in=socks5_in,
socks4_in=socks4_in,
http_in=http_in,
out_text=combined,
export_path=PROXY_EXPORT_PATH,
))
@@ -1051,7 +1153,6 @@ def proxies_post(
error=str(e),
socks5_in=socks5_in,
socks4_in=socks4_in,
http_in=http_in,
out_text="",
export_path=PROXY_EXPORT_PATH,
), status_code=400)
@@ -1060,19 +1161,16 @@ def proxies_post(
def proxies_save(
socks5_in: str = Form(""),
socks4_in: str = Form(""),
http_in: str = Form(""),
):
try:
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")
hp = format_proxy_lines(http_in, "http")
combined = "\n".join([x for x in [s5, s4, hp] if x.strip()])
combined = "\n".join([x for x in [s5, s4] if x.strip()])
export_path = save_proxy_export(combined)
return HTMLResponse(render_proxies_page(
message=f"Proxy-Liste gespeichert: {export_path}",
socks5_in=socks5_in,
socks4_in=socks4_in,
http_in=http_in,
out_text=combined,
export_path=export_path,
))
@@ -1081,7 +1179,6 @@ def proxies_save(
error=str(e),
socks5_in=socks5_in,
socks4_in=socks4_in,
http_in=http_in,
out_text="",
export_path=PROXY_EXPORT_PATH,
), status_code=400)

View File

@@ -19,3 +19,4 @@ code { font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace; fo
.progress-row { display:flex; align-items:center; gap:8px; margin-top:6px; }
.progress-text { font-size:12px; color:#333; min-width:48px; }
.inline-form { margin-top:6px; }
.log-area { width:100%; max-width: 920px; padding:10px; border:1px solid #ccc; border-radius:8px; background:#fff; }