Compare commits

..

14 Commits

Author SHA1 Message Date
93310e3d99 Merge branch 'main' into codex/add-proxy-list-import-function-p0zu8g 2026-01-01 21:06:52 +01:00
00c72a78d2 Export proxies as jdproxies file 2026-01-01 21:04:41 +01:00
2891466635 Merge pull request #7 from DasPoschi/codex/add-proxy-list-import-function
Add proxy export file support for JDownloader
2026-01-01 20:57:12 +01:00
de41769e5f Add proxy export to file for JDownloader 2026-01-01 20:56:54 +01:00
f9ba535c56 Merge pull request #6 from DasPoschi/codex/analysieren-und-beheben-von-fehlern-4xbqut
Fetch and prefill proxy lists from TheSpeedX on Proxies page
2026-01-01 20:35:46 +01:00
32f159c03a Merge branch 'main' into codex/analysieren-und-beheben-von-fehlern-4xbqut 2026-01-01 20:35:33 +01:00
30329ef72a Fetch proxy lists from upstream sources 2026-01-01 20:18:30 +01:00
1f5fa2e31d Merge pull request #5 from DasPoschi/codex/analysieren-und-beheben-von-fehlern-8ffk1k
Auto-refresh progress display
2026-01-01 20:08:58 +01:00
b42fda4852 Auto-refresh progress display 2026-01-01 20:08:34 +01:00
e8dfe28b0b Merge pull request #4 from DasPoschi/codex/analysieren-und-beheben-von-fehlern-g3are0
Add download cancel and progress display
2026-01-01 20:02:04 +01:00
0d4f030f2c Add download cancel and progress display 2026-01-01 20:01:46 +01:00
812cb08df7 Change jd-output volume from read-only to read-write 2026-01-01 19:19:31 +01:00
063c22ab86 Merge pull request #3 from DasPoschi/codex/analysieren-und-beheben-von-fehlern
Handle TMDB auth failures gracefully
2026-01-01 18:49:14 +01:00
93fc395ebc Handle TMDB auth failures gracefully 2026-01-01 18:49:00 +01:00
3 changed files with 385 additions and 35 deletions

View File

@@ -26,6 +26,6 @@ services:
environment: environment:
TZ: Europe/Berlin TZ: Europe/Berlin
volumes: volumes:
- ./data/jd-output:/output:ro - ./data/jd-output:/output:rw
- ./data/md5:/md5:rw - ./data/md5:/md5:rw
- /root/.ssh/id_ed25519:/ssh/id_ed25519:ro - /root/.ssh/id_ed25519:/ssh/id_ed25519:ro

View File

@@ -56,6 +56,7 @@ POLL_SECONDS = float(os.environ.get("POLL_SECONDS", "5"))
# JDownloader writes here inside container # JDownloader writes here inside container
JD_OUTPUT_PATH = "/output" JD_OUTPUT_PATH = "/output"
PROXY_EXPORT_PATH = os.environ.get("PROXY_EXPORT_PATH", "/output/jd-proxies.jdproxies")
URL_RE = re.compile(r"^https?://", re.I) URL_RE = re.compile(r"^https?://", re.I)
@@ -117,6 +118,8 @@ class Job:
library: str # movies|series|auto library: str # movies|series|auto
status: str # queued|collecting|downloading|upload|finished|failed status: str # queued|collecting|downloading|upload|finished|failed
message: str message: str
progress: float = 0.0
cancel_requested: bool = False
jobs: Dict[str, Job] = {} jobs: Dict[str, Job] = {}
lock = threading.Lock() lock = threading.Lock()
@@ -204,39 +207,25 @@ def md5_file(path: str) -> str:
h.update(chunk) h.update(chunk)
return h.hexdigest() return h.hexdigest()
_md5_dir_cache: Optional[str] = None
def pick_md5_dir() -> str:
global _md5_dir_cache
if _md5_dir_cache:
return _md5_dir_cache
candidates = [
MD5_DIR,
os.path.join(JD_OUTPUT_PATH, ".md5"),
"/tmp/jd-md5",
]
for candidate in candidates:
try:
os.makedirs(candidate, exist_ok=True)
except Exception:
continue
if os.access(candidate, os.W_OK):
_md5_dir_cache = candidate
return candidate
raise RuntimeError(
"Kein beschreibbares MD5-Verzeichnis gefunden (MD5_DIR, /output/.md5, /tmp/jd-md5)."
)
def write_md5_sidecar(file_path: str, md5_hex: str) -> str: def write_md5_sidecar(file_path: str, md5_hex: str) -> str:
md5_dir = pick_md5_dir()
base = os.path.basename(file_path) base = os.path.basename(file_path)
md5_path = os.path.join(md5_dir, base + ".md5") candidates = [MD5_DIR, "/tmp/md5"]
with open(md5_path, "w", encoding="utf-8") as f: last_err: Optional[Exception] = None
f.write(f"{md5_hex} {base}\n")
return md5_path for target in candidates:
try:
os.makedirs(target, exist_ok=True)
md5_path = os.path.join(target, base + ".md5")
with open(md5_path, "w", encoding="utf-8") as f:
f.write(f"{md5_hex} {base}\n")
return md5_path
except PermissionError as exc:
last_err = exc
continue
if last_err:
raise last_err
raise RuntimeError("Failed to write MD5 sidecar file.")
def ffprobe_ok(path: str) -> bool: def ffprobe_ok(path: str) -> bool:
try: try:
@@ -309,7 +298,10 @@ def tmdb_search_movie(query: str) -> Optional[Dict[str, Any]]:
return None return None
q = urllib.parse.quote(query.strip()) q = urllib.parse.quote(query.strip())
url = f"https://api.themoviedb.org/3/search/movie?api_key={TMDB_API_KEY}&language={urllib.parse.quote(TMDB_LANGUAGE)}&query={q}" url = f"https://api.themoviedb.org/3/search/movie?api_key={TMDB_API_KEY}&language={urllib.parse.quote(TMDB_LANGUAGE)}&query={q}"
data = _http_get_json(url) try:
data = _http_get_json(url)
except Exception:
return None
results = data.get("results") or [] results = data.get("results") or []
return results[0] if results else None return results[0] if results else None
@@ -318,7 +310,10 @@ def tmdb_search_tv(query: str) -> Optional[Dict[str, Any]]:
return None return None
q = urllib.parse.quote(query.strip()) q = urllib.parse.quote(query.strip())
url = f"https://api.themoviedb.org/3/search/tv?api_key={TMDB_API_KEY}&language={urllib.parse.quote(TMDB_LANGUAGE)}&query={q}" url = f"https://api.themoviedb.org/3/search/tv?api_key={TMDB_API_KEY}&language={urllib.parse.quote(TMDB_LANGUAGE)}&query={q}"
data = _http_get_json(url) try:
data = _http_get_json(url)
except Exception:
return None
results = data.get("results") or [] results = data.get("results") or []
return results[0] if results else None return results[0] if results else None
@@ -327,6 +322,61 @@ def sanitize_name(name: str) -> str:
out = "".join("_" if c in bad else c for c in name).strip() out = "".join("_" if c in bad else c for c in name).strip()
return re.sub(r"\s+", " ", out) return re.sub(r"\s+", " ", out)
def format_proxy_lines(raw: str, scheme: str) -> str:
"""
Takes raw lines (ip:port or scheme://ip:port) and outputs normalized lines:
scheme://ip:port (one per line). Ignores empty lines and comments.
"""
scheme = scheme.strip().lower()
if scheme not in {"socks5", "socks4", "http"}:
raise ValueError("Unsupported proxy scheme")
out = []
for line in (raw or "").splitlines():
s = line.strip()
if not s or s.startswith("#"):
continue
if "://" in s:
s = s.split("://", 1)[1].strip()
if ":" not in s:
continue
host, port = s.rsplit(":", 1)
host = host.strip()
port = port.strip()
if not host or not port.isdigit():
continue
out.append(f"{scheme}://{host}:{port}")
seen = set()
dedup = []
for x in out:
if x not in seen:
seen.add(x)
dedup.append(x)
return "\n".join(dedup)
def fetch_proxy_list(url: str) -> str:
req = urllib.request.Request(url)
with urllib.request.urlopen(req, timeout=20) as resp:
return resp.read().decode("utf-8", "replace")
def save_proxy_export(text: str) -> str:
if not text.strip():
raise ValueError("Keine Proxy-Einträge zum Speichern.")
export_path = PROXY_EXPORT_PATH
export_dir = os.path.dirname(export_path)
if export_dir:
os.makedirs(export_dir, exist_ok=True)
with open(export_path, "w", encoding="utf-8") as handle:
handle.write(text.strip() + "\n")
return export_path
def pick_library_target(library_choice: str, filename: str, package_name: str) -> str: def pick_library_target(library_choice: str, filename: str, package_name: str) -> str:
if library_choice not in {"movies", "series", "auto"}: if library_choice not in {"movies", "series", "auto"}:
library_choice = "auto" library_choice = "auto"
@@ -415,6 +465,10 @@ def query_links_and_packages(dev, jobid: str) -> Tuple[List[Dict[str, Any]], Dic
"name": True, "name": True,
"finished": True, "finished": True,
"running": True, "running": True,
"bytesLoaded": True,
"bytesTotal": True,
"bytes": True,
"totalBytes": True,
"status": True, "status": True,
"packageUUID": True, "packageUUID": True,
"uuid": True, "uuid": True,
@@ -487,6 +541,86 @@ def try_remove_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict
return "JDownloader-API: Paket/Links konnten nicht entfernt werden (Wrapper-Methoden nicht vorhanden)." return "JDownloader-API: Paket/Links konnten nicht entfernt werden (Wrapper-Methoden nicht vorhanden)."
def try_cancel_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[str, Any]]) -> Optional[str]:
link_ids = [l.get("uuid") for l in links if l.get("uuid") is not None]
pkg_ids = list(pkg_map.keys())
candidates = [
("downloads", "removeLinks"),
("downloads", "remove_links"),
("downloads", "deleteLinks"),
("downloads", "delete_links"),
("downloadcontroller", "removeLinks"),
("downloadcontroller", "remove_links"),
]
payloads = [
{"linkUUIDs": link_ids, "packageUUIDs": pkg_ids, "deleteFiles": True},
{"linkIds": link_ids, "packageIds": pkg_ids, "deleteFiles": True},
{"linkUUIDs": link_ids, "deleteFiles": True},
{"packageUUIDs": pkg_ids, "deleteFiles": True},
{"linkUUIDs": link_ids, "packageUUIDs": pkg_ids, "removeFiles": True},
{"linkIds": link_ids, "packageIds": pkg_ids, "removeFiles": True},
]
for ns, fn in candidates:
obj = getattr(dev, ns, None)
if obj is None:
continue
meth = getattr(obj, fn, None)
if meth is None:
continue
for payload in payloads:
try:
meth([payload])
return None
except Exception:
continue
return "JDownloader-API: Abbrechen fehlgeschlagen (Wrapper-Methoden nicht vorhanden)."
def cancel_job(dev, jobid: str) -> Optional[str]:
links, pkg_map = query_links_and_packages(dev, jobid)
local_paths = local_paths_from_links(links, pkg_map)
for path in local_paths:
try:
if os.path.isfile(path):
os.remove(path)
except Exception:
pass
try:
sidecar = os.path.join(MD5_DIR, os.path.basename(path) + ".md5")
if os.path.isfile(sidecar):
os.remove(sidecar)
except Exception:
pass
return try_cancel_from_jd(dev, links, pkg_map)
def calculate_progress(links: List[Dict[str, Any]]) -> float:
total = 0
loaded = 0
for link in links:
bytes_total = link.get("bytesTotal")
if bytes_total is None:
bytes_total = link.get("totalBytes")
if bytes_total is None:
bytes_total = link.get("bytes")
bytes_loaded = link.get("bytesLoaded")
if bytes_total is None or bytes_loaded is None:
continue
try:
bytes_total = int(bytes_total)
bytes_loaded = int(bytes_loaded)
except (TypeError, ValueError):
continue
if bytes_total <= 0:
continue
total += bytes_total
loaded += min(bytes_loaded, bytes_total)
if total <= 0:
return 0.0
return max(0.0, min(100.0, (loaded / total) * 100.0))
# ============================================================ # ============================================================
# Worker # Worker
# ============================================================ # ============================================================
@@ -500,6 +634,13 @@ def worker(jobid: str):
job = jobs.get(jobid) job = jobs.get(jobid)
if not job: if not job:
return return
if job.cancel_requested:
cancel_msg = cancel_job(dev, jobid)
with lock:
job.status = "canceled"
job.message = cancel_msg or "Download abgebrochen und Dateien entfernt."
job.progress = 0.0
return
links, pkg_map = query_links_and_packages(dev, jobid) links, pkg_map = query_links_and_packages(dev, jobid)
@@ -507,15 +648,18 @@ def worker(jobid: str):
with lock: with lock:
job.status = "collecting" job.status = "collecting"
job.message = "Warte auf Link-Crawler…" job.message = "Warte auf Link-Crawler…"
job.progress = 0.0
time.sleep(POLL_SECONDS) time.sleep(POLL_SECONDS)
continue continue
all_finished = all(bool(l.get("finished")) for l in links) all_finished = all(bool(l.get("finished")) for l in links)
if not all_finished: if not all_finished:
progress = calculate_progress(links)
with lock: with lock:
job.status = "downloading" job.status = "downloading"
done = sum(1 for l in links if l.get("finished")) done = sum(1 for l in links if l.get("finished"))
job.message = f"Download läuft… ({done}/{len(links)} fertig)" job.message = f"Download läuft… ({done}/{len(links)} fertig)"
job.progress = progress
time.sleep(POLL_SECONDS) time.sleep(POLL_SECONDS)
continue continue
@@ -526,6 +670,7 @@ def worker(jobid: str):
with lock: with lock:
job.status = "failed" job.status = "failed"
job.message = "Keine Video-Datei gefunden (Whitelist)." job.message = "Keine Video-Datei gefunden (Whitelist)."
job.progress = 0.0
return return
valid_videos = [p for p in video_files if ffprobe_ok(p)] valid_videos = [p for p in video_files if ffprobe_ok(p)]
@@ -533,11 +678,13 @@ def worker(jobid: str):
with lock: with lock:
job.status = "failed" job.status = "failed"
job.message = "ffprobe: keine gültige Video-Datei." job.message = "ffprobe: keine gültige Video-Datei."
job.progress = 0.0
return return
with lock: with lock:
job.status = "upload" job.status = "upload"
job.message = f"Download fertig. MD5/Upload/Verify für {len(valid_videos)} Datei(en)…" job.message = f"Download fertig. MD5/Upload/Verify für {len(valid_videos)} Datei(en)…"
job.progress = 100.0
ssh = ssh_connect() ssh = ssh_connect()
try: try:
@@ -577,6 +724,7 @@ def worker(jobid: str):
with lock: with lock:
job.status = "finished" job.status = "finished"
job.message = "Upload + MD5 OK. " + (jd_cleanup_msg or "JDownloader: Paket/Links entfernt.") job.message = "Upload + MD5 OK. " + (jd_cleanup_msg or "JDownloader: Paket/Links entfernt.")
job.progress = 100.0
return return
except Exception as e: except Exception as e:
@@ -585,6 +733,7 @@ def worker(jobid: str):
if job: if job:
job.status = "failed" job.status = "failed"
job.message = str(e) job.message = str(e)
job.progress = 0.0
# ============================================================ # ============================================================
# Web # Web
@@ -599,13 +748,27 @@ def render_page(error: str = "") -> str:
job_list = list(jobs.values())[::-1] job_list = list(jobs.values())[::-1]
for j in job_list: for j in job_list:
progress_pct = f"{j.progress:.1f}%"
progress_html = (
f"<div class='progress-row'>"
f"<progress value='{j.progress:.1f}' max='100'></progress>"
f"<span class='progress-text'>{progress_pct}</span>"
f"</div>"
)
cancel_html = ""
if j.status not in {"finished", "failed", "canceled"}:
cancel_html = (
f"<form method='post' action='/cancel/{j.id}' class='inline-form'>"
f"<button type='submit' class='danger'>Abbrechen</button>"
f"</form>"
)
rows += ( rows += (
f"<tr>" f"<tr>"
f"<td><code>{j.id}</code></td>" f"<td><code>{j.id}</code></td>"
f"<td style='max-width:560px; word-break:break-all;'>{j.url}</td>" f"<td style='max-width:560px; word-break:break-all;'>{j.url}</td>"
f"<td>{j.package_name}</td>" f"<td>{j.package_name}</td>"
f"<td>{j.library}</td>" f"<td>{j.library}</td>"
f"<td><b>{j.status}</b><br/><small>{j.message}</small></td>" f"<td><b>{j.status}</b><br/><small>{j.message}</small>{progress_html}{cancel_html}</td>"
f"</tr>" f"</tr>"
) )
@@ -617,9 +780,16 @@ def render_page(error: str = "") -> str:
<link rel="stylesheet" href="/static/style.css"> <link rel="stylesheet" href="/static/style.css">
<meta charset="utf-8"> <meta charset="utf-8">
<title>JD → Jellyfin</title> <title>JD → Jellyfin</title>
<script>
setInterval(() => {{
if (document.hidden) return;
window.location.reload();
}}, 5000);
</script>
</head> </head>
<body> <body>
<h1>JD → Jellyfin</h1> <h1>JD → Jellyfin</h1>
{render_nav("downloads")}
{err_html} {err_html}
<form method="post" action="/submit"> <form method="post" action="/submit">
@@ -660,6 +830,84 @@ def render_page(error: str = "") -> str:
</html> </html>
""" """
def render_nav(active: str) -> str:
def link(label: str, href: str, key: str) -> str:
style = "font-weight:700;" if active == key else ""
return f"<a href='{href}' style='margin-right:14px; {style}'>{label}</a>"
return (
"<div style='margin: 8px 0 14px 0;'>"
+ link("Downloads", "/", "downloads")
+ link("Proxies", "/proxies", "proxies")
+ "</div>"
)
def render_proxies_page(
error: str = "",
message: str = "",
socks5_in: str = "",
socks4_in: str = "",
http_in: str = "",
out_text: str = "",
export_path: str = "",
) -> str:
err_html = f"<p class='error'>{error}</p>" if error else ""
msg_html = f"<p class='success'>{message}</p>" if message else ""
return f"""
<html>
<head>
<link rel="stylesheet" href="/static/style.css">
<meta charset="utf-8">
<title>JD → Jellyfin (Proxies)</title>
</head>
<body>
<h1>JD → Jellyfin</h1>
{render_nav("proxies")}
{err_html}
{msg_html}
<form method="post" action="/proxies">
<div class="row">
<label>SOCKS5 (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/>
<textarea name="socks5_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{socks5_in}</textarea>
</div>
<div class="row">
<label>SOCKS4 (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/>
<textarea name="socks4_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{socks4_in}</textarea>
</div>
<div class="row">
<label>HTTP (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/>
<textarea name="http_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{http_in}</textarea>
</div>
<button type="submit">In JDownloader-Format umwandeln</button>
</form>
<h2 style="margin-top:18px;">JDownloader Import-Liste</h2>
<p class="hint">Format: <code>socks5://IP:PORT</code>, <code>socks4://IP:PORT</code>, <code>http://IP:PORT</code>. Keine Prüfung/Validierung.</p>
<div class="row">
<textarea id="out" rows="12" readonly style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{out_text}</textarea>
</div>
<button type="button" onclick="navigator.clipboard.writeText(document.getElementById('out').value)">Kopieren</button>
<h2 style="margin-top:18px;">Datei für Connection Manager</h2>
<p class="hint">Speichert die Liste als <code>.jdproxies</code> im Container, z. B. zum Import in JDownloader → Verbindungsmanager → Importieren.</p>
<form method="post" action="/proxies/save">
<textarea name="socks5_in" style="display:none;">{socks5_in}</textarea>
<textarea name="socks4_in" style="display:none;">{socks4_in}</textarea>
<textarea name="http_in" style="display:none;">{http_in}</textarea>
<button type="submit">Liste als JDProxies speichern</button>
</form>
<p class="hint">Aktueller Pfad: <code>{export_path or PROXY_EXPORT_PATH}</code></p>
</body>
</html>
"""
@app.get("/", response_class=HTMLResponse) @app.get("/", response_class=HTMLResponse)
def index(): def index():
try: try:
@@ -698,9 +946,102 @@ def submit(url: str = Form(...), package_name: str = Form(""), library: str = Fo
library=library, library=library,
status="queued", status="queued",
message="Download gestartet", message="Download gestartet",
progress=0.0,
) )
t = threading.Thread(target=worker, args=(jobid,), daemon=True) t = threading.Thread(target=worker, args=(jobid,), daemon=True)
t.start() t.start()
return RedirectResponse(url="/", status_code=303) return RedirectResponse(url="/", status_code=303)
@app.post("/cancel/{jobid}")
def cancel(jobid: str):
with lock:
job = jobs.get(jobid)
if not job:
return RedirectResponse(url="/", status_code=303)
if job.status in {"finished", "failed", "canceled"}:
return RedirectResponse(url="/", status_code=303)
job.cancel_requested = True
job.message = "Abbruch angefordert…"
return RedirectResponse(url="/", status_code=303)
@app.get("/proxies", response_class=HTMLResponse)
def proxies_get():
try:
socks5_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks5.txt")
socks4_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks4.txt")
http_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt")
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")
hp = format_proxy_lines(http_in, "http")
combined = "\n".join([x for x in [s5, s4, hp] if x.strip()])
return HTMLResponse(render_proxies_page(
socks5_in=socks5_in,
socks4_in=socks4_in,
http_in=http_in,
out_text=combined,
export_path=PROXY_EXPORT_PATH,
))
except Exception as e:
return HTMLResponse(render_proxies_page(error=str(e)), status_code=502)
@app.post("/proxies", response_class=HTMLResponse)
def proxies_post(
socks5_in: str = Form(""),
socks4_in: str = Form(""),
http_in: str = Form(""),
):
try:
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")
hp = format_proxy_lines(http_in, "http")
combined = "\n".join([x for x in [s5, s4, hp] if x.strip()])
return HTMLResponse(render_proxies_page(
socks5_in=socks5_in,
socks4_in=socks4_in,
http_in=http_in,
out_text=combined,
export_path=PROXY_EXPORT_PATH,
))
except Exception as e:
return HTMLResponse(render_proxies_page(
error=str(e),
socks5_in=socks5_in,
socks4_in=socks4_in,
http_in=http_in,
out_text="",
export_path=PROXY_EXPORT_PATH,
), status_code=400)
@app.post("/proxies/save", response_class=HTMLResponse)
def proxies_save(
socks5_in: str = Form(""),
socks4_in: str = Form(""),
http_in: str = Form(""),
):
try:
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")
hp = format_proxy_lines(http_in, "http")
combined = "\n".join([x for x in [s5, s4, hp] if x.strip()])
export_path = save_proxy_export(combined)
return HTMLResponse(render_proxies_page(
message=f"Proxy-Liste gespeichert: {export_path}",
socks5_in=socks5_in,
socks4_in=socks4_in,
http_in=http_in,
out_text=combined,
export_path=export_path,
))
except Exception as e:
return HTMLResponse(render_proxies_page(
error=str(e),
socks5_in=socks5_in,
socks4_in=socks4_in,
http_in=http_in,
out_text="",
export_path=PROXY_EXPORT_PATH,
), status_code=400)

View File

@@ -4,9 +4,18 @@ form { background:#fff; border:1px solid #e5e5e5; border-radius:10px; padding:14
.row { margin-bottom: 10px; } .row { margin-bottom: 10px; }
input, select { padding:10px; border:1px solid #ccc; border-radius:8px; font-size:14px; width: 100%; max-width: 860px; } input, select { padding:10px; border:1px solid #ccc; border-radius:8px; font-size:14px; width: 100%; max-width: 860px; }
button { padding:10px 14px; border:0; border-radius:8px; font-weight:600; cursor:pointer; } button { padding:10px 14px; border:0; border-radius:8px; font-weight:600; cursor:pointer; }
button.danger { background:#b00020; color:#fff; }
progress { width: 100%; height: 12px; }
progress::-webkit-progress-bar { background:#f0f0f0; border-radius:8px; }
progress::-webkit-progress-value { background:#1b7f3a; border-radius:8px; }
progress::-moz-progress-bar { background:#1b7f3a; border-radius:8px; }
table { margin-top:16px; width:100%; border-collapse: collapse; background:#fff; border:1px solid #e5e5e5; border-radius:10px; overflow:hidden; } table { margin-top:16px; width:100%; border-collapse: collapse; background:#fff; border:1px solid #e5e5e5; border-radius:10px; overflow:hidden; }
th, td { border-top:1px solid #eee; padding:10px; vertical-align: top; font-size:14px; } th, td { border-top:1px solid #eee; padding:10px; vertical-align: top; font-size:14px; }
th { background:#fbfbfb; text-align:left; } th { background:#fbfbfb; text-align:left; }
code { font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace; font-size: 12px; background:#f2f2f2; padding:2px 4px; border-radius:4px; } code { font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace; font-size: 12px; background:#f2f2f2; padding:2px 4px; border-radius:4px; }
.hint { color:#555; font-size: 12px; margin-top: 10px; } .hint { color:#555; font-size: 12px; margin-top: 10px; }
.error { color:#b00020; font-weight: 700; } .error { color:#b00020; font-weight: 700; }
.success { color:#1b7f3a; font-weight: 700; }
.progress-row { display:flex; align-items:center; gap:8px; margin-top:6px; }
.progress-text { font-size:12px; color:#333; min-width:48px; }
.inline-form { margin-top:6px; }