refactor: standardize string formatting and improve code readability
Some checks failed
Python syntax & lint / syntax-lint (push) Failing after 7s

This commit is contained in:
Space-Banane
2026-04-06 23:03:21 +02:00
parent e95f915d86
commit b817f34607
2 changed files with 106 additions and 70 deletions

View File

@@ -16,29 +16,37 @@ URLS = [
URLS_JSON = os.environ.get("URLS_JSON", str(Path(__file__).parent / "urls.json"))
RESET = "\033[0m"
BOLD = "\033[1m"
RED = "\033[91m"
GREEN = "\033[92m"
RESET = "\033[0m"
BOLD = "\033[1m"
RED = "\033[91m"
GREEN = "\033[92m"
YELLOW = "\033[93m"
CYAN = "\033[96m"
DIM = "\033[2m"
CYAN = "\033[96m"
DIM = "\033[2m"
def log(msg, color=RESET):
print(f"{color}{msg}{RESET}", flush=True)
def get_post_id(url):
return url.rstrip("/").split("-")[-1]
def fetch_info(url):
cmd = [
YTDLP,
"--dump-json", "--no-playlist",
"--cookies", COOKIES,
"--extractor-args", "generic:impersonate",
url
"--dump-json",
"--no-playlist",
"--cookies",
COOKIES,
"--extractor-args",
"generic:impersonate",
url,
]
result = subprocess.run(cmd, capture_output=True, text=True, timeout=30, cwd=Path(__file__).parent)
result = subprocess.run(
cmd, capture_output=True, text=True, timeout=30, cwd=Path(__file__).parent
)
if result.returncode != 0:
return None, result.stderr.strip()
try:
@@ -46,31 +54,41 @@ def fetch_info(url):
except json.JSONDecodeError:
return None, "Failed to parse JSON"
def download(url, out_dir):
cmd = [
YTDLP,
"-f", "bestvideo+bestaudio/best",
"-f",
"bestvideo+bestaudio/best",
"--prefer-free-formats",
"--cookies", COOKIES,
"--extractor-args", "generic:impersonate",
"--merge-output-format", "mp4",
"-o", str(out_dir / "%(title)s.%(ext)s"),
url
"--cookies",
COOKIES,
"--extractor-args",
"generic:impersonate",
"--merge-output-format",
"mp4",
"-o",
str(out_dir / "%(title)s.%(ext)s"),
url,
]
result = subprocess.run(cmd, capture_output=True, text=True, cwd=Path(__file__).parent)
result = subprocess.run(
cmd, capture_output=True, text=True, cwd=Path(__file__).parent
)
return result.returncode == 0, (result.stdout + result.stderr).strip()
def sanitize(name, max_len=60):
safe = "".join(c if c.isalnum() or c in " _-." else "_" for c in name)
return safe.strip()[:max_len].strip("_. ")
def main():
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
total = len(URLS)
# Load from JSON if present and non-empty
try:
if Path(URLS_JSON).exists():
with open(URLS_JSON, 'r', encoding='utf-8') as fh:
with open(URLS_JSON, "r", encoding="utf-8") as fh:
data = json.load(fh)
if isinstance(data, list) and data:
URLS.clear()
@@ -95,7 +113,9 @@ def main():
if info is None:
# Likely a text post or unavailable
log(f" {YELLOW}⚠ skipped — no media ({err[:80] if err else 'no video found'}){RESET}")
log(
f" {YELLOW}⚠ skipped — no media ({err[:80] if err else 'no video found'}){RESET}"
)
skipped.append((url, err))
print()
continue
@@ -115,7 +135,9 @@ def main():
# Find what was downloaded
files = list(out_dir.iterdir())
sizes = [f"{f.stat().st_size / 1e6:.1f} MB" for f in files if f.is_file()]
log(f" {GREEN}✓ done — {', '.join(sizes) if sizes else 'file saved'}{RESET}")
log(
f" {GREEN}✓ done — {', '.join(sizes) if sizes else 'file saved'}{RESET}"
)
ok.append(url)
else:
# Check if it's just no video (text post that slipped through info check)
@@ -123,14 +145,18 @@ def main():
log(f" {YELLOW}⚠ skipped — text post{RESET}")
skipped.append((url, "no video content"))
# Remove empty dir
try: out_dir.rmdir()
except OSError: pass
try:
out_dir.rmdir()
except OSError:
pass
else:
log(f" {RED}✗ failed{RESET}")
# Print last few lines of output for context
for line in output.splitlines()[-3:]:
log(f" {DIM}{line}{RESET}")
failed.append((url, output.splitlines()[-1] if output else "unknown error"))
failed.append(
(url, output.splitlines()[-1] if output else "unknown error")
)
print()
@@ -150,5 +176,6 @@ def main():
log(f"{''*55}\n", CYAN)
if __name__ == "__main__":
main()
main()