#!/usr/bin/env python3
from tqdm import tqdm
from getopt import getopt, GetoptError
import tempfile
import subprocess
import requests
import hashlib
import os
import re
import sys

with open(os.path.expanduser("~/.config/radiodl")) as f:
    secret_domain = f.read().strip()

def show_help():
    print("""radiodl [options] username

Options:
    -h: Display this message
    -s: Assume existing files are up to date (skip hash check)
    -l [path]: Specify log file (default: radiodl.log)
    -m [num]: Specify max retries (default: 5)
    -o [path]: Specify output directory (default: cwd)
    -p: Show overall progress (hides track names)
    -f: transcode to format, implies -s (e.g. '.opus')
    -z: organize music into subdirs""")
    sys.exit(0)

try:
    optlist, args = getopt(sys.argv[1:], 'hspo:l:m:f:z')
except GetoptError as err:
    print(err)
    show_help()

if len(args) != 1:
    show_help()

user = args[0]
output_dir = os.getcwd()
log_path = "radiodl.log"
skip_check = False
max_retries = 5
overall = False
out_format = None
organize = False

for opt, val in optlist:
    if opt == "-h":
        show_help()
    elif opt == "-s":
        skip_check = True
    elif opt == "-o":
        output_dir = val
    elif opt == "-l":
        log_file = val
    elif opt == "-m":
        max_retries = int(val)
        assert max_retries > 0
    elif opt == "-f":
        out_format = val
        skip_check = True
    elif opt == "-p":
        overall = True
    elif opt == "-z":
        organize = True

r = requests.get(f"https://{secret_domain}/api/favorites/{user}")
if r.status_code != 200:
    print(f"Radio API returned {r.status_code}")
    sys.exit(1)
favs = r.json()

log = open("radiodl.log", "w")

pretty_name = lambda fav: f"{fav['artist']} - {fav['title']} ({fav['album']})"

def chunk(f, size=0x10000):
    while True:
        c = f.read(size)
        if c:
            yield c
        else:
            return

report = type("Report", tuple(), {
    "http_errors": {},
    "corrupt_errors": [],
    "downloads": [],
    "skipped": []
})

def pprint(*args, **kwargs):
    if not overall:
        print(*args, **kwargs)

sanitize = re.compile(r"[\\/<>:\"'?*\|]")

for fav in tqdm(favs, desc="Overall") if overall else favs:
    if not overall:
        sys.stdout.write(pretty_name(fav))
    ext = os.path.splitext(fav["path"])[1]
    if organize:
        artist = sanitize.sub(" ", fav["artist"]).strip()
        album = sanitize.sub(" ", fav["album"]).strip()
        title = sanitize.sub(" ", fav["title"]).strip()
        path = os.path.join(output_dir, artist, album)
        os.makedirs(path, exist_ok=True)
        path = os.path.join(path, title + ext)
    else:
        _hash = fav["hash"][:6]
        ext = os.path.splitext(fav["path"])[1]
        filename = _hash + " " + pretty_name(fav) + ext
        filename = filename.replace("/", "\\")
        path = os.path.join(output_dir, filename)
    transcode = False
    if out_format != None:
        _path, ext = os.path.splitext(path)
        if ext != "." + out_format:
            path = _path + "." + out_format
            transcode = True
    _dl = 0
    if os.path.exists(path):
        if not skip_check:
            md5 = hashlib.md5()
            with open(path, "rb") as f:
                for c in chunk(f):
                    md5.update(c)
            fhash = md5.hexdigest()
            if fhash != fav["hash"]:
                pprint(" (redownload - corrupt)")
                _dl = 1
    else:
        _dl = 1
        pprint()
    if not _dl:
        pprint(" (skip)")
        report.skipped.append(fav)
        log.write(f"{fav['hash']} SKIP: md5 matches\n")
    while _dl > 0 and _dl < 5:
        r = requests.get(f"https://{secret_domain}/dl/{fav['hash']}", stream=True)
        if r.status_code != 200:
            errors = report.http_errors.get(r.status_code)
            if not errors:
                errors = list()
                report.http_errors[r.status_code] = errors
            errors.append(fav)
            pprint(f"Radio returned {r.status_code}")
            log.write(f"{fav['hash']} FAIL: Radio returned {r.status_code}\n")
            _dl = 0
        else:
            md5 = hashlib.md5()
            if transcode:
                f, tmppath = tempfile.mkstemp()
                f = os.fdopen(f, "wb")
            else:
                f = open(path, "wb")
            with f:
                with tqdm(
                        total=int(r.headers.get("Content-Length")),
                        unit="bytes",
                        unit_scale=True,
                        desc=fav["hash"][:7]) as progress:
                    for c in r.iter_content(chunk_size=4096):
                        f.write(c)
                        md5.update(c)
                        progress.update(len(c))
            if md5.hexdigest() != fav["hash"]:
                pprint("Corrupt download, retrying")
                _dl += 1
            else:
                if transcode:
                    pprint("Transcoding...")
                    subprocess.run([
                        "ffmpeg",
                        "-loglevel", "error",
                        "-y",
                        "-i", tmppath,
                        "-b:a", "256k",
                        path])
                    os.remove(tmppath)
                report.downloads.append(fav)
                log.write(f"{fav['hash']} SUCCESS\n")
                _dl = 0
    if _dl >= max_retries:
        pprint("Maximum retries exceeded, giving up")
        report.corrupt_errors.append(fav)
        try:
            log.write(f"{fav['hash']} FAIL: Max retries exceeded\n")
            os.remove(path)
        except:
            pass
    log.flush()

print("\nSync complete.")
print(f"{len(report.downloads)} files downloaded successfully.")
print(f"{len(report.skipped)} files up to date.")
for err in report.http_errors:
    print(f"\n{err} errors:")
    for fav in report.http_errors[err]:
        _hash = fav["hash"][:6]
        print(f"\t{_hash} {pretty_name(fav)}")
if any(report.corrupt_errors):
    print("\nCorrupt files:")
    for fav in report.corrupt_errors:
        _hash = fav["hash"][:6]
        print(f"\t{_hash} {pretty_name(fav)}")

log.close()
