import sys
from webbrowser import get
sys.path.append("../app")
import runner
import logging
logger = logging.getLogger()
import json

from rich.console import Console
from rich.table import Table
console = Console()

logger.info("Overwriting production values for single time media-fetch")
runner.configuration.models.set_db(
    runner.configuration.SqliteDatabase("../.dev/media_message_dummy.db"),  # chat_db (not needed here)
    runner.configuration.SqliteDatabase("../.dev/media_downloads.db")
)
runner.configuration.parsed["DOWNLOADS"]["local_storage_path"] = "../.dev/"


def fetch():
    coordinator = runner.Coordinator()


    kwargs = {
        "worker_download" : runner.DownloadWorker(),
        "worker_fetch" : runner.FetchWorker(),
        "worker_upload" : runner.UploadWorker(),
        "worker_compress" : runner.CompressWorker(),
    }

    coordinator.add_workers(**kwargs)
    coordinator.start()

    with open("media_urls.json", "r") as f:
        url_list = json.loads(f.read()) 

    logger.info(f"Found {len(url_list)} media urls")
    for u in url_list:
        msg_text = f"<{u}|dummy preview text>"
        dummy_thread = runner.models.Thread()
        msg = runner.models.Message(text= msg_text, thread=dummy_thread)
        coordinator.incoming_request(msg)


def show():

    t = Table(
        title = "ArticleDownloads",
        row_styles = ["white", "bright_black"],
    )
    
    entries = ["title", "article_url", "archive_url", "authors"]

    for e in entries:
        t.add_column(e, justify = "right")

    sel = runner.models.ArticleDownload.select()

    for s in sel:
        c = [getattr(s, e) for e in entries]#
        c[-1] = str([a.author for a in c[-1]])
        print(c)
        t.add_row(*c)

    
    console.print(t)




# fetch()
show()