import sys
sys.path.append("../app")
import runner
import logging
logger = logging.getLogger()
import json


logger.info("Overwriting production values for single time media-fetch")
runner.configuration.models.set_db(
    runner.configuration.SqliteDatabase("../.dev/media_message_dummy.db"),  # chat_db (not needed here)
    runner.configuration.SqliteDatabase("../.dev/media_downloads.db")
)
runner.configuration.parsed["DOWNLOADS"]["local_storage_path"] = "../.dev/"


def fetch():
    coordinator = runner.Coordinator()


    kwargs = {
        "worker_download" : runner.DownloadWorker(),
        "worker_fetch" : runner.FetchWorker(),
        "worker_upload" : runner.UploadWorker(),
        "worker_compress" : runner.CompressWorker(),
    }

    coordinator.add_workers(**kwargs)
    coordinator.start()

    with open("media_urls.json", "r") as f:
        url_list = json.loads(f.read()) 

    logger.info(f"Found {len(url_list)} media urls")
    for u in url_list:
        msg_text = f"<{u}|dummy preview text>"
        dummy_thread = runner.models.Thread()
        msg = runner.models.Message(text= msg_text, thread=dummy_thread)
        coordinator.incoming_request(msg)


def show():
    sel = runner.models.ArticleDownload.select()
    entries = ["title"]  #, "article_url", "archive_url"]

    for e in entries:
        r = [t.title for t in sel]
        print(r)
        # print([t for t in r])


show()