From 21fd49f09631f6fa5abda800794ecba29ad3f83a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Geoffrey=20=E2=80=9CFrogeye=E2=80=9D=20Preud=27homme?= Date: Tue, 28 Dec 2021 21:39:10 +0100 Subject: [PATCH] rssVideos: Clean up --- config/scripts/rssVideos | 34 ++++++++++++++++++---------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/config/scripts/rssVideos b/config/scripts/rssVideos index 4796d95..6211f20 100755 --- a/config/scripts/rssVideos +++ b/config/scripts/rssVideos @@ -8,7 +8,6 @@ The common use case would be a feed from an RSS aggregator with the unread items (non-video links are ignored). """ -import enum import functools import logging import os @@ -17,12 +16,8 @@ import random import requests import re import subprocess -import sys import time import typing -import urllib.parse -import urllib.request -import urllib.error import coloredlogs import configargparse @@ -322,7 +317,7 @@ class RVDatabase: self.auth_headers = cache.auth_headers def salvage_cache(self, cache: "RVDatabase") -> None: - log.debug(f"Salvaging cache") + log.debug("Salvaging cache") cache_els = dict() for cache_el in cache.elements: cache_els[cache_el.guid] = cache_el @@ -331,7 +326,7 @@ class RVDatabase: el.salvage_cache(cache_els[el.guid]) def clean_cache(self, cache: "RVDatabase") -> None: - log.debug(f"Cleaning cache") + log.debug("Cleaning cache") self_els = dict() for self_el in self.elements: self_els[self_el.guid] = self_el @@ -341,7 +336,7 @@ class RVDatabase: el.clean() def import_cache(self, cache: "RVDatabase") -> None: - log.debug(f"Importing cache") + log.debug("Importing cache") self.build_list([element.item for element in cache.elements]) @functools.cached_property @@ -614,13 +609,7 @@ def get_args() -> configargparse.Namespace: return args -def main() -> None: - args = get_args() - configure_logging(args) - - os.makedirs(args.videos, exist_ok=True) - os.chdir(args.videos) - +def get_database(args: configargparse.Namespace) -> RVDatabase: database = RVDatabase(args) cache = RVDatabase.load() feed_fetched = False @@ -635,6 +624,7 @@ def main() -> None: raise RuntimeError("Couldn't fetch feed, refusing to download") # This is a quirky failsafe in case of no internet connection, # so the script doesn't go noting that no element is a video. + log.warning(f"Couldn't fetch feed: {err}") if not feed_fetched: if cache: log.warning("Using cached feed.") @@ -646,7 +636,19 @@ def main() -> None: database.clean_cache(cache) database.save() - log.debug(f"Running action") + return database + + +def main() -> None: + args = get_args() + configure_logging(args) + + os.makedirs(args.videos, exist_ok=True) + os.chdir(args.videos) + + database = get_database(args) + + log.debug("Running action") if args.action == "clean": database.clean() else: