diff --git a/config/scripts/rssVideos b/config/scripts/rssVideos index 0f740da..93a1723 100755 --- a/config/scripts/rssVideos +++ b/config/scripts/rssVideos @@ -31,7 +31,7 @@ import yt_dlp log = logging.getLogger(__name__) # TODO Lockfile, or a way to parallel watch and download - +# TODO Save ytdl infos and view info separately def configure_logging(args: configargparse.Namespace) -> None: # Configure logging @@ -491,6 +491,12 @@ def get_args() -> configargparse.Namespace: help="Fetch video info again", action="store_true", ) + parser.add( + "--no-refresh", + dest="refresh", + help="Don't fetch feed", + action="store_false", + ) parser.add( "--videos", help="Directory to store videos", @@ -574,14 +580,22 @@ def main() -> None: database = RVDatabase(args) cache = RVDatabase.load() - try: - database.read_feed() - except urllib.error.URLError as err: - if args.action == "download" or not cache: - raise err - else: - log.warning("Cannot fetch RSS feed, using cached feed.", err) + feed_fetched = False + if args.refresh: + try: + database.read_feed() + feed_fetched = True + except urllib.error.URLError as err: + if args.action == "download": + raise RuntimeError("Couldn't fetch feed, refusing to download") + # This is a quirky failsafe in case of no internet connection, + # so the script doesn't go noting that no element is a video. + if not feed_fetched: + if cache: + log.warning("Using cached feed.") database.import_cache(cache) + else: + raise FileNotFoundError("Feed not fetched and no cached feed.") if cache: database.salvage_cache(cache) database.clean_cache(cache)