diff --git a/config/scripts/rssVideos b/config/scripts/rssVideos index b58efd3..261707a 100755 --- a/config/scripts/rssVideos +++ b/config/scripts/rssVideos @@ -20,6 +20,7 @@ import sys import typing import urllib.parse import urllib.request +import urllib.error from xml.dom import minidom import coloredlogs @@ -46,6 +47,7 @@ class RVCommand(enum.Enum): download = "download" list = "list" watch = "watch" + binge = "binge" class RVElement: @@ -100,7 +102,7 @@ class RVElement: def guid(self) -> int: return int(self.get_tag_data("guid")) - def read_cache(self, cache: "RVElement") -> None: + def salvage_cache(self, cache: "RVElement") -> None: if "ytdl_infos" in cache.__dict__: self.__dict__["ytdl_infos"] = cache.__dict__["ytdl_infos"] log.debug(f"From cache: {self}") @@ -127,7 +129,7 @@ class RVElement: raise e except youtube_dl.utils.DownloadError as e: # TODO Still raise in case of temporary network issue - log.warn(e) + log.warning(e) infos = None # Apparently that thing is transformed from a LazyList # somewhere in the normal yt_dlp process @@ -208,19 +210,19 @@ class RVElement: None: int.__le__, } - def matches_search(self, args: configargparse.Namespace) -> bool: - if not self.is_video: - return False + def matches_filter(self, args: configargparse.Namespace) -> bool: if self.watched: return False if args.title and not re.search(args.title, self.title): return False - if args.creator and not re.search(args.creator, self.creator): - return False if args.guid and not re.search(args.guid, str(self.guid)): return False if args.link and not re.search(args.link, self.link): return False + if args.creator and self.creator and not re.search(args.creator, self.creator): + return False + if not self.is_video: + return False if args.duration: dur = args.duration @@ -253,8 +255,11 @@ class RVElement: if not self.downloaded: self.download() - proc = subprocess.run(['mpv', self.filepath]) - proc.check_returncode() + cmd = ["mpv", self.filepath] + log.debug(f"Running {cmd}") + if not self.parent.args.dryrun: + proc = subprocess.run(cmd) + proc.check_returncode() self.watched = True self.parent.save() @@ -282,27 +287,33 @@ class RVDatabase: with open(cls.SAVE_FILE, "rb") as save_file: return pickle.load(save_file) except (TypeError, AttributeError, EOFError): - log.warn("Corrupt / outdated cache, it will be rebuilt.") + log.warning("Corrupt / outdated cache, it will be rebuilt.") except FileNotFoundError: pass return None - def read_cache(self, cache: "RVDatabase") -> None: + def salvage_cache(self, cache: "RVDatabase") -> None: + log.debug(f"Salvaging cache") cache_els = dict() for cache_el in cache.elements: cache_els[cache_el.guid] = cache_el for el in self.elements: if el.guid in cache_els: - el.read_cache(cache_els[el.guid]) + el.salvage_cache(cache_els[el.guid]) + + def import_cache(self, cache: "RVDatabase") -> None: + log.debug(f"Importing cache") + self.feed_xml = cache.feed_xml + self.read_feed() @functools.cached_property def feed_xml(self) -> minidom.Document: + log.info("Fetching RSS feed") with urllib.request.urlopen(self.args.feed) as request: return minidom.parse(request) def read_feed(self) -> None: - log.info("Fetching RSS feed") - self.elements = list() + self.elements = [] for item in self.feed_xml.getElementsByTagName("item"): element = RVElement(self, item) self.elements.insert(0, element) @@ -348,6 +359,18 @@ class RVDatabase: def ytdl_dry(self) -> youtube_dl.YoutubeDL: return youtube_dl.YoutubeDL(self.ytdl_dry_opts) + def filter(self, args: configargparse.Namespace) -> typing.Iterable[RVElement]: + elements: typing.Iterable[RVElement] + if args.order == "old": + elements = self.elements + elif args.order == "new": + elements = reversed(self.elements) + elif args.order == "random": + elements_random = self.elements.copy() + random.shuffle(elements_random) + elements = elements_random + return filter(lambda el: el.matches_filter(args), elements) + def get_args() -> configargparse.Namespace: defaultConfigPath = os.path.join( @@ -422,17 +445,25 @@ def get_args() -> configargparse.Namespace: sc_watch = subparsers.add_parser("watch") sc_watch.set_defaults(subcommand=RVCommand.watch) - sc_watch.add("order", choices=("old", "new", "random"), nargs="?", default="old", help="Watch X first") - # TODO Command to watch multiple + sc_binge = subparsers.add_parser("binge") + sc_binge.set_defaults(subcommand=RVCommand.binge) # Common arguments for filtering - for sc in (sc_list, sc_watch): + for sc in (sc_list, sc_watch, sc_binge): + sc.add( + "order", + choices=("old", "new", "random"), + nargs="?", + default="old", + help="Sorting mechanism", + ) sc.add("--guid", help="Regex to filter guid") sc.add("--creator", help="Regex to filter by creator") sc.add("--title", help="Regex to filter by title") sc.add("--link", help="Regex to filter by link") sc.add("--duration", help="Comparative to filter by duration") + # TODO Allow to ask args = parser.parse_args() args.videos = os.path.realpath(os.path.expanduser(args.videos)) @@ -447,41 +478,35 @@ def main() -> None: os.makedirs(args.videos, exist_ok=True) os.chdir(args.videos) - # TODO Abstract a bit + database = RVDatabase(args) + cache = RVDatabase.load() + try: + database.read_feed() + except urllib.error.URLError as err: + if args.subcommand == RVCommand.download or not cache: + raise err + else: + log.warning("Cannot fetch RSS feed, using cached feed.", err) + database.import_cache(cache) + if cache: + database.salvage_cache(cache) + + log.debug(f"Running subcommand") if args.subcommand == RVCommand.download: - database = RVDatabase(args) - database.read_feed() - cache = RVDatabase.load() - if cache: - database.read_cache(cache) database.clean() database.act_all() - database.save() - elif args.subcommand == RVCommand.list: - cache = RVDatabase.load() - if not cache: - raise FileNotFoundError("This command doesn't work without a cache yet.") - for element in cache.elements: - if not element.matches_search(args): - continue - print(element) + elif args.subcommand in (RVCommand.list, RVCommand.watch, RVCommand.binge): + for element in database.filter(args): + if args.subcommand == RVCommand.list: + print(element) + elif args.subcommand in (RVCommand.watch, RVCommand.binge): + element.watch() + if args.subcommand == RVCommand.watch: + break - elif args.subcommand == RVCommand.watch: - cache = RVDatabase.load() - if not cache: - raise FileNotFoundError("This command doesn't work without a cache yet.") - elements = cache.elements.copy() - if args.order == "new": - elements = reversed(elements) - elif args.order == "random": - random.shuffle(elements) - for element in elements: - if not element.matches_search(args): - continue - element.watch() - break + database.save() if __name__ == "__main__":