import argparse import logging import os from time import sleep import praw from nextcloud import NextCloud from praw.models.util import stream_generator from gallery_dl.job import DownloadJob from importer.uploader import upload_file, create_folders levels = { 'critical': logging.CRITICAL, 'error': logging.ERROR, 'warn': logging.WARNING, 'warning': logging.WARNING, 'info': logging.INFO, 'debug': logging.DEBUG } parser = argparse.ArgumentParser(description="Monitor saved") parser.add_argument('-c', '--client-id', help="Reddit client id", default=os.environ.get('CLIENT_ID', '')) parser.add_argument('-s', '--client-secret', help="Reddit client secret", default=os.environ.get('CLIENT_SECRET', '')) parser.add_argument('-u', '--reddit-username', help="Reddit username", default=os.environ.get('REDDIT_USERNAME', '')) parser.add_argument('-p', '--reddit-password', help="Reddit user password", default=os.environ.get('REDDIT_PASSWORD', '')) parser.add_argument('-P', '--nextcloud-password', help="Nextcloud Password", default=os.environ.get('NEXTCLOUD_PASSWORD', '')) parser.add_argument('-U', '--nextcloud-username', help="Nextcloud Username", default=os.environ.get('NEXTCLOUD_USERNAME', '')) parser.add_argument('-o', '--nextcloud-host', help="Nextcloud Host", default=os.environ.get('NEXTCLOUD_HOST', 'localhost')) parser.add_argument('-d', '--nextcloud-path', help="Nextcloud root folder", default=os.environ.get('NEXTCLOUD_PATH', 'im')) parser.add_argument('-f', '--from-beginning', dest='from_beginning', help="it will attempt to download all saved posts from the beginning.", action='store_true', default=os.environ.get("FROM_BEGINNING") is not None) parser.add_argument('-l', '--log-level', default=os.environ.get('LOG_LEVEL', 'info'), choices=levels.keys(), help=f'it will set log level.') def get_list_of_files(dirName): # create a list of file and sub directories # names in the given directory listOfFile = os.listdir(dirName) allFiles = list() # Iterate over all the entries for entry in listOfFile: # Create full path fullPath = os.path.join(dirName, entry) # If entry is a directory then get the list of files in this directory if os.path.isdir(fullPath): allFiles = allFiles + get_list_of_files(fullPath) else: allFiles.append(fullPath) return allFiles def download(url) -> [str]: d = DownloadJob(url=url) d.run() basedir = d.pathfmt.basedirectory files = get_list_of_files(basedir) return files if __name__ == "__main__": args = parser.parse_args() level = levels.get(args.log_level.lower()) logging.basicConfig(level=level) reddit = praw.Reddit(client_id=args.client_id, client_secret=args.client_secret, password=args.reddit_password, user_agent="hcrawler", username=args.reddit_username) nxc = NextCloud( args.nextcloud_host, user=args.nextcloud_username, password=args.nextcloud_password, session_kwargs={'verify': False} ) redditor = reddit.redditor(args.reddit_username) def upload(post): try: url = post.url create_folders(f"{args.nextcloud_path}/{post.subreddit}/", nxc) logging.info(f"{post.id} from {post.subreddit} downloaded") for path in download(url): filename = os.path.basename(path) upload_file(path, f"{args.nextcloud_path}/{post.subreddit}/{filename}", nxc) os.unlink(path) logging.info(f"{path} uploaded") except Exception as e: logging.error(e) if args.from_beginning: logging.info(f"Downloading from the beginning") for post in redditor.saved(limit=None): upload(post) sleep(60) generator = stream_generator(redditor.saved, attribute_name="name") for post in generator: upload(post)