def getPosts(programMode): """Call PRAW regarding to arguments and pass it to extractDetails. Return what extractDetails has returned. """ reddit = Reddit(GLOBAL.config["credentials"]["reddit"]).begin() if programMode["sort"] == "best": raise NoPrawSupport("PRAW does not support that") if "subreddit" in programMode: if "search" in programMode: if programMode["subreddit"] == "frontpage": programMode["subreddit"] = "all" if "user" in programMode: if programMode["user"] == "me": programMode["user"] = str(reddit.user.me()) if not "search" in programMode: if programMode["sort"] == "top" or programMode[ "sort"] == "controversial": keyword_params = { "time_filter": programMode["time"], "limit": programMode["limit"] } # OTHER SORT TYPES DON'T TAKE TIME_FILTER else: keyword_params = {"limit": programMode["limit"]} else: keyword_params = { "time_filter": programMode["time"], "limit": programMode["limit"] } if "search" in programMode: if programMode["sort"] in ["hot", "rising", "controversial"]: raise InvalidSortingType("Invalid sorting type has given") if "subreddit" in programMode: print ( "search for \"{search}\" in\n" \ "subreddit: {subreddit}\nsort: {sort}\n" \ "time: {time}\nlimit: {limit}\n".format( search=programMode["search"], limit=programMode["limit"], sort=programMode["sort"], subreddit=programMode["subreddit"], time=programMode["time"] ).upper(),noPrint=True ) return extractDetails( reddit.subreddit(programMode["subreddit"]).search( programMode["search"], limit=programMode["limit"], sort=programMode["sort"], time_filter=programMode["time"])) elif "multireddit" in programMode: raise NoPrawSupport("PRAW does not support that") elif "user" in programMode: raise NoPrawSupport("PRAW does not support that") elif "saved" in programMode: raise ("Reddit does not support that") if programMode["sort"] == "relevance": raise InvalidSortingType("Invalid sorting type has given") if "saved" in programMode: print("saved posts\nuser:{username}\nlimit={limit}\n".format( username=reddit.user.me(), limit=programMode["limit"]).upper(), noPrint=True) return extractDetails( reddit.user.me().saved(limit=programMode["limit"])) if "subreddit" in programMode: if programMode["subreddit"] == "frontpage": print ( "subreddit: {subreddit}\nsort: {sort}\n" \ "time: {time}\nlimit: {limit}\n".format( limit=programMode["limit"], sort=programMode["sort"], subreddit=programMode["subreddit"], time=programMode["time"] ).upper(),noPrint=True ) return extractDetails( getattr(reddit.front, programMode["sort"])(**keyword_params)) else: print ( "subreddit: {subreddit}\nsort: {sort}\n" \ "time: {time}\nlimit: {limit}\n".format( limit=programMode["limit"], sort=programMode["sort"], subreddit=programMode["subreddit"], time=programMode["time"] ).upper(),noPrint=True ) return extractDetails( getattr(reddit.subreddit(programMode["subreddit"]), programMode["sort"])(**keyword_params)) elif "multireddit" in programMode: print ( "user: {user}\n" \ "multireddit: {multireddit}\nsort: {sort}\n" \ "time: {time}\nlimit: {limit}\n".format( user=programMode["user"], limit=programMode["limit"], sort=programMode["sort"], multireddit=programMode["multireddit"], time=programMode["time"] ).upper(),noPrint=True ) try: return extractDetails( getattr( reddit.multireddit(programMode["user"], programMode["multireddit"]), programMode["sort"])(**keyword_params)) except NotFound: raise MultiredditNotFound("Multireddit not found") elif "submitted" in programMode: print ( "submitted posts of {user}\nsort: {sort}\n" \ "time: {time}\nlimit: {limit}\n".format( limit=programMode["limit"], sort=programMode["sort"], user=programMode["user"], time=programMode["time"] ).upper(),noPrint=True ) return extractDetails( getattr( reddit.redditor(programMode["user"]).submissions, programMode["sort"])(**keyword_params)) elif "upvoted" in programMode: print("upvoted posts of {user}\nlimit: {limit}\n".format( user=programMode["user"], limit=programMode["limit"]).upper(), noPrint=True) try: return extractDetails( reddit.redditor( programMode["user"]).upvoted(limit=programMode["limit"])) except Forbidden: raise InsufficientPermission( "You do not have permission to do that") elif "post" in programMode: print("post: {post}\n".format(post=programMode["post"]).upper(), noPrint=True) return extractDetails(reddit.submission(url=programMode["post"]), SINGLE_POST=True)
def download(submissions): """Analyze list of submissions and call the right function to download each one, catch errors, update the log files """ downloadedCount = 0 duplicates = 0 FAILED_FILE = createLogFile("FAILED") if GLOBAL.arguments.unsave: reddit = Reddit(GLOBAL.config['credentials']['reddit']).begin() subsLenght = len(submissions) for i in range(len(submissions)): print(f"\n({i+1}/{subsLenght})",end=" — ") print(submissions[i]['POSTID'], f"r/{submissions[i]['SUBREDDIT']}", f"u/{submissions[i]['REDDITOR']}", submissions[i]['FLAIR'] if submissions[i]['FLAIR'] else "", sep=" — ", end="") print(f" – {submissions[i]['TYPE'].upper()}",end="",noPrint=True) directory = GLOBAL.directory / GLOBAL.config["folderpath"].format(**submissions[i]) details = { **submissions[i], **{ "TITLE": nameCorrector( submissions[i]['TITLE'], reference = str(directory) + GLOBAL.config['filename'].format(**submissions[i]) + ".ext" ) } } filename = GLOBAL.config['filename'].format(**details) if isPostExists(details,directory): print() print(directory) print(filename) print("It already exists") duplicates += 1 continue if any(domain in submissions[i]['CONTENTURL'] for domain in GLOBAL.arguments.skip): print() print(submissions[i]['CONTENTURL']) print("Domain found in skip domains, skipping post...") continue try: downloadPost(details,directory) GLOBAL.downloadedPosts.add(details['POSTID']) try: if GLOBAL.arguments.unsave: reddit.submission(id=details['POSTID']).unsave() except InsufficientScope: reddit = Reddit().begin() reddit.submission(id=details['POSTID']).unsave() downloadedCount += 1 except FileAlreadyExistsError: print("It already exists") GLOBAL.downloadedPosts.add(details['POSTID']) duplicates += 1 except ImgurLoginError: print( "Imgur login failed. \nQuitting the program "\ "as unexpected errors might occur." ) sys.exit() except ImgurLimitError as exception: FAILED_FILE.add({int(i+1):[ "{class_name}: {info}".format( class_name=exception.__class__.__name__,info=str(exception) ), details ]}) except NotADownloadableLinkError as exception: print( "{class_name}: {info}".format( class_name=exception.__class__.__name__,info=str(exception) ) ) FAILED_FILE.add({int(i+1):[ "{class_name}: {info}".format( class_name=exception.__class__.__name__,info=str(exception) ), submissions[i] ]}) except TypeInSkip: print() print(submissions[i]['CONTENTURL']) print("Skipping post...") except DomainInSkip: print() print(submissions[i]['CONTENTURL']) print("Skipping post...") except NoSuitablePost: print("No match found, skipping...") except FailedToDownload: print("Failed to download the posts, skipping...") except AlbumNotDownloadedCompletely: print("Album did not downloaded completely.") FAILED_FILE.add({int(i+1):[ "{class_name}: {info}".format( class_name=exc.__class__.__name__,info=str(exc) ), submissions[i] ]}) except Exception as exc: print( "{class_name}: {info}\nSee CONSOLE_LOG.txt for more information".format( class_name=exc.__class__.__name__,info=str(exc) ) ) logging.error(sys.exc_info()[0].__name__, exc_info=full_exc_info(sys.exc_info())) print(GLOBAL.log_stream.getvalue(),noPrint=True) FAILED_FILE.add({int(i+1):[ "{class_name}: {info}".format( class_name=exc.__class__.__name__,info=str(exc) ), submissions[i] ]}) if duplicates: print(f"\nThere {'were' if duplicates > 1 else 'was'} " \ f"{duplicates} duplicate{'s' if duplicates > 1 else ''}") if downloadedCount == 0: print("Nothing is downloaded :(") else: print(f"Total of {downloadedCount} " \ f"link{'s' if downloadedCount > 1 else ''} downloaded!")