def runLikedSavedDownloader(pipeConnection): if pipeConnection: logger.setPipe(pipeConnection) initialize() if (not settings.settings['Use_cached_submissions'] and not settings.hasTumblrSettings() and not settings.hasRedditSettings() and not settings.hasPixivSettings() and not settings.hasPinterestSettings()): logger.log( 'Please provide Tumblr, Pixiv, or Reddit account details ' ' via the Settings page provided by Content Collector server') return if not settings.settings['Gfycat_Client_id']: logger.log( 'No Gfycat Client ID and/or Gfycat Client Secret was provided. ' 'This is required to download Gfycat media reliably.') logger.log('Output: ' + settings.settings['Output_dir']) utilities.makeDirIfNonexistant(settings.settings['Output_dir']) utilities.makeDirIfNonexistant(settings.settings['Metadata_output_dir']) submissions = getSubmissionsToSave() logger.log('Saving images. This will take several minutes...') unsupportedSubmissions = imageSaver.saveAllImages( settings.settings['Output_dir'], submissions, imgur_auth=imgurDownloader.getImgurAuth(), only_download_albums=settings.settings['Only_download_albums'], skip_n_percent_submissions=settings. settings['Skip_n_percent_submissions'], soft_retrieve_imgs=settings.settings['Should_soft_retrieve'], only_important_messages=settings.settings['Only_important_messages']) # Write out a .json file listing all of the submissions the script failed to download if unsupportedSubmissions: submission.saveSubmissionsAsJson( unsupportedSubmissions, settings.settings['Metadata_output_dir'] + u'/' + 'UnsupportedSubmissions_' + time.strftime("%Y%m%d-%H%M%S") + '.json') if settings.settings['Should_soft_retrieve']: logger.log( '\nYou have run the script in Soft Retrieve mode - if you actually\n' 'want to download images now, you should change SHOULD_SOFT_RETRIEVE\n' 'to False in settings.txt') if pipeConnection: logger.log(scriptFinishedSentinel) pipeConnection.close()
def saveRequestedUrls(pipeConnection, urls): if pipeConnection: logger.setPipe(pipeConnection) initialize() logger.log( 'Attempting to save {} requested urls. This may take several minutes...' .format(len(urls))) submissions = [] # Create Submission for each URL for url in urls: convertedSubmission = submission.Submission() convertedSubmission.source = "UserRequested" convertedSubmission.title = "UserRequested" convertedSubmission.author = "(Requested by user)" convertedSubmission.subreddit = "Requested_Downloads" convertedSubmission.subredditTitle = "Requested Downloads" convertedSubmission.body = "(Requested by user)" convertedSubmission.bodyUrl = url convertedSubmission.postUrl = url submissions.append(convertedSubmission) if len(submissions) != len(urls): logger.log( 'Could not parse {} URLs!'.format(len(urls) - len(submissions))) unsupportedSubmissions = imageSaver.saveAllImages( settings.settings['Output_dir'], submissions, imgur_auth=imgurDownloader.getImgurAuth(), only_download_albums=settings.settings['Only_download_albums'], skip_n_percent_submissions=settings. settings['Skip_n_percent_submissions'], soft_retrieve_imgs=settings.settings['Should_soft_retrieve'], only_important_messages=settings.settings['Only_important_messages']) logger.log( 'Download finished. Output to \'Requested Downloads\' directory') if pipeConnection: logger.log(scriptFinishedSentinel) pipeConnection.close()
def saveRequestedSubmissions(pipeConnection, submissionIds): if pipeConnection: logger.setPipe(pipeConnection) initialize() logger.log( 'Attempting to save {} requested submissions. This will take several minutes...' .format(len(submissionIds))) dbSubmissions = LikedSavedDatabase.db.getSubmissionsByIds(submissionIds) submissions = [] # Convert from database submissions to Submission for dbSubmission in dbSubmissions: convertedSubmission = submission.Submission() convertedSubmission.initFromDict(dbSubmission) submissions.append(convertedSubmission) if len(submissions) != len(submissionIds): logger.log('Could not find {} submissions in database!'.format( len(submissionIds) - len(submissions))) unsupportedSubmissions = imageSaver.saveAllImages( settings.settings['Output_dir'], submissions, imgur_auth=imgurDownloader.getImgurAuth(), only_download_albums=settings.settings['Only_download_albums'], skip_n_percent_submissions=settings. settings['Skip_n_percent_submissions'], soft_retrieve_imgs=settings.settings['Should_soft_retrieve'], only_important_messages=settings.settings['Only_important_messages']) logger.log( 'Download finished. Please refresh the page to see updated entries') if pipeConnection: logger.log(scriptFinishedSentinel) pipeConnection.close()
return submissions if __name__ == '__main__': print("Running image saver tests") outputDirOverride = 'LOCAL_testOutput' utilities.makeDirIfNonexistant(outputDirOverride) settings.getSettings() LikedSavedDatabase.initializeFromSettings(settings.settings) # Temporary override settings.settings['Output_dir'] = outputDirOverride testSubmissions = loadSubmissionsFromJson( 'LOCAL_imageSaver_test_submissions.json') if testSubmissions: unsupportedSubmissions = saveAllImages( outputDirOverride, testSubmissions, imgur_auth=imgurDownloader.getImgurAuth(), only_download_albums=settings.settings['Only_download_albums'], skip_n_percent_submissions=settings. settings['Skip_n_percent_submissions'], soft_retrieve_imgs=settings.settings['Should_soft_retrieve'], only_important_messages=settings. settings['Only_important_messages']) else: print("No submissions found")