def scheduled_uploader(uploader_name, uploader_settings): log.debug("Scheduled disk check triggered for uploader: %s", uploader_name) try: rclone_settings = conf.configs['remotes'][uploader_name] # check suspended uploaders if check_suspended_uploaders(uploader_name): return # clear any banned service accounts check_suspended_sa(uploader_name) # check used disk space used_space = path.get_size(rclone_settings['upload_folder'], uploader_settings['size_excludes']) # if disk space is above the limit, clean hidden files then upload if used_space >= uploader_settings['max_size_gb']: log.info( "Uploader: %s. Local folder size is currently %d GB over the maximum limit of %d GB", uploader_name, used_space - uploader_settings['max_size_gb'], uploader_settings['max_size_gb']) # does this uploader have schedule settings if 'schedule' in uploader_settings and uploader_settings[ 'schedule']['enabled']: # there is a schedule set for this uploader, check if we are within the allowed times current_time = time.strftime('%H:%M') if not misc.is_time_between( (uploader_settings['schedule']['allowed_from'], uploader_settings['schedule']['allowed_until'])): log.info( "Uploader: %s. The current time %s is not within the allowed upload time periods %s -> %s", uploader_name, current_time, uploader_settings['schedule']['allowed_from'], uploader_settings['schedule']['allowed_until']) return # clean hidden files do_hidden() # upload do_upload(uploader_name) else: log.info( "Uploader: %s. Local folder size is currently %d GB. " "Still have %d GB remaining before its eligible to begin uploading...", uploader_name, used_space, uploader_settings['max_size_gb'] - used_space) except Exception: log.exception( "Unexpected exception occurred while processing uploader %s: ", uploader_name)
def scheduled_uploader(uploader_name, uploader_settings): log.debug("Scheduled disk check triggered for uploader: %s", uploader_name) try: rclone_settings = conf.configs['remotes'][uploader_name] # check suspended uploaders if check_suspended_uploaders(uploader_name): return # check used disk space used_space = path.get_size(rclone_settings['upload_folder'], uploader_settings['size_excludes']) # if disk space is above the limit, clean hidden files then upload if used_space >= uploader_settings['max_size_gb']: log.info( "Uploader: %s. Local folder size is currently %d GB over the maximum limit of %d GB", uploader_name, used_space - uploader_settings['max_size_gb'], uploader_settings['max_size_gb']) # clean hidden files do_hidden() # upload do_upload(uploader_name) else: log.info( "Uploader: %s. Local folder size is currently %d GB. " "Still have %d GB remaining before its eligible to begin uploading...", uploader_name, used_space, uploader_settings['max_size_gb'] - used_space) except Exception: log.exception( "Unexpected exception occurred while processing uploader %s: ", uploader_name)
def do_upload(remote=None): global plex_monitor_thread lock_file = lock.upload() if lock_file.is_locked(): log.info("Waiting for running upload to finish before proceeding...") with lock_file: log.info("Starting upload") try: # loop each supplied uploader config for uploader_remote, uploader_config in conf.configs[ 'uploader'].items(): # if remote is not None, skip this remote if it is not == remote if remote and uploader_remote != remote: continue # retrieve rclone config for this remote rclone_config = conf.configs['remotes'][uploader_remote] # send notification that upload is starting notify.send( message="Upload of %d GB has begun for remote: %s" % (path.get_size(rclone_config['upload_folder'], uploader_config['size_excludes']), uploader_remote)) # perform the upload uploader = Uploader(uploader_remote, uploader_config, rclone_config, conf.configs['core']['dry_run'], conf.configs['core']['rclone_config_path'], conf.configs['plex']['enabled']) # start the plex stream monitor before the upload begins if enabled if conf.configs['plex'][ 'enabled'] and plex_monitor_thread is None: plex_monitor_thread = thread.start(do_plex_monitor, 'plex-monitor') resp, resp_trigger = uploader.upload() if resp: # non 0 result indicates a trigger was met, the result is how many hours to sleep this remote for log.info( "Upload aborted due to trigger: %r being met, %s will continue automatic uploading normally in " "%d hours", resp_trigger, uploader_remote, resp) # add remote to uploader_delay uploader_delay[uploader_remote] = time.time() + ( (60 * 60) * resp) # send aborted upload notification notify.send( message= "Upload was aborted for remote: %s due to trigger %r. Uploads suspended for %d hours" % (uploader_remote, resp_trigger, resp)) else: # send successful upload notification notify.send( message= "Upload was completed successfully for remote: %s" % uploader_remote) # remove leftover empty directories from disk if not conf.configs['core']['dry_run']: uploader.remove_empty_dirs() except Exception: log.exception("Exception occurred while uploading: ") log.info("Finished upload")
def do_upload(remote=None): global plex_monitor_thread, uploader_delay global sa_delay nzbget = None nzbget_paused = False lock_file = lock.upload() if lock_file.is_locked(): log.info("Waiting for running upload to finish before proceeding...") with lock_file: log.info("Starting upload") try: # loop each supplied uploader config for uploader_remote, uploader_config in conf.configs[ 'uploader'].items(): # if remote is not None, skip this remote if it is not == remote if remote and uploader_remote != remote: continue # retrieve rclone config for this remote rclone_config = conf.configs['remotes'][uploader_remote] # send notification that upload is starting notify.send( message="Upload of %d GB has begun for remote: %s" % (path.get_size(rclone_config['upload_folder'], uploader_config['size_excludes']), uploader_remote)) # start the plex stream monitor before the upload begins, if enabled if conf.configs['plex'][ 'enabled'] and plex_monitor_thread is None: plex_monitor_thread = thread.start(do_plex_monitor, 'plex-monitor') # pause the nzbget queue before starting the upload, if enabled if conf.configs['nzbget']['enabled']: nzbget = Nzbget(conf.configs['nzbget']['url']) if nzbget.pause_queue(): nzbget_paused = True log.info( "Paused the Nzbget download queue, upload commencing!" ) else: log.error( "Failed to pause the Nzbget download queue, upload commencing anyway..." ) uploader = Uploader(uploader_remote, uploader_config, rclone_config, conf.configs['core']['dry_run'], conf.configs['core']['rclone_binary_path'], conf.configs['core']['rclone_config_path'], conf.configs['plex']['enabled']) if sa_delay[uploader_remote] is not None: available_accounts = [ account for account, last_ban_time in sa_delay[uploader_remote].items() if last_ban_time is None ] if len(available_accounts): available_accounts.sort() log.info("The following accounts are available: %s", str(available_accounts)) # If there are no service accounts available, do not even bother attemping the upload if len(available_accounts) == 0: log.info( "Upload aborted due to the fact that no service accounts " "are currently unbanned and available to use for remote %s", uploader_remote) # add remote to uploader_delay time_till_unban = misc.get_lowest_remaining_time( sa_delay[uploader_remote]) log.info("Lowest Remaining time till unban is %d", time_till_unban) uploader_delay[uploader_remote] = time_till_unban else: for i in range(0, len(available_accounts)): uploader.set_service_account(available_accounts[i]) resp, resp_trigger = uploader.upload() if resp: current_data = sa_delay[uploader_remote] current_data[ available_accounts[i]] = time.time() + ( (60 * 60) * resp) sa_delay[uploader_remote] = current_data log.debug( "Setting account %s as unbanned at %f", available_accounts[i], sa_delay[uploader_remote][ available_accounts[i]]) if i != (len(available_accounts) - 1): log.info( "Upload aborted due to trigger: %r being met, " "%s is cycling to service_account file: %r", resp_trigger, uploader_remote, available_accounts[i + 1]) # Set unban time for current service account log.debug( "Setting service account %s as banned for remote: %s", available_accounts[i], uploader_remote) continue else: # non 0 result indicates a trigger was met, the result is how many hours # to sleep this remote for # Before banning remote, check that a service account did not become unbanned during upload check_suspended_sa( sa_delay[uploader_remote]) unbanTime = misc.get_lowest_remaining_time( sa_delay[uploader_remote]) if unbanTime is not None: log.info( "Upload aborted due to trigger: %r being met, %s will continue automatic " "uploading normally in %d hours", resp_trigger, uploader_remote, resp) # add remote to uploader_delay log.debug( "Adding unban time for %s as %f", uploader_remote, misc.get_lowest_remaining_time( sa_delay[uploader_remote])) uploader_delay[ uploader_remote] = misc.get_lowest_remaining_time( sa_delay[uploader_remote]) # send aborted upload notification notify.send( message= "Upload was aborted for remote: %s due to trigger %r. " "Uploads suspended for %d hours" % (uploader_remote, resp_trigger, resp)) else: # send successful upload notification notify.send( message= "Upload was completed successfully for remote: %s" % uploader_remote) # Remove ban for service account sa_delay[uploader_remote][ available_accounts[i]] = None break else: resp, resp_trigger = uploader.upload() if resp: if uploader_remote not in uploader_delay: # this uploader was not already in the delay dict, so lets put it there log.info( "Upload aborted due to trigger: %r being met, %s will continue automatic uploading " "normally in %d hours", resp_trigger, uploader_remote, resp) # add remote to uploader_delay uploader_delay[uploader_remote] = time.time() + ( (60 * 60) * resp) # send aborted upload notification notify.send( message= "Upload was aborted for remote: %s due to trigger %r. Uploads suspended for %d" " hours" % (uploader_remote, resp_trigger, resp)) else: # this uploader is already in the delay dict, lets not delay it any further log.info( "Upload aborted due to trigger: %r being met for %s uploader", resp_trigger, uploader_remote) # send aborted upload notification notify.send( message= "Upload was aborted for remote: %s due to trigger %r." % (uploader_remote, resp_trigger)) else: log.info( "Upload completed successfully for uploader: %s", uploader_remote) # send successful upload notification notify.send( message= "Upload was completed successfully for remote: %s" % uploader_remote) # remove uploader from uploader_delays (as its no longer banned) if uploader_remote in uploader_delay and uploader_delay.pop( uploader_remote, None) is not None: # this uploader was in the delay dict, but upload was successful, lets remove it log.info( "%s is no longer suspended due to a previous aborted upload!", uploader_remote) # remove leftover empty directories from disk if not conf.configs['core']['dry_run']: uploader.remove_empty_dirs() # resume the nzbget queue, if enabled if conf.configs['nzbget'][ 'enabled'] and nzbget is not None and nzbget_paused: if nzbget.resume_queue(): nzbget_paused = False log.info("Resumed the Nzbget download queue!") else: log.error( "Failed to resume the Nzbget download queue??") except Exception: log.exception("Exception occurred while uploading: ") log.info("Finished upload")