def do_upload(remote=None): global plex_monitor_thread lock_file = lock.upload() if lock_file.is_locked(): log.info("Waiting for running upload to finish before proceeding...") with lock_file: log.info("Starting upload") try: # loop each supplied uploader config for uploader_remote, uploader_config in conf.configs[ 'uploader'].items(): # if remote is not None, skip this remote if it is not == remote if remote and uploader_remote != remote: continue # retrieve rclone config for this remote rclone_config = conf.configs['remotes'][uploader_remote] # send notification that upload is starting notify.send( message="Upload of %d GB has begun for remote: %s" % (path.get_size(rclone_config['upload_folder'], uploader_config['size_excludes']), uploader_remote)) # perform the upload uploader = Uploader(uploader_remote, uploader_config, rclone_config, conf.configs['core']['dry_run'], conf.configs['core']['rclone_config_path'], conf.configs['plex']['enabled']) # start the plex stream monitor before the upload begins if enabled if conf.configs['plex'][ 'enabled'] and plex_monitor_thread is None: plex_monitor_thread = thread.start(do_plex_monitor, 'plex-monitor') resp, resp_trigger = uploader.upload() if resp: # non 0 result indicates a trigger was met, the result is how many hours to sleep this remote for log.info( "Upload aborted due to trigger: %r being met, %s will continue automatic uploading normally in " "%d hours", resp_trigger, uploader_remote, resp) # add remote to uploader_delay uploader_delay[uploader_remote] = time.time() + ( (60 * 60) * resp) # send aborted upload notification notify.send( message= "Upload was aborted for remote: %s due to trigger %r. Uploads suspended for %d hours" % (uploader_remote, resp_trigger, resp)) else: # send successful upload notification notify.send( message= "Upload was completed successfully for remote: %s" % uploader_remote) # remove leftover empty directories from disk if not conf.configs['core']['dry_run']: uploader.remove_empty_dirs() except Exception: log.exception("Exception occurred while uploading: ") log.info("Finished upload")
def do_plex_monitor(): global plex_monitor_thread # create the plex object plex = Plex(conf.configs['plex']['url'], conf.configs['plex']['token']) if not plex.validate(): log.error( "Aborting Plex stream monitor due to failure to validate supplied server url/token..." ) plex_monitor_thread = None return # sleep 15 seconds to allow rclone to start log.info( "Plex server url + token were validated, sleeping 15 seconds before checking Rclone rc url..." ) time.sleep(15) # create the rclone throttle object rclone = RcloneThrottler(conf.configs['plex']['rclone']['url']) if not rclone.validate(): log.error( "Aborting Plex stream monitor due to failure to validate supplied rclone rc url..." ) plex_monitor_thread = None return else: log.info( "Rclone rc url was validated, Plex streams monitoring will begin now!" ) throttled = False throttle_speed = None lock_file = lock.upload() while lock_file.is_locked(): streams = plex.get_streams() if streams is None: log.error( "Failed to check Plex stream(s), trying again in %d seconds...", conf.configs['plex']['poll_interval']) else: # we had a response stream_count = 0 for stream in streams: if stream.state == 'playing': stream_count += 1 # are we already throttled? if not throttled and stream_count >= conf.configs['plex'][ 'max_streams_before_throttle']: log.info( "There was %d playing stream(s) on Plex while we were currently un-throttled, streams:", stream_count) for stream in streams: log.info(stream) log.info("Upload throttling will now commence...") # send throttle request throttle_speed = misc.get_nearest_less_element( conf.configs['plex']['rclone']['throttle_speeds'], stream_count) throttled = rclone.throttle(throttle_speed) # send notification if throttled: notify.send( message= "Throttled current upload to %s because there was %d playing stream(s) on Plex" % (throttle_speed, stream_count)) elif throttled: if stream_count < conf.configs['plex'][ 'max_streams_before_throttle']: log.info( "There was less than %d playing stream(s) on Plex while we were currently throttled, " "removing throttle!", conf.configs['plex']['max_streams_before_throttle']) # send un-throttle request throttled = not rclone.no_throttle() throttle_speed = None # send notification if not throttled: notify.send( message= "Un-throttled current upload because there was less than %d playing stream(s) on " "Plex" % conf.configs['plex']['max_streams_before_throttle'] ) elif misc.get_nearest_less_element( conf.configs['plex']['rclone']['throttle_speeds'], stream_count) != throttle_speed: # throttle speed changed, probably due to more/less streams, re-throttle throttle_speed = misc.get_nearest_less_element( conf.configs['plex']['rclone']['throttle_speeds'], stream_count) log.info( "Adjusting throttle speed for current upload to %s because there " "was now %d playing stream(s) on Plex", throttle_speed, stream_count) throttled = rclone.throttle(throttle_speed) if throttled: notify.send( message= 'Throttle for current upload was adjusted to %s due to %d playing stream(s)' ' on Plex' % (throttle_speed, stream_count)) else: log.info( "There was %d playing stream(s) on Plex while we were already throttled to %s, throttling " "will continue..", stream_count, throttle_speed) # the lock_file exists, so we can assume an upload is in progress at this point time.sleep(conf.configs['plex']['poll_interval']) log.info("Finished monitoring Plex stream(s)!") plex_monitor_thread = None
def do_upload(remote=None): global plex_monitor_thread, uploader_delay global sa_delay nzbget = None nzbget_paused = False lock_file = lock.upload() if lock_file.is_locked(): log.info("Waiting for running upload to finish before proceeding...") with lock_file: log.info("Starting upload") try: # loop each supplied uploader config for uploader_remote, uploader_config in conf.configs[ 'uploader'].items(): # if remote is not None, skip this remote if it is not == remote if remote and uploader_remote != remote: continue # retrieve rclone config for this remote rclone_config = conf.configs['remotes'][uploader_remote] # send notification that upload is starting notify.send( message="Upload of %d GB has begun for remote: %s" % (path.get_size(rclone_config['upload_folder'], uploader_config['size_excludes']), uploader_remote)) # start the plex stream monitor before the upload begins, if enabled if conf.configs['plex'][ 'enabled'] and plex_monitor_thread is None: plex_monitor_thread = thread.start(do_plex_monitor, 'plex-monitor') # pause the nzbget queue before starting the upload, if enabled if conf.configs['nzbget']['enabled']: nzbget = Nzbget(conf.configs['nzbget']['url']) if nzbget.pause_queue(): nzbget_paused = True log.info( "Paused the Nzbget download queue, upload commencing!" ) else: log.error( "Failed to pause the Nzbget download queue, upload commencing anyway..." ) uploader = Uploader(uploader_remote, uploader_config, rclone_config, conf.configs['core']['dry_run'], conf.configs['core']['rclone_binary_path'], conf.configs['core']['rclone_config_path'], conf.configs['plex']['enabled']) if sa_delay[uploader_remote] is not None: available_accounts = [ account for account, last_ban_time in sa_delay[uploader_remote].items() if last_ban_time is None ] if len(available_accounts): available_accounts.sort() log.info("The following accounts are available: %s", str(available_accounts)) # If there are no service accounts available, do not even bother attemping the upload if len(available_accounts) == 0: log.info( "Upload aborted due to the fact that no service accounts " "are currently unbanned and available to use for remote %s", uploader_remote) # add remote to uploader_delay time_till_unban = misc.get_lowest_remaining_time( sa_delay[uploader_remote]) log.info("Lowest Remaining time till unban is %d", time_till_unban) uploader_delay[uploader_remote] = time_till_unban else: for i in range(0, len(available_accounts)): uploader.set_service_account(available_accounts[i]) resp, resp_trigger = uploader.upload() if resp: current_data = sa_delay[uploader_remote] current_data[ available_accounts[i]] = time.time() + ( (60 * 60) * resp) sa_delay[uploader_remote] = current_data log.debug( "Setting account %s as unbanned at %f", available_accounts[i], sa_delay[uploader_remote][ available_accounts[i]]) if i != (len(available_accounts) - 1): log.info( "Upload aborted due to trigger: %r being met, " "%s is cycling to service_account file: %r", resp_trigger, uploader_remote, available_accounts[i + 1]) # Set unban time for current service account log.debug( "Setting service account %s as banned for remote: %s", available_accounts[i], uploader_remote) continue else: # non 0 result indicates a trigger was met, the result is how many hours # to sleep this remote for # Before banning remote, check that a service account did not become unbanned during upload check_suspended_sa( sa_delay[uploader_remote]) unbanTime = misc.get_lowest_remaining_time( sa_delay[uploader_remote]) if unbanTime is not None: log.info( "Upload aborted due to trigger: %r being met, %s will continue automatic " "uploading normally in %d hours", resp_trigger, uploader_remote, resp) # add remote to uploader_delay log.debug( "Adding unban time for %s as %f", uploader_remote, misc.get_lowest_remaining_time( sa_delay[uploader_remote])) uploader_delay[ uploader_remote] = misc.get_lowest_remaining_time( sa_delay[uploader_remote]) # send aborted upload notification notify.send( message= "Upload was aborted for remote: %s due to trigger %r. " "Uploads suspended for %d hours" % (uploader_remote, resp_trigger, resp)) else: # send successful upload notification notify.send( message= "Upload was completed successfully for remote: %s" % uploader_remote) # Remove ban for service account sa_delay[uploader_remote][ available_accounts[i]] = None break else: resp, resp_trigger = uploader.upload() if resp: if uploader_remote not in uploader_delay: # this uploader was not already in the delay dict, so lets put it there log.info( "Upload aborted due to trigger: %r being met, %s will continue automatic uploading " "normally in %d hours", resp_trigger, uploader_remote, resp) # add remote to uploader_delay uploader_delay[uploader_remote] = time.time() + ( (60 * 60) * resp) # send aborted upload notification notify.send( message= "Upload was aborted for remote: %s due to trigger %r. Uploads suspended for %d" " hours" % (uploader_remote, resp_trigger, resp)) else: # this uploader is already in the delay dict, lets not delay it any further log.info( "Upload aborted due to trigger: %r being met for %s uploader", resp_trigger, uploader_remote) # send aborted upload notification notify.send( message= "Upload was aborted for remote: %s due to trigger %r." % (uploader_remote, resp_trigger)) else: log.info( "Upload completed successfully for uploader: %s", uploader_remote) # send successful upload notification notify.send( message= "Upload was completed successfully for remote: %s" % uploader_remote) # remove uploader from uploader_delays (as its no longer banned) if uploader_remote in uploader_delay and uploader_delay.pop( uploader_remote, None) is not None: # this uploader was in the delay dict, but upload was successful, lets remove it log.info( "%s is no longer suspended due to a previous aborted upload!", uploader_remote) # remove leftover empty directories from disk if not conf.configs['core']['dry_run']: uploader.remove_empty_dirs() # resume the nzbget queue, if enabled if conf.configs['nzbget'][ 'enabled'] and nzbget is not None and nzbget_paused: if nzbget.resume_queue(): nzbget_paused = False log.info("Resumed the Nzbget download queue!") else: log.error( "Failed to resume the Nzbget download queue??") except Exception: log.exception("Exception occurred while uploading: ") log.info("Finished upload")