def encode_worker(): """ Represents a single thread that is continuously scanning for a new encode job to do. Once it finds one, it takes it and processes it. """ while True: # Fetch the request from the Queue new_request = episode_job_queue.get() # Process the encoding job here try: o = OSHandler(c, new_request, p) o.download() ofile_size = o.encode() ofile_name = o.upload() # Create the NetworkHandler to send out notifications n = NetworkHandler(c, new_request, p, ofile_name, ofile_size) n.notify() except: pass finally: try: o.cleanup() except: pass # Mark the job as done episode_job_queue.task_done() logger.warning(ep.JOB_COMPLETE)
def distribute_worker(): """ Represents a single thread that is continuously scanning for a new distribution job to handle. Once it finds one, it takes it and procsses it. """ while True: new_request = episode_job_queue.get() try: o = OSHandler(c, new_request, p) if o.check_filters(new_request.show): o.distribute() n = NetworkHandler(c, new_request, p) n.notify() except: pass finally: # Always attempt to clean up try: o.cleanup() except: pass episode_job_queue.task_done() logger.warning(dp.JOB_COMPLETE)
def main(): inote = sys.argv[1] try: c = _get_config_handler() p = PrintHandler(c) a = ArgumentHandler(c, p, inote) f = FileHandler(c, a, p, inote) n = NetworkHandler(c, f, p) o = OSHandler(c, a, f, p) o.create_temp_replica_fs() o.upload() n.notify() except: pass finally: try: # A bit hacky, just to clean up if possible o.cleanup() except: pass print() # For interactive sessions
def mochi(): try: a.refresh() status = a.authorize(request.headers) if not status: return "Unauthorized request", 401 r = RequestHandler(request, p) if r.sub_type in c.mochi_sub_types: m.send(r) n = NetworkHandler(c, r, p) n.notify() return "Request accepted", 200 except Exception as e: print(e) return "Error with request", 400
def encode_worker(): """ Represents a single thread that is continuously scanning for a new distribution job to do. Once it finds one, it takes it and processes it. """ while True: new_request = notify_job_queue.get() try: # We need to ignore any errors to keep the queue empty m = ModuleHandler(c, new_request, p) if m.check_filters(new_request.show): m.notify_all() n = NetworkHandler(c, new_request, p) n.notify() except: pass notify_job_queue.task_done() logger.warning(np.JOB_COMPLETE) print()
def generate_network(self): self.network_handler = NetworkHandler(self.in_file_directory, self.in_file_name, self.weight_id, self.aggregate_number)
class Analyzer: # ------------------------------------------------------------- # # __init__ (in_file_directory, in_file_name, # out_file_directory, # is_weighted, is_directed, full_analysis) # # ------------------------------------------------------------- def __init__(self, in_file_directory, in_file_name, out_file_directory, is_weighted, is_directed, full_analysis, weight_id, aggregate_number): self.in_file_directory = in_file_directory self.in_file_name = in_file_name self.out_file_directory = out_file_directory self.aggregate_number = int(aggregate_number) self.RESULTS = [] self.analyzed_feat = 'period' if is_weighted == 'yes': self.is_weighted = True self.weight_id = weight_id else: self.is_weighted = False self.weight_id = 0 if is_directed == 'yes': self.is_directed = True else: self.is_directed = False if full_analysis == 'yes': self.full_analysis = True else: self.full_analysis = False # ------------------------------------------------------------- # ------------------------------------------------------------- def generate_network(self): self.network_handler = NetworkHandler(self.in_file_directory, self.in_file_name, self.weight_id, self.aggregate_number) # ------------------------------------------------------------- # ------------------------------------------------------------- def launch_analysis(self): self.distributions = [] [analysis, txt] = self.network_handler.set_general_values() self.RESULTS.extend(analysis) self.analyzed_feat += txt [analysis, txt] = self.network_handler.set_degree_analysis() self.RESULTS.extend(analysis) self.analyzed_feat += txt [analysis, txt] = self.network_handler.set_volume_distribution() self.RESULTS.extend(analysis) self.analyzed_feat += txt [analysis, txt] = self.network_handler.set_clustering_distribution() self.RESULTS.extend(analysis) self.analyzed_feat += txt [analysis, txt] = self.network_handler.centrality_measures() self.RESULTS.extend(analysis) self.analyzed_feat += txt [analysis, txt] = self.network_handler.transversal_measures() self.RESULTS.extend(analysis) self.analyzed_feat += txt [analysis, txt] = self.network_handler.scc_analysis() self.RESULTS.extend(analysis) self.analyzed_feat += txt self.network_handler.save_extra() # ------------------------------------------------------------- # ------------------------------------------------------------- def import_results(self): return self.RESULTS
def generate_network(self): self.network_handler = NetworkHandler( self.in_file_directory, self.in_file_name, self.weight_id, self.aggregate_number )
class Analyzer: # ------------------------------------------------------------- # # __init__ (in_file_directory, in_file_name, # out_file_directory, # is_weighted, is_directed, full_analysis) # # ------------------------------------------------------------- def __init__( self, in_file_directory, in_file_name, out_file_directory, is_weighted, is_directed, full_analysis, weight_id, aggregate_number, ): self.in_file_directory = in_file_directory self.in_file_name = in_file_name self.out_file_directory = out_file_directory self.aggregate_number = int(aggregate_number) self.RESULTS = [] self.analyzed_feat = "period" if is_weighted == "yes": self.is_weighted = True self.weight_id = weight_id else: self.is_weighted = False self.weight_id = 0 if is_directed == "yes": self.is_directed = True else: self.is_directed = False if full_analysis == "yes": self.full_analysis = True else: self.full_analysis = False # ------------------------------------------------------------- # ------------------------------------------------------------- def generate_network(self): self.network_handler = NetworkHandler( self.in_file_directory, self.in_file_name, self.weight_id, self.aggregate_number ) # ------------------------------------------------------------- # ------------------------------------------------------------- def launch_analysis(self): self.distributions = [] [analysis, txt] = self.network_handler.set_general_values() self.RESULTS.extend(analysis) self.analyzed_feat += txt [analysis, txt] = self.network_handler.set_degree_analysis() self.RESULTS.extend(analysis) self.analyzed_feat += txt [analysis, txt] = self.network_handler.set_volume_distribution() self.RESULTS.extend(analysis) self.analyzed_feat += txt [analysis, txt] = self.network_handler.set_clustering_distribution() self.RESULTS.extend(analysis) self.analyzed_feat += txt [analysis, txt] = self.network_handler.centrality_measures() self.RESULTS.extend(analysis) self.analyzed_feat += txt [analysis, txt] = self.network_handler.transversal_measures() self.RESULTS.extend(analysis) self.analyzed_feat += txt [analysis, txt] = self.network_handler.scc_analysis() self.RESULTS.extend(analysis) self.analyzed_feat += txt self.network_handler.save_extra() # ------------------------------------------------------------- # ------------------------------------------------------------- def import_results(self): return self.RESULTS