def __init__(self, outbox_path, plugins_path, delegate): logger.info("(JobDispatcher __init__) ") self.outbox_path = outbox_path self.plugins_path = plugins_path self.productService = ProductsService() self.config = configparser.ConfigParser() self.config.read( "/usr/ichnosat/src/core/processing_pipe/config/config.cfg") self.delegate = delegate threading.Thread.__init__(self)
def __init__(self, outbox_path, plugins_path, lock, i): self.i = i logger.info("(Job __init__) ["+str(self.i)+"]") self.lock = lock self.outbox_path = outbox_path plugin_manager = PluginManager(plugins_path) self.plugins = plugin_manager.get_plugins() self.productService = ProductsService() threading.Thread.__init__(self) return
def publish_new_job(self): """ This method filteres async processing requests, if there is another processing task on-going. """ logger.info("(Producer publish_new_job) ") logger.info("(Producer publish_new_job) self.processing: " + str(self.processing)) if self.processing: logger.info( "(Producer publish_new_job) self.processing is true so return " ) return logger.info( "(Producer publish_new_job) no processing on-going so set self.processing as True" ) self.processing = True logger.info("(Producer publish_new_job) LAUNCH JobDispatcher ") job_dispatcher = JobDispatcher(self.outbox_path, self.plugins_path, self) job_dispatcher.start()
def run(self): """ Thread run method implementation. It launches the *Job* objects to process products via *Processor* plugins. """ logger.info("(JobDispatcher run) ") threads = [] logger.info("(JobDispatcher run) get list of downloaded products") lock = threading.Lock() for i in range( int(self.config['PROCESSING_PIPE']['parallel_processing'])): logger.info("(JobDispatcher run) SPREAD (" + str(i) + ") thread") t = Job(self.outbox_path, self.plugins_path, lock, i) t.daemon = True t.start() threads.append(t) logger.info("(JobDispatcher run) SPREAD wait threads end") for thread in threads: thread.join() logger.info( "(JobDispatcher run) SPREAD processing ended, set_processing on-going false on DELEGATE" ) self.delegate.set_processing_false()
def _process(self, product): """ Private method to run *Processor* plugin. :param product: The product to process :type product: Product """ logger.info("(Job _process) ["+str(self.i)+"] process the product with name: " + product.name) original_name = product.name.replace("/", "-") source = "/usr/ichnosat/data_local/inbox/" + original_name[:-1] + "/" logger.info("(Job run)["+str(self.i)+"] process product with path: " + source) for plugin in self.plugins: plugin.run(source, self.outbox_path) self.productService.update_product_status(product.name, ProductStatus.processed) logger.info("(Job run) ["+str(self.i)+"]remove product with path > " + source) shutil.rmtree(source)
def __init__(self, outbox_path, plugins_path): logger.info("(Producer __init__) ") self.processing = False self.outbox_path = outbox_path self.plugins_path = plugins_path
def run(self): """ Thread run method implementation. """ WAIT_MULTIPLICATOR = 0.5 SECONDS_PER_MINUTE = 60 FIBONACCI_ITERATIONS = 1000 logger.info("(Job run)["+str(self.i)+"] ") iterator = self.fibonacci(FIBONACCI_ITERATIONS) while True: total_wait_time = 0 logger.info("(Job run) ["+str(self.i)+"]@ Acquire the lock") self.lock.acquire() logger.info("(Job run)["+str(self.i)+"] @ Get a downloaded product from db") product = self.productService.get_a_downloaded_product() logger.info("(Job run) ["+str(self.i)+"]@ Extracted downloaded product " + str(product)) if product: logger.info("(Job run) ["+str(self.i)+"]@ found a product to process") self.productService.update_product_status(product.name, ProductStatus.processing) self.lock.release() self._process(product) del iterator iterator = self.fibonacci(FIBONACCI_ITERATIONS) else: logger.info("(Job run) ["+str(self.i)+"]@ not found a product, relase the lock") self.lock.release() try: logger.info("(Job run) ["+str(self.i)+"]@ Iteration") n = next(iterator) wait_seconds = n * WAIT_MULTIPLICATOR total_wait_time += wait_seconds time.sleep(wait_seconds) logger.info(" (Job run) ["+str(self.i)+"] ---- > waited " + str(total_wait_time) + " seconds, " + str(total_wait_time / SECONDS_PER_MINUTE) + " minutes in total") continue except StopIteration: logger.info("(Job run) ["+str(self.i)+"]@ Iteration attempts finished, return") del iterator break