global model_api try: res = model_api.add_model(TEXT_CLASSIFICATION_MODEL_TYPE, version_id) except Exception as e: log.error(str(e)) def start(rconn): global model_api model_api = Models() while True: version_id = get_latest_crawl_version(rconn) if version_id is not None: log.info("check_condition_to_start") ok = check_condition_to_start(version_id) log.info("check_condition_to_start: " + str(ok)) if ok is True: create_model(rconn, version_id) dispatch(rconn, version_id) time.sleep(60 * 10) if __name__ == '__main__': log.info('Start bl-model:2') try: Process(target=start, args=(rconn, )).start() except Exception as e: log.error(str(e))
path = storage.upload_file_to_bucket(AWS_OBJ_IMAGE_BUCKET, file, key, is_public=is_public) obj['image_url'] = path log.debug('save_to_storage done') def start(rconn): global version_id version_id = get_latest_crawl_version() log.info('Start dispatch_job') Timer(HEALTH_CHECK_TIME, check_health, ()).start() count = 0 while True: key, value = rconn.blpop([REDIS_PRODUCT_CLASSIFY_QUEUE]) if value is not None: analyze_product(value) global heart_bit heart_bit = True # count = count + 1 # if count > MAX_PROCESS_NUM: # delete_pod() if __name__ == '__main__': try: log.info('Start bl-object-classifier:3') start(rconn) except Exception as e: log.error('main; ' + str(e)) delete_pod()
if (available_product_size + unavailable_product_size) == total_product_size: return False except Exception as e: log.error(str(e)) return True def start(rconn): while True: version_id = get_latest_crawl_version(rconn) if version_id is not None: log.info("check_condition_to_start") ok = check_condition_to_start(version_id) log.info("check_condition_to_start: " + str(ok)) if ok is True: prepare_products(rconn, version_id) dispatch(rconn, version_id) time.sleep(60 * 10) if __name__ == '__main__': log.info('Start bl-image-process:3') try: Process(target=start, args=(rconn, )).start() except Exception as e: log.error(str(e))
model = {'status': 'doing'} model_api.update_model(PRODUCT_MODELS_TYPE, version_id, model) log.info('Doing : bl-text-classification-modeler') make_dataset() make_model() save_model_to_storage() save_eval_to_storage() predict_test() # done model['status'] = 'done' model_api.update_model(PRODUCT_MODELS_TYPE, version_id, model) log.info('Done : bl-text-classification-modeler') # if (rconn.blpop([REDIS_PRODUCT_TEXT_MODEL_PROCESS_QUEUE])): # log.info('SUCCESS : bl-text-classification-modeler') except Exception as e: log.error(str(e)) if __name__ == '__main__': try: log.info('Start bl-text-classification-modeler') start() except Exception as e: log.error('main; ' + str(e)) # delete_pod()
def dispatch_job(rconn): log.info('Start dispatch_job') Timer(HEALTH_CHECK_TIME, check_health, ()).start() count = 0 while True: key, value = rconn.blpop([REDIS_PRODUCT_IMAGE_PROCESS_QUEUE]) start_time = time.time() process_image(value) count = count + 1 elapsed_time = time.time() - start_time log.info('image-processing time: ' + str(elapsed_time)) #if count > MAX_PROCESS_NUM: # delete_pod() global heart_bit heart_bit = True if __name__ == '__main__': try: log.info('Start bl-image-processor:4') dispatch_job(rconn) except Exception as e: log.error(str(e)) delete_pod()
classified_size = product_api.get_size_products(version_id, is_classified=True) not_classified_size = product_api.get_size_products(version_id, is_classified=False) if (classified_size + not_classified_size) == total_product_size: return False except Exception as e: log.error(str(e)) return True def start(rconn): while True: version_id = get_latest_crawl_version(rconn) if version_id is not None: log.info("check_condition_to_start") ok = check_condition_to_start(version_id) log.info("check_condition_to_start: " + str(ok)) if ok is True: prepare_products_to_classfiy(rconn, version_id) dispatch(rconn, version_id) time.sleep(60*10) if __name__ == '__main__': try: log.info("start bl-classify:1") Process(target=start, args=(rconn,)).start() except Exception as e: log.error(str(e))
# log.debug(response) except Exception as e: log.error( "Exception when calling ProductApi->update_product_by_hostcode_and_productno: %s\n" % e) # delete_pod() def notify_to_classify(host_code): rconn.lpush(REDIS_HOST_CLASSIFY_QUEUE, host_code) def dispatch_job(rconn, version_id): log.info('Start dispatch_job') # crawl('HC0001', "5a3bda9e4dfd7d90b88e5cde") # Timer(HEALTH_CHECK_TIME, check_health, ()).start() while True: key, value = rconn.blpop([REDIS_HOST_CRAWL_QUEUE]) # global heart_bit # heart_bit = True if __name__ == '__main__': log.info('Start bl-crawler:new5') version_id = get_latest_crawl_version() try: crawl(HOST_CODE, version_id) except Exception as e: log.error(str(e)) delete_pod()
REDIS_SERVER = os.environ['REDIS_SERVER'] REDIS_PASSWORD = os.environ['REDIS_PASSWORD'] rconn = redis.StrictRedis(REDIS_SERVER, port=6379, password=REDIS_PASSWORD) options = { 'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD } log = Logging(options, tag='bl-ticker') class Ticker(Pod): def __init__(self): super().__init__(REDIS_SERVER, REDIS_PASSWORD, rconn, log) def run(self): while True: time.sleep(REDIS_TICKER_VALUE) rconn.lrem(REDIS_TICKER_KEY, count=0, value='@') rconn.lpush(REDIS_TICKER_KEY, '@') if __name__ == '__main__': log.info('Start bl-ticker:1') try: ticker = Ticker() ticker.run() except Exception as e: log.error(str(e))
DB_USER_LOG_PORT = os.environ['DB_USER_LOG_PORT'] DB_USER_LOG_NAME = os.environ['DB_USER_LOG_NAME'] DB_USER_LOG_PASSWORD = os.environ['DB_USER_LOG_PASSWORD'] rconn = redis.StrictRedis(REDIS_SERVER, decode_responses=False, port=6379, password=REDIS_PASSWORD) options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD} log = Logging(options, tag='bl-user-profile') def profile_image_file_search(rconn): while True: key, value = rconn.blpop([REDIS_USER_PROFILE_IMAGE_FILE_SEARCH_QUEUE]) if value is not None: image = pickle.loads(value) # objects = image.get('objects') # if objects is not None: # for o in objects: if __name__ == '__main__': try: log.info("start bl-user-profile:1") Process(target=profile_image_file_search, args=(rconn, )).start() # Process(target=profile_image_index_search, args=(rconn,)).start() # Process(target=profile_object_id_search, args=(rconn,)).start() except Exception as e: log.error(str(e))
crawl = {} crawl['host_code'] = host['host_code'] crawl['version_id'] = version_id crawl_api.add_crawl(crawl) if limit > len(hosts): break else: offset = offset + limit except Exception as e: log.error(str(e)) def dispatch(): while True: key, value = rconn.blpop([REDIS_JOB_CRAWL_QUEUE]) version_name = value.decode('utf-8') version_id= create_new_version(version_name) if version_id is not None: create_crawl_jobs(version_id) restart_indexer(version_id) start_crawl(version_id) if __name__ == '__main__': log.info('Start bl-crawl:3') try: dispatch() except Exception as e: log.error(str(e)) # exit()
"Exception when calling update_product_by_hostcode_and_productno: %s\n" % e) # delete_pod() def keep_the_job(): rconn.lpush(REDIS_HOST_CRAWL_QUEUE, HOST_CODE) log.info('keep_the_job:' + HOST_CODE) def notify_to_classify(host_code): log.info('notify_to_classify') rconn.lpush(REDIS_HOST_CLASSIFY_QUEUE, host_code) if __name__ == '__main__': log.info('Start bl-crawler:1') try: save_status_on_crawl_job(HOST_CODE, STATUS_DOING) if HOST_GROUP == 'HG8000': crawl_amazon(HOST_CODE, HOST_GROUP) else: crawl(HOST_CODE, HOST_GROUP) except Exception as e: log.error('global exception') log.error(e) log.error(str(e)) traceback.print_exc(limit=None) delete_pod()