def download_model(self):
     storage = s3.S3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY)
     storage.download_file_from_bucket(
         AWS_BUCKET, os.path.join(LOCAL_MODEL_DIR, MODEL_FILE),
         os.path.join(AWS_MODEL_DIR, MODEL_FILE))
     storage.download_file_from_bucket(
         AWS_BUCKET, os.path.join(LOCAL_MODEL_DIR, LABEL_FILE),
         os.path.join(AWS_MODEL_DIR, LABEL_FILE))
Exemple #2
0
def save_to_storage(image_info):
    print('save_to_storage')
    storage = s3.S3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY)
    key = os.path.join(image_info.class_code,
                       image_info.name + '.' + image_info.format)
    is_public = False
    if image_info.main == 1:
        is_public = True
    storage.upload_file_to_bucket(AWS_BUCKET,
                                  TMP_CROP_IMG_FILE,
                                  key,
                                  is_public=is_public)
    print('save_to_storage done')
Exemple #3
0
REDIS_PRODUCT_CLASSIFY_QUEUE = 'bl_product_classify_queue'
REDIS_OBJECT_INDEX_QUEUE = 'bl:object:index:queue'
REDIS_PRODUCT_HASH = 'bl:product:hash'
REDIS_PRODUCT_IMAGE_PROCESS_QUEUE = 'bl:product:image:process:queue'
REDIS_CRAWL_VERSION = 'bl:crawl:version'
REDIS_CRAWL_VERSION_LATEST = 'latest'

options = {
  'REDIS_SERVER': REDIS_SERVER,
  'REDIS_PASSWORD': REDIS_PASSWORD
}
log = Logging(options, tag='bl-object-classifier')
rconn = redis.StrictRedis(REDIS_SERVER, decode_responses=False, port=6379, password=REDIS_PASSWORD)

storage = s3.S3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY)

heart_bit = True

product_api = Products()
object_api = Objects()
image_api = Images()
version_id = None

def analyze_product(p_data):
  log.info('analyze_product')
  product = pickle.loads(p_data)

  try:
    main_class_code, main_objects = analyze_main_image(product)
  except Exception as e: