COLOR_GRPC_HOST = os.environ['COLOR_GRPC_HOST']
cOLOR_GRPC_PORT = os.environ['COLOR_GRPC_PORT']
OD_GRPC_HOST = os.environ['COLOR_GRPC_HOST']
OD_GRPC_PORT = os.environ['COLOR_GRPC_PORT']
#FEATURE_GRPC_HOST = os.environ['FEATURE_GRPC_HOST']
#FEATURE_GRPC_PORT = os.environ['FEATURE_GRPC_PORT']
#OD_GRPC_HOST = os.environ['FEATURE_GRPC_HOST']
#OD_GRPC_PORT = os.environ['FEATURE_GRPC_PORT']
OD_SCORE_MIN = float(os.environ['OD_SCORE_MIN'])
GPU_NUM = os.environ['GPU_NUM']
GPU = '/device:GPU:' + GPU_NUM

MODEL_FILE = 'frozen_inference_graph.pb'
LABEL_MAP_FILE = 'label_map.pbtxt'
options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-detect:BottomObjectDetect')
storage = s3.S3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY)


class BottomObjectDetect(object):
    def __init__(self):
        label_map_file = self.load_labelemap()
        label_map = label_map_util.load_labelmap(label_map_file)
        log.debug(label_map)
        categories = label_map_util.convert_label_map_to_categories(
            label_map, max_num_classes=NUM_CLASSES, use_display_name=True)
        self.__category_index = label_map_util.create_category_index(
            categories)
        self.__detection_graph = tf.Graph()
        #self.__feature_extractor = ExtractFeature(use_gpu=True)
        self.__color_extractor = ExtractColor(use_gpu=True)
Exemple #2
0
REDIS_OBJECT_FEATURE_QUEUE = 'bl:object:feature:queue'
REDIS_OBJECT_INDEX_QUEUE = 'bl:object:index:queue'
REDIS_OBJECT_LIST = 'bl:object:list'
REDIS_OBJECT_HASH = 'bl:object:hash'
REDIS_CRAWL_VERSION = 'bl:crawl:version'
REDIS_CRAWL_VERSION_LATEST = 'latest'

REDIS_INDEX_RESTART_QUEUE = 'bl:index:restart:queue'

INTERVAL_TIME = 60

AWS_BUCKET = 'bluelens-style-index'
INDEX_FILE = 'faiss.index'

options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-object-index')
rconn = redis.StrictRedis(REDIS_SERVER, port=6379, password=REDIS_PASSWORD)
storage = s3.S3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY)

object_api = None
feature_api = None
product_api = None
crawl_api = None
version_id = None


def spawn(uuid):

    pool = spawning_pool.SpawningPool()

    project_name = 'bl-object-indexer-' + uuid
Exemple #3
0
SPAWN_ID = os.environ['SPAWN_ID']
REDIS_SERVER = os.environ['REDIS_SERVER']
REDIS_PASSWORD = os.environ['REDIS_PASSWORD']
RELEASE_MODE = os.environ['RELEASE_MODE']
# DATA_SOURCE = os.environ['DATA_SOURCE']
DATA_SOURCE_QUEUE = 'REDIS_QUEUE'
DATA_SOURCE_DB = 'DB'

REDIS_OBJECT_FEATURE_QUEUE = 'bl:object:feature:queue'
REDIS_OBJECT_INDEX_QUEUE = 'bl:object:index:queue'

options = {
  'REDIS_SERVER': REDIS_SERVER,
  'REDIS_PASSWORD': REDIS_PASSWORD
}
log = Logging(options, tag='bl-image-indexer')
rconn = redis.StrictRedis(REDIS_SERVER, port=6379, password=REDIS_PASSWORD)
feature_extractor = ExtractFeature(use_gpu=True)
storage = s3.S3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY)

heart_bit = True

def start_index():
  log.info('start_index')
  def items():
    while True:
      yield rconn.blpop([REDIS_OBJECT_INDEX_QUEUE])

  def request_stop(signum, frame):
    log.info('stopping')
    rconn.connection_pool.disconnect()
Exemple #4
0
import pickle

from bl_db_product_amz_best.products import Products

# from stylelens_product.products import Products
# product_api = Products()
#
# HEALTH_CHECK_TIME = 60*20
REDIS_SERVER = os.environ['REDIS_SERVER']
REDIS_PASSWORD = os.environ['REDIS_PASSWORD']

REDIS_AMZ_BEST_ASIN_QUEUE = "bl:amz:best:asin:queue"
REDIS_AMZ_BEST_ASIN_QUEUE_TEST = "bl:amz:best:asin:queue:test"

options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-amazon-best-crawler')
rconn = redis.StrictRedis(REDIS_SERVER, port=6379, password=REDIS_PASSWORD)

amazon = bottlenose.Amazon(
    AWSAccessKeyId=os.environ['AWS_ACCESS_KEY_ID'],
    AWSSecretAccessKey=os.environ['AWS_SECRET_ACCESS_KEY'],
    AssociateTag=os.environ['AWS_ASSOCIATE_TAG'],
    Parser=lambda text: BeautifulSoup(text, 'xml'))


def add_product(product):
    api_instance = Products()
    res = api_instance.add_product(product=product)
    print(res)

Exemple #5
0
REDIS_SERVER = os.environ['REDIS_SERVER']
REDIS_PASSWORD = os.environ['REDIS_PASSWORD']
RELEASE_MODE = os.environ['RELEASE_MODE']
DB_PRODUCT_HOST = os.environ['DB_PRODUCT_HOST']
DB_PRODUCT_PORT = os.environ['DB_PRODUCT_PORT']
DB_PRODUCT_USER = os.environ['DB_PRODUCT_USER']
DB_PRODUCT_PASSWORD = os.environ['DB_PRODUCT_PASSWORD']
DB_PRODUCT_NAME = os.environ['DB_PRODUCT_NAME']
rconn = redis.StrictRedis(REDIS_SERVER, port=6379, password=REDIS_PASSWORD)

options = {
  'REDIS_SERVER': REDIS_SERVER,
  'REDIS_PASSWORD': REDIS_PASSWORD
}
log = Logging(options, tag='bl-crawl')

crawl_api = Crawls()

def spawn_crawler(host_code, version_id):
  pool = spawning_pool.SpawningPool()
  id = host_code.lower()

  project_name = 'bl-crawler-' + id
  log.debug('spawn_crawler: ' + project_name)

  pool.setServerUrl(REDIS_SERVER)
  pool.setServerPassword(REDIS_PASSWORD)
  pool.setApiVersion('v1')
  pool.setKind('Pod')
  pool.setMetadataName(project_name)
Exemple #6
0
AWS_ACCESS_KEY = os.environ['AWS_ACCESS_KEY'].replace('"', '')
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY'].replace('"', '')
AWS_MODEL_BUCKET = 'bluelens-style-model'
AWS_BUCKET_CLASSIFICATION_TEXT_PATH = 'classification/text/' + RELEASE_MODE + '/'


def get_latest_crawl_version():
    value = rconn.hget(REDIS_CRAWL_VERSION, REDIS_CRAWL_VERSION_LATEST)
    return value


# REDIS_PRODUCT_TEXT_MODEL_PROCESS_QUEUE = 'bl:product:text:model:process:queue'

options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-text-classification-modeler')
rconn = redis.StrictRedis(REDIS_SERVER,
                          decode_responses=True,
                          port=6379,
                          password=REDIS_PASSWORD)

storage = s3.S3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY)

text_api = Texts()
product_api = Products()
model_api = Models()
PRODUCT_MODELS_TYPE = 'text-classification'

TEXT_CLASSIFICATION_MODEL = 'text_classification_model'
DATASET_LABEL_PREFIX = '__label__'
generated_datasets = []
Exemple #7
0
DB_DATASET_NAME = os.environ['DB_DATASET_NAME']
DB_DATASET_PORT = os.environ['DB_DATASET_PORT']
DB_DATASET_USER = os.environ['DB_DATASET_USER']
DB_DATASET_PASSWORD = os.environ['DB_DATASET_PASSWORD']

REDIS_PRODUCT_TEXT_MODEL_PROCESS_QUEUE = 'bl:product:text:model:process:queue'
REDIS_CRAWL_VERSION = 'bl:crawl:version'
REDIS_CRAWL_VERSION_LATEST = 'latest'

TEXT_CLASSIFICATION_MODEL_TYPE = 'text-classification'

SPAWNING_CRITERIA = 50
PROCESSING_TERM = 60

options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-model')
rconn = redis.StrictRedis(REDIS_SERVER, port=6379, password=REDIS_PASSWORD)

model_api = None


def get_latest_crawl_version(rconn):
    value = rconn.hget(REDIS_CRAWL_VERSION, REDIS_CRAWL_VERSION_LATEST)
    if value is None:
        return None

    log.debug(value)
    try:
        version_id = value.decode("utf-8")
    except Exception as e:
        log.error(str(e))
import json
import os
from bluelens_log import Logging
import subprocess
import yaml

TMP_CONFIG_FILE = "config.yaml"

REDIS_SERVER = os.environ['REDIS_SERVER']
REDIS_PASSWORD = os.environ['REDIS_PASSWORD']
options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-spawning-pool')


class PodManager:
    def __init__(self):
        log.debug('init PodManager')

    def create(self, data):
        log.debug('%s' % (data))

        with open(TMP_CONFIG_FILE, 'w') as outfile:
            try:
                yaml.dump(data, outfile, default_flow_style=False)
            finally:
                cmd = 'cat config.yaml && kubectl --namespace=' + data[
                    'metadata']['namespace'] + ' create -f ' + TMP_CONFIG_FILE
                self.runInBash(cmd)

    def delete(self, data):
        log.debug(data)
Exemple #9
0
import os
from bson.objectid import ObjectId
from orm.database import DataBase
from swagger_server.models.image import Image
from swagger_server.models.add_image_response import AddImageResponse
from swagger_server.models.add_image_response_data import AddImageResponseData

from bluelens_log import Logging

REDIS_SERVER = os.environ['REDIS_SERVER']
REDIS_PASSWORD = os.environ['REDIS_PASSWORD']

options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-db-product:Images')


class Images(DataBase):
    def __init__(self):
        super().__init__()
        self.images = self.db.images

    @staticmethod
    def add_image(connexion):
        log.debug('add_image')
        orm = Images()
        res = AddImageResponse()
        data = AddImageResponseData()
        response_status = 200
        if connexion.request.is_json:
            body = Image.from_dict(connexion.request.get_json())
OD_GRPC_PORT = os.environ['COLOR_GRPC_PORT']
#FEATURE_GRPC_HOST = os.environ['FEATURE_GRPC_HOST']
#FEATURE_GRPC_PORT = os.environ['FEATURE_GRPC_PORT']
#OD_GRPC_HOST = os.environ['FEATURE_GRPC_HOST']
#OD_GRPC_PORT = os.environ['FEATURE_GRPC_PORT']
OD_SCORE_MIN = float(os.environ['OD_SCORE_MIN'])
GPU_NUM = os.environ['GPU_NUM']
GPU = '/device:GPU:' + GPU_NUM

MODEL_FILE = 'frozen_inference_graph.pb'
LABEL_MAP_FILE = 'label_map.pbtxt'
options = {
  'REDIS_SERVER': REDIS_SERVER,
  'REDIS_PASSWORD': REDIS_PASSWORD
}
log = Logging(options, tag='bl-detect:AllObjectDetect')
storage = s3.S3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY)

class TopFullObjectDetect(object):
  def __init__(self):
    label_map_file = self.load_labelemap()
    label_map = label_map_util.load_labelmap(label_map_file)
    log.debug(label_map)
    categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES,
                                                                use_display_name=True)
    self.__category_index = label_map_util.create_category_index(categories)
    self.__detection_graph = tf.Graph()
    #self.__feature_extractor = ExtractFeature(use_gpu=True)
    self.__color_extractor = ExtractColor(use_gpu=True)
    model_file = self.load_model()
    with self.__detection_graph.as_default():
Exemple #11
0
from bluelens_log import Logging
from bluelens_k8s.pod import Pod

REDIS_TICKER_KEY = os.environ['TICKER_KEY']
REDIS_TICKER_VALUE = int(os.environ['TICKER_VALUE'])

REDIS_SERVER = os.environ['REDIS_SERVER']
REDIS_PASSWORD = os.environ['REDIS_PASSWORD']

rconn = redis.StrictRedis(REDIS_SERVER, port=6379, password=REDIS_PASSWORD)

options = {
  'REDIS_SERVER': REDIS_SERVER,
  'REDIS_PASSWORD': REDIS_PASSWORD
}
log = Logging(options, tag='bl-ticker')

class Ticker(Pod):
  def __init__(self):
    super().__init__(REDIS_SERVER, REDIS_PASSWORD, rconn, log)

  def run(self):
    while True:
      time.sleep(REDIS_TICKER_VALUE)
      rconn.lrem(REDIS_TICKER_KEY, count=0, value='@')
      rconn.lpush(REDIS_TICKER_KEY, '@')

if __name__ == '__main__':
  log.info('Start bl-ticker:1')

  try:
Exemple #12
0
REDIS_LOG_SEARCH_IMAGE_FILE_QUEUE = 'bl:log:search:image:file'
REDIS_LOG_SEARCH_IMAGE_ID_QUEUE = 'bl:log:search:image:id'
REDIS_LOG_SEARCH_OBJECT_ID_QUEUE = 'bl:log:search:object:id'

REDIS_SERVER = os.environ['REDIS_SEARCH_SERVER']
REDIS_PASSWORD = os.environ['REDIS_SEARCH_PASSWORD']

CACHE_MAX_NUM = 7000

rconn = redis.StrictRedis(REDIS_SERVER,
                          decode_responses=False,
                          port=6379,
                          password=REDIS_PASSWORD)

options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='style-api:Products')


class Images(object):
    def __init__(self):
        super().__init__()

    @staticmethod
    def get_images(image_id, offset=0, limit=10):
        log.info('get_images:' + image_id)
        index_image_api = IndexImages()
        res = GetImageResponse()

        try:
            image_d = rconn.hget(REDIS_INDEXED_SIM_IMAGES_HASH, image_id)
Exemple #13
0
import os
import time
from orm.database import DataBase
from swagger_server.models.add_index_response import AddIndexResponse
from swagger_server.models.add_index_response_data import AddIndexResponseData
from swagger_server.models.get_index_response import GetIndexResponse
from swagger_server.models.index import Index

from bluelens_log import Logging

REDIS_SERVER = os.environ['REDIS_SERVER']
REDIS_PASSWORD = os.environ['REDIS_PASSWORD']

options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-db-index:Indexes')


class Indexes(DataBase):
    def __init__(self):
        super().__init__()
        self.indexes = self.db.indexes

    @staticmethod
    def add_index(connexion):
        start_time = time.time()
        orm = Indexes()
        res = AddIndexResponse()
        data = AddIndexResponseData()
        response_status = 200
        if connexion.request.is_json:
            index = connexion.request.get_json()
Exemple #14
0
REDIS_SERVER = os.environ['REDIS_SEARCH_SERVER']
REDIS_PASSWORD = os.environ['REDIS_SEARCH_PASSWORD']

DB_USER_LOG_USER = os.environ['DB_USER_LOG_USER']
DB_USER_LOG_HOST = os.environ['DB_USER_LOG_HOST']
DB_USER_LOG_PORT = os.environ['DB_USER_LOG_PORT']
DB_USER_LOG_NAME = os.environ['DB_USER_LOG_NAME']
DB_USER_LOG_PASSWORD = os.environ['DB_USER_LOG_PASSWORD']

rconn = redis.StrictRedis(REDIS_SERVER,
                          decode_responses=False,
                          port=6379,
                          password=REDIS_PASSWORD)
options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-user-profile')


def profile_image_file_search(rconn):
    while True:
        key, value = rconn.blpop([REDIS_USER_PROFILE_IMAGE_FILE_SEARCH_QUEUE])
        if value is not None:
            image = pickle.loads(value)
            # objects = image.get('objects')
            # if objects is not None:
            #   for o in objects:


if __name__ == '__main__':
    try:
        log.info("start bl-user-profile:1")
Exemple #15
0
DB_IMAGE_HOST = os.environ['DB_IMAGE_HOST']
DB_IMAGE_PORT = os.environ['DB_IMAGE_PORT']
DB_IMAGE_NAME = os.environ['DB_IMAGE_NAME']
DB_IMAGE_USER = os.environ['DB_IMAGE_USER']
DB_IMAGE_PASSWORD = os.environ['DB_IMAGE_PASSWORD']

AWS_ACCESS_KEY = os.environ['AWS_ACCESS_KEY']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']

rconn = redis.StrictRedis(REDIS_SERVER, decode_responses=True, port=6379, password=REDIS_PASSWORD)
options = {
  'REDIS_SERVER': REDIS_SERVER,
  'REDIS_PASSWORD': REDIS_PASSWORD
}
log = Logging(options, tag='bl-classify')

product_api = None

def spawn_classifier(uuid):

  pool = spawning_pool.SpawningPool()

  project_name = 'bl-object-classifier-' + uuid
  log.debug('spawn_classifier: ' + project_name)

  pool.setServerUrl(REDIS_SERVER)
  pool.setServerPassword(REDIS_PASSWORD)
  pool.setApiVersion('v1')
  pool.setKind('Pod')
  pool.setMetadataName(project_name)
Exemple #16
0
import os
import time
from bson.objectid import ObjectId
from orm.database import DataBase
from swagger_server.models.object import Object
from swagger_server.models.add_object_response import AddObjectResponse
from swagger_server.models.add_object_response_data import AddObjectResponseData
from swagger_server.models.get_object_response import GetObjectResponse

from bluelens_log import Logging

REDIS_SERVER = os.environ['REDIS_SERVER']
REDIS_PASSWORD = os.environ['REDIS_PASSWORD']

options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-db-product:Products')


class Objects(DataBase):
    def __init__(self):
        super().__init__()
        self.objects = self.db.objects

    @staticmethod
    def add_object(connexion):
        start_time = time.time()
        orm = Objects()
        res = AddObjectResponse()
        data = AddObjectResponseData()
        response_status = 200
        if connexion.request.is_json:
Exemple #17
0
DB_PRODUCT_NAME = os.environ['DB_PRODUCT_NAME']
AWS_ACCESS_KEY = os.environ['AWS_ACCESS_KEY'].replace('"', '')
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY'].replace('"', '')

MAX_PROCESS_NUM = int(os.environ['MAX_PROCESS_NUM'])

REDIS_HOST_CLASSIFY_QUEUE = 'bl:host:classify:queue'
REDIS_PRODUCT_IMAGE_PROCESS_QUEUE = 'bl:product:image:process:queue'
REDIS_CRAWL_VERSION = 'bl:crawl:version'
REDIS_CRAWL_VERSION_LATEST = 'latest'

SPAWNING_CRITERIA = 50
PROCESSING_TERM = 60

options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-image-process')
rconn = redis.StrictRedis(REDIS_SERVER, port=6379, password=REDIS_PASSWORD)


def get_latest_crawl_version(rconn):
    value = rconn.hget(REDIS_CRAWL_VERSION, REDIS_CRAWL_VERSION_LATEST)
    if value is None:
        return None

    log.debug(value)
    try:
        version_id = value.decode("utf-8")
    except Exception as e:
        log.error(str(e))
        version_id = None
    return version_id
Exemple #18
0
import numpy as np
import os
from util import s3
import faiss
from bluelens_log import Logging

AWS_BUCKET = 'bluelens-style-index'
AWS_ACCESS_KEY = os.environ['AWS_ACCESS_KEY']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
REDIS_SERVER = os.environ['REDIS_SERVER']
REDIS_PASSWORD = os.environ['REDIS_PASSWORD']
RELEASE_MODE = os.environ['RELEASE_MODE']
INDEX_FILE = 'faiss.index'
options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-search-vector')
storage = s3.S3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY)


class SearchVector(object):
    def __init__(self):
        log.info('Init')
        try:
            file = self.load_index_file()
            self.index = faiss.read_index(file)
        except Exception as e:
            log.error(str(e))

    def load_index_file(self):
        log.info('load_index_file')
        file = os.path.join(os.getcwd(), INDEX_FILE)
Exemple #19
0
REDIS_INDEXED_OBJECT_HASH = 'bl_indexed_object_hash'
REDIS_INDEXED_OBJECT_LIST = 'bl:indexed:object:list'

REDIS_USER_OBJECT_HASH = 'bl:user:object:hash'
REDIS_USER_OBJECT_QUEUE = 'bl:user:object:queue'
REDIS_USER_IMAGE_HASH = 'bl:user:image:hash'
REDIS_FEED_IMAGE_HASH = 'bl:feed:image:hash'

REDIS_LOG_SEARCH_IMAGE_FILE_QUEUE = 'bl:log:search:image:file'
REDIS_LOG_SEARCH_IMAGE_ID_QUEUE = 'bl:log:search:image:id'
REDIS_LOG_SEARCH_OBJECT_ID_QUEUE = 'bl:log:search:object:id'

ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif'])

options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='style-api:Objects')

rconn = redis.StrictRedis(REDIS_SERVER,
                          decode_responses=False,
                          port=6379,
                          password=REDIS_PASSWORD)

TMP_IMG = 'tmp.jpg'


def allowed_file(filename):
    return '.' in filename and \
           filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS


class Objects(object):
Exemple #20
0
REDIS_HOST_CLASSIFY_QUEUE = 'bl:host:classify:queue'
REDIS_HOST_CRAWL_QUEUE = 'bl:host:crawl:queue'
REDIS_PRODUCT_IMAGE_PROCESS_QUEUE = 'bl:product:image:process:queue'
REDIS_CRAWL_VERSION = 'bl:crawl:version'
REDIS_CRAWL_VERSION_LATEST = 'latest'

SPAWN_ID = os.environ['SPAWN_ID']
HOST_CODE = os.environ['HOST_CODE']
RELEASE_MODE = os.environ['RELEASE_MODE']
REDIS_SERVER = os.environ['REDIS_SERVER']
REDIS_PASSWORD = os.environ['REDIS_PASSWORD']

rconn = redis.StrictRedis(REDIS_SERVER, port=6379, password=REDIS_PASSWORD)

options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-crawler')

product_api = None

# heart_bit = True

# def check_health():
#   global  heart_bit
#   log.info('check_health: ' + str(heart_bit))
#   if heart_bit == True:
#     heart_bit = False
#     Timer(HEALTH_CHECK_TIME, check_health, ()).start()
#   else:
#     delete_pod()

Exemple #21
0
SPAWN_ID = os.environ['SPAWN_ID']
REDIS_SERVER = os.environ['REDIS_SERVER']
REDIS_PASSWORD = os.environ['REDIS_PASSWORD']
RELEASE_MODE = os.environ['RELEASE_MODE']
# DATA_SOURCE = os.environ['DATA_SOURCE']
DATA_SOURCE_QUEUE = 'REDIS_QUEUE'
DATA_SOURCE_DB = 'DB'
FEATURE_GRPC_HOST = os.environ['FEATURE_GRPC_HOST']
FEATURE_GRPC_PORT = os.environ['FEATURE_GRPC_PORT']

# REDIS_OBJECT_FEATURE_QUEUE = 'bl:object:feature:queue'
REDIS_OBJECT_INDEX_QUEUE = 'bl:object:index:queue'

options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-object-indexer')
rconn = redis.StrictRedis(REDIS_SERVER, port=6379, password=REDIS_PASSWORD)
feature_extractor = ExtractFeature(use_gpu=True)
storage = s3.S3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY)

heart_bit = True
object_api = Objects()


def start_index():
    log.info('start_index')

    def items():
        count = 0
        while True:
            if count > MAX_INDEX_NUM:
Exemple #22
0
import os
import time
from bluelens_log import Logging
from swagger_server.models.get_feed_response import GetFeedResponse
from .feed import Feed

REDIS_SERVER = os.environ['REDIS_SEARCH_SERVER']
REDIS_PASSWORD = os.environ['REDIS_SEARCH_PASSWORD']

options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='style-api:Feeds')


class Feeds(object):
    def __init__(self):
        super().__init__()

    @staticmethod
    def get_feeds(offset=None, limit=None):
        feed = Feed(log)
        res = GetFeedResponse()
        start_time = time.time()

        try:
            feeds = feed.feeds(offset, limit)
            res.message = 'Successful'
            res.data = feeds

            response_status = 200
        except Exception as e:
            log.error(str(e))
Exemple #23
0
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY'].replace('"', '')

MAX_PROCESS_NUM = int(os.environ['MAX_PROCESS_NUM'])

REDIS_PRODUCT_CLASSIFY_QUEUE = 'bl_product_classify_queue'
REDIS_OBJECT_INDEX_QUEUE = 'bl:object:index:queue'
REDIS_PRODUCT_HASH = 'bl:product:hash'
REDIS_PRODUCT_IMAGE_PROCESS_QUEUE = 'bl:product:image:process:queue'
REDIS_CRAWL_VERSION = 'bl:crawl:version'
REDIS_CRAWL_VERSION_LATEST = 'latest'

options = {
  'REDIS_SERVER': REDIS_SERVER,
  'REDIS_PASSWORD': REDIS_PASSWORD
}
log = Logging(options, tag='bl-object-classifier')
rconn = redis.StrictRedis(REDIS_SERVER, decode_responses=False, port=6379, password=REDIS_PASSWORD)

storage = s3.S3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY)

heart_bit = True

product_api = Products()
object_api = Objects()
image_api = Images()
version_id = None

def analyze_product(p_data):
  log.info('analyze_product')
  product = pickle.loads(p_data)
Exemple #24
0
STATUS_TODO = 'todo'
STATUS_DOING = 'doing'
STATUS_DONE = 'done'

SPAWN_ID = os.environ['SPAWN_ID']
HOST_CODE = os.environ['HOST_CODE']
HOST_GROUP = os.environ['HOST_GROUP']
VERSION_ID = os.environ['VERSION_ID']
RELEASE_MODE = os.environ['RELEASE_MODE']
REDIS_SERVER = os.environ['REDIS_SERVER']
REDIS_PASSWORD = os.environ['REDIS_PASSWORD']

rconn = redis.StrictRedis(REDIS_SERVER, port=6379, password=REDIS_PASSWORD)

options = {'REDIS_SERVER': REDIS_SERVER, 'REDIS_PASSWORD': REDIS_PASSWORD}
log = Logging(options, tag='bl-crawler')

product_api = Products()
host_api = Hosts()
crawl_api = Crawls()

# heart_bit = True

# def check_health():
#   global  heart_bit
#   log.info('check_health: ' + str(heart_bit))
#   if heart_bit == True:
#     heart_bit = False
#     Timer(HEALTH_CHECK_TIME, check_health, ()).start()
#   else:
#     delete_pod()