예제 #1
0
        def wrapped(*args, **kwargs):
            auth_header = request.headers.get('authorization', None)
            token = extract_token_from_header(auth_header)
            auth = decode_token(token)

            if auth.get('user_type') not in user_types:
                raise UnauthorizedError()

            metastore = MetaStore()
            with metastore:
                user = metastore.get_user_by_id(user_id=auth.get("user_id"))
                if user is None:
                    raise UnauthorizedError("user is not exist")

            return f(auth, *args, **kwargs)
예제 #2
0
    def __init__(self,
                 meta_store=None,
                 container_manager=None,
                 var_autoforward=None):

        if var_autoforward is None:
            var_autoforward = ENVIRONMENT_VARIABLES_AUTOFORWARD
        self._meta_store: MetaStore = meta_store or MetaStore()
        self._container_manager: ContainerManager = container_manager
        self.service_app_name = None

        # Ensure that environment variable exists, failing fast
        for x in var_autoforward:
            if x not in os.environ:
                raise ServiceDeploymentError(
                    '{} is not in environment variables'.format(x))

        self._var_autoforward = var_autoforward

        version = os.environ['SINGA_AUTO_VERSION']
        self._data_dir_path = os.environ['DATA_DIR_PATH']
        self._logs_dir_path = os.environ['LOGS_DIR_PATH']
        self._params_dir_path = os.environ['PARAMS_DIR_PATH']
        self._host_workdir_path = os.environ['HOST_WORKDIR_PATH']
        self._docker_workdir_path = os.environ['DOCKER_WORKDIR_PATH']
        self._predictor_image = f"{os.environ['SINGA_AUTO_IMAGE_PREDICTOR']}:{version}"
        self._predictor_port = os.environ['PREDICTOR_PORT']
        self._app_mode = os.environ['APP_MODE']
        self._singa_auto_addr = os.environ['SINGA_AUTO_ADDR']
        self._app_mode = os.environ['APP_MODE']
예제 #3
0
 def __init__(self, service_id: str, meta_store: MetaStore = None):
     self.sub_train_job_id = None
     self.budget = None
     self.model_class = None
     self._num_trials = None
     self._meta_store = meta_store or MetaStore()
     self._service_id = service_id
     self._model_id = None
예제 #4
0
파일: train.py 프로젝트: pinpom/singa-auto
 def __init__(self, service_id: str, meta_store: MetaStore = None):
     self.sub_train_job_id = None
     self.model_class = None
     self.train_dataset_path = None
     self.val_dataset_path = None
     self.train_args = None
     self._meta_store = meta_store or MetaStore()
     self._service_id = service_id
     self._data_store = FileDataStore()
예제 #5
0
    def __init__(self, service_id, meta_store=None):
        self._service_id = service_id
        self._meta_store = meta_store or MetaStore()
        self._redis_host = os.getenv('REDIS_HOST', 'singa_auto_redis')
        self._redis_port = os.getenv('REDIS_PORT', 6379)
        self._kafka_host = os.getenv('KAFKA_HOST', 'singa_auto_kafka')
        self._kafka_port = os.getenv('KAFKA_PORT', 9092)
        self._ensemble_method: Callable[[List[Any]], Any] = None

        self._pull_job_info()
        self._redis_cache = RedisInferenceCache(self._inference_job_id,
                                                self._redis_host,
                                                self._redis_port)
        self._kakfa_cache = KafkaInferenceCache()
        logger.info(
            f'Initialized predictor for inference job "{self._inference_job_id}"'
        )
예제 #6
0
 def __init__(self,
              meta_store=None,
              container_manager=None,
              data_store=None,
              param_store=None):
     self._meta_store = meta_store or MetaStore()
     if os.getenv('CONTAINER_MODE', 'SWARM') == 'SWARM':
         container_manager = container_manager or DockerSwarmContainerManager(
         )
     else:
         container_manager = container_manager or KubernetesContainerManager(
         )
     self._data_store: DataStore = data_store or FileDataStore()
     self._param_store: ParamStore = param_store or FileParamStore()
     self._base_worker_image = '{}:{}'.format(
         os.environ['SINGA_AUTO_IMAGE_WORKER'],
         os.environ['SINGA_AUTO_VERSION'])
     self._services_manager = ServicesManager(self._meta_store,
                                              container_manager)
예제 #7
0
 def __init__(self,
              service_id,
              worker_id,
              meta_store=None,
              param_store=None):
     self._service_id = service_id
     self._worker_id = worker_id
     self._meta_store = meta_store or MetaStore()
     self._param_store = param_store or FileParamStore()
     self._redis_host = os.getenv('REDIS_HOST', 'singa_auto_redis')
     self._redis_port = os.getenv('REDIS_PORT', 6379)
     self._kafka_host = os.getenv('KAFKA_HOST', 'singa_auto_kafka')
     self._kafka_port = os.getenv('KAFKA_PORT', 9092)
     self._batch_size = PREDICT_BATCH_SIZE
     self._redis_cache: RedisInferenceCache = None
     self._inference_job_id = None
     self._model_inst: BaseModel = None
     self._proposal: Proposal = None
     self._store_params_id = None
     self._py_model_class: Type[BaseModel] = None
     self._kafka_cache = KafkaInferenceCache()
예제 #8
0
from singa_auto.meta_store import MetaStore


def start_worker(service_id, service_type, container_id):
    global worker

    if service_type == ServiceType.TRAIN:
        from singa_auto.worker.train import TrainWorker
        worker = TrainWorker(service_id, container_id)
        worker.start()
    elif service_type == ServiceType.INFERENCE:
        from singa_auto.worker.inference import InferenceWorker
        worker = InferenceWorker(service_id, container_id)
        worker.start()
    elif service_type == ServiceType.ADVISOR:
        from singa_auto.worker.advisor import AdvisorWorker
        worker = AdvisorWorker(service_id)
        worker.start()
    else:
        raise Exception('Invalid service type: {}'.format(service_type))


def stop_worker():
    global worker
    if worker is not None:
        worker.stop()


meta_store = MetaStore()
run_worker(meta_store, start_worker, stop_worker)