def get_obj(self, key): if key in self._obj_cache: return self._obj_cache[key] cache_path = os.path.join(self.cache_dir, key) self.download_file(key, cache_path) self._obj_cache[key] = util.read_msgpack(cache_path) return self._obj_cache[key]
def __init__(self, **kwargs): if "cache_dir" in kwargs: self.cache_dir = kwargs["cache_dir"] elif "local_path" in kwargs: local_path_dir = os.path.dirname( os.path.abspath(kwargs["local_path"])) self.cache_dir = os.path.join(local_path_dir, "cache") else: raise ValueError( "cache_dir must be specified (or inferred from local_path)") util.mkdir_p(self.cache_dir) if "local_path" in kwargs: ctx_raw = util.read_msgpack(kwargs["local_path"]) self.ctx = _deserialize_raw_ctx(ctx_raw) elif "obj" in kwargs: self.ctx = kwargs["obj"] elif "raw_obj" in kwargs: ctx_raw = kwargs["raw_obj"] self.ctx = _deserialize_raw_ctx(ctx_raw) elif "s3_path": local_ctx_path = os.path.join(self.cache_dir, "context.msgpack") bucket, key = S3.deconstruct_s3_path(kwargs["s3_path"]) S3(bucket, client_config={}).download_file(key, local_ctx_path) ctx_raw = util.read_msgpack(local_ctx_path) self.ctx = _deserialize_raw_ctx(ctx_raw) else: raise ValueError("invalid context args: " + kwargs) self.workload_id = kwargs.get("workload_id") self.id = self.ctx["id"] self.key = self.ctx["key"] self.cortex_config = self.ctx["cortex_config"] self.dataset_version = self.ctx["dataset_version"] self.root = self.ctx["root"] self.raw_dataset = self.ctx["raw_dataset"] self.status_prefix = self.ctx["status_prefix"] self.app = self.ctx["app"] self.environment = self.ctx["environment"] self.python_packages = self.ctx["python_packages"] self.raw_columns = self.ctx["raw_columns"] self.transformed_columns = self.ctx["transformed_columns"] self.transformers = self.ctx["transformers"] self.aggregators = self.ctx["aggregators"] self.aggregates = self.ctx["aggregates"] self.constants = self.ctx["constants"] self.models = self.ctx["models"] self.apis = self.ctx["apis"] self.training_datasets = { k: v["dataset"] for k, v in self.models.items() } self.api_version = self.cortex_config["api_version"] if "local_storage_path" in kwargs: self.storage = LocalStorage(base_dir=kwargs["local_storage_path"]) else: self.storage = S3( bucket=self.cortex_config["bucket"], region=self.cortex_config["region"], client_config={}, ) if self.api_version != consts.CORTEX_VERSION: raise ValueError( "API version mismatch (Context: {}, Image: {})".format( self.api_version, consts.CORTEX_VERSION)) self.columns = util.merge_dicts_overwrite( self.raw_columns, self.transformed_columns # self.aggregates ) self.values = util.merge_dicts_overwrite(self.aggregates, self.constants) self.raw_column_names = list(self.raw_columns.keys()) self.transformed_column_names = list(self.transformed_columns.keys()) self.column_names = list(self.columns.keys()) # Internal caches self._transformer_impls = {} self._aggregator_impls = {} self._model_impls = {} # This affects Tensorflow S3 access os.environ["AWS_REGION"] = self.cortex_config.get("region", "") # Id map self.pp_id_map = ResourceMap(self.python_packages) self.rf_id_map = ResourceMap(self.raw_columns) self.ag_id_map = ResourceMap(self.aggregates) self.tf_id_map = ResourceMap(self.transformed_columns) self.td_id_map = ResourceMap(self.training_datasets) self.models_id_map = ResourceMap(self.models) self.apis_id_map = ResourceMap(self.apis) self.constants_id_map = ResourceMap(self.constants) self.id_map = util.merge_dicts_overwrite( self.pp_id_map, self.rf_id_map, self.ag_id_map, self.tf_id_map, self.td_id_map, self.models_id_map, self.apis_id_map, self.constants_id_map, )
def get_obj(self, key): cache_path = os.path.join(self.cache_dir, key) self.download_file(key, cache_path) return util.read_msgpack(cache_path)