Пример #1
0
    def save(self, model_path):
        if not model_path.endswith(fs.sep):
            model_path = model_path + fs.sep
        if not fs.exists(model_path):
            fs.mkdirs(model_path, exist_ok=True)

        stub = copy.copy(self)
        estimators = self.estimators
        if estimators is not None:
            stub.estimators = [None for _ in estimators]  # keep size

        if estimators is not None:
            for i, est in enumerate(estimators):
                est_pkl = f'{model_path}{i}.pkl'
                est_model = f'{model_path}{i}.model'
                for t in [est_pkl, est_model]:
                    if fs.exists(t):
                        fs.rm(t)

                if est is None:
                    continue
                with fs.open(est_pkl, 'wb') as f:
                    pickle.dump(est, f, protocol=pickle.HIGHEST_PROTOCOL)

                if hasattr(est, 'save') and hasattr(est, 'load'):
                    est.save(est_model)

        with fs.open(f'{model_path}ensemble.pkl', 'wb') as f:
            pickle.dump(stub, f, protocol=pickle.HIGHEST_PROTOCOL)
Пример #2
0
def clear(cache_dir=None, fn=None):
    assert fn is None or callable(fn)

    if cache_dir is None:
        cache_dir = cfg.cache_dir
    if callable(fn):
        cache_dir = f'{cache_dir}{fs.sep}{".".join([fn.__module__, fn.__qualname__])}'

    if fs.exists(cache_dir):
        fs.rm(cache_dir, recursive=True)
        fs.mkdirs(cache_dir, exist_ok=True)
Пример #3
0
 def load_transformers_from_cache(self):
     transformer_path = f'{self.cache_dir}/transformers.pkl'
     if fs.exists(transformer_path):
         try:
             with fs.open(transformer_path, 'rb') as input:
                 preprocessor = pickle.load(input)
                 self.__dict__.update(preprocessor.__dict__)
                 return True
         except Exception as e:
             logger.error(e)
             fs.rm(transformer_path)
     return False
Пример #4
0
 def save_transformed_X_y_to_cache(self, sign, X, y):
     filepath = f'{self.cache_dir}/X_y_{sign}.pkl.gz'
     try:
         # x_t = X.copy(deep=True)
         X.insert(0, 'saved__y__', y)
         with fs.open(filepath, mode='wb') as f:
             X.to_pickle(f, compression='gzip')
         return True
     except Exception as e:
         logger.error(e)
         if fs.exists(filepath):
             fs.rm(filepath)
     return False
Пример #5
0
 def get_transformed_X_y_from_cache(self, sign):
     file_x_y = f'{self.cache_dir}/X_y_{sign}.pkl.gz'
     X_t, y_t = None, None
     if fs.exists(file_x_y):
         try:
             with fs.open(file_x_y, mode='rb') as f:
                 df = pd.read_pickle(f, compression='gzip')
             y_t = df.pop('saved__y__')
             X_t = df
         except Exception as e:
             logger.error(e)
             fs.rm(file_x_y)
     return X_t, y_t
Пример #6
0
    def _prepare_cache_dir(self, cache_home, clear_cache=False):
        if cache_home is None:
            cache_home = 'cache'
        if cache_home[-1] == '/':
            cache_home = cache_home[:-1]

        cache_home = os.path.expanduser(f'{cache_home}')
        if not fs.exists(cache_home):
            fs.makedirs(cache_home, exist_ok=True)
        else:
            if clear_cache:
                fs.rm(cache_home, recursive=True)
                fs.mkdirs(cache_home, exist_ok=True)
        cache_dir = f'{cache_home}/{self.signature}'
        if not fs.exists(cache_dir):
            fs.makedirs(cache_dir, exist_ok=True)
        return cache_dir
Пример #7
0
 def teardown_class(cls):
     fs.rm(f'/{cls.__name__}', recursive=True)
Пример #8
0
 def clear_cache(self):
     fs.rm(self.cache_dir, recursive=True)
     fs.makedirs(self.cache_dir, exist_ok=True)