def save(self, path, aws_credentials={}): """ Save predictive object to the given path Parameters ---------- path : str The location to save the predictive object to """ # only support saving to local or S3 for now if (not (fu.is_s3_path(path) or \ fu.is_local_path(path) or \ fu.is_hdfs_path(path))): raise RuntimeError( "Only save to local, hdfs and S3 path are supported, " "cannot save predictive object to path %s. " % path) if fu.is_local_path(path) and os.path.exists(path): if os.path.exists(path): _logger.warning( "Overwriting existing file '%s' when saving predictive object" % path) rm_fn = os.remove if os.path.isfile(path) else shutil.rmtree rm_fn(path) if fu.is_local_path(path): self._save_local(path) else: self._save_remote(path, aws_credentials)
def list_file(self, remote_path): if file_util.is_local_path(remote_path): if os.path.isdir(remote_path): return os.listdir(remote_path) else: return [] elif file_util.is_s3_path(remote_path): files = file_util.list_s3(remote_path, aws_credentials=self.aws_credentials) else: files = file_util.list_hdfs(remote_path) if not files or len(files) == 0: return [] return_value = set() # strip the relative path for f in files: rel_path = os.path.relpath(f['path'], remote_path) # Only show first level children if rel_path != '.' and rel_path != '..': # In windows, the relpath could replace the separator with '\' return_value.add(rel_path.replace('\\', '/').split('/')[0]) return_value = list(return_value) return_value.sort() return return_value
def save(self, path, aws_credentials = None): ''' Persist the policy to a certain path ''' if _file_util.is_local_path(path): self._save_local(path) else: self._save_remote(path, aws_credentials)
def load(cls, path, schema_version, aws_credentials={}): ''' Load the policy from give path ''' loaded_policy = None if (_file_util.is_local_path(path)): loaded_policy = cls._load_local(path) else: loaded_policy = cls._load_remote(path, schema_version, aws_credentials) return loaded_policy
def load(cls, path, schema_version, aws_credentials={}): """ Load predictive object from given path """ new_po = None if (fu.is_local_path(path)): new_po = cls._load_local(path) else: new_po = cls._load_remote(path, schema_version, aws_credentials) _logger.info('Loaded predictive object "%s" successfully' % type(new_po).__name__) return new_po
def _copy_predictive_object_files(source_path, target_path, is_dir, src_credentials, tgt_credentials): ''' Copy either file or folder from source location to target location ''' # Cleanup existing file path if exists if _file_util.is_local_path(target_path) and _os.path.exists(target_path): _shutil.rmtree(target_path) if _file_util.is_s3_path(source_path) and _file_util.is_s3_path( target_path): # compare credentials _check_aws_credentials(src_credentials, tgt_credentials, source_path) # intra s3 copy model _file_util.intra_s3_copy_model(source_path, target_path, is_dir, tgt_credentials) elif _file_util.is_local_path(source_path): _file_util.copy_from_local(source_path, target_path, is_dir=is_dir) else: tmp_dir = _tempfile.mkdtemp(prefix='copy_predictive_object') try: # download to local first local_path = _os.path.join(tmp_dir, 'temp_po_file') if _file_util.is_s3_path(source_path): _file_util.download_from_s3(source_path, local_path, is_dir=is_dir, aws_credentials=src_credentials, silent=False) elif _file_util.is_hdfs_path(source_path): _file_util.download_from_hdfs(source_path, local_path, is_dir=False) else: raise RuntimeError('Unsupported file system type: %s' % source_path) # upload from local to remote if _file_util.is_s3_path(target_path): _file_util.upload_to_s3(local_path, target_path, is_dir=is_dir, aws_credentials=tgt_credentials, silent=False) elif _file_util.is_hdfs_path(target_path): _file_util.hdfs_mkdir(target_path) _file_util.upload_to_hdfs(local_path, target_path, force=True, silent=False) else: _file_util.upload_to_local(local_path, target_path, is_dir=is_dir, silent=False) finally: _shutil.rmtree(tmp_dir)