def download_remote_path(self, remote_path, local_path): # remote_path is full S3-formatted file reference if remote_path.endswith('/'): _, _, key = util.parse_s3_path(remote_path[:-1]) return self._download_dir(key + '/', local_path) else: _, _, key = util.parse_s3_path(remote_path) return self.download_file(key, local_path)
def get_local_destination(self, remote_path: str): if remote_path.endswith('/'): _, _, key = util.parse_s3_path(remote_path[:-1]) parts = key.split('/') return parts[len(parts) - 1], None else: _, _, key = util.parse_s3_path(remote_path) parts = key.split('/') return parts[len(parts) - 2], parts[len(parts) - 1]
def _build_s3_config(self, art_dict): """ For art_dict representing external S3-based artifact, build configuration suitable for constructing S3-based storage handler for this artifact. Returns: (configuration dictionary, artifact's S3 key) """ url, bucket, key = util.parse_s3_path(self.remote_path) config = dict() config['endpoint'] = "http://{0}".format(url) config['bucket'] = bucket config[credentials.KEY_CREDENTIALS] =\ self.credentials.to_dict() if self.credentials else dict() if 'region' in art_dict.keys(): config['region'] = art_dict['region'] return config, key
def to_dict(self): result = dict() result['unpack'] = self.unpack result['mutable'] = self.is_mutable if self.key is not None: result['key'] = self.key if self.local_path is not None: result['local'] = self.local_path if self.remote_path is not None: if self.storage_handler.type == StorageType.storageHTTP: result['url'] = self.remote_path else: result['qualified'] = self.remote_path if self.storage_handler.type == StorageType.storageS3: # Get artifact bucket directly from remote_path: _, bucket, _ = util.parse_s3_path(self.remote_path) result['bucket'] = bucket if self.credentials is not None: result[credentials.KEY_CREDENTIALS] = self.credentials.to_dict() return result