def pull(self, api_name, dependencies=[], force=False, prefix=None, basepath=None): dependencies.append(api_name) make_dirs(self._work_tree) self.apiproxy_dir = api_name if not force: paths_exist((os.path.relpath(self._zip_file), os.path.relpath(self._apiproxy_dir))) export = self.export_api_proxy(api_name, self._revision_number, fs_write=True, output_file=self._zip_file) make_dirs(self._apiproxy_dir) extract_zip(self._zip_file, self._apiproxy_dir) os.remove(self._zip_file) files = self.get_apiproxy_files(self._apiproxy_dir) for resource_type in ('keyvaluemap', 'targetserver', 'cache'): self._Apis__get_and_export(resource_type, files, self._environment, dependencies=dependencies, force=force) return export, dependencies
def download_apis(self): for api, metadata in self.snapshot_data.apis.items(): for revision in metadata['revision']: output_file = Backups.generate_download_path( self.org_path, subpaths=['apis', api, revision, f'{api}.zip'] ) target_directory = os.path.dirname(output_file) try: Apis(self.auth, self.org_name, None).export_api_proxy( api, revision, fs_write=True, output_file=output_file ) extract_zip(output_file, target_directory) os.remove(output_file) except HTTPError as e: Backups.log_error(e, append_msg=' for API Proxy ({api}, revision {revision})') self._Backups__progress_callback(desc='APIs')
def download_apis(self): for api, metadata in self.snapshot_data.apis.items(): for revision in metadata['revision']: output_file = self._gen_download_path( subpaths=['apis', api, revision, f'{api}.zip'] ) target_directory = os.path.dirname(output_file) try: Apis(self.auth, self.org_name, None).export_api_proxy( api, revision, fs_write=True, output_file=output_file ) extract_zip(output_file, target_directory) os.remove(output_file) except HTTPError as e: console.echo( f'Ignoring {type(e).__name__} {e.response.status_code} error for API Proxy ({api}, revision {revision})' ) self._progress_callback(desc='APIs') return self.snapshot_data.apis