def download_dataset(name, path, dataset_type, url, auth_headers): """download dataset Download dataset object from S3 through ZumoLabs backend. Args: name (str): name of dataset to download path (str): output directory dataset_type (str): type of dataset to download url (str): backend endpoint auth_headers: authentication for backend Returns: str: output file path """ endpoint = f"{url}/api/v1/{dataset_type}-data-sets/" r = requests.get(endpoint, params={"name": name}, headers=auth_headers) if r.status_code != 200: r.raise_for_status() response = json.loads(r.text) if response["count"] != 1: raise NameError(f"found {response['count']} datasets for name {name}") dataset = response["results"][0] endpoint = ( f"{url}/api/v1/{dataset['dataset_type']}-data-sets/{dataset['id']}/download" ) r = requests.get(endpoint, headers=auth_headers) if r.status_code != 200: r.raise_for_status() response = json.loads(r.text) name_slug = f"{dataset['name'].replace(' ', '_')}-{dataset['id'][:8]}.zip" output_path = to_pathlib_path(path) / name_slug download_url(response["redirect_link"], output_path) return output_path
def fetch_logs(resource, name, path, url, auth_headers): """fetch logs Fetch LOG_TYPES for a backend run. Args: type (str): resource to fetch logs name (str): name of resource path (str): output_dir url (str): backend endpoint auth_headers: authentication for backend """ endpoint = f"{url}/api/v1/{resource}/" r = requests.get(endpoint, params={"name": name}, headers=auth_headers) if r.status_code != 200: r.raise_for_status() response = json.loads(r.text) if response["count"] != 1: raise NameError( f"found {response['count']} {resource} for name {name}") obj = response["results"][0] endpoint = f"{url}/api/v1/{resource}/{obj['id']}/logs" r = requests.get(endpoint, headers=auth_headers) if r.status_code != 200: r.raise_for_status() response = json.loads(r.text) output_dir = to_pathlib_path(path) for log_type in LOG_TYPES: output_path = output_dir / f"{log_type}.log" download_url(response[log_type]["redirect_link"], output_path)
def get_dataset(name, dtype, path): config = read_config() dir_path = to_pathlib_path(path) if not dir_path.exists(): log.info(f'output path {dir_path} does not exist') return fetch_dataset(name, path, dtype, config['ENDPOINT'], config['TOKEN'])
def upload_dataset(name, path): input_path = to_pathlib_path(path) if not input_path.exists(): log.info(f'input path {input_path} does not exist') return if input_path.suffix != '.zip': log.warning(f'input path {input_path} not a zip file') create_uploaded_dataset(name, path)
def upload_dataset(name, path): config = read_config() input_path = to_pathlib_path(path) if not input_path.exists(): log.info(f'input path {input_path} does not exist') return if input_path.suffix != '.zip': log.warning(f'input path {input_path} not a zip file') create_uploaded_dataset(name, path, config['ENDPOINT'], config['TOKEN'])
def write_config(config): """write config Write zpy cli configuration file. Args: config (dict): new configuration to write """ path = to_pathlib_path(os.path.expanduser(CONFIG_FILE)) with path.open("w") as f: yaml.dump(config, f)
def read_config(): """read config Read zpy cli configuration file. Returns: config: dictionary of current configuration """ path = to_pathlib_path(os.path.expanduser(CONFIG_FILE)) with path.open() as f: config = yaml.load(f, Loader=yaml.FullLoader) return config
def initialize_config(): """initialize config If CONFIG_FILE doesnt exist write it and put in prod as the endpoint. Also creates the ~/.zpy folder if not existing. The config is some variables needed by the cli to make validated requests to the backend. """ path = to_pathlib_path(os.path.expanduser(CONFIG_FILE)) if path.exists(): return CONFIG = { "ENVIRONMENT": "prod", "TOKEN": None, "ENDPOINT": ENDPOINTS["prod"] } path.parent.mkdir(parents=True, exist_ok=True) write_config(CONFIG)
def initialize_config(): """ Initialize zpy config if missing """ path = to_pathlib_path(os.path.expanduser(CONFIG_FILE)) if path.exists(): log.debug(f'found zpy config {path}') config = read_config() log.debug(config) return CONFIG = { 'ENVIRONMENT': 'prod', 'TOKEN': None, 'ENDPOINT': ENDPOINTS['prod'] } log.debug(f'initializing new zpy config {path}...') path.parent.mkdir(parents=True, exist_ok=True) log.debug(CONFIG) write_config(CONFIG) log.info('please login to fetch token')
def upload_sim(name, path): """upload sim Upload sim to backend. Args: name (str): name of sim path (str): path to sim """ from cli.sims import create_sim if to_pathlib_path(path).suffix != ".zip": click.secho(f"File {path} must be of type zip", fg="red", err=True) try: with Loader("Uploading sim..."): create_sim(name, path) click.secho(f"Uploaded sim {path} with name '{name}'", fg="green") except requests.exceptions.HTTPError as e: click.secho(f"Failed to upload sim: {e}", fg="red", err=True)
def upload_dataset(name, path): """upload dataset Upload dataset to backend. Args: name (str): name of dataset path (str): path to dataset """ from cli.datasets import create_uploaded_dataset if to_pathlib_path(path).suffix != ".zip": click.secho(f"File {path} must be of type zip", fg="red", err=True) try: with Loader("Uploading dataset..."): create_uploaded_dataset(name, path) click.secho(f"Uploaded dataset {path} with name '{name}'", fg="green") except requests.exceptions.HTTPError as e: click.secho(f"Failed to upload dataset: {e}", fg="red", err=True)
def fetch_dataset(name, path, dataset_type, url, token): """ fetch a dataset from ragnarok """ endpoint = f'{url}/api/v1/{dataset_type}-data-sets/' params = {'name': name} r = requests.get(endpoint, params=params, headers=auth_headers(token)) if r.status_code != 200: log.warning(f'Unable to fetch {dataset_type} datasets') return response = json.loads(r.text) if response['count'] != 1: log.warning(f'Unable to find {dataset_type} dataset with name "{name}"') return dataset = response['results'][0] endpoint = f"{url}/api/v1/{dataset['dataset_type']}-data-sets/{dataset['id']}/download" r = requests.get(endpoint, headers=auth_headers(token)) if r.status_code != 200: log.warning(f"Unable to get download link for dataset {dataset['id']}") return response = json.loads(r.text) name_slug = f"{dataset['name'].replace(' ', '_')}-{dataset['id'][:8]}.zip" output_path = to_pathlib_path(path) / name_slug download_url(response['redirect_link'], output_path)
def fetch_scene(name, path, url, token): """ fetch a scene from ragnarok """ endpoint = f'{url}/api/v1/scenes/' params = {'name': name} r = requests.get(endpoint, params=params, headers=auth_headers(token)) if r.status_code != 200: log.warning(f'Unable to fetch scenes') return response = json.loads(r.text) if response['count'] != 1: log.warning(f'Unable to find scene with name "{name}"') return scene = response['results'][0] endpoint = f"{url}/api/v1/scenes/{scene['id']}/download" r = requests.get(endpoint, headers=auth_headers(token)) if r.status_code != 200: log.warning(f"Unable to get download link for scene {scene['id']}") return response = json.loads(r.text) name_slug = f"{scene['name'].replace(' ', '_')}-{scene['id'][:8]}.zip" output_path = to_pathlib_path(path) / name_slug download_url(response['redirect_link'], output_path)
def get_sim(name, path): dir_path = to_pathlib_path(path) if not dir_path.exists(): log.info(f'output path {dir_path} does not exist') return fetch_sim(name, path)
def get_dataset(name, dtype, path): dir_path = to_pathlib_path(path) if not dir_path.exists(): log.info(f'output path {dir_path} does not exist') return fetch_dataset(name, path, dtype)
def write_config(config): path = to_pathlib_path(os.path.expanduser(CONFIG_FILE)) log.debug(f'Writing zpy config {path}') with path.open('w') as f: yaml.dump(config, f)