def test_standardize(): r"""Test standardize.""" vals = [(False, ['inputs', 'outputs'], ['_file'], { 'input': 'inputA', 'output_file': 'outputA' }, { 'inputs': [{ 'name': 'inputA' }], 'outputs': [{ 'name': 'outputA' }] }), (True, ['input', 'output'], ['_file'], { 'inputs': 'inputA', 'output_files': 'outputA' }, { 'input': [{ 'name': 'inputA' }], 'output': [{ 'name': 'outputA' }] })] for is_singular, keys, suffixes, x, y in vals: schema.standardize(x, keys, suffixes=suffixes, is_singular=is_singular) assert_equal(x, y)
def prep_yaml(files): r"""Prepare yaml to be parsed by Cerberus using schema including covering backwards compatible options. Args: files (str, list): Either the path to a single yaml file or a list of yaml files. Returns: dict: YAML ready to be parsed using schema. """ # Load each file if isinstance(files, str): files = [files] yamls = [load_yaml(f) for f in files] # Standardize format of models and connections to be lists and # add working_dir to each comp_keys = ['models', 'connections'] for yml in yamls: standardize(yml, comp_keys) for k in comp_keys: for x in yml[k]: if isinstance(x, dict): x.setdefault('working_dir', yml['working_dir']) # Combine models & connections yml_all = {} for k in comp_keys: yml_all[k] = [] for yml in yamls: yml_all[k] += yml[k] return yml_all
def prep_yaml(files): r"""Prepare yaml to be parsed by jsonschema including covering backwards compatible options. Args: files (str, list): Either the path to a single yaml file or a list of yaml files. Entries can also be opened file descriptors for files containing YAML documents or pre-loaded YAML documents. Returns: dict: YAML ready to be parsed using schema. """ # Load each file if not isinstance(files, list): files = [files] yamls = [load_yaml(f) for f in files] # Load files pointed to for y in yamls: if 'include' in y: new_files = y.pop('include') if not isinstance(new_files, list): new_files = [new_files] for f in new_files: if not os.path.isabs(f): f = os.path.join(y['working_dir'], f) yamls.append(load_yaml(f)) # Standardize format of models and connections to be lists and # add working_dir to each comp_keys = ['models', 'connections'] for yml in yamls: standardize(yml, comp_keys) for k in comp_keys: for x in yml[k]: if isinstance(x, dict): x.setdefault('working_dir', yml['working_dir']) # Combine models & connections yml_all = {} for k in comp_keys: yml_all[k] = [] for yml in yamls: yml_all[k] += yml[k] return yml_all
def prep_yaml(files, yaml_param=None, directory_for_clones=None): r"""Prepare yaml to be parsed by jsonschema including covering backwards compatible options. Args: files (str, list): Either the path to a single yaml file or a list of yaml files. Entries can also be opened file descriptors for files containing YAML documents or pre-loaded YAML documents. yaml_param (dict, optional): Parameters that should be used in mustache formatting of YAML files. Defaults to None and is ignored. directory_for_clones (str, optional): Directory that git repositories should be cloned into. Defaults to None and the current working directory will be used. Returns: dict: YAML ready to be parsed using schema. """ from yggdrasil.services import IntegrationServiceManager # Load each file if not isinstance(files, list): files = [files] yamls = [ load_yaml(f, yaml_param=yaml_param, directory_for_clones=directory_for_clones) for f in files ] # Load files pointed to for y in yamls: if 'include' in y: new_files = y.pop('include') if not isinstance(new_files, list): new_files = [new_files] for f in new_files: if not os.path.isabs(f): f = os.path.join(y['working_dir'], f) yamls.append(load_yaml(f)) # Replace references to services with service descriptions for i, y in enumerate(yamls): services = y.pop('services', []) if 'service' in y: services.append(y.pop('service')) if services: y.setdefault('models', []) if 'model' in y: y['models'].append(y.pop('model')) for x in services: request = {'action': 'start'} for k in ['name', 'yamls', 'yaml_param']: if k in x: request[k] = x.pop(k) if 'type' in x: x.setdefault('service_type', x.pop('type')) x.setdefault('for_request', True) cli = IntegrationServiceManager(**x) response = cli.send_request(**request) assert (response.pop('status') == 'complete') y['models'].append(response) # Standardize format of models and connections to be lists and # add working_dir to each comp_keys = ['models', 'connections'] for yml in yamls: standardize(yml, comp_keys) for k in comp_keys: for x in yml[k]: if isinstance(x, dict): if (k == 'models') and ('repository_url' in x): repo_dir = clone_github_repo( x['repository_url'], commit=x.get('repository_commit', None), local_directory=directory_for_clones) x.setdefault('working_dir', repo_dir) else: x.setdefault('working_dir', yml['working_dir']) # Combine models & connections yml_all = {} for k in comp_keys: yml_all[k] = [] for yml in yamls: yml_all[k] += yml[k] return yml_all