Example #1
0
    def print_options(self, opt):
        opt_dict = OrderedDict()
        message = ''
        message += '----------------- Options ---------------\n'
        # top level options
        for k, v in sorted(vars(opt).items()):
            if type(v) == argparse.Namespace:
                grouped_k.append((k, v))
                continue
            comment = ''
            default = self.parser.get_default(k)
            if v != default:
                comment = '\t[default: %s]' % str(default)
            message += '{:>25}: {:<30}{}\n'.format(str(k), str(v), comment)
            opt_dict[k] = v
        message += '----------------- End -------------------'
        print(message)

        # make experiment directory
        if hasattr(opt, 'checkpoints_dir') and hasattr(opt, 'name'):
            if opt.name != '':
                expr_dir = os.path.join(opt.checkpoints_dir, opt.name)
            else:
                expr_dir = os.path.join(opt.checkpoints_dir)
        else:
            expr_dir = './'
        os.makedirs(expr_dir, exist_ok=True)

        # save to the disk
        file_name = os.path.join(expr_dir, 'opt.txt')
        if not opt.overwrite_config:
            assert (not os.path.isfile(file_name)
                    ), 'config file exists, use --overwrite_config'
        with open(file_name, 'wt') as opt_file:
            opt_file.write(message)
            opt_file.write('\n')

        file_name = os.path.join(expr_dir, 'opt.yml')
        if not opt.overwrite_config:
            assert (not os.path.isfile(file_name)
                    ), 'config file exists, use --overwrite_config'
        with open(file_name, 'wt') as opt_file:
            opt_dict['overwrite_config'] = False  # make it false for saving
            yaml.dump(opt_dict, opt_file, default_flow_style=False)
Example #2
0
    def write_to(self, filename=''):
        """
        Write to YAML file.

        Parameters
        ----------
        filename : :class:`str`
            Name of the YAML file to write to.

        Raises
        ------
        aspecd.processing.MissingFilenameError
            Raised if no filename is given to write to.

        """
        if not filename:
            raise aspecd.exceptions.MissingFilenameError
        with open(filename, 'w') as file:
            yaml.dump(self.dict, file)
Example #3
0
def create_params(
    defn_path: str,
    *param_paths,
    error_on_unused: bool = False,
    out_path: Optional[str] = None,
) -> Dict[str, Any]:
    """
    Entry function - given the path to the parameter definitions and files, parse and create a params dictionary.

    The defn_path and param_paths can be either a single yaml file, or a directory containing yaml files.

    args:
        defn_path: path to the parameter definitions
        *param_paths: paths to parameter files. The files will be merged in the passed order so that item 'a' in the first params will be overwritten by item 'a' in the second params.
        out_path: path to directory where computed params will be saved if passed
        error_on_unused: throw a hard error if there are unused parameters, otherwise warnings are only printed

    returns:
        computed/validated model paramters with defaults filled in where needed
    """
    defs = build_yaml(defn_path)

    params: Dict[str, Any] = {}
    for param_path in param_paths:
        cur_params = build_yaml(param_path)
        params = merge(params, cur_params)

    classes = parse_classes(defs, params)
    parsed = parse_params(defs, params, "", classes)

    if out_path is not None:
        with open(out_path, "w") as f:
            yaml.dump(parsed, f)

    print("\nChecking for unused parameters...")
    num_unused = warn_unused_params(parsed, params, "")
    print(f"{num_unused} unused parameters found")
    if error_on_unused:
        assert (
            num_unused == 0
        ), "There are unused parameters passed to the parser (see print statements)"

    return parsed
Example #4
0
def export(prefix, search):
    logger.info('exporting rules')
    kwargs = {"EventBusName": "default"}
    if prefix is not None:
        kwargs["NamePrefix"] = prefix
    if search is not None:
        query = "Rules[?contains(Name, '{0}')] | {{Rules: @}}".format(search)
        kwargs["query"] = query
    rules = events("list_rules", **kwargs)['Rules']
    print(yaml.dump([_export_rule(rule) for rule in rules]))
Example #5
0
def main(argv):
    cache_path = None
    if len(argv) >= 2:
        cache_path = argv[1]

    if cache_path is None:
        # Fetch the Mastodon API docs in an intermediate format.
        cache = OrderedDict(
            endpoint_groups=fetch_endpoint_groups(),
            entities=fetch_entities(),
            scopes=fetch_scopes(),
        )
        yaml.dump(cache, sys.stdout, indent=2, default_flow_style=False)
    else:
        # Translate the intermediate format into OpenAPI.
        with open(cache_path, 'rb') as f:
            cache = yaml.safe_load(f)
        openapi = translate_cache(cache)
        yaml.dump(openapi, sys.stdout, indent=2, default_flow_style=False)
Example #6
0
 def save_state(self):
     self.append_history()
     with open('autosave.yml', 'w') as f:
         f.write(
             dump({
                 'board': self.state,
                 'scores': self.scores,
                 'names': self.names,
                 'choosing': self.choosing
             }))
def cli(output_format, entry):
    """This script combine multiple json or yaml files
     into a single readable OpenAPI specification.

    ENTRY could be a directory or a file (json or yaml).
    If it is a directory, index.json or index.yaml will be used.

    ENTRY can be omitted, current working directory will be assumed.

    All files including referenced files MUST be utf-8 encoded.
    """

    if entry is None:
        entry = os.getcwd()

    if os.path.isdir(entry):
        index_json = os.path.join(entry, "index.json")
        index_yaml = os.path.join(entry, "index.yaml")
        if os.path.isfile(index_json):
            entry = index_json
        elif os.path.isfile(index_yaml):
            entry = index_yaml
        else:
            raise click.BadArgumentUsage("index.json or index.yaml not found!")

    if output_format is "auto":
        ext = get_file_ext(entry)
        if ext == ".json":
            output_format = "json"
        elif ext == ".yaml":
            output_format = "yaml"
        else:
            raise UnknownFileTypeError(entry)

    api = bundle(entry)
    if output_format == "json":
        json.dump(api, fp=sys.stdout, ensure_ascii=False, indent=2)
    else:
        yaml.dump(api,
                  stream=sys.stdout,
                  allow_unicode=True,
                  default_flow_style=False)
Example #8
0
    def write_stream(self):
        """
        Write to from stream.

        Returns
        -------
        stream : :class:`str`
            string representation of YAML file

        """
        return yaml.dump(self.dict)
Example #9
0
 def save(self, path: Path):
     if self._pixmap is None:
         _logger.warning(f"No pixmap to save for {self}")
     else:
         self._pixmap.save(path.with_suffix(".png").__fspath__())
     if self.text:
         path.with_suffix(".txt").write_text(self.text)
     if self.data:
         path.with_suffix(".yml").write_text(
             yaml.dump(self.data, Dumper=yaml.Dumper)
         )
Example #10
0
def createSnippetHash(snippet):
    """
    Function that creates and returns a hash of the snippet given to the function.
        output: snippetHash (bytes)
    """
    snippetHash = hashlib.sha256()
    serializedSnippet = bytes(
        yaml.dump(snippet, default_flow_style=False).encode("UTF-8"))
    snippetHash.update(serializedSnippet)

    return snippetHash.digest()
Example #11
0
def lint(paths, format, nofail, logger=None, **kwargs):
    """Lint SQL files via passing a list of files or using stdin.

    PATH is the path to a sql file or directory to lint. This can be either a
    file ('path/to/file.sql'), a path ('directory/of/sql/files'), a single ('-')
    character to indicate reading from *stdin* or a dot/blank ('.'/' ') which will
    be interpreted like passing the current working directory as a path argument.

    Linting SQL files:

        sqlfluff lint path/to/file.sql
        sqlfluff lint directory/of/sql/files

    Linting a file via stdin (note the lone '-' character):

        cat path/to/file.sql | sqlfluff lint -
        echo 'select col from tbl' | sqlfluff lint -

    """
    c = get_config(**kwargs)
    lnt, formatter = get_linter_and_formatter(c, silent=format in ('json', 'yaml'))
    verbose = c.get('verbose')

    formatter.dispatch_config(lnt)

    # Set up logging.
    set_logging_level(verbosity=verbose, logger=logger)

    # add stdin if specified via lone '-'
    if ('-',) == paths:
        # TODO: Remove verbose
        result = lnt.lint_string_wrapped(sys.stdin.read(), fname='stdin')
    else:
        # Output the results as we go
        click.echo(format_linting_result_header(verbose=verbose))
        try:
            # TODO: Remove verbose
            result = lnt.lint_paths(paths, ignore_non_existent_files=False)
        except IOError:
            click.echo(colorize('The path(s) {0!r} could not be accessed. Check it/they exist(s).'.format(paths), 'red'))
            sys.exit(1)
        # Output the final stats
        click.echo(format_linting_result_footer(result, verbose=verbose))

    if format == 'json':
        click.echo(json.dumps(result.as_records()))
    elif format == 'yaml':
        click.echo(yaml.dump(result.as_records()))

    if not nofail:
        sys.exit(result.stats()['exit code'])
    else:
        sys.exit(0)
Example #12
0
async def make_variable_dict(server_url,
                             client_certificate=None,
                             client_private_key=None):
    sock = 'opc.tcp://'
    if not server_url.startswith(sock):
        server_url = sock + server_url
    nested_dict = await create_nested_node_dict(server_url, client_certificate,
                                                client_private_key)
    output_variables = []
    get_variables(nested_dict, output_variables=output_variables)
    with open('debug.yaml', 'w') as f:
        f.write(yaml.dump(output_variables))
Example #13
0
 def dump(self, path=None, **kwargs):
     result = oyaml.dump(self.to_dict(),
                         allow_unicode=True,
                         default_flow_style=False,
                         **kwargs)
     if not path:
         return result
     try:
         with open(path, "w") as fp:
             fp.write(result)
     except TypeError:
         path.write(result)
Example #14
0
def expand(out_dir, template="template.yaml", pattern="cfg.yaml", verbose=False, remove_previous=True):
    with open(os.path.join(out_dir, template), "r") as f:
        s = yaml.load(f, Loader=yaml.FullLoader)
    if remove_previous:
        shutil.rmtree(out_dir)
        os.makedirs(out_dir)
        with open(os.path.join(out_dir, template), "w") as f:
            yaml.dump(s, f)
    lst = walk(s)
    if verbose:
        print("Found {} combinations".format(len(lst)))
    for index, (a, b) in enumerate(lst):
        if verbose:
            print("Processing run-{}: \t {}".format(index, a))
        path_dir = os.path.join(out_dir, a)
        if not os.path.exists(path_dir):
            os.makedirs(path_dir)
        with open(os.path.join(path_dir, pattern), "w") as f:
            yaml.dump(b, f)
    if verbose:
        print("Done.")
    def dict(cls,
             d,
             order=None,
             header=None,
             output="table",
             sort_keys=True,
             humanize=None,
             show_none="",
             max_width=48):
        """
        :param d: A a dict with dicts of the same type.
        :type d: dict
        :param order: The order in which the columns are printed.
                      The order is specified by the key names of the dict.
        :type order: list
        :param header: The Header of each of the columns
        :type header: list or tuple of field names
        :param output: type of output (table, csv, json, yaml or dict)
        :type output: string
        :param sort_keys: list
        :type sort_keys: bool
        :param show_none: prints None if True for None values otherwise ""
        :type show_none: string
        :param max_width: maximum width for a cell
        :type max_width: int
        :return:

        """

        if output == "table":
            if d == {}:
                return None
            else:
                return cls.dict_table(d,
                                      order=order,
                                      header=header,
                                      humanize=humanize,
                                      sort_keys=sort_keys,
                                      max_width=max_width)
        elif output == "csv":
            return cls.csv(d,
                           order=order,
                           header=header,
                           humanize=humanize,
                           sort_keys=sort_keys)
        elif output == "json":
            return json.dumps(d, sort_keys=sort_keys, indent=4)
        elif output == "yaml":
            return yaml.dump(convert_from_unicode(d), default_flow_style=False)
        elif output == "dict":
            return d
        else:
            return "UNKOWN FORMAT. Please use table, csv, json, yaml, dict."
Example #16
0
    def save_result(self, success=True, **kwa):
        """ Stores result as a yaml file """

        duration = time.time() - self.start_time
        duration_human = time_human(duration)

        # Store basic execution info
        self.t_data["end_time"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        self.t_data["duration"] = duration
        self.t_data["duration_human"] = duration_human
        self.t_data["success"] = success

        if success:
            log.success(f"{self.name} ended in {duration_human}")

        # Allow extra params like 'exception'
        self.t_data.update(**kwa)

        # Export them as an ordered yaml
        with open(self.output_filename(success), "w") as stream:
            yaml.dump(self.t_data, stream)
Example #17
0
def create(mapchete_file,
           process_file,
           out_format,
           out_path=None,
           pyramid_type=None,
           force=False):
    """Create an empty Mapchete and process file in a given directory."""
    if os.path.isfile(process_file) or os.path.isfile(mapchete_file):
        if not force:
            raise IOError("file(s) already exists")

    out_path = out_path if out_path else os.path.join(os.getcwd(), "output")

    # copy file template to target directory
    # Reads contents with UTF-8 encoding and returns str.
    process_template = str(
        files("mapchete.static").joinpath("process_template.py"))
    process_file = os.path.join(os.getcwd(), process_file)
    copyfile(process_template, process_file)

    # modify and copy mapchete file template to target directory
    mapchete_template = str(
        files("mapchete.static").joinpath("mapchete_template.mapchete"))

    output_options = dict(format=out_format,
                          path=out_path,
                          **FORMAT_MANDATORY[out_format])

    pyramid_options = {'grid': pyramid_type}

    substitute_elements = {
        'process_file': process_file,
        'output': dump({'output': output_options}, default_flow_style=False),
        'pyramid': dump({'pyramid': pyramid_options}, default_flow_style=False)
    }
    with open(mapchete_template, 'r') as config_template:
        config = Template(config_template.read())
        customized_config = config.substitute(substitute_elements)
    with open(mapchete_file, 'w') as target_config:
        target_config.write(customized_config)
def init_yaml(
        csv_dir='/Users/koer2434/Google Drive/UST/research/instrbuilder/instruments',
        cmd_name='commands.csv',
        lookup_name='lookup.csv',
        filename='config.yaml'):
    """ expectation is that a YAML file does not already exist"""

    configs = {}
    configs['csv_directory'] = csv_dir
    configs['cmd_name'] = cmd_name
    configs['lookup_name'] = lookup_name

    try:
        os.mkdir(home)
    except FileExistsError:
        pass

    # write to YAML file
    with open(os.path.join(home, filename), 'w+') as f:
        # see: https://pyyaml.org/wiki/PyYAMLDocumentation (for default_flow_style)
        yaml.dump(configs, f, default_flow_style=False)
    return
Example #19
0
def write_template_to_file(stack_id, template, platform, debug=False):
    timestamp = strftime("%Y_%m_%d")
    if not os.path.exists('history'):
        os.mkdir('history')
    if debug is True:
        filename = os.path.join('output', 'debug.yaml')
    else:
        filename = os.path.join(
            'output', 'heat_stack_' + timestamp + '_' + stack_id + '.yaml')
    with open(filename, 'w') as file:
        yaml_template = yaml.dump(template)
        file.write(str(yaml_template))
    return filename
Example #20
0
    def spec_replace(self, book, variables):

        variable_list = dict_flatten(variables, sep=".")

        spec = yaml.dump(book)

        for variable in variable_list:
            value = variable_list[variable]
            token = "{" + variable + "}"
            spec = spec.replace(token, value)

        output = yaml.load(spec, Loader=yaml.SafeLoader)
        return output
Example #21
0
def print_dict(init_dict, file_name="test"):
    """This function prints the initialization dict as a yaml file."""

    # Transfer the init dict in an ordered one to ensure that the init file is aligned
    #  appropriately
    ordered_dict = collections.OrderedDict()
    order = [
        "SIMULATION",
        "ESTIMATION",
        "TREATED",
        "UNTREATED",
        "CHOICE",
        "DIST",
        "VARTYPES",
        "SCIPY-BFGS",
        "SCIPY-POWELL",
    ]

    for key_ in order:
        if key_ in init_dict.keys():
            ordered_dict[key_] = init_dict[key_]
        else:
            pass
    for section in ["TREATED", "CHOICE", "UNTREATED", "DIST"]:
        if isinstance(ordered_dict[section]["params"], list):
            pass
        else:
            ordered_dict[section]["params"] = ordered_dict[section]["params"].tolist()

    # Print the initialization file
    with open(f"{file_name}.grmpy.yml", "w") as outfile:
        yaml.dump(
            ordered_dict,
            outfile,
            explicit_start=True,
            indent=4,
            width=99,
            default_flow_style=False,
        )
def append_to_yaml(new_configs, filename='config.yaml'):
    """
    Append to the configuration file (after some checking) and after copying to {name}_backup.yaml

    Parameters
    ----------
    new_configs : dict
        A new instrument configuration as a dict. Should not have the key 'instruments' (i.e. lower in the tree)
    filename : str (optional)
        The name of the YAML configuration file

    Returns
    -------
    None

    """
    # read current YAML
    yaml_config = open(os.path.join(home, filename), 'r+')
    configs = yaml.safe_load(yaml_config)

    if 'instruments' in new_configs.keys():
        print('Error: instruments key is in the new configuration dictionary')
        return

    # copy YAML
    os.rename(os.path.join(home, filename),
              os.path.join(home, filename).replace('.yaml', '_backup.yaml'))

    # append new dictionary to old dictionary
    if 'instruments' not in configs:
        configs['instruments'] = {}
    configs['instruments'].update(new_configs)

    # write to YAML file
    with open(os.path.join(home, filename), 'w+') as f:
        # see: https://pyyaml.org/wiki/PyYAMLDocumentation (for default_flow_style)
        yaml.dump(configs, f, default_flow_style=False)
    return
Example #23
0
    def post(self):
        res = request.form.to_dict()
        # print(res.keys())
        list_keys = res.keys()
        list_keys.remove('submit')
        group_var_dir = CONST.ansible_group_vars_dir
        sf_dir = CONST.ansible_group_vars_sf_dir

        file_path = group_var_dir + '/' + request.args.get('group') + '/' + request.args.get('filename')
        sf_file_path = sf_dir + '/' + request.args.get('group') + '/' + request.args.get('filename')
        data = load_yml_file(sf_file_path)
        for gr_data in data:
            for config in gr_data['configs']:
                if config['input_type'] == 'checkbox':
                    config['ex_value'] = False
        for key in list_keys:
            group_key = key.split("###")[0]
            # print(group_key)
            key_name = key.split("###")[1]
            value = res[key]
            for gr_data in data:
                if gr_data['name'] == group_key:
                    for config in gr_data['configs']:
                        if config['key'] == key_name:
                            if config['input_type'] == 'number':
                                config['ex_value'] = int(value)
                            elif config['input_type'] == 'checkbox':
                                # print("xxxxxxxxxxxxxxxxxxxxxxxxxxxx" + str(value))
                                config['ex_value'] = True if value.encode('utf-8') == 'on' else False
                            else:
                                config['ex_value'] = value.encode('utf-8')

        stream = file(sf_file_path, 'w')
        yaml.dump(data, stream)
        convert_input_sf_to_origin(data, file_path)

        flash('Edit  Group_vars ' + file_path + ' sucessfully. Check Result in /tools/edit_ansible_group_vars?'+'group='+request.args.get('group')+'&filename='+request.args.get('filename'))
        return redirect('/tools/list_ansible_group_vars')
Example #24
0
def get_system_conf_info (json=False):
    confpath = constants.system_conf_path
    if os.path.exists(confpath):
        conf = get_system_conf()
        confexists = True
    else:
        conf = {}
        confexists = False
    if json:
        content = conf
    else:
        content = yaml.dump(conf, default_flow_style=False)
    system_conf_info = {'path': confpath, 'exists': confexists, 'content': content}
    return system_conf_info
Example #25
0
def xml_to_dct(xml_path, ordered=True, verbose=False):
    ''' return xml as ordered dct '''
    with open(xml_path, 'rb') as xml_string:
        # try:
        if verbose:
            print(xml_path)
        ordered_dct = xmltodict.parse(xml_string)
        if not ordered:
            return yaml.dump(ordered_dct, default_flow_style=False)
            # return json.loads(json.dumps(ordered_dct, ensure_ascii=False))
        else:
            return ordered_dct
    # except:
        return {}
    def translate_descriptor(self, descriptor_file, vnfd):
        log.info('Translating descriptor {}'.format(descriptor_file))
        with open(descriptor_file, 'r') as f:
            descriptor = yaml.load(f, Loader=yaml.FullLoader)

        descriptor.pop('descriptor_version')
        if vnfd:
            schema = self.type_mapping['application/vnd.5gtango.vnfd']
        else:
            schema = self.type_mapping['application/vnd.5gtango.nsd']
        descriptor['descriptor_schema'] = schema

        with open(descriptor_file, 'w') as f:
            f.write(yaml.dump(descriptor, default_flow_style=False))
Example #27
0
def ooc_cmd_save_hub(client, arg):
    """
    Save the current Hub in the server's storage/hubs/<name>.yaml file.
    If blank and you're a mod, it will save to server's config/areas_new.yaml for the server owner to approve.
    Usage: /save_hub <name>
    """
    if not client.is_mod:
        if arg == '':
            raise ArgumentError('You must be authorized to save the default hub!')
        if len(arg) < 3:
            raise ArgumentError("Filename must be at least 3 symbols long!")
    try:
        if arg != '':
            path = 'storage/hubs'
            num_files = len([f for f in os.listdir(
                path) if os.path.isfile(os.path.join(path, f))])
            if (num_files >= 1000): #yikes
                raise AreaError('Server storage full! Please contact the server host to resolve this issue.')
            try:
                arg = f'{path}/{arg}.yaml'
                if os.path.isfile(arg):
                    with open(arg, 'r', encoding='utf-8') as stream:
                        hub = yaml.safe_load(stream)
                    if 'read_only' in hub and hub['read_only'] == True:
                        raise ArgumentError(f'Hub {arg} already exists and it is read-only!')
                with open(arg, 'w', encoding='utf-8') as stream:
                    yaml.dump(client.area.area_manager.save(ignore=['can_gm', 'max_areas']), stream, default_flow_style=False)
            except ArgumentError:
                raise
            except:
                raise AreaError(f'File path {arg} is invalid!')
            client.send_ooc(f'Saving as {arg}...')
        else:
            client.server.hub_manager.save('config/areas_new.yaml')
            client.send_ooc('Saving all Hubs to areas_new.yaml. Contact the server owner to apply the changes.')
    except AreaError:
        raise
def download(hostname, api_key, api_secret, dest, validate_certs):
    result = {"ansible_facts": {}}
    api_endpoint = 'https://{0}'.format(hostname)
    restclient = RestClient(api_endpoint,
                            api_key=api_key,
                            api_secret=api_secret,
                            verify=validate_certs)
    resp = restclient.get('/openapi/v1/filters/inventories')
    if not resp.status_code == 200:
        return (
            1,
            "Error {0}: {1} during connection attempt to {2}/openapi/v1/filters/inventories. \n"
            .format(resp.status_code, resp.reason, api_endpoint))
    filters = json.loads(resp.content)

    with open(dest, 'w') as f:
        for filter in filters:
            f.write('---\n')
            oyaml.dump(filter, f, allow_unicode=True, encoding='utf-8')
            f.write('\n')

    result["ansible_facts"] = {'Output': os.path.join(os.getcwd(), dest)}
    result['changed'] = False
    return (0, result)
    def handle(self, **options):
        source = options['api-spec']
        with open(source, 'r') as infile:
            spec = yaml.safe_load(infile)

        for path, methods in spec['paths'].items():
            for method in methods.values():
                if 'responses' not in method:
                    continue

                for status, response in method['responses'].items():
                    if not (400 <= int(status) < 600):
                        continue

                    content = {}
                    for contenttype, _response in response['content'].items():
                        if contenttype == 'application/json':
                            contenttype = ERROR_CONTENT_TYPE
                        content[contenttype] = _response

                    response['content'] = content

        with open(source, 'w') as outfile:
            yaml.dump(spec, outfile, default_flow_style=False)
Example #30
0
    def get_pipelines(self):
        """
        Returns everything but secrets
        
        :return: string
        :rtype: string
        """
        pipeline_step = ""

        for i in self.yaml_data:
            if i['kind'] != "secret":
                step = OrderedDict(i)
                pipeline_step = pipeline_step + "---\n"
                pipeline_step = pipeline_step + yaml.dump(step)

        return pipeline_step
    def profile_tpu(**input_fn_params):
        # location to save the accumulated results
        output_name = subprocess_env['OUTPUT_NAME']

        # trial-specific output in output_dir
        timestamp = str(int(time.time()))
        output_uri = os.path.join(subprocess_env['OUTPUT_DIR'], 'trials', timestamp)

        # model_dir is used only within a trial.
        # Its content is backed up in output_uri at the end of each trial.
        tpu_name = subprocess_env['TPU_NAME']
        model_dir = subprocess_env['MODEL_DIR']
        if tf.gfile.Exists(model_dir):
            tf.gfile.DeleteRecursively(model_dir)

        # create new TPU each time
        create_tpu_and_wait(subprocess_env)

        # create the scripts
        build_submit_script(input_fn_params)
        build_trace_script()

        # run task
        print('>>>>> running training task')
        p_submit = Popen(submit_command, stdout=PIPE, preexec_fn=os.setsid, env=subprocess_env)

        # wait until the job starts before starting to collect trace
        time.sleep(120)

        # run profiler
        p_submit.poll()
        returncode = p_submit.returncode

        n_scores = 3
        n_attempts = 10

        scores = []
        counter = 0
        checked_filenames = set()

        while returncode is None and len(scores) < n_scores and counter < n_attempts:
            print('>>>>> running profiler')
            p_trace = Popen(trace_command, stdout=PIPE, preexec_fn=os.setsid, env=subprocess_env)
            counter += 1

            time.sleep(45)
            kill_process(p_trace)

            print('>>>>> checking trace files')

            trace_filenames = tf.gfile.Glob('{}/plugins/profile/**/input_pipeline.json'.format(model_dir))

            if trace_filenames:
                early_stop = False
                for trace_filename in trace_filenames:
                    if trace_filename in checked_filenames:
                        continue

                    print('>>>>> reading: {}'.format(trace_filename))
                    with tf.gfile.GFile(trace_filename, 'r') as f:
                        json_str = f.read()

                    checked_filenames.add(trace_filename)
                    input_pipeline = json.loads(json_str)

                    # some trace files might not have a valid score
                    try:
                        infeed_percent_average = float(input_pipeline[0]['p']['infeed_percent_average'])

                        if infeed_percent_average > 0.0:
                            scores.append(infeed_percent_average)
                            print('>>>>> current scores: {}'.format(scores))
                    except:
                        pass

                    # This happens when each training step takes too long.
                    if 'No step time measured' in json_str:
                        early_stop = True

                if early_stop:
                    print('>>>>> early stopping')
                    break

            p_submit.poll()
            returncode = p_submit.returncode

        print('>>>>> training process finished with returncode: {}, number of attempts: {}, number of scores: {}'.format(returncode, counter, len(scores)))

        # kill processes, just in case
        print('>>>>> killing training process')
        kill_process(p_submit)

        # calculate average score
        print('>>>>> calculating score')
        if scores:
            score = sum(scores) / len(scores)
        else:
            # Give the worst possible score when no valid scores collected.
            score = 100.0

        print('>>>>> scores: {}, average score: {}'.format(scores, score))

        # write artifacts to output_uri:
        # the generated submit script, the whole model_dir, and the scores
        print('>>>>> writing trial outputs')
        tf.gfile.Copy(submit_script_name, os.path.join(output_uri, submit_script_name))

        copy_command = copy_cmd.format(model_dir, output_uri)
        copy_command = shlex.split(copy_command)
        call(copy_command)

        with tf.gfile.GFile(os.path.join(output_uri, 'scores.txt'), 'w') as f:
            f.write(str(scores))

        # Add new results to the accumulated results
        params_scores = {
            'input_fn_params': {k:int('{}'.format(v)) for k, v in input_fn_params.items()},
            'scores': scores,
            'score': score
        }
        entry = {timestamp: params_scores}

        with tf.gfile.GFile(output_name, 'a') as f:
            yaml.dump(entry, f, default_flow_style=False)

        # clean up artifacts
        print('>>>>> removing artifacts')
        os.remove(submit_script_name)
        os.remove(trace_script_name)

        # delete TPU
        delete_tpu_and_wait(subprocess_env)

        return score