Esempio n. 1
0
    def end_auction(self):
        LOGGER.info('---------------- End auction ----------------',
                    extra={
                        "JOURNAL_REQUEST_ID": self.request_id,
                        "MESSAGE_ID": AUCTION_WORKER_SERVICE_END_AUCTION
                    })
        LOGGER.debug("Stop server",
                     extra={"JOURNAL_REQUEST_ID": self.request_id})
        if self.server:
            self.server.stop()
        delete_mapping(self.worker_defaults, self.auction_doc_id)

        LOGGER.debug("Clear mapping",
                     extra={"JOURNAL_REQUEST_ID": self.request_id})

        self.auction_document["current_stage"] = (
            len(self.auction_document["stages"]) - 1)
        self.auction_document['current_phase'] = END
        LOGGER.debug(' '.join(('Document in end_stage: \n',
                               yaml_dump(dict(self.auction_document)))),
                     extra={"JOURNAL_REQUEST_ID": self.request_id})
        self.approve_audit_info_on_announcement()
        LOGGER.info('Audit data: \n {}'.format(yaml_dump(self.audit)),
                    extra={"JOURNAL_REQUEST_ID": self.request_id})
        if self.put_auction_data():
            self.save_auction_document()
        LOGGER.debug("Fire 'stop auction worker' event",
                     extra={"JOURNAL_REQUEST_ID": self.request_id})

        self._end_auction_event.set()
Esempio n. 2
0
    def save(self, dirname):
        self._check_if_fitted()

        mkdir(dirname)
        save_tree(self.tree, self.node_to_class, self.node_to_classes,
                  self.class_maps, join(dirname, 'tree'))

        models_dct = {}
        models_dirname = join(dirname, 'models')
        mkdir(models_dirname)
        for node_id, model in self.models.items():
            fname = f'model{node_id}'
            model.save(join(models_dirname, fname))
            models_dct[node_id] = fname
        with open(join(models_dirname, 'models_fnames.yaml'),
                  'w',
                  encoding='utf-8') as file:
            yaml_dump(models_dct, file)

        encoders_dct = {}
        encoders_dirname = join(dirname, 'encoders')
        mkdir(encoders_dirname)
        for node_id, encoder in self.encoders.items():
            fname = f'encoder{node_id}.sav'
            joblib_dump(encoder, join(encoders_dirname, fname))
            encoders_dct[node_id] = fname
        with open(join(encoders_dirname, 'encoders_fnames.yaml'),
                  'w',
                  encoding='utf-8') as file:
            yaml_dump(encoders_dct, file)
Esempio n. 3
0
    def _dump_algorithm_data(self, algorithm_data: dict,
                             stop_conditions_data: dict) -> None:
        """
        Dumps optimization algorithm data to proper file.

        :param algorithm_data: Configuration data of optimization algorithm used.
        :param stop_conditions_data: Configuration data of optimization process stop conditions.
        """
        if self.log_format == LoggingFormat.YAML:
            algorithm_file_path = path.join(self.optimization_process_dir,
                                            "algorithm.yaml")  # type: ignore
            stop_conditions_file_path = path.join(
                self.optimization_process_dir,
                "stop_conditions.yaml")  # type: ignore
            with open(algorithm_file_path, "w") as yaml_file:
                yaml_dump(algorithm_data, yaml_file, YamlDumper)
            with open(stop_conditions_file_path, "w") as yaml_file:
                yaml_dump(stop_conditions_data, yaml_file, YamlDumper)
        elif self.log_format == LoggingFormat.JSON:
            algorithm_file_path = path.join(self.optimization_process_dir,
                                            "algorithm.json")  # type: ignore
            stop_conditions_file_path = path.join(
                self.optimization_process_dir,
                "stop_conditions.json")  # type: ignore
            with open(algorithm_file_path, "w") as json_file:
                json_dump(algorithm_data, json_file)
            with open(stop_conditions_file_path, "w") as json_file:
                json_dump(stop_conditions_data, json_file)
Esempio n. 4
0
    def log(self, node, epoch, y_pred, y, indices=None, class_map=None):

        if epoch == 0 and indices is None:
            raise ValueError('No indices to log at epoch 0')
        if epoch > 0 and class_map is None:
            raise ValueError(f'No class_map to log at epoch {epoch}')

        if self.log_output:
            current_folder = join(self.log_output_folder,
                                  '/'.join(str(n.name) for n in node.path))
            if not exists(current_folder):
                mkdir(current_folder)

            fname = join(current_folder,
                         f'predictions_epoch{format_number(epoch)}.csv')
            indices_fname = join(current_folder, 'indices.csv')
            classes_fname = join(current_folder,
                                 f'labels{format_number(epoch)}.csv')
            map_fname = join(current_folder,
                             f'class_map{format_number(epoch)}.yaml')

            savetxt(fname, y_pred, delimiter=',')
            savetxt(classes_fname, y.astype('int64'), delimiter=',')
            if indices is not None:
                savetxt(indices_fname, indices, delimiter=',')
            if class_map is not None:
                with open(map_fname, 'w', encoding='utf-8') as file:
                    yaml_dump(class_map, file)
Esempio n. 5
0
    def log_iteration(self, iteration: int, solutions: Iterable) -> None:
        """
        Logging method that will be called at each iteration of main optimization algorithm.

        :param iteration: Number of iteration of main optimization algorithm.
        :param solutions: Solutions found in this iteration.
        """
        if self.verbosity >= LoggingVerbosity.AllSolutions:
            if iteration > 0:
                mode = "a"
            else:
                mode = "w"
            data_to_log = {
                f"Iteration {iteration}":
                [solution.get_log_data() for solution in solutions]
            }
            if self.log_format == LoggingFormat.YAML:
                file_path = path.join(self.optimization_process_dir,
                                      "solutions.yaml")  # type: ignore
                with open(file_path, mode) as yaml_file:
                    yaml_dump(data_to_log, yaml_file, YamlDumper)
            elif self.log_format == LoggingFormat.JSON:
                file_path = path.join(self.optimization_process_dir,
                                      "solutions.json")  # type: ignore
                with open(file_path, mode) as json_file:
                    json_dump(data_to_log, json_file)
Esempio n. 6
0
def export_config(config=None, export_path=None, format='stdout'):
    
    if format == 'stdout':
        pprint.pprint(config)
        return

    fp = None
    stdout = False    
    try:
        if export_path and export_path != 'stdout':
            fp = open(export_path, 'wt')
        else:
            stdout = True
            fp = StringIO()
    
        if format == 'yaml':
            yaml_dump(config, fp, Dumper=YAMLDumper, explicit_start=False, default_flow_style=False)
        
        elif format == 'json':
            jsonmod.dump(config, fp, indent=3)
    except:
        raise
    finally:
        if fp:
            if stdout:
                print(fp.getvalue())
            
            fp.close()
Esempio n. 7
0
def test_config_load(tmp_path):
    #
    # Import the module and functions relevant to this particular set of tests
    from encommon.config import config_load
    from encommon.readwrite import writefile
    from os import makedirs as os_makedirs
    from os import path as os_path
    from yaml import dump as yaml_dump
    #
    # Create the expected directory structure for the configuration validation
    os_makedirs(os_path.join(tmp_path, "folder"))
    #
    # Initial section for instantizing variables expected by remaining routine
    expect = {"base": {"k": "v"}, "folder": {"file": {"subset": {"k": "v"}}}}
    #
    # Write the initial content to the various files using temporary directory
    file_path = os_path.join(tmp_path, "base.yml")
    writefile(file_path, yaml_dump({"k": "v"}), truncate=True)
    file_path = os_path.join(tmp_path, "folder", "file.subset.yml")
    writefile(file_path, yaml_dump({"k": "v"}), truncate=True)
    #
    # Load and parse the YAML configuration enumerating additional directories
    config = config_load(tmp_path)
    #
    # Assert the relevant conditions indicating either test success or failure
    assert config["base"]["k"] == "v"
    assert config["folder"]["file"]["subset"]["k"] == "v"
    def to_yaml(self, destination, flow_style=False):
        """
        Save a dictionnary into a YAML file.

        :param str destination:
            A path to a file where we're going to write the
            converted dict into a JSON format.
        """

        if destination:
            with open(destination, "w") as file:
                return yaml_dump(
                    self.main,
                    stream=file,
                    encoding="utf-8",
                    allow_unicode=True,
                    indent=4,
                    default_flow_style=flow_style,
                )
        return yaml_dump(
            self.main,
            encoding="utf-8",
            allow_unicode=True,
            indent=4,
            default_flow_style=flow_style,
        )
Esempio n. 9
0
    def to_yaml_file(
        self,
        file_path: str,
        encoding: str = "utf-8",
        default_flow_style: bool = False,
        indent: int = 4,
        allow_unicode: bool = True,
        sort_keys: bool = True,
    ) -> None:
        """
        Converts the given dict/list to YAML and save the result into a file.

        :param file_path: The file path.
        :param encoding: The encoding.
        :param default_flow_style: Uses the default flow style.
        :param indent: The indentation to apply.
        :param allow_unicode: Allows the  decoding of unicode chars.
        :param sort_keys: Sorts the keys.
        """

        with open(file_path, "w", encoding=encoding) as file_stream:
            yaml_dump(
                self.subject,
                stream=file_stream,
                default_flow_style=default_flow_style,
                indent=indent,
                allow_unicode=allow_unicode,
                encoding=encoding,
                sort_keys=sort_keys,
            )
Esempio n. 10
0
    def log_lower_level_at_end(self, upper_iteration: int,
                               lower_algorithm_index: int, best_solution,
                               optimization_time: timedelta) -> None:
        """
        Logging method that will be called at the end of optimization process.

        :param upper_iteration: Upper algorithm iteration.
        :param lower_algorithm_index: Lower algorithm index.
        :param best_solution: The best solution found by the optimization algorithm.
        :param optimization_time: Optimization process duration time.
        """
        # assess data to log
        log_data = {}
        if self.verbosity >= LoggingVerbosity.OptimizationTime:
            log_data["optimization_duration"] = str(optimization_time)
        if self.verbosity >= LoggingVerbosity.BestSolution:
            log_data["best_solution"] = best_solution.get_log_data()
        # log to file
        if log_data:
            if self.log_format == LoggingFormat.YAML:
                file_path = path.join(
                    self.optimization_process_dir,  # type: ignore
                    f"iter_{upper_iteration}_alg_{lower_algorithm_index}_best_solution.yaml"
                )
                with open(file_path, "w") as yaml_file:
                    yaml_dump(log_data, yaml_file, YamlDumper)
            elif self.log_format == LoggingFormat.JSON:
                file_path = path.join(
                    self.optimization_process_dir,  # type: ignore
                    f"iter_{upper_iteration}_alg_{lower_algorithm_index}_best_solution.json"
                )
                with open(file_path, "w") as json_file:
                    json_dump(log_data, json_file)
Esempio n. 11
0
 def __save__(self):
     with open(self.MkdocsTemplateFileName,
               encoding='utf-8') as template_f, open(self.MkdocsFileName,
                                                     'w',
                                                     encoding='utf-8') as f:
         mkdocs = yaml_load(template_f, Loader=Loader)
         mkdocs[self.Nav] = self.root_nav_node[self.Nav]
         yaml_dump(mkdocs, f, default_flow_style=False)
Esempio n. 12
0
def write_data_file(path, data):
    with open(path, 'w') as data_file:
        if path.name.endswith('.json'):
            json.dump(data, data_file)
        elif path.name.endswith('.yaml') or path.name.endswith('.yml'):
            yaml_dump(data, data_file, Dumper=Dumper)
        else:
            raise NotImplementedError(
                f'Write data to {path.name} is not implemented')
Esempio n. 13
0
def compact(dev_path,
            rel_path,
            versions_yaml,
            src_type,
            compactor_fn,
            merge=False):
    from yaml import dump as yaml_dump
    from turbulenz_tools.utils.hash import hash_for_file, hash_for_string

    rel_path = _posixpath(rel_path)
    dev_path = _posixpath(dev_path)
    new_versions = {}

    def _compact_directory(path):
        # Search for folders and recurse.
        for p in [
                f for f in os_listdir(path) if path_isdir(path_join(path, f))
        ]:
            _compact_directory(_join(path, p))

        # Search the development path for all src files.
        for dev_filename in iglob(_join(path, '*.%s' % src_type)):
            dev_filename = _posixpath(dev_filename)
            current_hash = hash_for_file(dev_filename)
            # Build a suitable output filename - hash.ext
            rel_filename = _join(rel_path, src_type,
                                 '%s.%s' % (current_hash, src_type))
            if not path_exists(rel_filename):
                compactor_fn(dev_filename, rel_filename)

            # Update the list of compact files, so it can be reused when generating script tags.
            new_versions[
                dev_filename[len(dev_path):]] = rel_filename[len(rel_path):]

    _compact_directory(dev_path)

    if merge:
        current_hash = hash_for_string(''.join(
            [v for _, v in new_versions.iteritems()]))
        rel_filename = _join(rel_path, src_type,
                             '%s.%s' % (current_hash, src_type))
        if not path_exists(rel_filename):
            # Merge the compacted files.
            with open(rel_filename, 'wt') as t:
                for _, v in new_versions.iteritems():
                    with open('%s%s' % (rel_path, v)) as f:
                        t.write(f.read())
                        t.write('\n')

        new_versions['/%s/_merged.%s' %
                     (src_type, src_type)] = rel_filename[len(rel_path):]

    # We don't catch any exceptions here - as it will be handled by the calling function.
    with open(versions_yaml, 'w') as f:
        yaml_dump(new_versions, f, default_flow_style=False)
Esempio n. 14
0
def main():
    parser = argparse_ArgumentParser("Input parameters")
    parser.add_argument("--input_file_name",
                        default="input_toy.yaml",
                        help="Input parameters file name")
    parser.add_argument("--graph_files_dir",
                        default="",
                        help="Graph files' folder path")
    parser.add_argument("--out_dir_name",
                        default="/results",
                        help="Output directory name")
    args = parser.parse_args()
    with open(args.input_file_name, 'r') as f:
        inputs = yaml_load(f, yaml_Loader)

    # Override output directory name if same as gen
    if args.out_dir_name or inputs['out_comp_nm'] == "/results/res":
        if not os_path.exists(inputs['dir_nm'] + args.out_dir_name):
            os_mkdir(inputs['dir_nm'] + args.out_dir_name)
        inputs['out_comp_nm'] = args.out_dir_name + "/res"

    inputs['graph_files_dir'] = ''
    if args.graph_files_dir:
        if not os_path.exists(inputs['dir_nm'] + args.graph_files_dir):
            os_mkdir(inputs['dir_nm'] + args.graph_files_dir)
        inputs['graph_files_dir'] = args.graph_files_dir

    with open(inputs['dir_nm'] + inputs['out_comp_nm'] + "_input.yaml",
              'w') as outfile:
        yaml_dump(inputs, outfile, default_flow_style=False)

    logging_basicConfig(filename=inputs['dir_nm'] + inputs['out_comp_nm'] +
                        "_logs.yaml",
                        level=logging_INFO)
    start_time_read = time_time()
    myGraph = read_graphs(inputs)
    read_time = time_time() - start_time_read

    myGraphName = inputs['dir_nm'] + inputs['graph_files_dir'] + "/res_myGraph"
    with open(myGraphName, 'wb') as f:
        pickle_dump(myGraph, f)

    tot_time = time_time() - start_time

    out_comp_nm = inputs['dir_nm'] + inputs['out_comp_nm']
    # Write to yaml file instead
    with open(out_comp_nm + '_runtime_performance.out', "a") as fid:
        print("Read network time (s) = ",
              read_time,
              "[",
              round(100 * float(read_time) / tot_time, 2),
              "%]",
              file=fid)
        print("Total time (s) = ", tot_time, file=fid)
def dump_config():
    global CONFIG
    if CONFIG.get('persistent'):
        CONFIG.update({'values': VALUES})
    try:
        with open(CONFIG_FILE, 'w') as config:
            yaml_dump(CONFIG, config, default_flow_style=False)
            return True
    except Exception as err:
        collectd.error('cloudhealth - error: {}'.format(err))
        return False
Esempio n. 16
0
 def __save__(self):
     with open(self.mkdocs_template_file_in_os,
               encoding='utf-8') as template_f, open(MkdocsFileName,
                                                     'w',
                                                     encoding='utf-8') as f:
         mkdocs = yaml_load(template_f, Loader=Loader)
         mkdocs[self.Nav] = self.root_nav_node[self.Nav]
         mkdocs["site_name"] = self.site_name
         mkdocs["site_url"] = "https://dengking.github.io/{}".format(
             self.repository_name)
         mkdocs["repo_url"] = "https://github.com/dengking/{}".format(
             self.repository_name)
         yaml_dump(mkdocs, f, default_flow_style=False)
Esempio n. 17
0
def save():
    global CONFIG

    d = yaml_dump(CONFIG)
    f = open(CONFIG_FILE, 'w')
    f.write(d)
    f.close()
    def upload_audit_file_with_document_service(self, doc_id=None):
        files = {'file': ('audit_{}.yaml'.format(self.auction_doc_id),
                          yaml_dump(self.audit, default_flow_style=False))}
        ds_response = make_request(self.worker_defaults["DOCUMENT_SERVICE"]["url"],
                                   files=files, method='post',
                                   user=self.worker_defaults["DOCUMENT_SERVICE"]["username"],
                                   password=self.worker_defaults["DOCUMENT_SERVICE"]["password"],
                                   session=self.session_ds, retry_count=3)

        if doc_id:
            method = 'put'
            path = self.tender_url + '/documents/{}'.format(doc_id)
        else:
            method = 'post'
            path = self.tender_url + '/documents'

        response = make_request(path, data=ds_response,
                                user=self.worker_defaults["resource_api_token"],
                                method=method, request_id=self.request_id, session=self.session,
                                retry_count=2
                                )
        if response:
            doc_id = response["data"]['id']
            LOGGER.info(
                "Audit log approved. Document id: {}".format(doc_id),
                extra={"JOURNAL_REQUEST_ID": self.request_id,
                       "MESSAGE_ID": AUCTION_WORKER_API_AUDIT_LOG_APPROVED}
            )
            return doc_id
        else:
            LOGGER.warning(
                "Audit log not approved.",
                extra={"JOURNAL_REQUEST_ID": self.request_id,
                       "MESSAGE_ID": AUCTION_WORKER_API_AUDIT_LOG_NOT_APPROVED})
def main():
    args = parser.parse_args()
    create_wsgi_app()
    if args.json:
        print(json.dumps(spec.to_dict(), ensure_ascii=False))
    else:
        print(yaml_dump(spec.to_dict(), Dumper=YAMLDumper, allow_unicode=True))
Esempio n. 20
0
    def _dump_problem_data(self, problem_data: dict) -> None:
        """
        Dumps optimization problem data to proper file.

        :param problem_data: Optimization problem definition.
        """
        if self.log_format == LoggingFormat.YAML:
            file_path = path.join(self.optimization_process_dir,
                                  "problem.yaml")  # type: ignore
            with open(file_path, "w") as yaml_file:
                yaml_dump(problem_data, yaml_file, YamlDumper)
        elif self.log_format == LoggingFormat.JSON:
            file_path = path.join(self.optimization_process_dir,
                                  "problem.json")  # type: ignore
            with open(file_path, "w") as json_file:
                json_dump(problem_data, json_file)
Esempio n. 21
0
    def filter_yamlfy(value: typing.Any) -> str:
        """
        Filter to, optionally, emit a dump of the dsdl input as a yaml document.
        Available as ``yamlfy`` in all template environments.

        Example::

            /*
            {{ T | yamlfy }}
            */

        Result Example (truncated for brevity)::

            /*
            !!python/object:pydsdl.StructureType
            _attributes:
            - !!python/object:pydsdl.Field
            _serializable: !!python/object:pydsdl.UnsignedIntegerType
                _bit_length: 16
                _cast_mode: &id001 !!python/object/apply:pydsdl.CastMode
                - 0
            _name: value
            */

        :param value: The input value to parse as yaml.

        :return: If a yaml parser is available, a pretty dump of the given value as yaml.
                  If a yaml parser is not available then an empty string is returned.
        """
        return str(yaml_dump(value, Dumper=YamlDumper))
Esempio n. 22
0
 def terminate(self, operation, temp_dir, hosts=[]):
     self.temp_dir = temp_dir
     extra_options = getattr(self, 'terminate_%s_extra_args' % operation)()
     config = getattr(self, 'get_config_for_%s' % operation)()
     logger.debug('%s' % (config, ))
     config_yaml = yaml_dump(config)
     logger.debug('Config YAML is')
     logger.debug(config_yaml)
     jinja_render_to_temp('%s-docker-compose.j2.yml' % (operation, ),
                          temp_dir,
                          'docker-compose.yml',
                          hosts=self.all_hosts_in_orchestration(),
                          project_name=self.project_name,
                          base_path=self.base_path,
                          params=self.params,
                          api_version=self.api_version,
                          config=config_yaml,
                          env=os.environ)
     options = self.DEFAULT_COMPOSE_OPTIONS.copy()
     options.update({
         u'--verbose':
         self.params['debug'],
         u'--file': [os.path.join(temp_dir, 'docker-compose.yml')],
         u'COMMAND':
         'stop',
         u'--project-name':
         'ansible'
     })
     command_options = self.DEFAULT_COMPOSE_STOP_OPTIONS.copy()
     command_options[u'SERVICE'] = hosts
     command_options.update(extra_options)
     project = project_from_options(self.base_path, options)
     command = main.TopLevelCommand(project)
     command.stop(command_options)
Esempio n. 23
0
 def terminate(self, operation, temp_dir, hosts=[]):
     self.temp_dir = temp_dir
     extra_options = getattr(self, 'terminate_%s_extra_args' % operation)()
     config = getattr(self, 'get_config_for_%s' % operation)()
     logger.debug('%s' % (config,))
     config_yaml = yaml_dump(config)
     logger.debug('Config YAML is')
     logger.debug(config_yaml)
     jinja_render_to_temp('%s-docker-compose.j2.yml' % (operation,),
                          temp_dir,
                          'docker-compose.yml',
                          hosts=self.all_hosts_in_orchestration(),
                          project_name=self.project_name,
                          base_path=self.base_path,
                          params=self.params,
                          api_version=self.api_version,
                          config=config_yaml,
                          env=os.environ)
     options = self.DEFAULT_COMPOSE_OPTIONS.copy()
     options.update({
         u'--verbose': self.params['debug'],
         u'--file': [
             os.path.join(temp_dir,
                          'docker-compose.yml')],
         u'COMMAND': 'stop',
         u'--project-name': 'ansible'
     })
     command_options = self.DEFAULT_COMPOSE_STOP_OPTIONS.copy()
     command_options[u'SERVICE'] = hosts
     command_options.update(extra_options)
     project = project_from_options(self.base_path, options)
     command = main.TopLevelCommand(project)
     command.stop(command_options)
Esempio n. 24
0
    def upload_audit_file_without_document_service(self, doc_id=None):
        files = {'file': ('audit_{}.yaml'.format(self.auction_doc_id),
                          yaml_dump(self.audit, default_flow_style=False))}
        if doc_id:
            method = 'put'
            path = self.tender_url + '/documents/{}'.format(doc_id)
        else:
            method = 'post'
            path = self.tender_url + '/documents'

        response = make_request(path, files=files,
                                user=self.worker_defaults["TENDERS_API_TOKEN"],
                                method=method, request_id=self.request_id, session=self.session,
                                retry_count=2
                                )
        if response:
            doc_id = response["data"]['id']
            logger.info(
                "Audit log approved. Document id: {}".format(doc_id),
                extra={"JOURNAL_REQUEST_ID": self.request_id,
                       "MESSAGE_ID": AUCTION_WORKER_API_AUDIT_LOG_APPROVED}
            )
            return doc_id
        else:
            logger.warning(
                "Audit log not approved.",
                extra={"JOURNAL_REQUEST_ID": self.request_id,
                       "MESSAGE_ID": AUCTION_WORKER_API_AUDIT_LOG_NOT_APPROVED})
Esempio n. 25
0
    def to_yaml(
        self,
        encoding: str = "utf-8",
        default_flow_style: bool = False,
        indent: int = 4,
        allow_unicode: bool = True,
        sort_keys: bool = True,
    ) -> str:
        """
        Converts the given dict/list to the YAML format and return
        the result.

        :param str encoding: The encoding to use.
        :param bool default_flow_style: Uses the default flow style.
        :param int indent: The indentation to apply.
        :param bool allow_unicode: Allows the decoding of unicode chars.
        :param bool sort_keys: Sors the keys.

        :rtype: dict|list
        """

        return yaml_dump(
            self.subject,
            default_flow_style=default_flow_style,
            indent=indent,
            allow_unicode=allow_unicode,
            encoding=encoding,
            sort_keys=sort_keys,
        ).decode()
Esempio n. 26
0
    def orchestrate(self, operation, temp_dir, hosts=[], context={}):
        """
        Execute the compose engine.

        :param operation: One of build, run, or listhosts
        :param temp_dir: A temporary directory usable as workspace
        :param hosts: (optional) A list of hosts to limit orchestration to
        :return: The exit status of the builder container (None if it wasn't run)
        """
        if self.params.get('detached'):
            is_detached = True
            del self.params['detached']

        self.temp_dir = temp_dir
        try:
            builder_img_id = self.get_image_id_by_tag(
                self.builder_container_img_tag)
        except NameError:
            image_version = '.'.join(release_version.split('.')[:2])
            builder_img_id = 'ansible/%s:%s' % (self.builder_container_img_tag,
                                                image_version)
        extra_options = getattr(self, 'orchestrate_%s_extra_args' % operation)()
        config = getattr(self, 'get_config_for_%s' % operation)()
        logger.debug('%s' % (config,))
        config_yaml = yaml_dump(config)
        logger.debug('Config YAML is')
        logger.debug(config_yaml)
        jinja_render_to_temp('%s-docker-compose.j2.yml' % (operation,),
                             temp_dir,
                             'docker-compose.yml',
                             hosts=self.all_hosts_in_orchestration(),
                             project_name=self.project_name,
                             base_path=self.base_path,
                             params=self.params,
                             api_version=self.api_version,
                             builder_img_id=builder_img_id,
                             config=config_yaml,
                             env=os.environ,
                             **context)
        options = self.DEFAULT_COMPOSE_OPTIONS.copy()
        options.update({
            u'--verbose': self.params['debug'],
            u'--file': [
                os.path.join(temp_dir,
                             'docker-compose.yml')],
            u'COMMAND': 'up',
            u'ARGS': ['--no-build'] + hosts,
            u'--project-name': 'ansible'
        })
        command_options = self.DEFAULT_COMPOSE_UP_OPTIONS.copy()
        command_options[u'--no-build'] = True
        command_options[u'SERVICE'] = hosts
        if locals().get('is_detached'):
            logger.info('Deploying application in detached mode')
            command_options[u'-d'] = True
        command_options.update(extra_options)
        project = project_from_options(self.base_path, options)
        command = main.TopLevelCommand(project)
        command.up(command_options)
Esempio n. 27
0
    def __save_response_to_file(self, *, response: Response, pathfile: str,
                                save_metadata: bool,
                                expire_limit: int) -> None:
        with open(pathfile + ".json.gz", "wb") as out_file:
            shutil.copyfileobj(response.raw, out_file)

        if save_metadata:
            metadata = {
                "pandas_types":
                self.__parse_type_in_headers(response.headers),
                "expiration_time":
                datetime.now(timezone.utc) + timedelta(minutes=expire_limit),
                "expire_limit":
                expire_limit
            }
            with open(pathfile + ".yaml", "w") as file:
                yaml_dump(metadata, file, Dumper)
Esempio n. 28
0
def store_redis_config():
    import redis

    c = redis.Redis(host=REDIS_HOST, port=REDIS_PORT)
    data = yaml_dump(SAMPLE, allow_unicode=True).encode()
    c.set('my/server/config.yml', data)
    c.set('my/server/empty.yml', '')
    return c
        def to_yaml(self, destination, flow_style=False):
            """
            Save a dictionnary into a YAML file.

            Arguments:
                - destination: A string, A path to a file which we are going to write.
                - flow_style: A bool, Tell us to follow or not the default flow style.
            """

            with open(destination, "w") as file:
                yaml_dump(
                    self.main_dictionnary,
                    file,
                    encoding="utf-8",
                    allow_unicode=True,
                    indent=4,
                    default_flow_style=flow_style,
                )
Esempio n. 30
0
def store_consul_config():
    import consul

    c = consul.Consul(host=CONSUL_HOST, port=CONSUL_PORT)
    data = yaml_dump(SAMPLE, allow_unicode=True).encode()
    c.kv.put('my/server/config.yml', data)

    c.kv.put('my/server/empty.yml', None)
    return c
Esempio n. 31
0
        def to_yaml(self, destination, flow_style=False):
            """
            Save a dictionnary into a YAML file.

            Arguments:
                - destination: A string, A path to a file which we are going to write.
                - flow_style: A bool, Tell us to follow or not the default flow style.
            """

            with open(destination, "w") as file:
                yaml_dump(
                    self.main_dictionnary,
                    file,
                    encoding="utf-8",
                    allow_unicode=True,
                    indent=4,
                    default_flow_style=flow_style,
                )
Esempio n. 32
0
    def bootstrap_env(self, temp_dir, behavior, operation, compose_option,
                      builder_img_id=None, context=None):
        """
        Build common Docker Compose elements required to execute orchestrate,
        terminate, restart, etc.
        
        :param temp_dir: A temporary directory usable as workspace
        :param behavior: x in x_operation_extra_args
        :param operation: Operation to perform, like, build, run, listhosts, etc
        :param compose_option: x in DEFAULT_COMPOSE_X_OPTIONS
        :param builder_img_id: Ansible Container Builder Image ID
        :param context: extra context to send to jinja_render_to_temp
        :return: options (options to pass to compose),
                 command_options (operation options to pass to compose),
                 command (compose's top level command)
        """

        if context is None:
            context = {}

        self.temp_dir = temp_dir
        extra_options = getattr(self, '{}_{}_extra_args'.format(behavior,
                                                                operation))()
        config = getattr(self, 'get_config_for_%s' % operation)()
        logger.debug('%s' % (config,))
        config_yaml = yaml_dump(config)
        logger.debug('Config YAML is')
        logger.debug(config_yaml)
        jinja_render_to_temp('%s-docker-compose.j2.yml' % (operation,),
                             temp_dir,
                             'docker-compose.yml',
                             hosts=self.all_hosts_in_orchestration(),
                             project_name=self.project_name,
                             base_path=self.base_path,
                             params=self.params,
                             api_version=self.api_version,
                             builder_img_id=builder_img_id,
                             config=config_yaml,
                             env=os.environ,
                             **context)
        options = self.DEFAULT_COMPOSE_OPTIONS.copy()

        options.update({
            u'--verbose': self.params['debug'],
            u'--file': [
                os.path.join(temp_dir,
                             'docker-compose.yml')],
            u'--project-name': 'ansible',
        })
        command_options = getattr(self, 'DEFAULT_COMPOSE_{}_OPTIONS'.format(
            compose_option.upper())).copy()
        command_options.update(extra_options)

        project = project_from_options(self.base_path + '/ansible', options)
        command = main.TopLevelCommand(project)

        return options, command_options, command
Esempio n. 33
0
def main():
    module = AnsibleModule(argument_spec=dict(inventory_path=dict(
        type='str', required=True),
                                              device_dict=dict(type='dict',
                                                               required=True)),
                           supports_check_mode=True)
    inventory_path = module.params['inventory_path']
    device_dict = module.params['device_dict']

    with open(inventory_path, 'r') as hostfile:
        try:
            inventory = yaml_load(hostfile, Loader=Loader)
        except YAMLError as e:
            module.fail_json(
                changed=False,
                msg=
                f"Hostfile parsing error.\n Hostfile path: {module.params['inventory_path']}\nError:\n{e}"
            )

    try:
        if 'emsfp' in inventory['all']['children']:
            em_group = inventory['all']['children']['emsfp']['children']
        else:
            raise KeyError("key 'emsfp' not in inventory")
        updated_em_group = {}
        updated_em_group = update_em_group(deepcopy(em_group), device_dict)
    except KeyError as e:
        module.fail_json(
            changed=False,
            msg=
            f"KeyError: key {e} not in hostfile\n Hostfile content: {inventory}"
        )

    if em_group == updated_em_group:
        module.exit_json(changed=False, msg=f"Nothing to change!\n")
    else:
        inventory['all']['children']['emsfp']['children'] = updated_em_group
        with open(inventory_path, 'w') as hostfile:
            yaml_dump(inventory, hostfile, explicit_start=True)
        module.exit_json(
            changed=True,
            msg=
            f"Updated inventory:\n{yaml_dump(inventory, default_flow_style=False, explicit_start=True)}\nPrevious hostfile content:\n{yaml_dump(em_group)}\nNew hostfile content:\n{yaml_dump(updated_em_group)}\n"
        )
Esempio n. 34
0
    def log_lower_level_iteration(self, upper_iteration: int,
                                  lower_algorithm_index: int,
                                  lower_iteration: int,
                                  solutions: Iterable) -> None:
        """
        Logging method that will be called at each iteration of lower level optimization algorithms.

        Log files naming convention: solution_iter_X_alg_Y.E,
        where:
        - X - iteration of main algorithm
        - Y - index (order number) of lower algorithm in main algorithm
        - E - extension (according to log_format attribute)

        Note: This method will only be called by adaptive algorithm!

        :param upper_iteration: Upper algorithm iteration.
        :param lower_algorithm_index: Lower algorithm index.
        :param lower_iteration: Lower algorithm iteration.
        :param solutions: Solutions found in this iteration of lower algorithm.
        """
        if self.verbosity >= LoggingVerbosity.AllSolutions:
            if lower_iteration > 0:
                mode = "a"
            else:
                mode = "w"
            data_to_log = {
                f"Iteration {lower_iteration}":
                [solution.get_log_data() for solution in solutions]
            }
            if self.log_format == LoggingFormat.YAML:
                file_path = path.join(
                    self.optimization_process_dir,  # type: ignore
                    f"iter_{upper_iteration}_alg_{lower_algorithm_index}_solutions.yaml"
                )
                with open(file_path, mode) as yaml_file:
                    yaml_dump(data_to_log, yaml_file, YamlDumper)
            elif self.log_format == LoggingFormat.JSON:
                file_path = path.join(
                    self.optimization_process_dir,  # type: ignore
                    f"iter_{upper_iteration}_alg_{lower_algorithm_index}_solutions.json"
                )
                with open(file_path, mode) as json_file:
                    json_dump(data_to_log, json_file)
Esempio n. 35
0
def compact(dev_path, rel_path, versions_yaml, src_type, compactor_fn, merge=False):
    from yaml import dump as yaml_dump
    from turbulenz_tools.utils.hash import hash_for_file, hash_for_string

    rel_path = _posixpath(rel_path)
    dev_path = _posixpath(dev_path)
    new_versions = { }

    def _compact_directory(path):
        # Search for folders and recurse.
        for p in [f for f in os_listdir(path) if path_isdir(path_join(path, f))]:
            _compact_directory(_join(path, p))

        # Search the development path for all src files.
        for dev_filename in iglob(_join(path, '*.%s' % src_type)):
            dev_filename = _posixpath(dev_filename)
            current_hash = hash_for_file(dev_filename)
            # Build a suitable output filename - hash.ext
            rel_filename = _join(rel_path, src_type, '%s.%s' % (current_hash, src_type))
            if not path_exists(rel_filename):
                compactor_fn(dev_filename, rel_filename)

            # Update the list of compact files, so it can be reused when generating script tags.
            new_versions[dev_filename[len(dev_path):]] = rel_filename[len(rel_path):]

    _compact_directory(dev_path)

    if merge:
        current_hash = hash_for_string(''.join([v for _, v in new_versions.iteritems()]))
        rel_filename = _join(rel_path, src_type, '%s.%s' % (current_hash, src_type))
        if not path_exists(rel_filename):
            # Merge the compacted files.
            with open(rel_filename, 'wt') as t:
                for _, v in new_versions.iteritems():
                    with open('%s%s' % (rel_path, v)) as f:
                        t.write(f.read())
                        t.write('\n')

        new_versions['/%s/_merged.%s' % (src_type, src_type)] = rel_filename[len(rel_path):]

    # We don't catch any exceptions here - as it will be handled by the calling function.
    with open(versions_yaml, 'w') as f:
        yaml_dump(new_versions, f, default_flow_style=False)
Esempio n. 36
0
    def end_auction(self):
        LOGGER.info('---------------- End auction ----------------',
                    extra={
                        "JOURNAL_REQUEST_ID": self.request_id,
                        "MESSAGE_ID": AUCTION_WORKER_SERVICE_END_AUCTION
                    })
        LOGGER.debug("Stop server",
                     extra={"JOURNAL_REQUEST_ID": self.request_id})
        if self.server:
            self.server.stop()
        LOGGER.debug("Clear mapping",
                     extra={"JOURNAL_REQUEST_ID": self.request_id})
        delete_mapping(self.worker_defaults, self.auction_doc_id)

        start_stage, end_stage = self.get_round_stages(ROUNDS)
        minimal_bids = deepcopy(
            self.auction_document["stages"][start_stage:end_stage])
        minimal_bids = self.filter_bids_keys(
            sorting_by_amount(minimal_bids, reverse=False))
        self.auction_document["results"] = []
        for item in minimal_bids:
            self.auction_document["results"].append(
                prepare_results_stage(**item))
        self.auction_document["current_stage"] = (
            len(self.auction_document["stages"]) - 1)
        LOGGER.debug(' '.join(
            ('Document in end_stage: \n',
             yaml_dump(json.loads(dumps(self.auction_document))))),
                     extra={"JOURNAL_REQUEST_ID": self.request_id})
        self.approve_audit_info_on_announcement()
        LOGGER.info('Audit data: \n {}'.format(
            yaml_dump(json.loads(dumps(self.audit)))),
                    extra={"JOURNAL_REQUEST_ID": self.request_id})
        if self.debug:
            LOGGER.debug('Debug: put_auction_data disabled !!!',
                         extra={"JOURNAL_REQUEST_ID": self.request_id})
            sleep(10)
            self.save_auction_document()
        else:
            if self.put_auction_data():
                self.save_auction_document()
        LOGGER.debug("Fire 'stop auction worker' event",
                     extra={"JOURNAL_REQUEST_ID": self.request_id})
Esempio n. 37
0
    def create(self, repo_path, override=False):
        config_file_name = posixpath.join(repo_path, CONFIG_FILE)

        if posixpath.exists(config_file_name) and not override:
            raise RepositoryAlreadyExistsError()

        try:
            os.remove(config_file_name)
        except FileNotFoundError:
            # don't care if the file exists or not
            pass

        with open(config_file_name, 'w') as f:
            f.write(yaml_dump({'settings': self._settings, 'rules': self._rules}, Dumper=YAMLDumper))
Esempio n. 38
0
    def orchestrate(self, operation, temp_dir, hosts=[], context={}):
        """
        Execute the compose engine.

        :param operation: One of build, run, or listhosts
        :param temp_dir: A temporary directory usable as workspace
        :param hosts: (optional) A list of hosts to limit orchestration to
        :return: The exit status of the builder container (None if it wasn't run)
        """
        self.temp_dir = temp_dir
        builder_img_id = self.get_image_id_by_tag(
            self.builder_container_img_tag)
        extra_options = getattr(self, 'orchestrate_%s_extra_args' % operation)()
        config = getattr(self, 'get_config_for_%s' % operation)()
        logger.debug('%s' % (config,))
        config_yaml = yaml_dump(config)
        logger.debug('Config YAML is')
        logger.debug(config_yaml)
        jinja_render_to_temp('%s-docker-compose.j2.yml' % (operation,),
                             temp_dir,
                             'docker-compose.yml',
                             hosts=self.config.get('services', {}).keys(),
                             project_name=self.project_name,
                             base_path=self.base_path,
                             params=self.params,
                             api_version=self.api_version,
                             builder_img_id=builder_img_id,
                             config=config_yaml,
                             env=os.environ,
                             **context)
        options = self.DEFAULT_COMPOSE_OPTIONS.copy()
        options.update({
            u'--file': [
                os.path.join(temp_dir,
                             'docker-compose.yml')],
            u'COMMAND': 'up',
            u'ARGS': ['--no-build'] + hosts,
            u'--project-name': 'ansible'
        })
        command_options = self.DEFAULT_COMPOSE_UP_OPTIONS.copy()
        #command_options[u'--no-build'] = True
        command_options[u'--build'] = True
        if os.environ.get('DETACH'):
            command_options[u'-d'] = True
        command_options[u'SERVICE'] = hosts
        command_options.update(extra_options)
        project = project_from_options(self.base_path, options)
        command = main.TopLevelCommand(project)
        command.up(command_options)
Esempio n. 39
0
def main(options):
    global verbose, WIDTH
    if options:
        verbose = options.verbose
        WIDTH = options.width
    scrape_data = scrape_repos()
    if options.yaml:
        if verbose: print('Writing scraped data to rosrepos.yaml...')
        # Open the output file.
        output = open('rosrepos.yaml', 'w')
        # Convert the data to yaml and write it to the output file.
        output.write(yaml_dump(scrape_data, default_flow_style=False))
    if options.sqlite:
        if verbose: print('Writing scraped data to rosrepos.sqlite...')
        gendb(scrape_data)
Esempio n. 40
0
    def orchestrate(self, operation, temp_dir, hosts=[], context={}):
        """
        Execute the compose engine.

        :param operation: One of build, run, or listhosts
        :param temp_dir: A temporary directory usable as workspace
        :param hosts: (optional) A list of hosts to limit orchestration to
        :return: The exit status of the builder container (None if it wasn't run)
        """
        self.temp_dir = temp_dir
        builder_img_id = self.get_image_id_by_tag(self.builder_container_img_tag)
        extra_options = getattr(self, "orchestrate_%s_extra_args" % operation)()
        config = getattr(self, "get_config_for_%s" % operation)()
        logger.debug("%s" % (config,))
        config_yaml = yaml_dump(config)
        logger.debug("Config YAML is")
        logger.debug(config_yaml)
        jinja_render_to_temp(
            "%s-docker-compose.j2.yml" % (operation,),
            temp_dir,
            "docker-compose.yml",
            hosts=self.config.get("services", {}).keys(),
            project_name=self.project_name,
            base_path=self.base_path,
            params=self.params,
            api_version=self.api_version,
            builder_img_id=builder_img_id,
            config=config_yaml,
            env=os.environ,
            **context
        )
        options = self.DEFAULT_COMPOSE_OPTIONS.copy()
        options.update(
            {
                u"--file": [os.path.join(temp_dir, "docker-compose.yml")],
                u"COMMAND": "up",
                u"ARGS": ["--no-build"] + hosts,
                u"--project-name": "ansible",
            }
        )
        command_options = self.DEFAULT_COMPOSE_UP_OPTIONS.copy()
        command_options[u"--no-build"] = True
        command_options[u"SERVICE"] = hosts
        command_options.update(extra_options)
        project = project_from_options(self.base_path, options)
        command = main.TopLevelCommand(project)
        command.up(command_options)
def main(image, image_name, rows, columns, filter_list, csv_output,
        yaml_out=None, save_cell_images=False, filter_image_name=None):
    cell_w, cell_h = dims.celldims(image.size, rows, columns)
    filter_name = "-".join(map(base_name_no_ext, args.filters))
    filtered_image = apply_filters(image, filter_list)
    if filter_image_name:
        filtered_image.save(filter_image_name)
    stats_data = []
    vector_data = []
    stat_functions = dict(mean=chi_mean, stddev=np.std, peaks=count_max_peaks)
    for i in range(rows):
        stats_data.append(list())
        vector_data.append(list())
        for j in range(columns):
            stats_data[i].append(dict())
            bb = bounding_box(j * cell_w, i * cell_h, cell_w, cell_h)
            cell = filtered_image.crop(bb)
            if save_cell_images:
                cell_name = "{:s}-{:s}_y{:02d}_x{:02d}.bmp".format(image_name, filter_name, i, j)
                cell.save(cell_name)
            vectors = get_vectors(cell)
            for key in vectors:
                # union of two dict to add new keys
                stats_data[i][j].update(dict_map(vectors[key], stat_functions, key + "_"))

            vector_data[i].append(vectors)

            if yaml_out:   #  yaml lib doesn't like numpy data types.
                vectors['x'] = list(map(float, vectors['x']))
                vectors['y'] = list(map(float, vectors['x']))

    if yaml_out:
        yaml_out.write(yaml_dump(vector_data))

    stat_names = stats_data[0][0].keys()
    header_names = [filter_name + "-" + stat_name for stat_name in stat_names]
    stats_writer = csv.writer(csv_output)
    stats_writer.writerow(['i', 'j', 'x', 'y'] + header_names)
    for i in range(rows):
        for j in range(columns):
            row = [i, j, float(j) / columns, float(i) / rows]
            for k in stat_names:
                row.append(stats_data[i][j][k])
            stats_writer.writerow(row)
 def end_auction(self):
     logger.info(
         '---------------- End auction ----------------',
         extra={"JOURNAL_REQUEST_ID": self.request_id,
                "MESSAGE_ID": AUCTION_WORKER_SERVICE}
     )
     start_stage, end_stage = self.get_round_stages(ROUNDS)
     minimal_bids = deepcopy(
         self.auction_document["stages"][start_stage:end_stage]
     )
     minimal_bids = self.filter_bids_keys(sorting_by_amount(minimal_bids))
     self.auction_document["results"] = []
     for item in minimal_bids:
         self.auction_document["results"].append(generate_resuls(item))
     self.auction_document["current_stage"] = (len(self.auction_document["stages"]) - 1)
     logger.debug(' '.join((
         'Document in end_stage: \n', yaml_dump(dict(self.auction_document))
     )), extra={"JOURNAL_REQUEST_ID": self.request_id})
     if self.debug:
         logger.debug(
             'Debug: put_auction_data disabled !!!',
             extra={"JOURNAL_REQUEST_ID": self.request_id}
         )
     else:
         self.put_auction_data()
     logger.debug(
         "Clear mapping", extra={"JOURNAL_REQUEST_ID": self.request_id}
     )
     delete_mapping(self.worker_defaults["REDIS_URL"],
                    self.auction_doc_id)
     logger.debug("Stop server", extra={"JOURNAL_REQUEST_ID": self.request_id})
     if self.server:
         self.server.stop()
     logger.debug(
         "Fire 'stop auction worker' event",
         extra={"JOURNAL_REQUEST_ID": self.request_id}
     )
Esempio n. 43
0
def main():
   pass

   # EXAMPLE: ONLY VALIDATE
   lconf_validate_one_section_str(lconf_section__base_example_lconf_section_raw_str)

   # EXAMPLE: ONLY PREPARE DEFAULT OBJ
   lconf_default_obj = lconf_prepare_default_obj(lconf_section__base_example_template_obj, with_comments=False)
   print('\n\n============== EXAMPLE: ONLY PREPARE DEFAULT OBJ ==============\n')
   print(lconf_default_obj)

   # EXAMPLE: VALIDATE, PREPARE, PARSE:
   # validate a `LCONF-Section string` and prepare a default lconf obj from the template obj and parse the LCONF-Section
   print('\n\n============== EXAMPLE: VALIDATE, PREPARE, PARSE ==============\n')
   lconf_parse_obj = lconf_prepare_and_parse_section(
      lconf_section__base_example_lconf_section_raw_str,
      lconf_section__base_example_template_obj,
      with_comments=True,
      validate=True
   )
   print(lconf_parse_obj)

   # EXAMPLE: EXTRACT KNOWN SECTION, VALIDATE, PREPARE, PARSE: this is also useful to extract from files
   lconf_parse_obj = lconf_parse_section_extract_by_name(
      lconf_section__base_example_lconf_section_raw_str,
      'BaseEXAMPLE',
      lconf_section__base_example_template_obj,
      with_comments=True,
      validate=True
   )
   print(
      '\n\n============== EXAMPLE: EXTRACT KNOWN SECTION, VALIDATE, PREPARE, PARSE: also for files ==============\n')
   print(lconf_parse_obj)

   # EXAMPLE: ACCESS The Section-INFO
   print('\n\n============== EXAMPLE: ACCESS The Section-INFO ==============\n')
   print('  lconf_parse_obj.key_order: ', lconf_parse_obj.key_order)
   print('  lconf_parse_obj.key_empty_replacementvalue: ', lconf_parse_obj.key_empty_replacementvalue)
   print('  lconf_parse_obj.section_name: ', lconf_parse_obj.section_name)
   print('  lconf_parse_obj.is_parsed: ', lconf_parse_obj.is_parsed)
   print('  lconf_parse_obj.has_comments: ', lconf_parse_obj.has_comments)


   # EXAMPLE: EMIT DEFAULT OBJ
   lconf_section_emitted_default_obj_str = lconf_emit_default_obj(
      lconf_section__base_example_template_obj,
      'EMITTED BaseEXAMPLE',
      onelinelists=LCONF_DEFAULT,
      with_comments=True
   )
   print('\n\n============== EXAMPLE: EMIT DEFAULT OBJ (==============\n')
   print(lconf_section_emitted_default_obj_str)

   # EXAMPLE: EMIT PARSED LCONF OBJ
   lconf_parse_obj = lconf_parse_section_extract_by_name(
      lconf_section__base_example_lconf_section_raw_str,
      'BaseEXAMPLE',
      lconf_section__base_example_template_obj,
      with_comments=True,
      validate=True
   )
   lconf_section_emitted_parsed_obj_str = lconf_emit(lconf_parse_obj, onelinelists=LCONF_DEFAULT, empty_key_value_pair=True)

   print('\n\n============== EXAMPLE: EMIT PARSED LCONF OBJ (empty_key_value_pair=True) ==============\n')
   print(lconf_section_emitted_parsed_obj_str)


   lconf_section_emitted_parsed_obj_str = lconf_emit(lconf_parse_obj, onelinelists=LCONF_DEFAULT, empty_key_value_pair=False)
   print('\n\n============== EXAMPLE: EMIT PARSED LCONF OBJ (empty_key_value_pair=False) ==============\n')
   print(lconf_section_emitted_parsed_obj_str)


   # EXAMPLE: EMIT TO JSON
   lconf_parse_obj = lconf_parse_section_extract_by_name(
      lconf_section__base_example_lconf_section_raw_str,
      'BaseEXAMPLE',
      lconf_section__base_example_template_obj,
      with_comments=False,
      validate=True
   )
   result_ordered_native_type = lconf_to_ordered_native_type(lconf_parse_obj)
   # IMPORTANT: datetime.datetime(2014, 5, 8, 13, 39) is not JSON serializable
   result_ordered_native_type['key11value_mapping']['mapping11_key2_mapping'][
      'mapping11_key2_nested_mapping_key1'] = '2014-05-08 13:39:00'
   dump_json = json_dumps(result_ordered_native_type, indent=3)

   print('\n\n============== EXAMPLE: EMIT TO ORDERED JSON ==============\n')
   print(dump_json)

   # EXAMPLE: EMIT TO YAML
   if has_yaml:
      lconf_parse_obj = lconf_parse_section_extract_by_name(
         lconf_section__base_example_lconf_section_raw_str,
         'BaseEXAMPLE',
         lconf_section__base_example_template_obj,
         with_comments=False,
         validate=True
      )
      result_native_type = lconf_to_native_type(lconf_parse_obj)
      #  IMPORTANT: datetime.datetime(2014, 5, 8, 13, 39) is not JSON serializable
      result_native_type['key11value_mapping']['mapping11_key2_mapping'][
         'mapping11_key2_nested_mapping_key1'] = '2014-05-08 13:39:00'
      dump_yaml = yaml_dump(result_native_type, indent=3, allow_unicode=True)

      print('\n\n============== EXAMPLE: EMIT TO YAML ==============\n')
      print(dump_yaml)
Esempio n. 44
0
def yaml(data, types, fn):
    with open(fn, "w") as f:
        yaml_dump(data, f, default_flow_style=False)
Esempio n. 45
0
def write_yaml(pynodes, cppnodes):
	output = {'nodes': \
				[{'name': n['name'], 'exec':os.path.basename(n['path'])} for n in pynodes + cppnodes]
			 }
	
	return yaml_dump(output, default_flow_style = False)
Esempio n. 46
0
File: yml.py Progetto: jerryjj/InGo
 def dump(self, config, target=None):
     if target:
         target = file(target, 'r')
     return yaml_dump(config, target, Dumper=Dumper)
def do_emit__yaml_cdumper():
   dump_yaml = yaml_dump(result_native_type, Dumper=yaml_CDumper, indent=3, allow_unicode=True)
Esempio n. 48
0
def main():
   # ---------------------------------- EXAMPLE 4 a ---------------------------------- #

   # EXAMPLE 4 a: ONLY VALIDATE
   lconf_validate_one_section_str(lconf_section__example_4a_lconf_section_raw_str)

   # EXAMPLE 4 a: ONLY PREPARE DEFAULT OBJ
   lconf_default_obj = lconf_prepare_default_obj(lconf_section__example_4a__template_obj, with_comments=False)
   print('\n\n============== EXAMPLE 4 a: ONLY PREPARE DEFAULT OBJ ==============\n')
   print(lconf_default_obj)

   # EXAMPLE 4 a: VALIDATE, PREPARE, PARSE:
   # validate a `LCONF-Section string` and prepare a default lconf obj from the template obj and parse the LCONF-Section
   print('\n\n============== EXAMPLE 4 a: VALIDATE, PREPARE, PARSE ==============\n')
   lconf_parse_obj = lconf_prepare_and_parse_section(
      lconf_section__example_4a_lconf_section_raw_str,
      lconf_section__example_4a__template_obj,
      with_comments=True,
      validate=True
   )
   print(lconf_parse_obj)

   # EXAMPLE 4 a: EXTRACT KNOWN SECTION, VALIDATE, PREPARE, PARSE: this is also useful to extract from files
   lconf_parse_obj = lconf_parse_section_extract_by_name(
      lconf_section__example_4a_lconf_section_raw_str,
      'EXAMPLE 4 a',
      lconf_section__example_4a__template_obj,
      with_comments=True,
      validate=True
   )
   print(
      '\n\n============== EXAMPLE 4 a: EXTRACT KNOWN SECTION, VALIDATE, PREPARE, PARSE: also for files ==============\n')
   print(lconf_parse_obj)

   # EXAMPLE 4 a: ACCESS The Section-INFO
   print('\n\n============== EXAMPLE 4 a: ACCESS The Section-INFO ==============\n')
   print('  lconf_parse_obj.key_order: ', lconf_parse_obj.key_order)
   print('  lconf_parse_obj.section_name: ', lconf_parse_obj.section_name)
   print('  lconf_parse_obj.is_parsed: ', lconf_parse_obj.is_parsed)
   print('  lconf_parse_obj.has_comments: ', lconf_parse_obj.has_comments)

   # EXAMPLE 4 a: EMIT DEFAULT OBJ
   lconf_section_emitted_default_obj_str = lconf_emit_default_obj(
      lconf_section__example_4a__template_obj,
      'EMITTED EXAMPLE 4 a',
      onelinelists=LCONF_DEFAULT,
      with_comments=True
   )
   print('\n\n============== EXAMPLE 4 a: EMIT DEFAULT OBJ ==============\n')
   print(lconf_section_emitted_default_obj_str)

   # EXAMPLE: EMIT PARSED LCONF OBJ
   lconf_parse_obj = lconf_parse_section_extract_by_name(
      lconf_section__example_4a_lconf_section_raw_str,
      'EXAMPLE 4 a',
      lconf_section__example_4a__template_obj,
      with_comments=True,
      validate=True
   )
   lconf_section_emitted_parsed_obj_str = lconf_emit(lconf_parse_obj, onelinelists=LCONF_DEFAULT)

   print('\n\n============== EXAMPLE 4 a: EMIT PARSED LCONF OBJ ==============\n')
   print(lconf_section_emitted_parsed_obj_str)

   # EXAMPLE 4 a: EMIT TO JSON
   lconf_parse_obj = lconf_parse_section_extract_by_name(
      lconf_section__example_4a_lconf_section_raw_str,
      'EXAMPLE 4 a',
      lconf_section__example_4a__template_obj,
      with_comments=False,
      validate=True
   )
   result_ordered_native_type = lconf_to_ordered_native_type(lconf_parse_obj)
   dump_json = json_dumps(result_ordered_native_type, indent=3)

   print('\n\n============== EXAMPLE 4 a: EMIT TO ORDERED JSON ==============\n')
   print(dump_json)

   # EXAMPLE: EMIT TO YAML
   if has_yaml:
      lconf_parse_obj = lconf_parse_section_extract_by_name(
         lconf_section__example_4a_lconf_section_raw_str,
         'EXAMPLE 4 a',
         lconf_section__example_4a__template_obj,
         with_comments=False,
         validate=True
      )
      result_native_type = lconf_to_native_type(lconf_parse_obj)
      dump_yaml = yaml_dump(result_native_type, indent=3, allow_unicode=True)

      print('\n\n============== EXAMPLE 4 a: EMIT TO YAML ==============\n')
      print(dump_yaml)


   # ---------------------------------- EXAMPLE 4 b ---------------------------------- #

   # EXAMPLE 4 b: ONLY VALIDATE
   lconf_validate_one_section_str(lconf_section__example_4b_lconf_section_raw_str)

   # EXAMPLE 4 b: ONLY PREPARE DEFAULT OBJ
   lconf_default_obj = lconf_prepare_default_obj(lconf_section__example_4b__template_obj, with_comments=False)
   print('\n\n============== EXAMPLE 4 b: ONLY PREPARE DEFAULT OBJ ==============\n')
   print(lconf_default_obj)

   # EXAMPLE 4 b: VALIDATE, PREPARE, PARSE:
   # validate a `LCONF-Section string` and prepare a default lconf obj from the template obj and parse the LCONF-Section
   print('\n\n============== EXAMPLE 4 b: VALIDATE, PREPARE, PARSE ==============\n')
   lconf_parse_obj = lconf_prepare_and_parse_section(
      lconf_section__example_4b_lconf_section_raw_str,
      lconf_section__example_4b__template_obj,
      with_comments=True,
      validate=True
   )
   print(lconf_parse_obj)

   # EXAMPLE 4 b: EXTRACT KNOWN SECTION, VALIDATE, PREPARE, PARSE: this is also useful to extract from files
   lconf_parse_obj = lconf_parse_section_extract_by_name(
      lconf_section__example_4b_lconf_section_raw_str,
      'EXAMPLE 4 b',
      lconf_section__example_4b__template_obj,
      with_comments=True,
      validate=True
   )
   print(
      '\n\n============== EXAMPLE 4 b: EXTRACT KNOWN SECTION, VALIDATE, PREPARE, PARSE: also for files ==============\n')
   print(lconf_parse_obj)

   # EXAMPLE 4 b: ACCESS The Section-INFO
   print('\n\n============== EXAMPLE 4 b: ACCESS The Section-INFO ==============\n')
   print('  lconf_parse_obj.key_order: ', lconf_parse_obj.key_order)
   print('  lconf_parse_obj.section_name: ', lconf_parse_obj.section_name)
   print('  lconf_parse_obj.is_parsed: ', lconf_parse_obj.is_parsed)
   print('  lconf_parse_obj.has_comments: ', lconf_parse_obj.has_comments)

   # EXAMPLE 4 b: EMIT DEFAULT OBJ
   lconf_section_emitted_default_obj_str = lconf_emit_default_obj(
      lconf_section__example_4b__template_obj,
      'EMITTED EXAMPLE 4 b',
      onelinelists=LCONF_DEFAULT,
      with_comments=True
   )
   print('\n\n============== EXAMPLE 4 b: EMIT DEFAULT OBJ ==============\n')
   print(lconf_section_emitted_default_obj_str)

   # EXAMPLE: EMIT PARSED LCONF OBJ
   lconf_parse_obj = lconf_parse_section_extract_by_name(
      lconf_section__example_4b_lconf_section_raw_str,
      'EXAMPLE 4 b',
      lconf_section__example_4b__template_obj,
      with_comments=True,
      validate=True
   )
   lconf_section_emitted_parsed_obj_str = lconf_emit(lconf_parse_obj, onelinelists=LCONF_DEFAULT)

   print('\n\n============== EXAMPLE 4 b: EMIT PARSED LCONF OBJ ==============\n')
   print(lconf_section_emitted_parsed_obj_str)

   # EXAMPLE 4 b: EMIT TO JSON
   lconf_parse_obj = lconf_parse_section_extract_by_name(
      lconf_section__example_4b_lconf_section_raw_str,
      'EXAMPLE 4 b',
      lconf_section__example_4b__template_obj,
      with_comments=False,
      validate=True
   )
   result_ordered_native_type = lconf_to_ordered_native_type(lconf_parse_obj)
   dump_json = json_dumps(result_ordered_native_type, indent=3)

   print('\n\n============== EXAMPLE 4 b: EMIT TO ORDERED JSON ==============\n')
   print(dump_json)

   # EXAMPLE: EMIT TO YAML
   if has_yaml:
      lconf_parse_obj = lconf_parse_section_extract_by_name(
         lconf_section__example_4b_lconf_section_raw_str,
         'EXAMPLE 4 b',
         lconf_section__example_4b__template_obj,
         with_comments=False,
         validate=True
      )
      result_native_type = lconf_to_native_type(lconf_parse_obj)
      dump_yaml = yaml_dump(result_native_type, indent=3, allow_unicode=True)

      print('\n\n============== EXAMPLE 4 b: EMIT TO YAML ==============\n')
      print(dump_yaml)
    def put_auction_data(self):
        doc_id = None
        self.approve_audit_info_on_announcement()

        files = {'file': ('audit.yaml', yaml_dump(self.audit, default_flow_style=False))}
        response = patch_tender_data(
            self.tender_url + '/documents', files=files,
            user=self.worker_defaults["TENDERS_API_TOKEN"],
            method='post', request_id=self.request_id,
            retry_count=2
        )
        if response:
            doc_id = response["data"]['id']
            logger.info(
                "Audit log approved. Document id: {}".format(doc_id),
                extra={"JOURNAL_REQUEST_ID": self.request_id,
                       "MESSAGE_ID": AUCTION_WORKER_API}
            )
        else:
            logger.warning(
                "Audit log not approved.",
                extra={"JOURNAL_REQUEST_ID": self.request_id,
                       "MESSAGE_ID": AUCTION_WORKER_API}
            )

        all_bids = self.auction_document["results"]
        logger.info(
            "Approved data: {}".format(all_bids),
            extra={"JOURNAL_REQUEST_ID": self.request_id,
                   "MESSAGE_ID": AUCTION_WORKER_API}
        )

        for index, bid_info in enumerate(self._auction_data["data"]["bids"]):
            auction_bid_info = get_latest_bid_for_bidder(all_bids, bid_info["id"])
            self._auction_data["data"]["bids"][index]["value"]["amount"] = auction_bid_info["amount"]
            self._auction_data["data"]["bids"][index]["date"] = auction_bid_info["time"]

        # clear data
        data = {'data': {'bids': self._auction_data["data"]['bids']}}
        results = patch_tender_data(
            self.tender_url + '/auction', data=data,
            user=self.worker_defaults["TENDERS_API_TOKEN"],
            method='post',
            request_id=self.request_id
        )
        if results:
            bids_dict = dict([(bid["id"], bid["tenderers"])
                              for bid in results["data"]["bids"]])
            for section in ['initial_bids', 'stages', 'results']:
                for index, stage in enumerate(self.auction_document[section]):
                    if 'bidder_id' in stage and stage['bidder_id'] in bids_dict:
                        self.auction_document[section][index]["label"]["uk"] = bids_dict[stage['bidder_id']][0]["name"]
                        self.auction_document[section][index]["label"]["ru"] = bids_dict[stage['bidder_id']][0]["name"]
                        self.auction_document[section][index]["label"]["en"] = bids_dict[stage['bidder_id']][0]["name"]

            if doc_id:
                self.approve_audit_info_on_announcement(approved=bids_dict)
                files = {'file': ('audit.yaml', yaml_dump(self.audit, default_flow_style=False))}
                response = patch_tender_data(
                    self.tender_url + '/documents/{}'.format(doc_id), files=files,
                    user=self.worker_defaults["TENDERS_API_TOKEN"],
                    method='put', request_id=self.request_id,
                    retry_count=2
                )
                if response:
                    doc_id = response["data"]['id']
                    logger.info(
                        "Audit log approved. Document id: {}".format(doc_id),
                        extra={"JOURNAL_REQUEST_ID": self.request_id,
                               "MESSAGE_ID": AUCTION_WORKER_API}
                    )
                else:
                    logger.warning(
                        "Audit log not approved.",
                        extra={"JOURNAL_REQUEST_ID": self.request_id,
                               "MESSAGE_ID": AUCTION_WORKER_API}
                    )
        else:
            logger.error(
                "Auctions results not approved",
                extra={"JOURNAL_REQUEST_ID": self.request_id,
                       "MESSAGE_ID": AUCTION_WORKER_API}
            )
Esempio n. 50
0
def test_lconf_to_native_type_ok0():
   """ Tests: test_lconf_to_native_type_ok0
   """
   print('::: TEST: test_lconf_to_native_type_ok0()')

   lconf_section__template_obj = Root([
      # Default Empty Line
      ('#1', ''),
      # Default Comment Line
      ('#2', '# Comment-Line: `Key :: Value Pair`'),
      ('first', ''),
      ('last', '', None, 'NOT-DEFINED'),
      ('sex', '', None, 'NOT-DEFINED'),
      ('age', '', lconf_to_int, -1),
      ('salary', ''),
      ('#3', '# Comment-Line: `Key-Value-List`'),
      ('interests', KVList(True, [])),
      ('#4', '# Comment-Line: `Key :: Value Pair`'),
      ('registered', ''),
   ])

   lconf_section_raw_str = r'''___SECTION :: EXAMPLE 1
#1 ::
#2 :: # Comment-Line: `Key :: Value Pair`
first :: Joe
last :: Smith
sex :: m
age :: 18
salary :: 12500
#3 :: # Comment-Line: `Key-Value-List`
- interests
   soccer
   tennis
#4 :: # Comment-Line: `Key :: Value Pair`
registered :: False
___END'''
   lconf_obj = lconf_parse_section_extract_by_name(
      lconf_section_raw_str,
      'EXAMPLE 1',
      lconf_section__template_obj,
      with_comments=False,
      validate=True
   )
   result_native_type = lconf_to_native_type(lconf_obj)

   ok_(isinstance(lconf_obj, LconfRoot), msg=None)
   ok_(isinstance(result_native_type, dict), msg=None)

   ok_(isinstance(lconf_obj['interests'], LconfKVList), msg=None)
   ok_(isinstance(result_native_type['interests'], list), msg=None)

   ok_(lconf_obj['last'] == result_native_type['last'] == 'Smith', msg=None)
   ok_(lconf_obj['age'] == result_native_type['age'] == 18, msg=None)


   # RE DUMP AS JSON
   re_dump_json = json_dumps(result_native_type, indent=3)

   # RE CONVERT TO LCONF
   result_reconverted_dict_to_lconf2 = lconf_dict_to_lconf(
      json_loads(re_dump_json),
      'EXAMPLE 1',
      onelinelists=False,
      skip_none_value=False
   )
   lconf_validate_one_section_str(result_reconverted_dict_to_lconf2)

   if has_yaml:
      dump_yaml = yaml_dump(result_native_type, indent=3, allow_unicode=True)
      parsed_load_yaml = yaml_load(dump_yaml)
      ok_(isinstance(parsed_load_yaml, dict), msg=None)
      ok_(isinstance(parsed_load_yaml['interests'], list), msg=None)
      ok_(parsed_load_yaml['last'] == lconf_obj['last'] == result_native_type['last'] == 'Smith', msg=None)
      ok_(parsed_load_yaml['age'] == lconf_obj['age'] == result_native_type['age'] == 18, msg=None)
Esempio n. 51
0
def test_lconf_to_native_type_ok2():
   """ Tests: test_lconf_to_native_type_ok2
   """
   print('::: TEST: test_lconf_to_native_type_ok2()')

   lconf_section__template_obj = get_lconf_section__base_example_template_obj()

   lconf_section_raw_str = get_lconf_section__base_example_lconf_section_raw_str()
   lconf_obj = lconf_parse_section_extract_by_name(
      lconf_section_raw_str,
      'BaseEXAMPLE',
      lconf_section__template_obj,
      with_comments=False,
      validate=True)

   eq_(lconf_obj['key11value_mapping']['mapping11_key2_mapping']['mapping11_key2_nested_mapping_key2_block_identifier'][
      'sky_blue_blk_name1']['blk_item_green'],
      206,
      msg=None)

   eq_(lconf_obj.key_order,
      ['key1value_pair', 'key2value_pair', 'key3value_pair', 'key4value_pair', 'key5value_pair', 'key6value_pair',
         'key7value_pair', 'key8value_pair', 'key9value_pair', 'key10value_mapping', 'key11value_mapping', 'key12list',
         'key13value_pairlist', 'key14list_of_color_tuples', 'key15value_pairlist', 'key16value_pairlist',
         'key17list_of_tuples', 'RepeatedBlk1'],
      msg=None)

   eq_(lconf_obj['key11value_mapping']['mapping11_key2_mapping']['mapping11_key2_nested_mapping_key2_block_identifier'][
      'sky_blue_blk_name1'].key_order,
      ['blk_item_red', 'blk_item_green', 'blk_item_blue'],
      msg=None)

   eq_(lconf_obj['key11value_mapping']['mapping11_key1'], '/home/examples', msg=None)
   eq_(lconf_obj['key1value_pair'], 'NOT-DEFINED', msg=None)  # `Empty-KeyValuePair-ReplacementValue` "NOT-DEFINED"
   eq_(lconf_obj['key7value_pair'], -94599.5, msg=None)  # `Empty-KeyValuePair-ReplacementValue` "-94599.5"

   eq_(lconf_obj['key11value_mapping']['mapping11_key2_mapping']['mapping11_key2_nested_mapping_key3'], 'car', msg=None)
   # `Empty-KeyValuePair-ReplacementValue` "0"
   eq_(lconf_obj['key11value_mapping']['mapping11_key2_mapping']['mapping11_key2_nested_mapping_key2_block_identifier'][
      'sky_blue_blk_name1']['blk_item_red'], 0, msg=None)

   eq_(lconf_obj['key14list_of_color_tuples'].column_names, ('Color Name', 'Red', 'Green', 'Blue'), msg=None)
   eq_(
      lconf_obj['key14list_of_color_tuples'].column_names_idx_lookup,
      {'Color Name': 0, 'Red': 1, 'Green': 2, 'Blue': 3},
      msg=None
   )

   eq_(lconf_obj['key14list_of_color_tuples'][0], ('forestgreen', 34, 139, 34), msg=None)

   eq_(lconf_obj['RepeatedBlk1'].key_order, ['BLK_OBJ1', 'BLK_OBJ2', 'BLK_OBJ3', 'BLK_OBJ4'], msg=None)
   eq_(lconf_obj['RepeatedBlk1'].min_required_blocks, 2, msg=None)
   eq_(lconf_obj['RepeatedBlk1'].max_allowed_blocks, 5, msg=None)
   eq_(lconf_obj['RepeatedBlk1'].has_comments, False, msg=None)

   eq_(
      lconf_obj['RepeatedBlk1']['BLK_OBJ4'].key_order,
      ['MyKey1_mapping', 'MyKey2', 'MyKey3', 'MyKey4', 'MyKey5list', 'MyKey6list', 'MyKey7list', 'MyKey8'],
      msg=None
   )
   # `Empty-KeyValuePair-ReplacementValue` "-9999999999.055"
   eq_(lconf_obj['RepeatedBlk1']['BLK_OBJ1']['MyKey1_mapping']['blk_mapping_key2'], 12345.99, msg=None)
   eq_(lconf_obj['RepeatedBlk1']['BLK_OBJ2']['MyKey1_mapping']['blk_mapping_key2'], -9999999999.055, msg=None)
   eq_(lconf_obj['RepeatedBlk1']['BLK_OBJ3']['MyKey1_mapping']['blk_mapping_key2'], 9999.999, msg=None)
   eq_(lconf_obj['RepeatedBlk1']['BLK_OBJ4']['MyKey1_mapping']['blk_mapping_key2'], 9999.999, msg=None)

   eq_(lconf_obj['RepeatedBlk1']['BLK_OBJ4']['MyKey5list'], ['one item'], msg=None)
   eq_(lconf_obj['RepeatedBlk1']['BLK_OBJ4']['MyKey4'], 'GREAT LIFE', msg=None)

   result_native_type = lconf_to_native_type(lconf_obj)

   ok_(isinstance(lconf_obj, LconfRoot), msg=None)
   ok_(isinstance(result_native_type, dict), msg=None)

   ok_(lconf_obj['key1value_pair'] == result_native_type['key1value_pair'] == 'NOT-DEFINED', msg=None)
   ok_(lconf_obj['key7value_pair'] == result_native_type['key7value_pair'] == -94599.5, msg=None)

   # RE DUMP AS JSON: NOTE SPECIAL characters might not be handled correctly
   # IMPORTANT: datetime.datetime(2014, 5, 8, 13, 39) is not JSON serializable
   result_native_type['key11value_mapping']['mapping11_key2_mapping'][
      'mapping11_key2_nested_mapping_key1'] = '2014-05-08 13:39:00'
   re_dump_json = json_dumps(result_native_type, indent=3)

   # RE CONVERT TO LCONF - NOTE: there will not be any Block-Identifier also the order might be messed up which does not work
   # with comment lines
   result_reconverted_dict_to_lconf2 = lconf_dict_to_lconf(
      json_loads(re_dump_json),
      'BaseEXAMPLE',
      onelinelists=False,
      skip_none_value=False
   )

   lconf_validate_one_section_str(result_reconverted_dict_to_lconf2)

   # do yaml of result_native_type
   if has_yaml:
      dump_yaml = yaml_dump(result_native_type, indent=3, allow_unicode=True)
      parsed_load_yaml = yaml_load(dump_yaml)
      ok_(isinstance(parsed_load_yaml, dict), msg=None)
      eq_(
         parsed_load_yaml['key11value_mapping']['mapping11_key2_mapping']['mapping11_key2_nested_mapping_key1'],
         '2014-05-08 13:39:00'
      )
      eq_(parsed_load_yaml['key11value_mapping']['mapping11_key2_mapping'][
         'mapping11_key2_nested_mapping_key2_block_identifier']['sky_blue_blk_name1']['blk_item_green'],
         206,
         msg=None)

      eq_(parsed_load_yaml['key11value_mapping']['mapping11_key1'], '/home/examples', msg=None)
      eq_(parsed_load_yaml['key1value_pair'], 'NOT-DEFINED', msg=None)  # `Empty-KeyValuePair-ReplacementValue` "NOT-DEFINED"
      eq_(parsed_load_yaml['key7value_pair'], -94599.5, msg=None)  # `Empty-KeyValuePair-ReplacementValue` "-94599.5"

      eq_(parsed_load_yaml['key11value_mapping']['mapping11_key2_mapping']['mapping11_key2_nested_mapping_key3'],
         'car',
         msg=None)
      # `Empty-KeyValuePair-ReplacementValue` "0"
      eq_(parsed_load_yaml['key11value_mapping']['mapping11_key2_mapping'][
         'mapping11_key2_nested_mapping_key2_block_identifier']['sky_blue_blk_name1']['blk_item_red'],
         0,
         msg=None)

      # Tuple are changed to lists
      eq_(parsed_load_yaml['key14list_of_color_tuples'][0], ['forestgreen', 34, 139, 34], msg=None)

      # `Empty-KeyValuePair-ReplacementValue` "-9999999999.055"
      eq_(parsed_load_yaml['RepeatedBlk1']['BLK_OBJ1']['MyKey1_mapping']['blk_mapping_key2'], 12345.99, msg=None)
      eq_(parsed_load_yaml['RepeatedBlk1']['BLK_OBJ2']['MyKey1_mapping']['blk_mapping_key2'], -9999999999.055, msg=None)
      eq_(parsed_load_yaml['RepeatedBlk1']['BLK_OBJ3']['MyKey1_mapping']['blk_mapping_key2'], 9999.999, msg=None)
      eq_(parsed_load_yaml['RepeatedBlk1']['BLK_OBJ4']['MyKey1_mapping']['blk_mapping_key2'], 9999.999, msg=None)

      eq_(parsed_load_yaml['RepeatedBlk1']['BLK_OBJ4']['MyKey5list'], ['one item'], msg=None)
      eq_(parsed_load_yaml['RepeatedBlk1']['BLK_OBJ4']['MyKey4'], 'GREAT LIFE', msg=None)
Esempio n. 52
0
def dump(data, stream=None):
    return yaml_dump(data, stream, Dumper=Dumper)